[Logs UI] Provide support for Kibana Index Patterns in the logs source configuration (#96454) (#97274)

* Provide support for Kibana Index Patterns in the logs source configuration

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>
This commit is contained in:
Kerry Gallagher 2021-04-15 18:50:56 +01:00 committed by GitHub
parent 12d943c8f4
commit cb730d3c40
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
62 changed files with 758 additions and 278 deletions

View file

@ -7,7 +7,7 @@
import * as rt from 'io-ts';
import { logEntryCursorRT, logEntryRT } from '../../log_entry';
import { logSourceColumnConfigurationRT } from '../log_sources';
import { logSourceColumnConfigurationRT } from '../../log_sources/log_source_configuration';
export const LOG_ENTRIES_HIGHLIGHTS_PATH = '/api/log_entries/highlights';

View file

@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export const LOG_SOURCE_CONFIGURATION_PATH_PREFIX = '/api/infra/log_source_configurations';
export const LOG_SOURCE_CONFIGURATION_PATH = `${LOG_SOURCE_CONFIGURATION_PATH_PREFIX}/{sourceId}`;
export const getLogSourceConfigurationPath = (sourceId: string) =>
`${LOG_SOURCE_CONFIGURATION_PATH_PREFIX}/${sourceId}`;

View file

@ -7,7 +7,7 @@
import * as rt from 'io-ts';
import { badRequestErrorRT, forbiddenErrorRT, routeTimingMetadataRT } from '../shared';
import { logSourceConfigurationRT } from './log_source_configuration';
import { logSourceConfigurationRT } from '../../log_sources/log_source_configuration';
/**
* request

View file

@ -7,10 +7,7 @@
import * as rt from 'io-ts';
import { routeTimingMetadataRT } from '../shared';
import {
getLogSourceConfigurationPath,
LOG_SOURCE_CONFIGURATION_PATH,
} from './log_source_configuration';
import { getLogSourceConfigurationPath, LOG_SOURCE_CONFIGURATION_PATH } from './common';
export const LOG_SOURCE_STATUS_PATH_SUFFIX = 'status';
export const LOG_SOURCE_STATUS_PATH = `${LOG_SOURCE_CONFIGURATION_PATH}/${LOG_SOURCE_STATUS_PATH_SUFFIX}`;
@ -50,7 +47,6 @@ const logIndexStatusRT = rt.keyof({
export type LogIndexStatus = rt.TypeOf<typeof logIndexStatusRT>;
const logSourceStatusRT = rt.strict({
logIndexFields: rt.array(logIndexFieldRT),
logIndexStatus: logIndexStatusRT,
});

View file

@ -7,5 +7,5 @@
export * from './get_log_source_configuration';
export * from './get_log_source_status';
export * from './log_source_configuration';
export * from './patch_log_source_configuration';
export * from './common';

View file

@ -8,7 +8,7 @@
import * as rt from 'io-ts';
import { badRequestErrorRT, forbiddenErrorRT } from '../shared';
import { getLogSourceConfigurationSuccessResponsePayloadRT } from './get_log_source_configuration';
import { logSourceConfigurationPropertiesRT } from './log_source_configuration';
import { logSourceConfigurationPropertiesRT } from '../../log_sources/log_source_configuration';
/**
* request

View file

@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export * from './log_source_configuration';
export * from './resolved_log_source_configuration';

View file

@ -7,11 +7,6 @@
import * as rt from 'io-ts';
export const LOG_SOURCE_CONFIGURATION_PATH_PREFIX = '/api/infra/log_source_configurations';
export const LOG_SOURCE_CONFIGURATION_PATH = `${LOG_SOURCE_CONFIGURATION_PATH_PREFIX}/{sourceId}`;
export const getLogSourceConfigurationPath = (sourceId: string) =>
`${LOG_SOURCE_CONFIGURATION_PATH_PREFIX}/${sourceId}`;
export const logSourceConfigurationOriginRT = rt.keyof({
fallback: null,
internal: null,
@ -26,6 +21,7 @@ const logSourceFieldsConfigurationRT = rt.strict({
pod: rt.string,
timestamp: rt.string,
tiebreaker: rt.string,
message: rt.array(rt.string),
});
const logSourceCommonColumnConfigurationRT = rt.strict({
@ -56,10 +52,24 @@ export const logSourceColumnConfigurationRT = rt.union([
]);
export type LogSourceColumnConfiguration = rt.TypeOf<typeof logSourceColumnConfigurationRT>;
// Kibana index pattern
const logIndexPatternReferenceRT = rt.type({
type: rt.literal('index_pattern'),
indexPatternId: rt.string,
});
// Legacy support
const logIndexNameReferenceRT = rt.type({
type: rt.literal('index_name'),
indexName: rt.string,
});
export const logIndexReferenceRT = rt.union([logIndexPatternReferenceRT, logIndexNameReferenceRT]);
export const logSourceConfigurationPropertiesRT = rt.strict({
name: rt.string,
description: rt.string,
logAlias: rt.string,
logIndices: logIndexReferenceRT,
fields: logSourceFieldsConfigurationRT,
logColumns: rt.array(logSourceColumnConfigurationRT),
});

View file

@ -1,19 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { LogSourceConfigurationProperties } from '../http_api/log_sources';
// NOTE: Type will change, see below.
type ResolvedLogsSourceConfiguration = LogSourceConfigurationProperties;
// NOTE: This will handle real resolution for https://github.com/elastic/kibana/issues/92650, via the index patterns service, but for now just
// hands back properties from the saved object (and therefore looks pointless...).
export const resolveLogSourceConfiguration = (
sourceConfiguration: LogSourceConfigurationProperties
): ResolvedLogsSourceConfiguration => {
return sourceConfiguration;
};

View file

@ -0,0 +1,83 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
LogSourceConfigurationProperties,
LogSourceColumnConfiguration,
} from './log_source_configuration';
import { IndexPatternsContract, IndexPattern } from '../../../../../src/plugins/data/common';
export interface ResolvedLogSourceConfiguration {
name: string;
description: string;
indices: string;
timestampField: string;
tiebreakerField: string;
messageField: string[];
fields: IndexPattern['fields'];
columns: LogSourceColumnConfiguration[];
}
export const resolveLogSourceConfiguration = async (
sourceConfiguration: LogSourceConfigurationProperties,
indexPatternsService: IndexPatternsContract
): Promise<ResolvedLogSourceConfiguration> => {
if (sourceConfiguration.logIndices.type === 'index_name') {
return await resolveLegacyReference(sourceConfiguration, indexPatternsService);
} else {
return await resolveKibanaIndexPatternReference(sourceConfiguration, indexPatternsService);
}
};
const resolveLegacyReference = async (
sourceConfiguration: LogSourceConfigurationProperties,
indexPatternsService: IndexPatternsContract
): Promise<ResolvedLogSourceConfiguration> => {
if (sourceConfiguration.logIndices.type !== 'index_name') {
throw new Error('This function can only resolve legacy references');
}
const fields = await indexPatternsService.getFieldsForWildcard({
pattern: sourceConfiguration.logIndices.indexName,
allowNoIndex: true,
});
return {
indices: sourceConfiguration.logIndices.indexName,
timestampField: sourceConfiguration.fields.timestamp,
tiebreakerField: sourceConfiguration.fields.tiebreaker,
messageField: sourceConfiguration.fields.message,
fields,
columns: sourceConfiguration.logColumns,
name: sourceConfiguration.name,
description: sourceConfiguration.description,
};
};
const resolveKibanaIndexPatternReference = async (
sourceConfiguration: LogSourceConfigurationProperties,
indexPatternsService: IndexPatternsContract
): Promise<ResolvedLogSourceConfiguration> => {
if (sourceConfiguration.logIndices.type !== 'index_pattern') {
throw new Error('This function can only resolve Kibana Index Pattern references');
}
const indexPattern = await indexPatternsService.get(
sourceConfiguration.logIndices.indexPatternId
);
return {
indices: indexPattern.title,
timestampField: indexPattern.timeFieldName ?? '@timestamp',
tiebreakerField: '_doc',
messageField: ['message'],
fields: indexPattern.fields,
columns: sourceConfiguration.logColumns,
name: sourceConfiguration.name,
description: sourceConfiguration.description,
};
};

View file

@ -7,7 +7,7 @@
import * as rt from 'io-ts';
import { DslQuery } from '../../../../../../src/plugins/data/common';
import { logSourceColumnConfigurationRT } from '../../http_api/log_sources';
import { logSourceColumnConfigurationRT } from '../../log_sources/log_source_configuration';
import {
logEntryAfterCursorRT,
logEntryBeforeCursorRT,

View file

@ -22,6 +22,7 @@ import * as rt from 'io-ts';
import moment from 'moment';
import { pipe } from 'fp-ts/lib/pipeable';
import { chain } from 'fp-ts/lib/Either';
import { logIndexReferenceRT } from '../log_sources';
export const TimestampFromString = new rt.Type<number, string>(
'TimestampFromString',
@ -39,6 +40,33 @@ export const TimestampFromString = new rt.Type<number, string>(
(output) => new Date(output).toISOString()
);
/**
* Source configuration config file properties.
* These are properties that can appear in the kibana.yml file.
* This is a legacy method of providing properties, and will be deprecated in the future (v 8.0.0).
*/
export const sourceConfigurationConfigFilePropertiesRT = rt.type({
sources: rt.type({
default: rt.partial({
logAlias: rt.string, // Cannot be deprecated until 8.0.0. Will be converted to an indexName reference.
metricAlias: rt.string,
fields: rt.partial({
timestamp: rt.string,
message: rt.array(rt.string),
tiebreaker: rt.string,
host: rt.string,
container: rt.string,
pod: rt.string,
}),
}),
}),
});
export type SourceConfigurationConfigFileProperties = rt.TypeOf<
typeof sourceConfigurationConfigFilePropertiesRT
>;
/**
* Log columns
*/
@ -103,7 +131,7 @@ export const SourceConfigurationRT = rt.type({
name: rt.string,
description: rt.string,
metricAlias: rt.string,
logAlias: rt.string,
logIndices: logIndexReferenceRT,
inventoryDefaultView: rt.string,
metricsExplorerDefaultView: rt.string,
fields: SourceConfigurationFieldsRT,

View file

@ -102,7 +102,11 @@ export const ExpressionEditor: React.FC<
<Editor {...props} />
</SourceStatusWrapper>
) : (
<LogSourceProvider sourceId={sourceId} fetch={http!.fetch}>
<LogSourceProvider
sourceId={sourceId}
fetch={http!.fetch}
indexPatternsService={props.data.indexPatterns}
>
<SourceStatusWrapper {...props}>
<Editor {...props} />
</SourceStatusWrapper>
@ -115,10 +119,10 @@ export const ExpressionEditor: React.FC<
export const SourceStatusWrapper: React.FC = ({ children }) => {
const {
initialize,
isLoadingSourceStatus,
loadSource,
isLoadingSourceConfiguration,
hasFailedLoadingSource,
isUninitialized,
hasFailedLoadingSourceStatus,
loadSourceStatus,
} = useLogSourceContext();
useMount(() => {
@ -127,13 +131,13 @@ export const SourceStatusWrapper: React.FC = ({ children }) => {
return (
<>
{isLoadingSourceStatus || isUninitialized ? (
{isLoadingSourceConfiguration || isUninitialized ? (
<div>
<EuiSpacer size="m" />
<EuiLoadingSpinner size="l" />
<EuiSpacer size="m" />
</div>
) : hasFailedLoadingSourceStatus ? (
) : hasFailedLoadingSource ? (
<EuiCallOut
title={i18n.translate('xpack.infra.logs.alertFlyout.sourceStatusError', {
defaultMessage: 'Sorry, there was a problem loading field information',
@ -141,7 +145,7 @@ export const SourceStatusWrapper: React.FC = ({ children }) => {
color="danger"
iconType="alert"
>
<EuiButton onClick={loadSourceStatus} iconType="refresh">
<EuiButton onClick={loadSource} iconType="refresh">
{i18n.translate('xpack.infra.logs.alertFlyout.sourceStatusErrorTryAgain', {
defaultMessage: 'Try again',
})}
@ -159,7 +163,7 @@ export const Editor: React.FC<
> = (props) => {
const { setAlertParams, alertParams, errors } = props;
const [hasSetDefaults, setHasSetDefaults] = useState<boolean>(false);
const { sourceId, sourceStatus } = useLogSourceContext();
const { sourceId, resolvedSourceConfiguration } = useLogSourceContext();
const {
criteria: criteriaErrors,
@ -169,24 +173,24 @@ export const Editor: React.FC<
} = useMemo(() => decodeOrThrow(errorsRT)(errors), [errors]);
const supportedFields = useMemo(() => {
if (sourceStatus?.logIndexFields) {
return sourceStatus.logIndexFields.filter((field) => {
if (resolvedSourceConfiguration?.fields) {
return resolvedSourceConfiguration.fields.filter((field) => {
return (field.type === 'string' || field.type === 'number') && field.searchable;
});
} else {
return [];
}
}, [sourceStatus]);
}, [resolvedSourceConfiguration]);
const groupByFields = useMemo(() => {
if (sourceStatus?.logIndexFields) {
return sourceStatus.logIndexFields.filter((field) => {
if (resolvedSourceConfiguration?.fields) {
return resolvedSourceConfiguration.fields.filter((field) => {
return field.type === 'string' && field.aggregatable;
});
} else {
return [];
}
}, [sourceStatus]);
}, [resolvedSourceConfiguration]);
const updateThreshold = useCallback(
(thresholdParams) => {

View file

@ -7,8 +7,8 @@
import React, { useMemo, useCallback, useEffect } from 'react';
import { noop } from 'lodash';
import type { DataPublicPluginStart } from '../../../../../../src/plugins/data/public';
import { euiStyled } from '../../../../../../src/plugins/kibana_react/common';
import { LogEntryCursor } from '../../../common/log_entry';
import { useKibana } from '../../../../../../src/plugins/kibana_react/public';
@ -20,6 +20,10 @@ import { LogColumnRenderConfiguration } from '../../utils/log_column_render_conf
import { JsonValue } from '../../../../../../src/plugins/kibana_utils/common';
import { Query } from '../../../../../../src/plugins/data/common';
interface LogStreamPluginDeps {
data: DataPublicPluginStart;
}
const PAGE_THRESHOLD = 2;
interface CommonColumnDefinition {
@ -80,8 +84,8 @@ export const LogStream: React.FC<LogStreamProps> = ({
);
// source boilerplate
const { services } = useKibana();
if (!services?.http?.fetch) {
const { services } = useKibana<LogStreamPluginDeps>();
if (!services?.http?.fetch || !services?.data?.indexPatterns) {
throw new Error(
`<LogStream /> cannot access kibana core services.
@ -98,6 +102,7 @@ Read more at https://github.com/elastic/kibana/blob/master/src/plugins/kibana_re
} = useLogSource({
sourceId,
fetch: services.http.fetch,
indexPatternsService: services.data.indexPatterns,
});
// Internal state

View file

@ -20,19 +20,24 @@ export const createUninitializedUseLogSourceMock: CreateUseLogSource = ({
},
hasFailedLoadingSource: false,
hasFailedLoadingSourceStatus: false,
hasFailedResolvingSourceConfiguration: false,
initialize: jest.fn(),
isLoading: false,
isLoadingSourceConfiguration: false,
isLoadingSourceStatus: false,
isResolvingSourceConfiguration: false,
isUninitialized: true,
loadSource: jest.fn(),
loadSourceConfiguration: jest.fn(),
loadSourceFailureMessage: undefined,
resolveSourceFailureMessage: undefined,
loadSourceStatus: jest.fn(),
sourceConfiguration: undefined,
sourceId,
sourceStatus: undefined,
updateSourceConfiguration: jest.fn(),
resolvedSourceConfiguration: undefined,
loadResolveLogSourceConfiguration: jest.fn(),
});
export const createLoadingUseLogSourceMock: CreateUseLogSource = ({
@ -42,6 +47,7 @@ export const createLoadingUseLogSourceMock: CreateUseLogSource = ({
isLoading: true,
isLoadingSourceConfiguration: true,
isLoadingSourceStatus: true,
isResolvingSourceConfiguration: true,
});
export const createLoadedUseLogSourceMock: CreateUseLogSource = ({
@ -60,7 +66,10 @@ export const createBasicSourceConfiguration = (sourceId: string): LogSourceConfi
origin: 'stored',
configuration: {
description: `description for ${sourceId}`,
logAlias: 'LOG_INDICES',
logIndices: {
type: 'index_pattern',
indexPatternId: 'some-id',
},
logColumns: [],
fields: {
container: 'CONTAINER_FIELD',
@ -68,12 +77,12 @@ export const createBasicSourceConfiguration = (sourceId: string): LogSourceConfi
pod: 'POD_FIELD',
tiebreaker: 'TIEBREAKER_FIELD',
timestamp: 'TIMESTAMP_FIELD',
message: ['MESSAGE_FIELD'],
},
name: sourceId,
},
});
export const createAvailableSourceStatus = (logIndexFields = []): LogSourceStatus => ({
logIndexFields,
logIndexStatus: 'available',
});

View file

@ -11,15 +11,20 @@ import useMountedState from 'react-use/lib/useMountedState';
import type { HttpHandler } from 'src/core/public';
import {
LogIndexField,
LogSourceConfiguration,
LogSourceConfigurationProperties,
LogSourceConfigurationPropertiesPatch,
LogSourceStatus,
} from '../../../../common/http_api/log_sources';
import {
LogSourceConfiguration,
LogSourceConfigurationProperties,
ResolvedLogSourceConfiguration,
resolveLogSourceConfiguration,
} from '../../../../common/log_sources';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { callFetchLogSourceConfigurationAPI } from './api/fetch_log_source_configuration';
import { callFetchLogSourceStatusAPI } from './api/fetch_log_source_status';
import { callPatchLogSourceConfigurationAPI } from './api/patch_log_source_configuration';
import { IndexPatternsContract } from '../../../../../../../src/plugins/data/common';
export {
LogIndexField,
@ -29,47 +34,78 @@ export {
LogSourceStatus,
};
export const useLogSource = ({ sourceId, fetch }: { sourceId: string; fetch: HttpHandler }) => {
export const useLogSource = ({
sourceId,
fetch,
indexPatternsService,
}: {
sourceId: string;
fetch: HttpHandler;
indexPatternsService: IndexPatternsContract;
}) => {
const getIsMounted = useMountedState();
const [sourceConfiguration, setSourceConfiguration] = useState<
LogSourceConfiguration | undefined
>(undefined);
const [resolvedSourceConfiguration, setResolvedSourceConfiguration] = useState<
ResolvedLogSourceConfiguration | undefined
>(undefined);
const [sourceStatus, setSourceStatus] = useState<LogSourceStatus | undefined>(undefined);
const [loadSourceConfigurationRequest, loadSourceConfiguration] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await callFetchLogSourceConfigurationAPI(sourceId, fetch);
const { data: sourceConfigurationResponse } = await callFetchLogSourceConfigurationAPI(
sourceId,
fetch
);
const resolvedSourceConfigurationResponse = await resolveLogSourceConfiguration(
sourceConfigurationResponse?.configuration,
indexPatternsService
);
return { sourceConfigurationResponse, resolvedSourceConfigurationResponse };
},
onResolve: ({ data }) => {
onResolve: ({ sourceConfigurationResponse, resolvedSourceConfigurationResponse }) => {
if (!getIsMounted()) {
return;
}
setSourceConfiguration(data);
setSourceConfiguration(sourceConfigurationResponse);
setResolvedSourceConfiguration(resolvedSourceConfigurationResponse);
},
},
[sourceId, fetch]
[sourceId, fetch, indexPatternsService]
);
const [updateSourceConfigurationRequest, updateSourceConfiguration] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async (patchedProperties: LogSourceConfigurationPropertiesPatch) => {
return await callPatchLogSourceConfigurationAPI(sourceId, patchedProperties, fetch);
const { data: updatedSourceConfig } = await callPatchLogSourceConfigurationAPI(
sourceId,
patchedProperties,
fetch
);
const resolvedSourceConfig = await resolveLogSourceConfiguration(
updatedSourceConfig.configuration,
indexPatternsService
);
return { updatedSourceConfig, resolvedSourceConfig };
},
onResolve: ({ data }) => {
onResolve: ({ updatedSourceConfig, resolvedSourceConfig }) => {
if (!getIsMounted()) {
return;
}
setSourceConfiguration(data);
setSourceConfiguration(updatedSourceConfig);
setResolvedSourceConfiguration(resolvedSourceConfig);
loadSourceStatus();
},
},
[sourceId, fetch]
[sourceId, fetch, indexPatternsService]
);
const [loadSourceStatusRequest, loadSourceStatus] = useTrackedPromise(
@ -91,10 +127,10 @@ export const useLogSource = ({ sourceId, fetch }: { sourceId: string; fetch: Htt
const derivedIndexPattern = useMemo(
() => ({
fields: sourceStatus?.logIndexFields ?? [],
title: sourceConfiguration?.configuration.logAlias ?? 'unknown',
fields: resolvedSourceConfiguration?.fields ?? [],
title: resolvedSourceConfiguration?.indices ?? 'unknown',
}),
[sourceConfiguration, sourceStatus]
[resolvedSourceConfiguration]
);
const isLoadingSourceConfiguration = useMemo(
@ -153,22 +189,28 @@ export const useLogSource = ({ sourceId, fetch }: { sourceId: string; fetch: Htt
}, [isUninitialized, loadSource]);
return {
sourceId,
initialize,
isUninitialized,
derivedIndexPattern,
// Failure states
hasFailedLoadingSource,
hasFailedLoadingSourceStatus,
initialize,
loadSourceFailureMessage,
// Loading states
isLoading,
isLoadingSourceConfiguration,
isLoadingSourceStatus,
isUninitialized,
loadSource,
loadSourceFailureMessage,
loadSourceConfiguration,
loadSourceStatus,
sourceConfiguration,
sourceId,
// Source status (denotes the state of the indices, e.g. missing)
sourceStatus,
loadSourceStatus,
// Source configuration (represents the raw attributes of the source configuration)
loadSource,
loadSourceConfiguration,
sourceConfiguration,
updateSourceConfiguration,
// Resolved source configuration (represents a fully resolved state, you would use this for the vast majority of "read" scenarios)
resolvedSourceConfiguration,
};
};

View file

@ -9,7 +9,7 @@ import { useCallback } from 'react';
import { Observable } from 'rxjs';
import { exhaustMap } from 'rxjs/operators';
import { IKibanaSearchRequest } from '../../../../../../../src/plugins/data/public';
import { LogSourceColumnConfiguration } from '../../../../common/http_api/log_sources';
import { LogSourceColumnConfiguration } from '../../../../common/log_sources';
import { LogEntryAfterCursor } from '../../../../common/log_entry';
import { decodeOrThrow } from '../../../../common/runtime_types';
import {

View file

@ -8,7 +8,7 @@
import { useCallback } from 'react';
import { combineLatest, Observable, ReplaySubject } from 'rxjs';
import { last, map, startWith, switchMap } from 'rxjs/operators';
import { LogSourceColumnConfiguration } from '../../../../common/http_api/log_sources';
import { LogSourceColumnConfiguration } from '../../../../common/log_sources';
import { LogEntryCursor } from '../../../../common/log_entry';
import { LogEntriesSearchRequestQuery } from '../../../../common/search_strategies/log_entries/log_entries';
import { flattenDataSearchResponseDescriptor } from '../../../utils/data_search';

View file

@ -9,7 +9,7 @@ import { useCallback } from 'react';
import { Observable } from 'rxjs';
import { exhaustMap } from 'rxjs/operators';
import { IKibanaSearchRequest } from '../../../../../../../src/plugins/data/public';
import { LogSourceColumnConfiguration } from '../../../../common/http_api/log_sources';
import { LogSourceColumnConfiguration } from '../../../../common/log_sources';
import { LogEntryBeforeCursor } from '../../../../common/log_entry';
import { decodeOrThrow } from '../../../../common/runtime_types';
import {

View file

@ -25,6 +25,9 @@ const renderRoutes = (routes: React.ReactElement) => {
const history = createMemoryHistory();
const services = {
http: httpServiceMock.createStartContract(),
data: {
indexPatterns: {},
},
};
const renderResult = render(
<KibanaContextProvider services={services}>

View file

@ -12,8 +12,6 @@ import flowRight from 'lodash/flowRight';
import React from 'react';
import { Redirect, RouteComponentProps } from 'react-router-dom';
import useMount from 'react-use/lib/useMount';
import { HttpStart } from 'src/core/public';
import { useKibana } from '../../../../../../src/plugins/kibana_react/public';
import { findInventoryFields } from '../../../common/inventory_models';
import { InventoryItemType } from '../../../common/inventory_models/types';
import { LoadingPage } from '../../components/loading_page';
@ -23,6 +21,7 @@ import { useLogSource } from '../../containers/logs/log_source';
import { replaceSourceIdInQueryString } from '../../containers/source_id';
import { LinkDescriptor } from '../../hooks/use_link_props';
import { getFilterFromLocation, getTimeFromLocation } from './query_params';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
type RedirectToNodeLogsType = RouteComponentProps<{
nodeId: string;
@ -36,10 +35,11 @@ export const RedirectToNodeLogs = ({
},
location,
}: RedirectToNodeLogsType) => {
const { services } = useKibana<{ http: HttpStart }>();
const { services } = useKibanaContextForPlugin();
const { isLoading, loadSourceConfiguration, sourceConfiguration } = useLogSource({
fetch: services.http.fetch,
sourceId,
indexPatternsService: services.data.indexPatterns,
});
const fields = sourceConfiguration?.configuration.fields;

View file

@ -12,22 +12,22 @@ import { useLogSourceContext } from '../../../containers/logs/log_source';
import { useActiveKibanaSpace } from '../../../hooks/use_kibana_space';
export const LogEntryCategoriesPageProviders: React.FunctionComponent = ({ children }) => {
const { sourceConfiguration, sourceId } = useLogSourceContext();
const { sourceId, resolvedSourceConfiguration } = useLogSourceContext();
const { space } = useActiveKibanaSpace();
// This is a rather crude way of guarding the dependent providers against
// arguments that are only made available asynchronously. Ideally, we'd use
// React concurrent mode and Suspense in order to handle that more gracefully.
if (sourceConfiguration?.configuration.logAlias == null || space == null) {
if (!resolvedSourceConfiguration || space == null) {
return null;
}
return (
<LogEntryCategoriesModuleProvider
indexPattern={sourceConfiguration.configuration.logAlias}
indexPattern={resolvedSourceConfiguration.indices}
sourceId={sourceId}
spaceId={space.id}
timestampField={sourceConfiguration.configuration.fields.timestamp}
timestampField={resolvedSourceConfiguration.timestampField}
>
<LogAnalysisSetupFlyoutStateProvider>{children}</LogAnalysisSetupFlyoutStateProvider>
</LogEntryCategoriesModuleProvider>

View file

@ -14,29 +14,29 @@ import { useActiveKibanaSpace } from '../../../hooks/use_kibana_space';
import { LogFlyout } from '../../../containers/logs/log_flyout';
export const LogEntryRatePageProviders: React.FunctionComponent = ({ children }) => {
const { sourceId, sourceConfiguration } = useLogSourceContext();
const { sourceId, resolvedSourceConfiguration } = useLogSourceContext();
const { space } = useActiveKibanaSpace();
// This is a rather crude way of guarding the dependent providers against
// arguments that are only made available asynchronously. Ideally, we'd use
// React concurrent mode and Suspense in order to handle that more gracefully.
if (sourceConfiguration?.configuration.logAlias == null || space == null) {
if (!resolvedSourceConfiguration || space == null) {
return null;
}
return (
<LogFlyout.Provider>
<LogEntryRateModuleProvider
indexPattern={sourceConfiguration?.configuration.logAlias ?? ''}
indexPattern={resolvedSourceConfiguration.indices ?? ''}
sourceId={sourceId}
spaceId={space.id}
timestampField={sourceConfiguration?.configuration.fields.timestamp ?? ''}
timestampField={resolvedSourceConfiguration.timestampField ?? ''}
>
<LogEntryCategoriesModuleProvider
indexPattern={sourceConfiguration?.configuration.logAlias ?? ''}
indexPattern={resolvedSourceConfiguration.indices ?? ''}
sourceId={sourceId}
spaceId={space.id}
timestampField={sourceConfiguration?.configuration.fields.timestamp ?? ''}
timestampField={resolvedSourceConfiguration.timestampField ?? ''}
>
<LogAnalysisSetupFlyoutStateProvider>{children}</LogAnalysisSetupFlyoutStateProvider>
</LogEntryCategoriesModuleProvider>

View file

@ -6,17 +6,20 @@
*/
import React from 'react';
import { HttpStart } from 'src/core/public';
import { useKibana } from '../../../../../../src/plugins/kibana_react/public';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
import { LogAnalysisCapabilitiesProvider } from '../../containers/logs/log_analysis';
import { LogSourceProvider } from '../../containers/logs/log_source';
import { useSourceId } from '../../containers/source_id';
export const LogsPageProviders: React.FunctionComponent = ({ children }) => {
const [sourceId] = useSourceId();
const { services } = useKibana<{ http: HttpStart }>();
const { services } = useKibanaContextForPlugin();
return (
<LogSourceProvider sourceId={sourceId} fetch={services.http.fetch}>
<LogSourceProvider
sourceId={sourceId}
fetch={services.http.fetch}
indexPatternsService={services.data.indexPatterns}
>
<LogAnalysisCapabilitiesProvider>{children}</LogAnalysisCapabilitiesProvider>
</LogSourceProvider>
);

View file

@ -6,12 +6,12 @@
*/
import { useCallback, useMemo } from 'react';
import { LogSourceConfigurationProperties } from '../../../containers/logs/log_source';
import { ResolvedLogSourceConfiguration } from '../../../../common/log_sources';
import { useLogIndicesConfigurationFormState } from './indices_configuration_form_state';
import { useLogColumnsConfigurationFormState } from './log_columns_configuration_form_state';
export const useLogSourceConfigurationFormState = (
configuration?: LogSourceConfigurationProperties
configuration?: ResolvedLogSourceConfiguration
) => {
const indicesConfigurationFormState = useLogIndicesConfigurationFormState({
initialFormState: useMemo(
@ -20,9 +20,9 @@ export const useLogSourceConfigurationFormState = (
? {
name: configuration.name,
description: configuration.description,
logAlias: configuration.logAlias,
tiebreakerField: configuration.fields.tiebreaker,
timestampField: configuration.fields.timestamp,
logAlias: configuration.indices,
tiebreakerField: configuration.tiebreakerField,
timestampField: configuration.timestampField,
}
: undefined,
[configuration]
@ -34,7 +34,7 @@ export const useLogSourceConfigurationFormState = (
() =>
configuration
? {
logColumns: configuration.logColumns,
logColumns: configuration.columns,
}
: undefined,
[configuration]

View file

@ -28,6 +28,7 @@ import { useLogSourceConfigurationFormState } from './source_configuration_form_
import { useLogSourceContext } from '../../../containers/logs/log_source';
import { SourceLoadingPage } from '../../../components/source_loading_page';
import { Prompt } from '../../../utils/navigation_warning_prompt';
import { LogSourceConfigurationPropertiesPatch } from '../../../../common/http_api/log_sources';
export const LogsSettingsPage = () => {
const uiCapabilities = useKibana().services.application?.capabilities;
@ -35,15 +36,15 @@ export const LogsSettingsPage = () => {
const {
sourceConfiguration: source,
sourceStatus,
isLoading,
isUninitialized,
updateSourceConfiguration,
resolvedSourceConfiguration,
} = useLogSourceContext();
const availableFields = useMemo(
() => sourceStatus?.logIndexFields.map((field) => field.name) ?? [],
[sourceStatus]
() => resolvedSourceConfiguration?.fields.map((field) => field.name) ?? [],
[resolvedSourceConfiguration]
);
const {
@ -56,10 +57,24 @@ export const LogsSettingsPage = () => {
isFormDirty,
isFormValid,
formStateChanges,
} = useLogSourceConfigurationFormState(source?.configuration);
} = useLogSourceConfigurationFormState(resolvedSourceConfiguration);
const persistUpdates = useCallback(async () => {
await updateSourceConfiguration(formStateChanges);
// NOTE / TODO: This is just a temporary workaround until this work is merged with the corresponding UI branch.
// Otherwise we would be duplicating work changing the logAlias etc references twice.
const patchedProperties: LogSourceConfigurationPropertiesPatch & { logAlias?: string } = {
...formStateChanges,
...(formStateChanges.logAlias
? {
logIndices: {
type: 'index_name',
indexName: formStateChanges.logAlias,
},
}
: {}),
};
delete patchedProperties.logAlias;
await updateSourceConfiguration(patchedProperties);
resetForm();
}, [updateSourceConfiguration, resetForm, formStateChanges]);
@ -68,7 +83,7 @@ export const LogsSettingsPage = () => {
source,
]);
if ((isLoading || isUninitialized) && !source) {
if ((isLoading || isUninitialized) && !resolvedSourceConfiguration) {
return <SourceLoadingPage />;
}
if (!source?.configuration) {

View file

@ -35,7 +35,7 @@ import { datemathToEpochMillis, isValidDatemath } from '../../../utils/datemath'
const PAGE_THRESHOLD = 2;
export const LogsPageLogsContent: React.FunctionComponent = () => {
const { sourceConfiguration, sourceId } = useLogSourceContext();
const { resolvedSourceConfiguration, sourceConfiguration, sourceId } = useLogSourceContext();
const { textScale, textWrap } = useContext(LogViewConfiguration.Context);
const {
surroundingLogsId,
@ -218,7 +218,7 @@ export const LogsPageLogsContent: React.FunctionComponent = () => {
<PageContent key={`${sourceId}-${sourceConfiguration?.version}`}>
<ScrollableLogTextStreamView
columnConfigurations={
(sourceConfiguration && sourceConfiguration.configuration.logColumns) || []
(resolvedSourceConfiguration && resolvedSourceConfiguration.columns) || []
}
hasMoreAfterEnd={hasMoreAfterEnd}
hasMoreBeforeStart={hasMoreBeforeStart}

View file

@ -16,13 +16,17 @@ export const DEFAULT_SOURCE_CONFIGURATION: GetLogSourceConfigurationSuccessRespo
configuration: {
name: 'Default',
description: '',
logAlias: 'kibana_sample_data_logs*',
logIndices: {
type: 'index_pattern',
indexPatternId: 'some-test-id',
},
fields: {
container: 'container.id',
host: 'host.name',
pod: 'kubernetes.pod.uid',
tiebreaker: '_doc',
timestamp: '@timestamp',
message: ['message'],
},
logColumns: [
{

View file

@ -27,9 +27,7 @@ export const options: MetricsExplorerOptions = {
export const source = {
name: 'default',
description: '',
logAlias: 'filebeat-*',
metricAlias: 'metricbeat-*',
logColumns: [],
inventoryDefaultView: 'host',
metricsExplorerDefaultView: 'host',
fields: {

View file

@ -12,6 +12,7 @@ import { DEFAULT_SOURCE_ID } from '../../common/constants';
import { callFetchLogSourceConfigurationAPI } from '../containers/logs/log_source/api/fetch_log_source_configuration';
import { callFetchLogSourceStatusAPI } from '../containers/logs/log_source/api/fetch_log_source_status';
import { InfraClientCoreSetup, InfraClientStartDeps } from '../types';
import { resolveLogSourceConfiguration } from '../../common/log_sources';
interface StatsAggregation {
buckets: Array<{
@ -54,10 +55,15 @@ export function getLogsOverviewDataFetcher(
core.http.fetch
);
const resolvedLogSourceConfiguration = await resolveLogSourceConfiguration(
sourceConfiguration.data.configuration,
startPlugins.data.indexPatterns
);
const { stats, series } = await fetchLogsOverview(
{
index: sourceConfiguration.data.configuration.logAlias,
timestampField: sourceConfiguration.data.configuration.fields.timestamp,
index: resolvedLogSourceConfiguration.indices,
timestampField: resolvedLogSourceConfiguration.timestampField,
},
params,
data

View file

@ -68,7 +68,7 @@ describe('Logs UI Observability Homepage Functions', () => {
const { mockedGetStartServices } = setup();
mockedCallFetchLogSourceStatusAPI.mockResolvedValue({
data: { logIndexFields: [], logIndexStatus: 'available' },
data: { logIndexStatus: 'available' },
});
const hasData = getLogsHasDataFetcher(mockedGetStartServices);
@ -82,7 +82,7 @@ describe('Logs UI Observability Homepage Functions', () => {
const { mockedGetStartServices } = setup();
mockedCallFetchLogSourceStatusAPI.mockResolvedValue({
data: { logIndexFields: [], logIndexStatus: 'empty' },
data: { logIndexStatus: 'empty' },
});
const hasData = getLogsHasDataFetcher(mockedGetStartServices);
@ -96,7 +96,7 @@ describe('Logs UI Observability Homepage Functions', () => {
const { mockedGetStartServices } = setup();
mockedCallFetchLogSourceStatusAPI.mockResolvedValue({
data: { logIndexFields: [], logIndexStatus: 'missing' },
data: { logIndexStatus: 'missing' },
});
const hasData = getLogsHasDataFetcher(mockedGetStartServices);
@ -112,7 +112,10 @@ describe('Logs UI Observability Homepage Functions', () => {
mockedCallFetchLogSourceConfigurationAPI.mockResolvedValue({
data: {
configuration: {
logAlias: 'filebeat-*',
logIndices: {
type: 'index_pattern',
indexPatternId: 'some-test-id',
},
fields: { timestamp: '@timestamp', tiebreaker: '_doc' },
},
},

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { FieldSpec } from 'src/plugins/data/common';
import type { InfraPluginRequestHandlerContext } from '../../../types';
import { KibanaFramework } from '../framework/kibana_framework_adapter';
import { FieldsAdapter, IndexFieldDescriptor } from './adapter_types';
@ -20,11 +21,17 @@ export class FrameworkFieldsAdapter implements FieldsAdapter {
requestContext: InfraPluginRequestHandlerContext,
indices: string
): Promise<IndexFieldDescriptor[]> {
const indexPatternsService = this.framework.getIndexPatternsService(requestContext);
const indexPatternsService = await this.framework.getIndexPatternsServiceWithRequestContext(
requestContext
);
// NOTE: Unfortunately getFieldsForWildcard is typed to "any" here in the data plugin, FieldSpec is used below in the map.
const response = await indexPatternsService.getFieldsForWildcard({
pattern: indices,
allowNoIndex: true,
});
return response.map((field) => ({
return response.map((field: FieldSpec) => ({
...field,
displayable: true,
}));

View file

@ -12,6 +12,7 @@ import {
} from '@elastic/elasticsearch/api/requestParams';
import { TransportRequestParams } from '@elastic/elasticsearch/lib/Transport';
import { estypes } from '@elastic/elasticsearch';
import { SavedObjectsClientContract, ElasticsearchClient } from 'src/core/server';
import {
InfraRouteConfig,
InfraServerPluginSetupDeps,
@ -32,16 +33,23 @@ import {
import { RequestHandler } from '../../../../../../../src/core/server';
import { InfraConfig } from '../../../plugin';
import type { InfraPluginRequestHandlerContext } from '../../../types';
import { IndexPatternsFetcher, UI_SETTINGS } from '../../../../../../../src/plugins/data/server';
import { UI_SETTINGS } from '../../../../../../../src/plugins/data/server';
import { TimeseriesVisData } from '../../../../../../../src/plugins/vis_type_timeseries/server';
import { InfraServerPluginStartDeps } from './adapter_types';
export class KibanaFramework {
public router: IRouter<InfraPluginRequestHandlerContext>;
public plugins: InfraServerPluginSetupDeps;
private core: CoreSetup<InfraServerPluginStartDeps>;
constructor(core: CoreSetup, config: InfraConfig, plugins: InfraServerPluginSetupDeps) {
constructor(
core: CoreSetup<InfraServerPluginStartDeps>,
config: InfraConfig,
plugins: InfraServerPluginSetupDeps
) {
this.router = core.http.createRouter();
this.plugins = plugins;
this.core = core;
}
public registerRoute<Params = any, Query = any, Body = any, Method extends RouteMethod = any>(
@ -195,10 +203,31 @@ export class KibanaFramework {
return apiResult ? (await apiResult).body : undefined;
}
public getIndexPatternsService(
public async getIndexPatternsServiceWithRequestContext(
requestContext: InfraPluginRequestHandlerContext
): IndexPatternsFetcher {
return new IndexPatternsFetcher(requestContext.core.elasticsearch.client.asCurrentUser, true);
) {
return await this.createIndexPatternsService(
requestContext.core.savedObjects.client,
requestContext.core.elasticsearch.client.asCurrentUser
);
}
public async getIndexPatternsService(
savedObjectsClient: SavedObjectsClientContract,
elasticsearchClient: ElasticsearchClient
) {
return await this.createIndexPatternsService(savedObjectsClient, elasticsearchClient);
}
private async createIndexPatternsService(
savedObjectsClient: SavedObjectsClientContract,
elasticsearchClient: ElasticsearchClient
) {
const [, startPlugins] = await this.core.getStartServices();
return startPlugins.data.indexPatterns.indexPatternsServiceFactory(
savedObjectsClient,
elasticsearchClient
);
}
public getSpaceId(request: KibanaRequest): string {

View file

@ -21,9 +21,9 @@ import {
LogSummaryBucket,
LOG_ENTRIES_PAGE_SIZE,
} from '../../domains/log_entries_domain';
import { InfraSourceConfiguration } from '../../sources';
import { SortedSearchHit } from '../framework';
import { KibanaFramework } from '../framework/kibana_framework_adapter';
import { ResolvedLogSourceConfiguration } from '../../../../common/log_sources';
const TIMESTAMP_FORMAT = 'epoch_millis';
@ -32,7 +32,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
public async getLogEntries(
requestContext: InfraPluginRequestHandlerContext,
sourceConfiguration: InfraSourceConfiguration,
resolvedLogSourceConfiguration: ResolvedLogSourceConfiguration,
fields: string[],
params: LogEntriesParams
): Promise<{ documents: LogEntryDocument[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> {
@ -64,13 +64,13 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
: {};
const sort = {
[sourceConfiguration.fields.timestamp]: sortDirection,
[sourceConfiguration.fields.tiebreaker]: sortDirection,
[resolvedLogSourceConfiguration.timestampField]: sortDirection,
[resolvedLogSourceConfiguration.tiebreakerField]: sortDirection,
};
const esQuery = {
allowNoIndices: true,
index: sourceConfiguration.logAlias,
index: resolvedLogSourceConfiguration.indices,
ignoreUnavailable: true,
body: {
size: size + 1, // Extra one to test if it has more before or after
@ -83,7 +83,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
...createFilterClauses(query, highlightQuery),
{
range: {
[sourceConfiguration.fields.timestamp]: {
[resolvedLogSourceConfiguration.timestampField]: {
gte: startTimestamp,
lte: endTimestamp,
format: TIMESTAMP_FORMAT,
@ -125,7 +125,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
public async getContainedLogSummaryBuckets(
requestContext: InfraPluginRequestHandlerContext,
sourceConfiguration: InfraSourceConfiguration,
resolvedLogSourceConfiguration: ResolvedLogSourceConfiguration,
startTimestamp: number,
endTimestamp: number,
bucketSize: number,
@ -139,13 +139,13 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
const query = {
allowNoIndices: true,
index: sourceConfiguration.logAlias,
index: resolvedLogSourceConfiguration.indices,
ignoreUnavailable: true,
body: {
aggregations: {
count_by_date: {
date_range: {
field: sourceConfiguration.fields.timestamp,
field: resolvedLogSourceConfiguration.timestampField,
format: TIMESTAMP_FORMAT,
ranges: bucketIntervalStarts.map((bucketIntervalStart) => ({
from: bucketIntervalStart.getTime(),
@ -157,8 +157,8 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
top_hits: {
size: 1,
sort: [
{ [sourceConfiguration.fields.timestamp]: 'asc' },
{ [sourceConfiguration.fields.tiebreaker]: 'asc' },
{ [resolvedLogSourceConfiguration.timestampField]: 'asc' },
{ [resolvedLogSourceConfiguration.tiebreakerField]: 'asc' },
],
_source: false,
},
@ -172,7 +172,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
...createQueryFilterClauses(filterQuery),
{
range: {
[sourceConfiguration.fields.timestamp]: {
[resolvedLogSourceConfiguration.timestampField]: {
gte: startTimestamp,
lte: endTimestamp,
format: TIMESTAMP_FORMAT,

View file

@ -70,7 +70,8 @@ export const createInventoryMetricThresholdExecutor = (libs: InfraBackendLibs) =
const logQueryFields = await libs.getLogQueryFields(
sourceId || 'default',
services.savedObjectsClient
services.savedObjectsClient,
services.scopedClusterClient.asCurrentUser
);
const compositeSize = libs.configuration.inventory.compositeSize;

View file

@ -7,7 +7,6 @@
import { i18n } from '@kbn/i18n';
import type { InfraPluginRequestHandlerContext } from '../../../types';
import { InfraSource } from '../../sources';
import { KibanaFramework } from '../../adapters/framework/kibana_framework_adapter';
import {
GetLogAlertsChartPreviewDataAlertParamsSubset,
@ -26,18 +25,19 @@ import {
GroupedSearchQueryResponseRT,
} from '../../../../common/alerting/logs/log_threshold/types';
import { decodeOrThrow } from '../../../../common/runtime_types';
import { ResolvedLogSourceConfiguration } from '../../../../common/log_sources';
const COMPOSITE_GROUP_SIZE = 40;
export async function getChartPreviewData(
requestContext: InfraPluginRequestHandlerContext,
sourceConfiguration: InfraSource,
resolvedLogSourceConfiguration: ResolvedLogSourceConfiguration,
callWithRequest: KibanaFramework['callWithRequest'],
alertParams: GetLogAlertsChartPreviewDataAlertParamsSubset,
buckets: number
) {
const indexPattern = sourceConfiguration.configuration.logAlias;
const timestampField = sourceConfiguration.configuration.fields.timestamp;
const indexPattern = resolvedLogSourceConfiguration.indices;
const timestampField = resolvedLogSourceConfiguration.timestampField;
const { groupBy, timeSize, timeUnit } = alertParams;
const isGrouped = groupBy && groupBy.length > 0 ? true : false;

View file

@ -40,6 +40,7 @@ import { InfraBackendLibs } from '../../infra_types';
import { getIntervalInSeconds } from '../../../utils/get_interval_in_seconds';
import { decodeOrThrow } from '../../../../common/runtime_types';
import { UNGROUPED_FACTORY_KEY } from '../common/utils';
import { resolveLogSourceConfiguration } from '../../../../common/log_sources';
type LogThresholdActionGroups = ActionGroupIdsOf<typeof FIRED_ACTIONS>;
type LogThresholdAlertServices = AlertServices<
@ -72,8 +73,15 @@ export const createLogThresholdExecutor = (libs: InfraBackendLibs) =>
const { sources } = libs;
const sourceConfiguration = await sources.getSourceConfiguration(savedObjectsClient, 'default');
const indexPattern = sourceConfiguration.configuration.logAlias;
const timestampField = sourceConfiguration.configuration.fields.timestamp;
const resolvedLogSourceConfiguration = await resolveLogSourceConfiguration(
sourceConfiguration.configuration,
await libs.framework.getIndexPatternsService(
savedObjectsClient,
scopedClusterClient.asCurrentUser
)
);
const indexPattern = resolvedLogSourceConfiguration.indices;
const timestampField = resolvedLogSourceConfiguration.timestampField;
try {
const validatedParams = decodeOrThrow(alertParamsRT)(params);

View file

@ -13,7 +13,10 @@ export const libsMock = {
return Promise.resolve({
id: sourceId,
configuration: {
logAlias: 'filebeat-*',
logIndices: {
type: 'index_pattern',
indexPatternId: 'some-id',
},
fields: { timestamp: '@timestamp' },
},
});

View file

@ -18,17 +18,14 @@ export class InfraFieldsDomain {
public async getFields(
requestContext: InfraPluginRequestHandlerContext,
sourceId: string,
indexType: 'LOGS' | 'METRICS'
indexType: 'METRICS'
): Promise<InfraSourceIndexField[]> {
const { configuration } = await this.libs.sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
const fields = await this.adapter.getIndexFields(
requestContext,
indexType === 'LOGS' ? configuration.logAlias : configuration.metricAlias
);
const fields = await this.adapter.getIndexFields(requestContext, configuration.metricAlias);
return fields;
}

View file

@ -12,7 +12,11 @@ import {
LogEntriesSummaryBucket,
LogEntriesSummaryHighlightsBucket,
} from '../../../../common/http_api';
import { LogSourceColumnConfiguration } from '../../../../common/http_api/log_sources';
import {
LogSourceColumnConfiguration,
ResolvedLogSourceConfiguration,
resolveLogSourceConfiguration,
} from '../../../../common/log_sources';
import { LogColumn, LogEntryCursor, LogEntry } from '../../../../common/log_entry';
import {
InfraSourceConfiguration,
@ -137,7 +141,10 @@ export class InfraLogEntriesDomain {
requestContext.core.savedObjects.client,
sourceId
);
const resolvedLogSourceConfiguration = await resolveLogSourceConfiguration(
configuration,
await this.libs.framework.getIndexPatternsServiceWithRequestContext(requestContext)
);
const columnDefinitions = columnOverrides ?? configuration.logColumns;
const messageFormattingRules = compileFormattingRules(
@ -148,7 +155,7 @@ export class InfraLogEntriesDomain {
const { documents, hasMoreBefore, hasMoreAfter } = await this.adapter.getLogEntries(
requestContext,
configuration,
resolvedLogSourceConfiguration,
requiredFields,
params
);
@ -199,9 +206,13 @@ export class InfraLogEntriesDomain {
requestContext.core.savedObjects.client,
sourceId
);
const resolvedLogSourceConfiguration = await resolveLogSourceConfiguration(
configuration,
await this.libs.framework.getIndexPatternsServiceWithRequestContext(requestContext)
);
const dateRangeBuckets = await this.adapter.getContainedLogSummaryBuckets(
requestContext,
configuration,
resolvedLogSourceConfiguration,
start,
end,
bucketSize,
@ -223,6 +234,10 @@ export class InfraLogEntriesDomain {
requestContext.core.savedObjects.client,
sourceId
);
const resolvedLogSourceConfiguration = await resolveLogSourceConfiguration(
configuration,
await this.libs.framework.getIndexPatternsServiceWithRequestContext(requestContext)
);
const messageFormattingRules = compileFormattingRules(
getBuiltinRules(configuration.fields.message)
);
@ -240,7 +255,7 @@ export class InfraLogEntriesDomain {
: highlightQuery;
const summaryBuckets = await this.adapter.getContainedLogSummaryBuckets(
requestContext,
configuration,
resolvedLogSourceConfiguration,
startTimestamp,
endTimestamp,
bucketSize,
@ -299,14 +314,14 @@ export class InfraLogEntriesDomain {
export interface LogEntriesAdapter {
getLogEntries(
requestContext: InfraPluginRequestHandlerContext,
sourceConfiguration: InfraSourceConfiguration,
resolvedLogSourceConfiguration: ResolvedLogSourceConfiguration,
fields: string[],
params: LogEntriesParams
): Promise<{ documents: LogEntryDocument[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }>;
getContainedLogSummaryBuckets(
requestContext: InfraPluginRequestHandlerContext,
sourceConfiguration: InfraSourceConfiguration,
resolvedLogSourceConfiguration: ResolvedLogSourceConfiguration,
startTimestamp: number,
endTimestamp: number,
bucketSize: number,

View file

@ -7,6 +7,7 @@
import type { InfraPluginRequestHandlerContext } from '../types';
import { InfraSources } from './sources';
import { ResolvedLogSourceConfiguration } from '../../common/log_sources';
export class InfraSourceStatus {
constructor(
@ -14,20 +15,6 @@ export class InfraSourceStatus {
private readonly libs: { sources: InfraSources }
) {}
public async getLogIndexNames(
requestContext: InfraPluginRequestHandlerContext,
sourceId: string
): Promise<string[]> {
const sourceConfiguration = await this.libs.sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
const indexNames = await this.adapter.getIndexNames(
requestContext,
sourceConfiguration.configuration.logAlias
);
return indexNames;
}
public async getMetricIndexNames(
requestContext: InfraPluginRequestHandlerContext,
sourceId: string
@ -42,20 +29,6 @@ export class InfraSourceStatus {
);
return indexNames;
}
public async hasLogAlias(
requestContext: InfraPluginRequestHandlerContext,
sourceId: string
): Promise<boolean> {
const sourceConfiguration = await this.libs.sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
const hasAlias = await this.adapter.hasAlias(
requestContext,
sourceConfiguration.configuration.logAlias
);
return hasAlias;
}
public async hasMetricAlias(
requestContext: InfraPluginRequestHandlerContext,
sourceId: string
@ -72,15 +45,11 @@ export class InfraSourceStatus {
}
public async getLogIndexStatus(
requestContext: InfraPluginRequestHandlerContext,
sourceId: string
resolvedLogSourceConfiguration: ResolvedLogSourceConfiguration
): Promise<SourceIndexStatus> {
const sourceConfiguration = await this.libs.sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
const indexStatus = await this.adapter.getIndexStatus(
requestContext,
sourceConfiguration.configuration.logAlias
resolvedLogSourceConfiguration.indices
);
return indexStatus;
}

View file

@ -16,7 +16,10 @@ export const defaultSourceConfiguration: InfraSourceConfiguration = {
name: 'Default',
description: '',
metricAlias: METRICS_INDEX_PATTERN,
logAlias: LOGS_INDEX_PATTERN,
logIndices: {
type: 'index_name',
indexName: LOGS_INDEX_PATTERN,
},
fields: {
container: 'container.id',
host: 'host.name',

View file

@ -0,0 +1,62 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { migrationMocks } from 'src/core/server/mocks';
import { convertLogAliasToLogIndices } from './7_13_0_convert_log_alias_to_log_indices';
import { infraSourceConfigurationSavedObjectName } from '../saved_object_type';
describe('infra source configuration migration function for 7.13.0', () => {
test('migrates the logAlias property to logIndices', () => {
const unmigratedConfiguration = createTestSourceConfiguration({
logAlias: 'filebeat-*',
});
const migratedConfiguration = convertLogAliasToLogIndices(
unmigratedConfiguration as any,
migrationMocks.createContext()
);
expect(migratedConfiguration).toStrictEqual(
createTestSourceConfiguration({
logIndices: {
type: 'index_name',
indexName: 'filebeat-*',
},
})
);
});
});
const createTestSourceConfiguration = (additionalProperties = {}) => ({
attributes: {
name: 'TEST CONFIGURATION',
description: '',
fields: {
pod: 'TEST POD FIELD',
host: 'TEST HOST FIELD',
message: ['TEST MESSAGE FIELD'],
container: 'TEST CONTAINER FIELD',
timestamp: 'TEST TIMESTAMP FIELD',
tiebreaker: 'TEST TIEBREAKER FIELD',
},
inventoryDefaultView: '0',
metricsExplorerDefaultView: '0',
logColumns: [
{
fieldColumn: {
id: 'TEST FIELD COLUMN ID',
field: 'TEST FIELD COLUMN FIELD',
},
},
],
metricAlias: 'metricbeat-*,metrics-*',
anomalyThreshold: 20,
...additionalProperties,
},
id: 'TEST_ID',
type: infraSourceConfigurationSavedObjectName,
});

View file

@ -0,0 +1,34 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { SavedObjectMigrationFn } from 'src/core/server';
import { InfraSourceConfiguration } from '../../../../common/source_configuration/source_configuration';
import { LOGS_INDEX_PATTERN } from '../../../../common/constants';
type SevenTwelveZeroSourceConfig = Omit<InfraSourceConfiguration, 'logIndices'> & {
logAlias: string;
};
export const convertLogAliasToLogIndices: SavedObjectMigrationFn<
SevenTwelveZeroSourceConfig,
InfraSourceConfiguration
> = (sourceConfigurationDocument) => {
const { logAlias, ...otherAttributes } = sourceConfigurationDocument.attributes;
const newAttributes: InfraSourceConfiguration = {
...otherAttributes,
logIndices: {
type: 'index_name',
indexName: logAlias ?? LOGS_INDEX_PATTERN,
},
};
return {
...sourceConfigurationDocument,
attributes: newAttributes,
};
};

View file

@ -8,9 +8,13 @@
import { SavedObjectMigrationFn } from 'src/core/server';
import { InfraSourceConfiguration } from '../../../../common/source_configuration/source_configuration';
type SevenNineZeroSourceConfig = Omit<InfraSourceConfiguration, 'logIndices'> & {
logAlias: string;
};
export const addNewIndexingStrategyIndexNames: SavedObjectMigrationFn<
InfraSourceConfiguration,
InfraSourceConfiguration
SevenNineZeroSourceConfig,
SevenNineZeroSourceConfig
> = (sourceConfigurationDocument) => {
const oldLogAliasSegments = sourceConfigurationDocument.attributes.logAlias.split(',');
const oldMetricAliasSegments = sourceConfigurationDocument.attributes.metricAlias.split(',');

View file

@ -7,6 +7,7 @@
import { SavedObjectsType } from 'src/core/server';
import { addNewIndexingStrategyIndexNames } from './migrations/7_9_0_add_new_indexing_strategy_index_names';
import { convertLogAliasToLogIndices } from './migrations/7_13_0_convert_log_alias_to_log_indices';
export const infraSourceConfigurationSavedObjectName = 'infrastructure-ui-source';
@ -23,5 +24,6 @@ export const infraSourceConfigurationSavedObjectType: SavedObjectsType = {
},
migrations: {
'7.9.0': addNewIndexingStrategyIndexNames,
'7.13.0': convertLogAliasToLogIndices,
},
};

View file

@ -20,7 +20,7 @@ describe('the InfraSources lib', () => {
updated_at: '2000-01-01T00:00:00.000Z',
attributes: {
metricAlias: 'METRIC_ALIAS',
logAlias: 'LOG_ALIAS',
logIndices: { type: 'index_pattern', indexPatternId: 'LOG_ALIAS' },
fields: {
container: 'CONTAINER',
host: 'HOST',
@ -39,7 +39,7 @@ describe('the InfraSources lib', () => {
updatedAt: 946684800000,
configuration: {
metricAlias: 'METRIC_ALIAS',
logAlias: 'LOG_ALIAS',
logIndices: { type: 'index_pattern', indexPatternId: 'LOG_ALIAS' },
fields: {
container: 'CONTAINER',
host: 'HOST',
@ -56,7 +56,7 @@ describe('the InfraSources lib', () => {
config: createMockStaticConfiguration({
default: {
metricAlias: 'METRIC_ALIAS',
logAlias: 'LOG_ALIAS',
logIndices: { type: 'index_pattern', indexPatternId: 'LOG_ALIAS' },
fields: {
host: 'HOST',
pod: 'POD',
@ -86,7 +86,7 @@ describe('the InfraSources lib', () => {
updatedAt: 946684800000,
configuration: {
metricAlias: 'METRIC_ALIAS',
logAlias: 'LOG_ALIAS',
logIndices: { type: 'index_pattern', indexPatternId: 'LOG_ALIAS' },
fields: {
container: 'CONTAINER',
host: 'HOST',
@ -118,7 +118,7 @@ describe('the InfraSources lib', () => {
updatedAt: 946684800000,
configuration: {
metricAlias: expect.any(String),
logAlias: expect.any(String),
logIndices: expect.any(Object),
fields: {
container: expect.any(String),
host: expect.any(String),

View file

@ -5,7 +5,6 @@
* 2.0.
*/
import * as runtimeTypes from 'io-ts';
import { failure } from 'io-ts/lib/PathReporter';
import { identity, constant } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
@ -21,8 +20,9 @@ import {
InfraStaticSourceConfiguration,
pickSavedSourceConfiguration,
SourceConfigurationSavedObjectRuntimeType,
StaticSourceConfigurationRuntimeType,
InfraSource,
sourceConfigurationConfigFilePropertiesRT,
SourceConfigurationConfigFileProperties,
} from '../../../common/source_configuration/source_configuration';
import { InfraConfig } from '../../../server';
@ -199,19 +199,32 @@ export class InfraSources {
}
private async getStaticDefaultSourceConfiguration() {
const staticSourceConfiguration = pipe(
runtimeTypes
.type({
sources: runtimeTypes.type({
default: StaticSourceConfigurationRuntimeType,
}),
})
.decode(this.libs.config),
const staticSourceConfiguration: SourceConfigurationConfigFileProperties['sources']['default'] = pipe(
sourceConfigurationConfigFilePropertiesRT.decode(this.libs.config),
map(({ sources: { default: defaultConfiguration } }) => defaultConfiguration),
fold(constant({}), identity)
);
return mergeSourceConfiguration(defaultSourceConfiguration, staticSourceConfiguration);
// NOTE: Legacy logAlias needs converting to a logIndices reference until we can remove
// config file sources in 8.0.0.
if (staticSourceConfiguration && staticSourceConfiguration.logAlias) {
const convertedStaticSourceConfiguration: InfraStaticSourceConfiguration & {
logAlias?: string;
} = {
...staticSourceConfiguration,
logIndices: {
type: 'index_name',
indexName: staticSourceConfiguration.logAlias,
},
};
delete convertedStaticSourceConfiguration.logAlias;
return mergeSourceConfiguration(
defaultSourceConfiguration,
convertedStaticSourceConfiguration
);
} else {
return mergeSourceConfiguration(defaultSourceConfiguration, staticSourceConfiguration);
}
}
private async getSavedSourceConfiguration(

View file

@ -43,7 +43,7 @@ export const config = {
schema.object({
default: schema.maybe(
schema.object({
logAlias: schema.maybe(schema.string()),
logAlias: schema.maybe(schema.string()), // NOTE / TODO: Should be deprecated in 8.0.0
metricAlias: schema.maybe(schema.string()),
fields: schema.maybe(
schema.object({
@ -124,7 +124,7 @@ export class InfraServerPlugin implements Plugin<InfraPluginSetup> {
sources,
sourceStatus,
...domainLibs,
getLogQueryFields: createGetLogQueryFields(sources),
getLogQueryFields: createGetLogQueryFields(sources, framework),
handleEsError,
};

View file

@ -86,7 +86,8 @@ export const initAlertPreviewRoute = ({
case METRIC_INVENTORY_THRESHOLD_ALERT_TYPE_ID: {
const logQueryFields = await getLogQueryFields(
sourceId || 'default',
requestContext.core.savedObjects.client
requestContext.core.savedObjects.client,
requestContext.core.elasticsearch.client.asCurrentUser
);
const {
nodeType,

View file

@ -14,6 +14,7 @@ import {
} from '../../../common/http_api/log_alerts/chart_preview_data';
import { createValidationFunction } from '../../../common/runtime_types';
import { getChartPreviewData } from '../../lib/alerting/log_threshold/log_threshold_chart_preview';
import { resolveLogSourceConfiguration } from '../../../common/log_sources';
export const initGetLogAlertsChartPreviewDataRoute = ({ framework, sources }: InfraBackendLibs) => {
framework.registerRoute(
@ -29,15 +30,20 @@ export const initGetLogAlertsChartPreviewDataRoute = ({ framework, sources }: In
data: { sourceId, buckets, alertParams },
} = request.body;
const sourceConfiguration = await sources.getSourceConfiguration(
const { configuration } = await sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
const resolvedLogSourceConfiguration = await resolveLogSourceConfiguration(
configuration,
await framework.getIndexPatternsServiceWithRequestContext(requestContext)
);
try {
const { series } = await getChartPreviewData(
requestContext,
sourceConfiguration,
resolvedLogSourceConfiguration,
framework.callWithRequest,
alertParams,
buckets

View file

@ -13,11 +13,13 @@ import {
} from '../../../common/http_api/log_sources';
import { createValidationFunction } from '../../../common/runtime_types';
import { InfraBackendLibs } from '../../lib/infra_types';
import { resolveLogSourceConfiguration } from '../../../common/log_sources';
export const initLogSourceStatusRoutes = ({
framework,
sourceStatus,
fields,
sources,
}: InfraBackendLibs) => {
framework.registerRoute(
{
@ -31,16 +33,24 @@ export const initLogSourceStatusRoutes = ({
const { sourceId } = request.params;
try {
const logIndexStatus = await sourceStatus.getLogIndexStatus(requestContext, sourceId);
const logIndexFields =
logIndexStatus !== 'missing'
? await fields.getFields(requestContext, sourceId, 'LOGS')
: [];
const sourceConfiguration = await sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
const resolvedLogSourceConfiguration = await resolveLogSourceConfiguration(
sourceConfiguration.configuration,
await framework.getIndexPatternsServiceWithRequestContext(requestContext)
);
const logIndexStatus = await sourceStatus.getLogIndexStatus(
requestContext,
resolvedLogSourceConfiguration
);
return response.ok({
body: getLogSourceStatusSuccessResponsePayloadRT.encode({
data: {
logIndexFields,
logIndexStatus,
},
}),

View file

@ -43,7 +43,8 @@ export const initSnapshotRoute = (libs: InfraBackendLibs) => {
const compositeSize = libs.configuration.inventory.compositeSize;
const logQueryFields = await libs.getLogQueryFields(
snapshotRequest.sourceId,
requestContext.core.savedObjects.client
requestContext.core.savedObjects.client,
requestContext.core.elasticsearch.client.asCurrentUser
);
UsageCollector.countNode(snapshotRequest.nodeType);

View file

@ -42,7 +42,10 @@ const source: InfraSource = {
name: 'Default',
description: '',
metricAlias: 'metrics-*,metricbeat-*',
logAlias: 'logs-*,filebeat-*,kibana_sample_data_logs*',
logIndices: {
type: 'index_pattern',
indexPatternId: 'kibana_index_pattern',
},
fields: {
container: 'container.id',
host: 'host.name',

View file

@ -25,6 +25,7 @@ import {
logEntriesSearchRequestStateRT,
logEntriesSearchStrategyProvider,
} from './log_entries_search_strategy';
import { getIndexPatternsMock } from './mocks';
describe('LogEntries search strategy', () => {
it('handles initial search requests', async () => {
@ -102,7 +103,7 @@ describe('LogEntries search strategy', () => {
fields: {
'@timestamp': [1605116827143],
'event.dataset': ['HIT_DATASET'],
MESSAGE_FIELD: ['HIT_MESSAGE'],
message: ['HIT_MESSAGE'],
'container.id': ['HIT_CONTAINER_ID'],
},
sort: [1605116827143 as any, 1 as any], // incorrectly typed as string upstream
@ -167,7 +168,7 @@ describe('LogEntries search strategy', () => {
columnId: 'MESSAGE_COLUMN_ID',
message: [
{
field: 'MESSAGE_FIELD',
field: 'message',
value: ['HIT_MESSAGE'],
highlights: [],
},
@ -255,7 +256,10 @@ const createSourceConfigurationMock = (): InfraSource => ({
configuration: {
name: 'SOURCE_NAME',
description: 'SOURCE_DESCRIPTION',
logAlias: 'log-indices-*',
logIndices: {
type: 'index_pattern',
indexPatternId: 'some-test-id',
},
metricAlias: 'metric-indices-*',
inventoryDefaultView: 'DEFAULT_VIEW',
metricsExplorerDefaultView: 'DEFAULT_VIEW',
@ -319,4 +323,5 @@ const createDataPluginMock = (esSearchStrategyMock: ISearchStrategy): any => ({
search: {
getSearchStrategy: jest.fn().mockReturnValue(esSearchStrategyMock),
},
indexPatterns: getIndexPatternsMock(),
});

View file

@ -21,7 +21,7 @@ import type {
import {
LogSourceColumnConfiguration,
logSourceFieldColumnConfigurationRT,
} from '../../../common/http_api/log_sources';
} from '../../../common/log_sources';
import {
getLogEntryCursorFromHit,
LogColumn,
@ -56,6 +56,7 @@ import {
getSortDirection,
LogEntryHit,
} from './queries/log_entries';
import { resolveLogSourceConfiguration } from '../../../common/log_sources';
type LogEntriesSearchRequest = IKibanaSearchRequest<LogEntriesSearchRequestParams>;
type LogEntriesSearchResponse = IKibanaSearchResponse<LogEntriesSearchResponsePayload>;
@ -74,15 +75,26 @@ export const logEntriesSearchStrategyProvider = ({
defer(() => {
const request = decodeOrThrow(asyncRequestRT)(rawRequest);
const sourceConfiguration$ = defer(() =>
sources.getSourceConfiguration(dependencies.savedObjectsClient, request.params.sourceId)
const resolvedSourceConfiguration$ = defer(() =>
forkJoin([
sources.getSourceConfiguration(
dependencies.savedObjectsClient,
request.params.sourceId
),
data.indexPatterns.indexPatternsServiceFactory(
dependencies.savedObjectsClient,
dependencies.esClient.asCurrentUser
),
]).pipe(
concatMap(([sourceConfiguration, indexPatternsService]) =>
resolveLogSourceConfiguration(sourceConfiguration.configuration, indexPatternsService)
)
)
).pipe(take(1), shareReplay(1));
const messageFormattingRules$ = defer(() =>
sourceConfiguration$.pipe(
map(({ configuration }) =>
compileFormattingRules(getBuiltinRules(configuration.fields.message))
)
resolvedSourceConfiguration$.pipe(
map(({ messageField }) => compileFormattingRules(getBuiltinRules(messageField)))
)
).pipe(take(1), shareReplay(1));
@ -94,23 +106,23 @@ export const logEntriesSearchStrategyProvider = ({
const initialRequest$ = of(request).pipe(
filter(asyncInitialRequestRT.is),
concatMap(({ params }) =>
forkJoin([sourceConfiguration$, messageFormattingRules$]).pipe(
forkJoin([resolvedSourceConfiguration$, messageFormattingRules$]).pipe(
map(
([{ configuration }, messageFormattingRules]): IEsSearchRequest => {
([
{ indices, timestampField, tiebreakerField, columns },
messageFormattingRules,
]): IEsSearchRequest => {
return {
// @ts-expect-error @elastic/elasticsearch declares indices_boost as Record<string, number>
params: createGetLogEntriesQuery(
configuration.logAlias,
indices,
params.startTimestamp,
params.endTimestamp,
pickRequestCursor(params),
params.size + 1,
configuration.fields.timestamp,
configuration.fields.tiebreaker,
getRequiredFields(
params.columns ?? configuration.logColumns,
messageFormattingRules
),
timestampField,
tiebreakerField,
getRequiredFields(params.columns ?? columns, messageFormattingRules),
params.query,
params.highlightPhrase
),
@ -126,18 +138,17 @@ export const logEntriesSearchStrategyProvider = ({
concatMap((esRequest) => esSearchStrategy.search(esRequest, options, dependencies))
);
return combineLatest([searchResponse$, sourceConfiguration$, messageFormattingRules$]).pipe(
map(([esResponse, { configuration }, messageFormattingRules]) => {
return combineLatest([
searchResponse$,
resolvedSourceConfiguration$,
messageFormattingRules$,
]).pipe(
map(([esResponse, { columns }, messageFormattingRules]) => {
const rawResponse = decodeOrThrow(getLogEntriesResponseRT)(esResponse.rawResponse);
const entries = rawResponse.hits.hits
.slice(0, request.params.size)
.map(
getLogEntryFromHit(
request.params.columns ?? configuration.logColumns,
messageFormattingRules
)
);
.map(getLogEntryFromHit(request.params.columns ?? columns, messageFormattingRules));
const sortDirection = getSortDirection(pickRequestCursor(request.params));

View file

@ -18,12 +18,14 @@ import {
ISearchStrategy,
SearchStrategyDependencies,
} from 'src/plugins/data/server';
import { getIndexPatternsMock } from './mocks';
import { createInfraSourcesMock } from '../../lib/sources/mocks';
import {
logEntrySearchRequestStateRT,
logEntrySearchStrategyProvider,
} from './log_entry_search_strategy';
import { createSearchSessionsClientMock } from '../../../../../../src/plugins/data/server/search/mocks';
import { InfraSource } from '../../../common/source_configuration/source_configuration';
describe('LogEntry search strategy', () => {
it('handles initial search requests', async () => {
@ -197,13 +199,16 @@ describe('LogEntry search strategy', () => {
});
});
const createSourceConfigurationMock = () => ({
const createSourceConfigurationMock = (): InfraSource => ({
id: 'SOURCE_ID',
origin: 'stored' as const,
configuration: {
name: 'SOURCE_NAME',
description: 'SOURCE_DESCRIPTION',
logAlias: 'log-indices-*',
logIndices: {
type: 'index_pattern',
indexPatternId: 'some-test-id',
},
metricAlias: 'metric-indices-*',
inventoryDefaultView: 'DEFAULT_VIEW',
metricsExplorerDefaultView: 'DEFAULT_VIEW',
@ -256,4 +261,5 @@ const createDataPluginMock = (esSearchStrategyMock: ISearchStrategy): any => ({
search: {
getSearchStrategy: jest.fn().mockReturnValue(esSearchStrategyMock),
},
indexPatterns: getIndexPatternsMock(),
});

View file

@ -6,7 +6,7 @@
*/
import * as rt from 'io-ts';
import { concat, defer, of } from 'rxjs';
import { concat, defer, of, forkJoin } from 'rxjs';
import { concatMap, filter, map, shareReplay, take } from 'rxjs/operators';
import type {
IEsSearchRequest,
@ -32,6 +32,7 @@ import {
jsonFromBase64StringRT,
} from '../../utils/typed_search_strategy';
import { createGetLogEntryQuery, getLogEntryResponseRT, LogEntryHit } from './queries/log_entry';
import { resolveLogSourceConfiguration } from '../../../common/log_sources';
type LogEntrySearchRequest = IKibanaSearchRequest<LogEntrySearchRequestParams>;
type LogEntrySearchResponse = IKibanaSearchResponse<LogEntrySearchResponsePayload>;
@ -50,9 +51,22 @@ export const logEntrySearchStrategyProvider = ({
defer(() => {
const request = decodeOrThrow(asyncRequestRT)(rawRequest);
const sourceConfiguration$ = defer(() =>
sources.getSourceConfiguration(dependencies.savedObjectsClient, request.params.sourceId)
).pipe(shareReplay(1));
const resolvedSourceConfiguration$ = defer(() =>
forkJoin([
sources.getSourceConfiguration(
dependencies.savedObjectsClient,
request.params.sourceId
),
data.indexPatterns.indexPatternsServiceFactory(
dependencies.savedObjectsClient,
dependencies.esClient.asCurrentUser
),
]).pipe(
concatMap(([sourceConfiguration, indexPatternsService]) =>
resolveLogSourceConfiguration(sourceConfiguration.configuration, indexPatternsService)
)
)
).pipe(take(1), shareReplay(1));
const recoveredRequest$ = of(request).pipe(
filter(asyncRecoveredRequestRT.is),
@ -62,15 +76,15 @@ export const logEntrySearchStrategyProvider = ({
const initialRequest$ = of(request).pipe(
filter(asyncInitialRequestRT.is),
concatMap(({ params }) =>
sourceConfiguration$.pipe(
resolvedSourceConfiguration$.pipe(
map(
({ configuration }): IEsSearchRequest => ({
({ indices, timestampField, tiebreakerField }): IEsSearchRequest => ({
// @ts-expect-error @elastic/elasticsearch declares indices_boost as Record<string, number>
params: createGetLogEntryQuery(
configuration.logAlias,
indices,
params.logEntryId,
configuration.fields.timestamp,
configuration.fields.tiebreaker
timestampField,
tiebreakerField
),
})
)

View file

@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { IIndexPattern, IFieldType, IndexPatternsContract } from 'src/plugins/data/common';
const indexPatternFields: IFieldType[] = [
{
name: 'event.dataset',
type: 'string',
esTypes: ['keyword'],
aggregatable: true,
filterable: true,
searchable: true,
},
];
const indexPattern: IIndexPattern = {
id: '1234',
title: 'log-indices-*',
timeFieldName: '@timestamp',
fields: indexPatternFields,
};
export const getIndexPatternsMock = (): any => {
return {
indexPatternsServiceFactory: async () => {
return {
get: async (id) => indexPattern,
getFieldsForWildcard: async (options) => indexPatternFields,
} as Pick<IndexPatternsContract, 'get' | 'getFieldsForWildcard'>;
},
};
};

View file

@ -5,26 +5,31 @@
* 2.0.
*/
import { SavedObjectsClientContract } from 'src/core/server';
import { SavedObjectsClientContract, ElasticsearchClient } from 'src/core/server';
import { InfraSources } from '../../lib/sources';
import { resolveLogSourceConfiguration } from '../../../common/log_sources';
import { KibanaFramework } from '../../lib/adapters/framework/kibana_framework_adapter';
// NOTE: TEMPORARY: This will become a subset of the new resolved KIP compatible log source configuration.
export interface LogQueryFields {
indexPattern: string;
timestamp: string;
}
// NOTE: TEMPORARY: This will become a subset of the new resolved KIP compatible log source configuration.
export const createGetLogQueryFields = (sources: InfraSources) => {
export const createGetLogQueryFields = (sources: InfraSources, framework: KibanaFramework) => {
return async (
sourceId: string,
savedObjectsClient: SavedObjectsClientContract
savedObjectsClient: SavedObjectsClientContract,
elasticsearchClient: ElasticsearchClient
): Promise<LogQueryFields> => {
const source = await sources.getSourceConfiguration(savedObjectsClient, sourceId);
const resolvedLogSourceConfiguration = await resolveLogSourceConfiguration(
source.configuration,
await framework.getIndexPatternsService(savedObjectsClient, elasticsearchClient)
);
return {
indexPattern: source.configuration.logAlias,
timestamp: source.configuration.fields.timestamp,
indexPattern: resolvedLogSourceConfiguration.indices,
timestamp: resolvedLogSourceConfiguration.timestampField,
};
};
};

View file

@ -16,7 +16,10 @@ export const initInfraSource = (config: MonitoringConfig, infraPlugin: InfraPlug
const filebeatIndexPattern = prefixIndexPattern(config, config.ui.logs.index, '*');
infraPlugin.defineInternalSourceConfiguration(INFRA_SOURCE_ID, {
name: 'Elastic Stack Logs',
logAlias: filebeatIndexPattern,
logIndices: {
type: 'index_name',
indexName: filebeatIndexPattern,
},
});
}
};

View file

@ -36,7 +36,10 @@ export default function ({ getService }: FtrProviderContext) {
expect(origin).to.be('fallback');
expect(configuration.name).to.be('Default');
expect(configuration.logAlias).to.be('logs-*,filebeat-*,kibana_sample_data_logs*');
expect(configuration.logIndices).to.eql({
type: 'index_name',
indexName: 'logs-*,filebeat-*,kibana_sample_data_logs*',
});
expect(configuration.fields.timestamp).to.be('@timestamp');
expect(configuration.fields.tiebreaker).to.be('_doc');
expect(configuration.logColumns[0]).to.have.key('timestampColumn');
@ -51,7 +54,10 @@ export default function ({ getService }: FtrProviderContext) {
.createUpdateLogSourceConfigurationAgent('default', {
name: 'NAME',
description: 'DESCRIPTION',
logAlias: 'filebeat-**',
logIndices: {
type: 'index_pattern',
indexPatternId: 'kip-id',
},
fields: {
tiebreaker: 'TIEBREAKER',
timestamp: 'TIMESTAMP',
@ -73,7 +79,10 @@ export default function ({ getService }: FtrProviderContext) {
expect(configuration.name).to.be('NAME');
expect(origin).to.be('stored');
expect(configuration.logAlias).to.be('filebeat-**');
expect(configuration.logIndices).to.eql({
type: 'index_pattern',
indexPatternId: 'kip-id',
});
expect(configuration.fields.timestamp).to.be('TIMESTAMP');
expect(configuration.fields.tiebreaker).to.be('TIEBREAKER');
expect(configuration.logColumns).to.have.length(1);
@ -98,7 +107,10 @@ export default function ({ getService }: FtrProviderContext) {
expect(configuration.name).to.be('Default');
expect(origin).to.be('stored');
expect(configuration.logAlias).to.be('logs-*,filebeat-*,kibana_sample_data_logs*');
expect(configuration.logIndices).eql({
type: 'index_name',
indexName: 'logs-*,filebeat-*,kibana_sample_data_logs*',
});
expect(configuration.fields.timestamp).to.be('@timestamp');
expect(configuration.fields.tiebreaker).to.be('_doc');
expect(configuration.logColumns).to.have.length(3);
@ -126,7 +138,10 @@ export default function ({ getService }: FtrProviderContext) {
.createUpdateLogSourceConfigurationAgent('default', {
name: 'NAME',
description: 'DESCRIPTION',
logAlias: 'filebeat-**',
logIndices: {
type: 'index_pattern',
indexPatternId: 'kip-id',
},
fields: {
tiebreaker: 'TIEBREAKER',
timestamp: 'TIMESTAMP',
@ -147,7 +162,10 @@ export default function ({ getService }: FtrProviderContext) {
expect(configuration.name).to.be('NAME');
expect(origin).to.be('stored');
expect(configuration.logAlias).to.be('filebeat-**');
expect(configuration.logIndices).to.eql({
type: 'index_pattern',
indexPatternId: 'kip-id',
});
expect(configuration.fields.timestamp).to.be('TIMESTAMP');
expect(configuration.fields.tiebreaker).to.be('TIEBREAKER');
expect(configuration.logColumns).to.have.length(1);
@ -167,7 +185,10 @@ export default function ({ getService }: FtrProviderContext) {
expect(configuration.name).to.be('NAME');
expect(origin).to.be('stored');
expect(configuration.logAlias).to.be('logs-*,filebeat-*,kibana_sample_data_logs*');
expect(configuration.logIndices).to.eql({
type: 'index_name',
indexName: 'logs-*,filebeat-*,kibana_sample_data_logs*',
});
expect(configuration.fields.timestamp).to.be('@timestamp');
expect(configuration.fields.tiebreaker).to.be('_doc');
expect(configuration.logColumns).to.have.length(3);