mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[Dataset quality] Enable page for metrics and traces (#190043)
Closes https://github.com/elastic/observability-dev/issues/3454. This PR enables Dataset quality for being used for `traces` and `metrics`. ### Changes - Added `KNOWN_TYPES` array containing the types that we allow in dataset quality page. - `datasets`, `degradedDocs` and `nonAggregatableDatasets` are now wrapped in an state. This allow us to retrigger the calls whenever we need to do it (e.g. when changing timeframe), more importantly now whenever we select a new type of dataset. - The `invoke` `degradedDocs` is created dynamically depending on the types present in `KNOWN_TYPES`. - `GET /internal/dataset_quality/data_streams/stats` and `GET /internal/dataset_quality/data_streams/non_aggregatable` now accept an array of `DataStreamType`. This allow us to query the information for multiple types of dataStreams at the same time. - degradedDocs are stored now locally as a nested structure. This nested structure allow us to update just the needed portion instead of updating all datasets whenever a change occurs (e.g. a type is deselected). - redirectLink now takes into account the datastream type, it only redirects to logs explorer if it's enabled and the type of the datastream is logs. ### 🎥 Demo https://github.com/user-attachments/assets/082bd4e9-a8f8-4af9-a425-267adc3b30df --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
8e9b827181
commit
2441e9ac8f
39 changed files with 610 additions and 313 deletions
|
@ -13,6 +13,7 @@ export const filtersRT = rt.exact(
|
|||
inactive: rt.boolean,
|
||||
fullNames: rt.boolean,
|
||||
timeRange: timeRangeRT,
|
||||
types: rt.array(rt.string),
|
||||
integrations: rt.array(rt.string),
|
||||
namespaces: rt.array(rt.string),
|
||||
qualities: rt.array(rt.union([rt.literal('poor'), rt.literal('degraded'), rt.literal('good')])),
|
||||
|
|
|
@ -25,6 +25,20 @@ export class DataQualityPlugin implements Plugin<void, void, any, any> {
|
|||
['logs-*-*']: ['read'],
|
||||
},
|
||||
},
|
||||
{
|
||||
ui: [],
|
||||
requiredClusterPrivileges: [],
|
||||
requiredIndexPrivileges: {
|
||||
['traces-*-*']: ['read'],
|
||||
},
|
||||
},
|
||||
{
|
||||
ui: [],
|
||||
requiredClusterPrivileges: [],
|
||||
requiredIndexPrivileges: {
|
||||
['metrics-*-*']: ['read'],
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
|
|
@ -5,10 +5,10 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { QualityIndicators } from './types';
|
||||
import { DataStreamType, QualityIndicators } from './types';
|
||||
|
||||
export const DATASET_QUALITY_APP_ID = 'dataset_quality';
|
||||
export const DEFAULT_DATASET_TYPE = 'logs';
|
||||
export const DEFAULT_DATASET_TYPE: DataStreamType = 'logs';
|
||||
export const DEFAULT_LOGS_DATA_VIEW = 'logs-*-*';
|
||||
|
||||
export const POOR_QUALITY_MINIMUM_PERCENTAGE = 3;
|
||||
|
@ -41,3 +41,5 @@ export const MAX_DEGRADED_FIELDS = 1000;
|
|||
|
||||
export const MASKED_FIELD_PLACEHOLDER = '<custom field>';
|
||||
export const UNKOWN_FIELD_PLACEHOLDER = '<unkwon>';
|
||||
|
||||
export const KNOWN_TYPES: DataStreamType[] = ['logs', 'metrics', 'traces'];
|
||||
|
|
|
@ -5,9 +5,6 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { DataStreamType } from '../types';
|
||||
|
||||
export interface GetDataStreamIntegrationParams {
|
||||
type: DataStreamType;
|
||||
integrationName: string;
|
||||
}
|
||||
|
|
|
@ -15,9 +15,6 @@ export type GetDataStreamsStatsResponse =
|
|||
export type DataStreamStatType = GetDataStreamsStatsResponse['dataStreamsStats'][0];
|
||||
export type DataStreamStatServiceResponse = GetDataStreamsStatsResponse;
|
||||
|
||||
export type GetIntegrationsParams =
|
||||
APIClientRequestParamsOf<`GET /internal/dataset_quality/integrations`>['params'];
|
||||
|
||||
export type GetDataStreamsDegradedDocsStatsParams =
|
||||
APIClientRequestParamsOf<`GET /internal/dataset_quality/data_streams/degraded_docs`>['params'];
|
||||
export type GetDataStreamsDegradedDocsStatsQuery = GetDataStreamsDegradedDocsStatsParams['query'];
|
||||
|
|
|
@ -10,12 +10,33 @@ import { EuiSuperDatePicker } from '@elastic/eui';
|
|||
import { UI_SETTINGS } from '@kbn/data-service';
|
||||
import { TimePickerQuickRange } from '@kbn/observability-shared-plugin/public/hooks/use_quick_time_ranges';
|
||||
import React, { useMemo } from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { useDatasetQualityFilters } from '../../../hooks/use_dataset_quality_filters';
|
||||
import { useKibanaContextForPlugin } from '../../../utils/use_kibana';
|
||||
import { FilterBar } from './filter_bar';
|
||||
import { IntegrationsSelector } from './integrations_selector';
|
||||
import { NamespacesSelector } from './namespaces_selector';
|
||||
import { QualitiesSelector } from './qualities_selector';
|
||||
import { Selector } from './selector';
|
||||
|
||||
const typesLabel = i18n.translate('xpack.datasetQuality.types.label', {
|
||||
defaultMessage: 'Types',
|
||||
});
|
||||
|
||||
const typesSearchPlaceholder = i18n.translate(
|
||||
'xpack.datasetQuality.selector.types.search.placeholder',
|
||||
{
|
||||
defaultMessage: 'Filter types',
|
||||
}
|
||||
);
|
||||
|
||||
const typesNoneMatching = i18n.translate('xpack.datasetQuality.selector.types.noneMatching', {
|
||||
defaultMessage: 'No types found',
|
||||
});
|
||||
|
||||
const typesNoneAvailable = i18n.translate('xpack.datasetQuality.selector.types.noneAvailable', {
|
||||
defaultMessage: 'No types available',
|
||||
});
|
||||
|
||||
// Allow for lazy loading
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
|
@ -29,9 +50,11 @@ export default function Filters() {
|
|||
integrations,
|
||||
namespaces,
|
||||
qualities,
|
||||
types,
|
||||
onIntegrationsChange,
|
||||
onNamespacesChange,
|
||||
onQualitiesChange,
|
||||
onTypesChange,
|
||||
selectedQuery,
|
||||
onQueryChange,
|
||||
} = useDatasetQualityFilters();
|
||||
|
@ -65,6 +88,14 @@ export default function Filters() {
|
|||
integrations={integrations}
|
||||
onIntegrationsChange={onIntegrationsChange}
|
||||
/>
|
||||
<Selector
|
||||
label={typesLabel}
|
||||
searchPlaceholder={typesSearchPlaceholder}
|
||||
noneMatchingMessage={typesNoneMatching}
|
||||
noneAvailableMessage={typesNoneAvailable}
|
||||
options={types}
|
||||
onOptionsChange={onTypesChange}
|
||||
/>
|
||||
<NamespacesSelector
|
||||
isLoading={isLoading}
|
||||
namespaces={namespaces}
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { EuiFilterButton, EuiPopover, EuiPopoverTitle, EuiSelectable, EuiText } from '@elastic/eui';
|
||||
import React, { useState } from 'react';
|
||||
import type { EuiSelectableOptionCheckedType } from '@elastic/eui/src/components/selectable/selectable_option';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
const selectorLoading = i18n.translate('xpack.datasetQuality.selector.loading', {
|
||||
defaultMessage: 'Loading',
|
||||
});
|
||||
|
||||
interface SelectorProps {
|
||||
isLoading?: boolean;
|
||||
options: Item[];
|
||||
loadingMessage?: string;
|
||||
label: string;
|
||||
searchPlaceholder: string;
|
||||
noneAvailableMessage: string;
|
||||
noneMatchingMessage: string;
|
||||
onOptionsChange: (options: Item[]) => void;
|
||||
}
|
||||
|
||||
export interface Item {
|
||||
label: string;
|
||||
checked?: EuiSelectableOptionCheckedType;
|
||||
}
|
||||
|
||||
export function Selector({
|
||||
isLoading,
|
||||
options,
|
||||
loadingMessage,
|
||||
label,
|
||||
searchPlaceholder,
|
||||
noneAvailableMessage,
|
||||
noneMatchingMessage,
|
||||
onOptionsChange,
|
||||
}: SelectorProps) {
|
||||
const [isPopoverOpen, setIsPopoverOpen] = useState(false);
|
||||
|
||||
const onButtonClick = () => {
|
||||
setIsPopoverOpen(!isPopoverOpen);
|
||||
};
|
||||
|
||||
const closePopover = () => {
|
||||
setIsPopoverOpen(false);
|
||||
};
|
||||
|
||||
const renderOption = (option: Item) => <EuiText size="s">{option.label}</EuiText>;
|
||||
|
||||
const button = (
|
||||
<EuiFilterButton
|
||||
data-test-subj="datasetQualitySelectableButton"
|
||||
iconType="arrowDown"
|
||||
badgeColor="success"
|
||||
onClick={onButtonClick}
|
||||
isSelected={isPopoverOpen}
|
||||
numFilters={options.length}
|
||||
hasActiveFilters={!!options.find((item) => item.checked === 'on')}
|
||||
numActiveFilters={options.filter((item) => item.checked === 'on').length}
|
||||
>
|
||||
{label}
|
||||
</EuiFilterButton>
|
||||
);
|
||||
|
||||
return (
|
||||
<EuiPopover
|
||||
button={button}
|
||||
isOpen={isPopoverOpen}
|
||||
closePopover={closePopover}
|
||||
panelPaddingSize="none"
|
||||
>
|
||||
<EuiSelectable
|
||||
data-test-subj="datasetQualitySelectableOptions"
|
||||
searchable
|
||||
searchProps={{
|
||||
placeholder: searchPlaceholder,
|
||||
compressed: true,
|
||||
}}
|
||||
aria-label={label}
|
||||
options={options}
|
||||
onChange={onOptionsChange}
|
||||
isLoading={isLoading}
|
||||
loadingMessage={loadingMessage ?? selectorLoading}
|
||||
emptyMessage={noneAvailableMessage}
|
||||
noMatchesMessage={noneMatchingMessage}
|
||||
renderOption={(option) => renderOption(option)}
|
||||
>
|
||||
{(list, search) => (
|
||||
<div style={{ width: 300 }}>
|
||||
<EuiPopoverTitle paddingSize="s">{search}</EuiPopoverTitle>
|
||||
{list}
|
||||
</div>
|
||||
)}
|
||||
</EuiSelectable>
|
||||
</EuiPopover>
|
||||
);
|
||||
}
|
|
@ -10,7 +10,7 @@ import { EuiBetaBadge, EuiLink, EuiPageHeader, EuiCode } from '@elastic/eui';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
|
||||
import { DEFAULT_LOGS_DATA_VIEW } from '../../../common/constants';
|
||||
import { KNOWN_TYPES } from '../../../common/constants';
|
||||
import { datasetQualityAppTitle } from '../../../common/translations';
|
||||
|
||||
// Allow for lazy loading
|
||||
|
@ -33,9 +33,16 @@ export default function Header() {
|
|||
description={
|
||||
<FormattedMessage
|
||||
id="xpack.datasetQuality.appDescription"
|
||||
defaultMessage="Monitor the data set quality for {logsPattern} data streams that follow the {dsNamingSchemeLink}."
|
||||
defaultMessage="Monitor the data set quality for {types} data streams that follow the {dsNamingSchemeLink}."
|
||||
values={{
|
||||
logsPattern: <EuiCode>{DEFAULT_LOGS_DATA_VIEW}</EuiCode>,
|
||||
types: KNOWN_TYPES.map((type, index) => {
|
||||
return (
|
||||
<>
|
||||
{index > 0 && ', '}
|
||||
<EuiCode>{type}</EuiCode>
|
||||
</>
|
||||
);
|
||||
}),
|
||||
dsNamingSchemeLink: (
|
||||
<EuiLink
|
||||
data-test-subj="datasetQualityAppDescriptionDsNamingSchemeLink"
|
||||
|
|
|
@ -46,6 +46,10 @@ const namespaceColumnName = i18n.translate('xpack.datasetQuality.namespaceColumn
|
|||
defaultMessage: 'Namespace',
|
||||
});
|
||||
|
||||
const typeColumnName = i18n.translate('xpack.datasetQuality.typeColumnName', {
|
||||
defaultMessage: 'Type',
|
||||
});
|
||||
|
||||
const sizeColumnName = i18n.translate('xpack.datasetQuality.sizeColumnName', {
|
||||
defaultMessage: 'Size',
|
||||
});
|
||||
|
@ -213,6 +217,15 @@ export const getDatasetQualityTableColumns = ({
|
|||
),
|
||||
width: '160px',
|
||||
},
|
||||
{
|
||||
name: typeColumnName,
|
||||
field: 'type',
|
||||
sortable: true,
|
||||
render: (_, dataStreamStat: DataStreamStat) => (
|
||||
<EuiBadge color="hollow">{dataStreamStat.type}</EuiBadge>
|
||||
),
|
||||
width: '160px',
|
||||
},
|
||||
...(isSizeStatsAvailable && canUserMonitorDataset && canUserMonitorAnyDataStream
|
||||
? [
|
||||
{
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import {
|
||||
EuiAccordion,
|
||||
|
@ -133,7 +134,10 @@ export default function DegradedDocs({ lastReloadTime }: { lastReloadTime: numbe
|
|||
<EuiButtonIcon
|
||||
display="base"
|
||||
iconType="discoverApp"
|
||||
aria-label="Discover"
|
||||
aria-label={i18n.translate(
|
||||
'xpack.datasetQuality.degradedDocs.euiButtonIcon.discoverLabel',
|
||||
{ defaultMessage: 'Discover' }
|
||||
)}
|
||||
size="s"
|
||||
data-test-subj="datasetQualityDetailsLinkToDiscover"
|
||||
{...degradedDocLinkLogsExplorer.linkProps}
|
||||
|
|
|
@ -8,12 +8,14 @@
|
|||
import { OnRefreshChangeProps } from '@elastic/eui';
|
||||
import { useSelector } from '@xstate/react';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { QualityIndicators } from '../../common/types';
|
||||
import { KNOWN_TYPES } from '../../common/constants';
|
||||
import { DataStreamType, QualityIndicators } from '../../common/types';
|
||||
import { Integration } from '../../common/data_streams_stats/integration';
|
||||
import { useDatasetQualityContext } from '../components/dataset_quality/context';
|
||||
import { IntegrationItem } from '../components/dataset_quality/filters/integrations_selector';
|
||||
import { NamespaceItem } from '../components/dataset_quality/filters/namespaces_selector';
|
||||
import { QualityItem } from '../components/dataset_quality/filters/qualities_selector';
|
||||
import { Item } from '../components/dataset_quality/filters/selector';
|
||||
|
||||
export const useDatasetQualityFilters = () => {
|
||||
const { service } = useDatasetQualityContext();
|
||||
|
@ -22,13 +24,14 @@ export const useDatasetQualityFilters = () => {
|
|||
service,
|
||||
(state) =>
|
||||
state.matches('integrations.fetching') &&
|
||||
(state.matches('datasets.fetching') || state.matches('degradedDocs.fetching'))
|
||||
(state.matches('stats.datasets.fetching') || state.matches('stats.degradedDocs.fetching'))
|
||||
);
|
||||
|
||||
const {
|
||||
timeRange,
|
||||
integrations: selectedIntegrations,
|
||||
namespaces: selectedNamespaces,
|
||||
types: selectedTypes,
|
||||
qualities: selectedQualities,
|
||||
query: selectedQuery,
|
||||
} = useSelector(service, (state) => state.context.filters);
|
||||
|
@ -169,6 +172,25 @@ export const useDatasetQualityFilters = () => {
|
|||
[service]
|
||||
);
|
||||
|
||||
const typeItems: Item[] = useMemo(() => {
|
||||
return KNOWN_TYPES.map((type) => ({
|
||||
label: type,
|
||||
checked: selectedTypes.includes(type) ? 'on' : undefined,
|
||||
}));
|
||||
}, [selectedTypes]);
|
||||
|
||||
const onTypesChange = useCallback(
|
||||
(newTypeItems: Item[]) => {
|
||||
service.send({
|
||||
type: 'UPDATE_TYPES',
|
||||
types: newTypeItems
|
||||
.filter((quality) => quality.checked === 'on')
|
||||
.map((type) => type.label as DataStreamType),
|
||||
});
|
||||
},
|
||||
[service]
|
||||
);
|
||||
|
||||
const onQueryChange = useCallback(
|
||||
(query: string) => {
|
||||
service.send({
|
||||
|
@ -187,9 +209,11 @@ export const useDatasetQualityFilters = () => {
|
|||
integrations: integrationItems,
|
||||
namespaces: namespaceItems,
|
||||
qualities: qualityItems,
|
||||
types: typeItems,
|
||||
onIntegrationsChange,
|
||||
onNamespacesChange,
|
||||
onQualitiesChange,
|
||||
onTypesChange,
|
||||
isLoading,
|
||||
selectedQuery,
|
||||
onQueryChange,
|
||||
|
|
|
@ -48,8 +48,7 @@ export const useDatasetQualityTable = () => {
|
|||
service,
|
||||
(state) =>
|
||||
!state.context.dataStreamStats ||
|
||||
!state.context.dataStreamStats.length ||
|
||||
state.context.dataStreamStats.some((s) => s.userPrivileges.canMonitor)
|
||||
state.context.datasets.some((s) => s.userPrivileges?.canMonitor)
|
||||
);
|
||||
|
||||
const {
|
||||
|
@ -66,15 +65,15 @@ export const useDatasetQualityTable = () => {
|
|||
const loading = useSelector(
|
||||
service,
|
||||
(state) =>
|
||||
state.matches('datasets.fetching') ||
|
||||
state.matches('stats.datasets.fetching') ||
|
||||
state.matches('integrations.fetching') ||
|
||||
state.matches('degradedDocs.fetching')
|
||||
state.matches('stats.degradedDocs.fetching')
|
||||
);
|
||||
const loadingDataStreamStats = useSelector(service, (state) =>
|
||||
state.matches('datasets.fetching')
|
||||
state.matches('stats.datasets.fetching')
|
||||
);
|
||||
const loadingDegradedStats = useSelector(service, (state) =>
|
||||
state.matches('degradedDocs.fetching')
|
||||
state.matches('stats.degradedDocs.fetching')
|
||||
);
|
||||
|
||||
const datasets = useSelector(service, (state) => state.context.datasets);
|
||||
|
|
|
@ -16,8 +16,11 @@ export function useDatasetQualityWarnings() {
|
|||
);
|
||||
|
||||
const isNonAggregatableDatasetsLoading = useSelector(service, (state) =>
|
||||
state.matches('nonAggregatableDatasets.fetching')
|
||||
state.matches('stats.nonAggregatableDatasets.fetching')
|
||||
);
|
||||
|
||||
return { loading: isNonAggregatableDatasetsLoading, nonAggregatableDatasets };
|
||||
return {
|
||||
loading: isNonAggregatableDatasetsLoading,
|
||||
nonAggregatableDatasets,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -18,9 +18,9 @@ export function useEmptyState() {
|
|||
const isDatasetEmpty = useSelector(
|
||||
service,
|
||||
(state) =>
|
||||
!state.matches('datasets.fetching') &&
|
||||
!state.matches('stats.datasets.fetching') &&
|
||||
!state.matches('integrations.fetching') &&
|
||||
!state.matches('degradedDocs.fetching') &&
|
||||
!state.matches('stats.degradedDocs.fetching') &&
|
||||
(state.context.datasets?.length ?? 0) === 0
|
||||
);
|
||||
|
||||
|
|
|
@ -47,7 +47,8 @@ export const useRedirectLink = <T extends BasicDataStream>({
|
|||
navigate: () => void;
|
||||
isLogsExplorerAvailable: boolean;
|
||||
}>(() => {
|
||||
const config = logsExplorerLocator
|
||||
const isLogsExplorerAvailable = !!logsExplorerLocator && dataStreamStat.type === 'logs';
|
||||
const config = isLogsExplorerAvailable
|
||||
? buildLogsExplorerConfig({
|
||||
locator: logsExplorerLocator,
|
||||
dataStreamStat,
|
||||
|
@ -83,7 +84,7 @@ export const useRedirectLink = <T extends BasicDataStream>({
|
|||
onClick: onClickWithTelemetry,
|
||||
},
|
||||
navigate: navigateWithTelemetry,
|
||||
isLogsExplorerAvailable: !!logsExplorerLocator,
|
||||
isLogsExplorerAvailable,
|
||||
};
|
||||
}, [
|
||||
breakdownField,
|
||||
|
@ -181,7 +182,7 @@ const buildDiscoverConfig = <T extends BasicDataStream>({
|
|||
dataViewId,
|
||||
dataViewSpec: {
|
||||
id: dataViewId,
|
||||
title: dataViewTitle,
|
||||
title: dataViewId,
|
||||
},
|
||||
query,
|
||||
breakdownField,
|
||||
|
|
|
@ -19,6 +19,7 @@ const useSummaryPanel = () => {
|
|||
isSizeStatsAvailable,
|
||||
canUserMonitorDataset,
|
||||
canUserMonitorAnyDataStream,
|
||||
loading,
|
||||
} = useDatasetQualityTable();
|
||||
|
||||
const { timeRange } = useSelector(service, (state) => state.context.filters);
|
||||
|
@ -32,7 +33,7 @@ const useSummaryPanel = () => {
|
|||
};
|
||||
|
||||
const isDatasetsQualityLoading = useSelector(service, (state) =>
|
||||
state.matches('degradedDocs.fetching')
|
||||
state.matches('stats.degradedDocs.fetching')
|
||||
);
|
||||
|
||||
/*
|
||||
|
@ -42,8 +43,9 @@ const useSummaryPanel = () => {
|
|||
(item) => item.userPrivileges?.canMonitor ?? true
|
||||
);
|
||||
|
||||
const isUserAuthorizedForDataset =
|
||||
canUserMonitorDataset && canUserMonitorAnyDataStream && canUserMonitorAllFilteredDataStreams;
|
||||
const isUserAuthorizedForDataset = !loading
|
||||
? canUserMonitorDataset && canUserMonitorAnyDataStream && canUserMonitorAllFilteredDataStreams
|
||||
: true;
|
||||
|
||||
/*
|
||||
Datasets Activity
|
||||
|
@ -57,7 +59,7 @@ const useSummaryPanel = () => {
|
|||
};
|
||||
|
||||
const isDatasetsActivityLoading = useSelector(service, (state) =>
|
||||
state.matches('datasets.fetching')
|
||||
state.matches('stats.datasets.fetching')
|
||||
);
|
||||
|
||||
/*
|
||||
|
@ -70,7 +72,8 @@ const useSummaryPanel = () => {
|
|||
|
||||
const isEstimatedDataLoading = useSelector(
|
||||
service,
|
||||
(state) => state.matches('datasets.fetching') || state.matches('degradedDocs.fetching')
|
||||
(state) =>
|
||||
state.matches('stats.datasets.fetching') || state.matches('stats.degradedDocs.fetching')
|
||||
);
|
||||
|
||||
return {
|
||||
|
|
|
@ -123,11 +123,9 @@ export class DataStreamDetailsClient implements IDataStreamDetailsClient {
|
|||
public async getDataStreamIntegration(
|
||||
params: GetDataStreamIntegrationParams
|
||||
): Promise<Integration | undefined> {
|
||||
const { type, integrationName } = params;
|
||||
const { integrationName } = params;
|
||||
const response = await this.http
|
||||
.get<IntegrationResponse>('/internal/dataset_quality/integrations', {
|
||||
query: { type },
|
||||
})
|
||||
.get<IntegrationResponse>('/internal/dataset_quality/integrations')
|
||||
.catch((error) => {
|
||||
throw new DatasetQualityError(`Failed to fetch integrations: ${error}`, error);
|
||||
});
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
|
||||
import { HttpStart } from '@kbn/core/public';
|
||||
import { decodeOrThrow } from '@kbn/io-ts-utils';
|
||||
import rison from '@kbn/rison';
|
||||
import { KNOWN_TYPES } from '../../../common/constants';
|
||||
import {
|
||||
getDataStreamsDegradedDocsStatsResponseRt,
|
||||
getDataStreamsStatsResponseRt,
|
||||
|
@ -15,14 +17,12 @@ import {
|
|||
IntegrationResponse,
|
||||
NonAggregatableDatasets,
|
||||
} from '../../../common/api_types';
|
||||
import { DEFAULT_DATASET_TYPE } from '../../../common/constants';
|
||||
import {
|
||||
DataStreamStatServiceResponse,
|
||||
GetDataStreamsDegradedDocsStatsQuery,
|
||||
GetDataStreamsDegradedDocsStatsResponse,
|
||||
GetDataStreamsStatsQuery,
|
||||
GetDataStreamsStatsResponse,
|
||||
GetIntegrationsParams,
|
||||
GetNonAggregatableDataStreamsParams,
|
||||
} from '../../../common/data_streams_stats';
|
||||
import { Integration } from '../../../common/data_streams_stats/integration';
|
||||
|
@ -33,11 +33,15 @@ export class DataStreamsStatsClient implements IDataStreamsStatsClient {
|
|||
constructor(private readonly http: HttpStart) {}
|
||||
|
||||
public async getDataStreamsStats(
|
||||
params: GetDataStreamsStatsQuery = { type: DEFAULT_DATASET_TYPE }
|
||||
params: GetDataStreamsStatsQuery
|
||||
): Promise<DataStreamStatServiceResponse> {
|
||||
const types = params.types.length === 0 ? KNOWN_TYPES : params.types;
|
||||
const response = await this.http
|
||||
.get<GetDataStreamsStatsResponse>('/internal/dataset_quality/data_streams/stats', {
|
||||
query: params,
|
||||
query: {
|
||||
...params,
|
||||
types: rison.encodeArray(types),
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new DatasetQualityError(`Failed to fetch data streams stats: ${error}`, error);
|
||||
|
@ -59,7 +63,6 @@ export class DataStreamsStatsClient implements IDataStreamsStatsClient {
|
|||
{
|
||||
query: {
|
||||
...params,
|
||||
type: DEFAULT_DATASET_TYPE,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
@ -86,7 +89,7 @@ export class DataStreamsStatsClient implements IDataStreamsStatsClient {
|
|||
.get<NonAggregatableDatasets>('/internal/dataset_quality/data_streams/non_aggregatable', {
|
||||
query: {
|
||||
...params,
|
||||
type: DEFAULT_DATASET_TYPE,
|
||||
types: rison.encodeArray(params.types),
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
|
@ -102,13 +105,9 @@ export class DataStreamsStatsClient implements IDataStreamsStatsClient {
|
|||
return nonAggregatableDatasets;
|
||||
}
|
||||
|
||||
public async getIntegrations(
|
||||
params: GetIntegrationsParams['query'] = { type: DEFAULT_DATASET_TYPE }
|
||||
): Promise<Integration[]> {
|
||||
public async getIntegrations(): Promise<Integration[]> {
|
||||
const response = await this.http
|
||||
.get<IntegrationResponse>('/internal/dataset_quality/integrations', {
|
||||
query: params,
|
||||
})
|
||||
.get<IntegrationResponse>('/internal/dataset_quality/integrations')
|
||||
.catch((error) => {
|
||||
throw new DatasetQualityError(`Failed to fetch integrations: ${error}`, error);
|
||||
});
|
||||
|
|
|
@ -11,7 +11,6 @@ import {
|
|||
DataStreamStatServiceResponse,
|
||||
GetDataStreamsDegradedDocsStatsQuery,
|
||||
GetDataStreamsStatsQuery,
|
||||
GetIntegrationsParams,
|
||||
GetNonAggregatableDataStreamsParams,
|
||||
} from '../../../common/data_streams_stats';
|
||||
import { Integration } from '../../../common/data_streams_stats/integration';
|
||||
|
@ -32,7 +31,7 @@ export interface IDataStreamsStatsClient {
|
|||
getDataStreamsDegradedStats(
|
||||
params?: GetDataStreamsDegradedDocsStatsQuery
|
||||
): Promise<DataStreamDegradedDocsStatServiceResponse>;
|
||||
getIntegrations(params: GetIntegrationsParams['query']): Promise<Integration[]>;
|
||||
getIntegrations(): Promise<Integration[]>;
|
||||
getNonAggregatableDatasets(
|
||||
params: GetNonAggregatableDataStreamsParams
|
||||
): Promise<NonAggregatableDatasets>;
|
||||
|
|
|
@ -14,8 +14,15 @@ import { DefaultDatasetQualityControllerState } from './types';
|
|||
|
||||
const ONE_MINUTE_IN_MS = 60000;
|
||||
|
||||
export const DEFAULT_DICTIONARY_TYPE = {
|
||||
logs: [],
|
||||
metrics: [],
|
||||
traces: [],
|
||||
synthetics: [],
|
||||
profiling: [],
|
||||
};
|
||||
|
||||
export const DEFAULT_CONTEXT: DefaultDatasetQualityControllerState = {
|
||||
type: DEFAULT_DATASET_TYPE,
|
||||
table: {
|
||||
page: 0,
|
||||
rowsPerPage: 10,
|
||||
|
@ -30,6 +37,7 @@ export const DEFAULT_CONTEXT: DefaultDatasetQualityControllerState = {
|
|||
canViewIntegrations: true,
|
||||
},
|
||||
dataStreamStats: [],
|
||||
degradedDocStats: DEFAULT_DICTIONARY_TYPE,
|
||||
filters: {
|
||||
inactive: true,
|
||||
fullNames: false,
|
||||
|
@ -44,6 +52,7 @@ export const DEFAULT_CONTEXT: DefaultDatasetQualityControllerState = {
|
|||
integrations: [],
|
||||
namespaces: [],
|
||||
qualities: [],
|
||||
types: [DEFAULT_DATASET_TYPE],
|
||||
},
|
||||
datasets: [],
|
||||
isSizeStatsAvailable: true,
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -6,18 +6,23 @@
|
|||
*/
|
||||
|
||||
import { DoneInvokeEvent } from 'xstate';
|
||||
import { QualityIndicators, TableCriteria, TimeRangeConfig } from '../../../../common/types';
|
||||
import { DatasetUserPrivileges, NonAggregatableDatasets } from '../../../../common/api_types';
|
||||
import { Integration } from '../../../../common/data_streams_stats/integration';
|
||||
import { DatasetTableSortField } from '../../../hooks';
|
||||
import { DegradedDocsStat } from '../../../../common/data_streams_stats/malformed_docs_stat';
|
||||
import {
|
||||
DataStreamDegradedDocsStatServiceResponse,
|
||||
DataStreamDetails,
|
||||
DataStreamStatServiceResponse,
|
||||
DataStreamStat,
|
||||
DataStreamStatServiceResponse,
|
||||
DataStreamStatType,
|
||||
} from '../../../../common/data_streams_stats';
|
||||
import { Integration } from '../../../../common/data_streams_stats/integration';
|
||||
import { DegradedDocsStat } from '../../../../common/data_streams_stats/malformed_docs_stat';
|
||||
import {
|
||||
DataStreamType,
|
||||
QualityIndicators,
|
||||
TableCriteria,
|
||||
TimeRangeConfig,
|
||||
} from '../../../../common/types';
|
||||
import { DatasetTableSortField } from '../../../hooks';
|
||||
|
||||
interface FiltersCriteria {
|
||||
inactive: boolean;
|
||||
|
@ -26,6 +31,7 @@ interface FiltersCriteria {
|
|||
integrations: string[];
|
||||
namespaces: string[];
|
||||
qualities: QualityIndicators[];
|
||||
types: string[];
|
||||
query?: string;
|
||||
}
|
||||
|
||||
|
@ -37,13 +43,15 @@ export interface WithFilters {
|
|||
filters: FiltersCriteria;
|
||||
}
|
||||
|
||||
export type DictionaryType<T> = Record<DataStreamType, T[]>;
|
||||
|
||||
export interface WithDataStreamStats {
|
||||
datasetUserPrivileges: DatasetUserPrivileges;
|
||||
dataStreamStats: DataStreamStatType[];
|
||||
}
|
||||
|
||||
export interface WithDegradedDocs {
|
||||
degradedDocStats: DegradedDocsStat[];
|
||||
degradedDocStats: DictionaryType<DegradedDocsStat>;
|
||||
}
|
||||
|
||||
export interface WithNonAggregatableDatasets {
|
||||
|
@ -59,9 +67,9 @@ export interface WithIntegrations {
|
|||
integrations: Integration[];
|
||||
}
|
||||
|
||||
export type DefaultDatasetQualityControllerState = { type: string } & WithTableOptions &
|
||||
export type DefaultDatasetQualityControllerState = WithTableOptions &
|
||||
WithDataStreamStats &
|
||||
Partial<WithDegradedDocs> &
|
||||
WithDegradedDocs &
|
||||
WithDatasets &
|
||||
WithFilters &
|
||||
WithNonAggregatableDatasets &
|
||||
|
@ -71,19 +79,19 @@ type DefaultDatasetQualityStateContext = DefaultDatasetQualityControllerState;
|
|||
|
||||
export type DatasetQualityControllerTypeState =
|
||||
| {
|
||||
value: 'datasets.fetching';
|
||||
value: 'stats.datasets.fetching';
|
||||
context: DefaultDatasetQualityStateContext;
|
||||
}
|
||||
| {
|
||||
value: 'datasets.loaded';
|
||||
value: 'stats.datasets.loaded';
|
||||
context: DefaultDatasetQualityStateContext;
|
||||
}
|
||||
| {
|
||||
value: 'datasets.loaded.idle';
|
||||
value: 'stats.degradedDocs.fetching';
|
||||
context: DefaultDatasetQualityStateContext;
|
||||
}
|
||||
| {
|
||||
value: 'degradedDocs.fetching';
|
||||
value: 'stats.nonAggregatableDatasets.fetching';
|
||||
context: DefaultDatasetQualityStateContext;
|
||||
}
|
||||
| {
|
||||
|
@ -135,6 +143,10 @@ export type DatasetQualityControllerEvent =
|
|||
type: 'UPDATE_QUERY';
|
||||
query: string;
|
||||
}
|
||||
| {
|
||||
type: 'UPDATE_TYPES';
|
||||
types: DataStreamType[];
|
||||
}
|
||||
| DoneInvokeEvent<DataStreamDegradedDocsStatServiceResponse>
|
||||
| DoneInvokeEvent<NonAggregatableDatasets>
|
||||
| DoneInvokeEvent<DataStreamDetails>
|
||||
|
|
|
@ -427,7 +427,7 @@ export const createDatasetQualityDetailsControllerStateMachine = ({
|
|||
const { startDate: start, endDate: end } = getDateISORange(context.timeRange);
|
||||
|
||||
return dataStreamStatsClient.getNonAggregatableDatasets({
|
||||
type,
|
||||
types: [type],
|
||||
start,
|
||||
end,
|
||||
dataStream: context.dataStream,
|
||||
|
@ -479,9 +479,7 @@ export const createDatasetQualityDetailsControllerStateMachine = ({
|
|||
},
|
||||
loadDataStreamIntegration: (context) => {
|
||||
if ('dataStreamSettings' in context && context.dataStreamSettings?.integration) {
|
||||
const { type } = indexNameToDataStreamParts(context.dataStream);
|
||||
return dataStreamDetailsClient.getDataStreamIntegration({
|
||||
type,
|
||||
integrationName: context.dataStreamSettings.integration,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { DataStreamType } from '../../common/types';
|
||||
|
||||
export function flattenStats<T>(
|
||||
stats: Record<DataStreamType, T[]>
|
||||
): Array<T & { type: DataStreamType }> {
|
||||
return Object.entries(stats).flatMap(([type, dataStreams]) =>
|
||||
dataStreams.map((dataStream) => ({ ...dataStream, type: type as DataStreamType }))
|
||||
);
|
||||
}
|
|
@ -8,7 +8,8 @@
|
|||
import { indexNameToDataStreamParts } from '../../common/utils';
|
||||
import { Integration } from '../../common/data_streams_stats/integration';
|
||||
import { generateDatasets } from './generate_datasets';
|
||||
import { DataStreamStatType } from '../../common/data_streams_stats/types';
|
||||
import { DataStreamStatType } from '../../common/data_streams_stats';
|
||||
import { DEFAULT_DICTIONARY_TYPE } from '../state_machines/dataset_quality_controller';
|
||||
|
||||
describe('generateDatasets', () => {
|
||||
const integrations: Integration[] = [
|
||||
|
@ -56,25 +57,28 @@ describe('generateDatasets', () => {
|
|||
},
|
||||
];
|
||||
|
||||
const degradedDocs = [
|
||||
{
|
||||
dataset: 'logs-system.application-default',
|
||||
percentage: 0,
|
||||
count: 0,
|
||||
docsCount: 0,
|
||||
quality: 'good' as const,
|
||||
},
|
||||
{
|
||||
dataset: 'logs-synth-default',
|
||||
percentage: 11.320754716981131,
|
||||
count: 6,
|
||||
docsCount: 0,
|
||||
quality: 'poor' as const,
|
||||
},
|
||||
];
|
||||
const degradedDocs = {
|
||||
...DEFAULT_DICTIONARY_TYPE,
|
||||
logs: [
|
||||
{
|
||||
dataset: 'logs-system.application-default',
|
||||
percentage: 0,
|
||||
count: 0,
|
||||
docsCount: 0,
|
||||
quality: 'good' as const,
|
||||
},
|
||||
{
|
||||
dataset: 'logs-synth-default',
|
||||
percentage: 11.320754716981131,
|
||||
count: 6,
|
||||
docsCount: 0,
|
||||
quality: 'poor' as const,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('merges integrations information with dataStreamStats', () => {
|
||||
const datasets = generateDatasets(dataStreamStats, undefined, integrations);
|
||||
const datasets = generateDatasets(dataStreamStats, DEFAULT_DICTIONARY_TYPE, integrations);
|
||||
|
||||
expect(datasets).toEqual([
|
||||
{
|
||||
|
@ -87,10 +91,10 @@ describe('generateDatasets', () => {
|
|||
rawName: dataStreamStats[0].name,
|
||||
integration: integrations[0],
|
||||
degradedDocs: {
|
||||
percentage: degradedDocs[0].percentage,
|
||||
count: degradedDocs[0].count,
|
||||
docsCount: degradedDocs[0].docsCount,
|
||||
quality: degradedDocs[0].quality,
|
||||
percentage: degradedDocs.logs[0].percentage,
|
||||
count: degradedDocs.logs[0].count,
|
||||
docsCount: degradedDocs.logs[0].docsCount,
|
||||
quality: degradedDocs.logs[0].quality,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -115,40 +119,42 @@ describe('generateDatasets', () => {
|
|||
|
||||
expect(datasets).toEqual([
|
||||
{
|
||||
rawName: degradedDocs[0].dataset,
|
||||
name: indexNameToDataStreamParts(degradedDocs[0].dataset).dataset,
|
||||
type: indexNameToDataStreamParts(degradedDocs[0].dataset).type,
|
||||
rawName: degradedDocs.logs[0].dataset,
|
||||
name: indexNameToDataStreamParts(degradedDocs.logs[0].dataset).dataset,
|
||||
type: indexNameToDataStreamParts(degradedDocs.logs[0].dataset).type,
|
||||
lastActivity: undefined,
|
||||
size: undefined,
|
||||
sizeBytes: undefined,
|
||||
userPrivileges: undefined,
|
||||
namespace: indexNameToDataStreamParts(degradedDocs[0].dataset).namespace,
|
||||
namespace: indexNameToDataStreamParts(degradedDocs.logs[0].dataset).namespace,
|
||||
title:
|
||||
integrations[0].datasets[indexNameToDataStreamParts(degradedDocs[0].dataset).dataset],
|
||||
integrations[0].datasets[
|
||||
indexNameToDataStreamParts(degradedDocs.logs[0].dataset).dataset
|
||||
],
|
||||
integration: integrations[0],
|
||||
degradedDocs: {
|
||||
percentage: degradedDocs[0].percentage,
|
||||
count: degradedDocs[0].count,
|
||||
docsCount: degradedDocs[0].docsCount,
|
||||
quality: degradedDocs[0].quality,
|
||||
percentage: degradedDocs.logs[0].percentage,
|
||||
count: degradedDocs.logs[0].count,
|
||||
docsCount: degradedDocs.logs[0].docsCount,
|
||||
quality: degradedDocs.logs[0].quality,
|
||||
},
|
||||
},
|
||||
{
|
||||
rawName: degradedDocs[1].dataset,
|
||||
name: indexNameToDataStreamParts(degradedDocs[1].dataset).dataset,
|
||||
type: indexNameToDataStreamParts(degradedDocs[1].dataset).type,
|
||||
rawName: degradedDocs.logs[1].dataset,
|
||||
name: indexNameToDataStreamParts(degradedDocs.logs[1].dataset).dataset,
|
||||
type: indexNameToDataStreamParts(degradedDocs.logs[1].dataset).type,
|
||||
lastActivity: undefined,
|
||||
size: undefined,
|
||||
sizeBytes: undefined,
|
||||
userPrivileges: undefined,
|
||||
namespace: indexNameToDataStreamParts(degradedDocs[1].dataset).namespace,
|
||||
title: indexNameToDataStreamParts(degradedDocs[1].dataset).dataset,
|
||||
namespace: indexNameToDataStreamParts(degradedDocs.logs[1].dataset).namespace,
|
||||
title: indexNameToDataStreamParts(degradedDocs.logs[1].dataset).dataset,
|
||||
integration: undefined,
|
||||
degradedDocs: {
|
||||
percentage: degradedDocs[1].percentage,
|
||||
count: degradedDocs[1].count,
|
||||
docsCount: degradedDocs[1].docsCount,
|
||||
quality: degradedDocs[1].quality,
|
||||
percentage: degradedDocs.logs[1].percentage,
|
||||
count: degradedDocs.logs[1].count,
|
||||
docsCount: degradedDocs.logs[1].docsCount,
|
||||
quality: degradedDocs.logs[1].quality,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
@ -168,10 +174,10 @@ describe('generateDatasets', () => {
|
|||
rawName: dataStreamStats[0].name,
|
||||
integration: integrations[0],
|
||||
degradedDocs: {
|
||||
percentage: degradedDocs[0].percentage,
|
||||
count: degradedDocs[0].count,
|
||||
docsCount: degradedDocs[0].docsCount,
|
||||
quality: degradedDocs[0].quality,
|
||||
percentage: degradedDocs.logs[0].percentage,
|
||||
count: degradedDocs.logs[0].count,
|
||||
docsCount: degradedDocs.logs[0].docsCount,
|
||||
quality: degradedDocs.logs[0].quality,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -182,10 +188,10 @@ describe('generateDatasets', () => {
|
|||
type: indexNameToDataStreamParts(dataStreamStats[1].name).type,
|
||||
rawName: dataStreamStats[1].name,
|
||||
degradedDocs: {
|
||||
percentage: degradedDocs[1].percentage,
|
||||
count: degradedDocs[1].count,
|
||||
docsCount: degradedDocs[1].docsCount,
|
||||
quality: degradedDocs[1].quality,
|
||||
percentage: degradedDocs.logs[1].percentage,
|
||||
count: degradedDocs.logs[1].count,
|
||||
docsCount: degradedDocs.logs[1].docsCount,
|
||||
quality: degradedDocs.logs[1].quality,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
@ -205,7 +211,7 @@ describe('generateDatasets', () => {
|
|||
},
|
||||
};
|
||||
|
||||
const datasets = generateDatasets([nonDefaultDataset], undefined, integrations);
|
||||
const datasets = generateDatasets([nonDefaultDataset], DEFAULT_DICTIONARY_TYPE, integrations);
|
||||
|
||||
expect(datasets).toEqual([
|
||||
{
|
||||
|
@ -225,8 +231,4 @@ describe('generateDatasets', () => {
|
|||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns an empty array if no valid object is provided', () => {
|
||||
expect(generateDatasets(undefined, undefined, integrations)).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -10,10 +10,12 @@ import { mapPercentageToQuality } from '../../common/utils';
|
|||
import { Integration } from '../../common/data_streams_stats/integration';
|
||||
import { DataStreamStat } from '../../common/data_streams_stats/data_stream_stat';
|
||||
import { DegradedDocsStat } from '../../common/data_streams_stats/malformed_docs_stat';
|
||||
import { DictionaryType } from '../state_machines/dataset_quality_controller/src/types';
|
||||
import { flattenStats } from './flatten_stats';
|
||||
|
||||
export function generateDatasets(
|
||||
dataStreamStats: DataStreamStatType[] = [],
|
||||
degradedDocStats: DegradedDocsStat[] = [],
|
||||
degradedDocStats: DictionaryType<DegradedDocsStat>,
|
||||
integrations: Integration[]
|
||||
): DataStreamStat[] {
|
||||
if (!dataStreamStats.length && !integrations.length) {
|
||||
|
@ -48,8 +50,10 @@ export function generateDatasets(
|
|||
{ datasetIntegrationMap: {}, integrationsMap: {} }
|
||||
);
|
||||
|
||||
const degradedDocs = flattenStats(degradedDocStats);
|
||||
|
||||
if (!dataStreamStats.length) {
|
||||
return degradedDocStats.map((degradedDocStat) =>
|
||||
return degradedDocs.map((degradedDocStat) =>
|
||||
DataStreamStat.fromDegradedDocStat({ degradedDocStat, datasetIntegrationMap })
|
||||
);
|
||||
}
|
||||
|
@ -62,8 +66,8 @@ export function generateDatasets(
|
|||
docsCount: DegradedDocsStat['docsCount'];
|
||||
quality: DegradedDocsStat['quality'];
|
||||
}
|
||||
> = degradedDocStats.reduce(
|
||||
(degradedMapAcc, { dataset, percentage, count, docsCount, quality }) =>
|
||||
> = degradedDocs.reduce(
|
||||
(degradedMapAcc, { dataset, percentage, count, docsCount }) =>
|
||||
Object.assign(degradedMapAcc, {
|
||||
[dataset]: {
|
||||
percentage,
|
||||
|
|
|
@ -40,13 +40,13 @@ describe('getDataStreams', () => {
|
|||
const esClientMock = elasticsearchServiceMock.createElasticsearchClient();
|
||||
const result = await getDataStreams({
|
||||
esClient: esClientMock,
|
||||
type: 'logs',
|
||||
datasetQuery: 'nginx',
|
||||
types: ['logs'],
|
||||
datasetQuery: 'nginx-*',
|
||||
uncategorisedOnly: true,
|
||||
});
|
||||
expect(dataStreamService.getMatchingDataStreams).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
'logs-*nginx*'
|
||||
'logs-nginx-*'
|
||||
);
|
||||
|
||||
expect(result.datasetUserPrivileges.canMonitor).toBe(true);
|
||||
|
@ -57,8 +57,8 @@ describe('getDataStreams', () => {
|
|||
const esClientMock = elasticsearchServiceMock.createElasticsearchClient();
|
||||
const results = await getDataStreams({
|
||||
esClient: esClientMock,
|
||||
type: 'logs',
|
||||
datasetQuery: 'nginx',
|
||||
types: ['logs'],
|
||||
datasetQuery: 'nginx-*',
|
||||
uncategorisedOnly: true,
|
||||
});
|
||||
expect(results.items.length).toBe(1);
|
||||
|
@ -67,8 +67,8 @@ describe('getDataStreams', () => {
|
|||
const esClientMock = elasticsearchServiceMock.createElasticsearchClient();
|
||||
const results = await getDataStreams({
|
||||
esClient: esClientMock,
|
||||
type: 'logs',
|
||||
datasetQuery: 'nginx',
|
||||
types: ['logs'],
|
||||
datasetQuery: 'nginx-*',
|
||||
uncategorisedOnly: false,
|
||||
});
|
||||
expect(results.items.length).toBe(5);
|
||||
|
|
|
@ -6,27 +6,28 @@
|
|||
*/
|
||||
|
||||
import type { ElasticsearchClient } from '@kbn/core/server';
|
||||
import { DEFAULT_DATASET_TYPE } from '../../../../common/constants';
|
||||
import { streamPartsToIndexPattern } from '../../../../common/utils';
|
||||
import { DataStreamType } from '../../../../common/types';
|
||||
import { dataStreamService, datasetQualityPrivileges } from '../../../services';
|
||||
|
||||
export async function getDataStreams(options: {
|
||||
esClient: ElasticsearchClient;
|
||||
type?: DataStreamType;
|
||||
types: DataStreamType[];
|
||||
datasetQuery?: string;
|
||||
uncategorisedOnly: boolean;
|
||||
}) {
|
||||
const { esClient, type = DEFAULT_DATASET_TYPE, datasetQuery, uncategorisedOnly } = options;
|
||||
const { esClient, types, datasetQuery, uncategorisedOnly } = options;
|
||||
|
||||
const datasetName = streamPartsToIndexPattern({
|
||||
typePattern: type,
|
||||
datasetPattern: datasetQuery ? `*${datasetQuery}*` : '*-*',
|
||||
});
|
||||
const datasetNames = types.map((type) =>
|
||||
streamPartsToIndexPattern({
|
||||
typePattern: type,
|
||||
datasetPattern: datasetQuery ? `${datasetQuery}` : '*-*',
|
||||
})
|
||||
);
|
||||
|
||||
const datasetUserPrivileges = await datasetQualityPrivileges.getDatasetPrivileges(
|
||||
esClient,
|
||||
datasetName
|
||||
datasetNames.join(',')
|
||||
);
|
||||
|
||||
if (!datasetUserPrivileges.canMonitor) {
|
||||
|
@ -36,7 +37,10 @@ export async function getDataStreams(options: {
|
|||
};
|
||||
}
|
||||
|
||||
const allDataStreams = await dataStreamService.getMatchingDataStreams(esClient, datasetName);
|
||||
const allDataStreams = await dataStreamService.getMatchingDataStreams(
|
||||
esClient,
|
||||
datasetNames.join(',')
|
||||
);
|
||||
|
||||
const filteredDataStreams = uncategorisedOnly
|
||||
? allDataStreams.filter((stream) => {
|
||||
|
|
|
@ -8,28 +8,29 @@
|
|||
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
|
||||
import { rangeQuery } from '@kbn/observability-plugin/server/utils/queries';
|
||||
import { extractIndexNameFromBackingIndex } from '../../../common/utils';
|
||||
import { DEFAULT_DATASET_TYPE } from '../../../common/constants';
|
||||
import { _IGNORED } from '../../../common/es_fields';
|
||||
import { DataStreamType } from '../../../common/types';
|
||||
import { createDatasetQualityESClient } from '../../utils';
|
||||
|
||||
export async function getNonAggregatableDataStreams({
|
||||
esClient,
|
||||
type = DEFAULT_DATASET_TYPE,
|
||||
types,
|
||||
start,
|
||||
end,
|
||||
dataStream,
|
||||
}: {
|
||||
esClient: ElasticsearchClient;
|
||||
type?: DataStreamType;
|
||||
types: DataStreamType[];
|
||||
start: number;
|
||||
end: number;
|
||||
dataStream?: string;
|
||||
}) {
|
||||
const datasetQualityESClient = createDatasetQualityESClient(esClient);
|
||||
|
||||
const dataStreamTypes = types.map((type) => `${type}-*-*`).join(',');
|
||||
|
||||
const response = await datasetQualityESClient.fieldCaps({
|
||||
index: dataStream ?? `${type}-*-*`,
|
||||
index: dataStream ?? dataStreamTypes,
|
||||
fields: [_IGNORED],
|
||||
index_filter: {
|
||||
...rangeQuery(start, end)[0],
|
||||
|
|
|
@ -16,7 +16,7 @@ import {
|
|||
DegradedFieldResponse,
|
||||
DatasetUserPrivileges,
|
||||
} from '../../../common/api_types';
|
||||
import { rangeRt, typeRt } from '../../types/default_api_types';
|
||||
import { rangeRt, typeRt, typesRt } from '../../types/default_api_types';
|
||||
import { createDatasetQualityServerRoute } from '../create_datasets_quality_server_route';
|
||||
import { datasetQualityPrivileges } from '../../services';
|
||||
import { getDataStreamDetails, getDataStreamSettings } from './get_data_stream_details';
|
||||
|
@ -30,7 +30,7 @@ const statsRoute = createDatasetQualityServerRoute({
|
|||
endpoint: 'GET /internal/dataset_quality/data_streams/stats',
|
||||
params: t.type({
|
||||
query: t.intersection([
|
||||
typeRt,
|
||||
t.type({ types: typesRt }),
|
||||
t.partial({
|
||||
datasetQuery: t.string,
|
||||
}),
|
||||
|
@ -59,6 +59,7 @@ const statsRoute = createDatasetQualityServerRoute({
|
|||
const privilegedDataStreams = items.filter((stream) => {
|
||||
return stream.userPrivileges.canMonitor;
|
||||
});
|
||||
|
||||
const dataStreamsStats = await getDataStreamsStats({
|
||||
esClient,
|
||||
dataStreams: privilegedDataStreams.map((stream) => stream.name),
|
||||
|
@ -116,7 +117,7 @@ const nonAggregatableDatasetsRoute = createDatasetQualityServerRoute({
|
|||
params: t.type({
|
||||
query: t.intersection([
|
||||
rangeRt,
|
||||
typeRt,
|
||||
t.type({ types: typesRt }),
|
||||
t.partial({
|
||||
dataStream: t.string,
|
||||
}),
|
||||
|
@ -131,11 +132,36 @@ const nonAggregatableDatasetsRoute = createDatasetQualityServerRoute({
|
|||
|
||||
const esClient = coreContext.elasticsearch.client.asCurrentUser;
|
||||
|
||||
return await getNonAggregatableDataStreams({
|
||||
esClient,
|
||||
...params.query,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const nonAggregatableDatasetRoute = createDatasetQualityServerRoute({
|
||||
endpoint: 'GET /internal/dataset_quality/data_streams/{dataStream}/non_aggregatable',
|
||||
params: t.type({
|
||||
path: t.type({
|
||||
dataStream: t.string,
|
||||
}),
|
||||
query: t.intersection([rangeRt, typeRt]),
|
||||
}),
|
||||
options: {
|
||||
tags: [],
|
||||
},
|
||||
async handler(resources): Promise<NonAggregatableDatasets> {
|
||||
const { context, params } = resources;
|
||||
const coreContext = await context.core;
|
||||
|
||||
const esClient = coreContext.elasticsearch.client.asCurrentUser;
|
||||
|
||||
await datasetQualityPrivileges.throwIfCannotReadDataset(esClient, params.query.type);
|
||||
|
||||
return await getNonAggregatableDataStreams({
|
||||
esClient,
|
||||
...params.query,
|
||||
types: [params.query.type],
|
||||
});
|
||||
},
|
||||
});
|
||||
|
@ -230,6 +256,7 @@ export const dataStreamsRouteRepository = {
|
|||
...statsRoute,
|
||||
...degradedDocsRoute,
|
||||
...nonAggregatableDatasetsRoute,
|
||||
...nonAggregatableDatasetRoute,
|
||||
...degradedFieldsRoute,
|
||||
...dataStreamDetailsRoute,
|
||||
...dataStreamSettingsRoute,
|
||||
|
|
|
@ -9,16 +9,13 @@ import { Logger } from '@kbn/core/server';
|
|||
import { PackageClient } from '@kbn/fleet-plugin/server';
|
||||
import { PackageNotFoundError } from '@kbn/fleet-plugin/server/errors';
|
||||
import { PackageListItem, RegistryDataStream } from '@kbn/fleet-plugin/common';
|
||||
import { DEFAULT_DATASET_TYPE } from '../../../common/constants';
|
||||
import { DataStreamType } from '../../../common/types';
|
||||
import { IntegrationType } from '../../../common/api_types';
|
||||
|
||||
export async function getIntegrations(options: {
|
||||
packageClient: PackageClient;
|
||||
logger: Logger;
|
||||
type?: DataStreamType;
|
||||
}): Promise<IntegrationType[]> {
|
||||
const { packageClient, logger, type = DEFAULT_DATASET_TYPE } = options;
|
||||
const { packageClient, logger } = options;
|
||||
|
||||
const packages = await packageClient.getPackages();
|
||||
const installedPackages = packages.filter((p) => p.status === 'installed');
|
||||
|
@ -29,7 +26,7 @@ export async function getIntegrations(options: {
|
|||
title: p.title,
|
||||
version: p.version,
|
||||
icons: p.icons,
|
||||
datasets: await getDatasets({ packageClient, logger, pkg: p, type }),
|
||||
datasets: await getDatasets({ packageClient, logger, pkg: p }),
|
||||
}))
|
||||
);
|
||||
|
||||
|
@ -40,9 +37,8 @@ const getDatasets = async (options: {
|
|||
packageClient: PackageClient;
|
||||
logger: Logger;
|
||||
pkg: PackageListItem;
|
||||
type: DataStreamType;
|
||||
}) => {
|
||||
const { packageClient, logger, pkg, type } = options;
|
||||
const { packageClient, logger, pkg } = options;
|
||||
|
||||
return (
|
||||
(await fetchDatasets({
|
||||
|
@ -50,7 +46,6 @@ const getDatasets = async (options: {
|
|||
logger,
|
||||
name: pkg.name,
|
||||
version: pkg.version,
|
||||
type,
|
||||
})) ?? getDatasetsReadableName(pkg.data_streams ?? [])
|
||||
);
|
||||
};
|
||||
|
@ -60,16 +55,13 @@ const fetchDatasets = async (options: {
|
|||
logger: Logger;
|
||||
name: string;
|
||||
version: string;
|
||||
type: DataStreamType;
|
||||
}) => {
|
||||
try {
|
||||
const { packageClient, name, version, type } = options;
|
||||
const { packageClient, name, version } = options;
|
||||
|
||||
const pkg = await packageClient.getPackage(name, version);
|
||||
|
||||
return getDatasetsReadableName(
|
||||
(pkg.packageInfo.data_streams ?? []).filter((ds) => ds.type === type)
|
||||
);
|
||||
return getDatasetsReadableName(pkg.packageInfo.data_streams ?? []);
|
||||
} catch (error) {
|
||||
// Custom integration
|
||||
if (error instanceof PackageNotFoundError) {
|
||||
|
|
|
@ -7,28 +7,24 @@
|
|||
|
||||
import * as t from 'io-ts';
|
||||
import { IntegrationType, IntegrationDashboardsResponse } from '../../../common/api_types';
|
||||
import { typeRt } from '../../types/default_api_types';
|
||||
import { createDatasetQualityServerRoute } from '../create_datasets_quality_server_route';
|
||||
import { getIntegrations } from './get_integrations';
|
||||
import { getIntegrationDashboards } from './get_integration_dashboards';
|
||||
|
||||
const integrationsRoute = createDatasetQualityServerRoute({
|
||||
endpoint: 'GET /internal/dataset_quality/integrations',
|
||||
params: t.type({
|
||||
query: typeRt,
|
||||
}),
|
||||
options: {
|
||||
tags: [],
|
||||
},
|
||||
async handler(resources): Promise<{
|
||||
integrations: IntegrationType[];
|
||||
}> {
|
||||
const { params, plugins, logger } = resources;
|
||||
const { plugins, logger } = resources;
|
||||
|
||||
const fleetPluginStart = await plugins.fleet.start();
|
||||
const packageClient = fleetPluginStart.packageService.asInternalUser;
|
||||
|
||||
const integrations = await getIntegrations({ packageClient, logger, ...params.query });
|
||||
const integrations = await getIntegrations({ packageClient, logger });
|
||||
|
||||
return { integrations };
|
||||
},
|
||||
|
|
|
@ -7,12 +7,24 @@
|
|||
|
||||
import { isoToEpochRt } from '@kbn/io-ts-utils';
|
||||
import * as t from 'io-ts';
|
||||
import { dataStreamTypesRt } from '../../common/types';
|
||||
import { DataStreamType, dataStreamTypesRt } from '../../common/types';
|
||||
|
||||
export const typeRt = t.partial({
|
||||
export const typeRt = t.type({
|
||||
type: dataStreamTypesRt,
|
||||
});
|
||||
|
||||
export const typesRt = new t.Type<DataStreamType[], DataStreamType[], unknown>(
|
||||
'typesRt',
|
||||
(input: unknown): input is DataStreamType[] =>
|
||||
(typeof input === 'string' && input.split(',').every((value) => dataStreamTypesRt.is(value))) ||
|
||||
(Array.isArray(input) && input.every((value) => dataStreamTypesRt.is(value))),
|
||||
(input, context) =>
|
||||
typeof input === 'string' && input.split(',').every((value) => dataStreamTypesRt.is(value))
|
||||
? t.success(input.split(',') as DataStreamType[])
|
||||
: t.failure(input, context),
|
||||
t.identity
|
||||
);
|
||||
|
||||
export const rangeRt = t.type({
|
||||
start: isoToEpochRt,
|
||||
end: isoToEpochRt,
|
||||
|
|
|
@ -55,7 +55,8 @@
|
|||
"@kbn/server-route-repository-utils",
|
||||
"@kbn/core-analytics-browser",
|
||||
"@kbn/core-lifecycle-browser",
|
||||
"@kbn/core-notifications-browser"
|
||||
"@kbn/core-notifications-browser",
|
||||
"@kbn/rison"
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*"
|
||||
|
|
|
@ -14608,7 +14608,6 @@
|
|||
"xpack.dataQuality.Initializing": "Page Initialisation de la qualité de l'ensemble de données",
|
||||
"xpack.dataQuality.name": "Qualité de l’ensemble de données",
|
||||
"xpack.datasetQuality.actionsColumnName": "Actions",
|
||||
"xpack.datasetQuality.appDescription": "Surveillez la qualité du jeu de données pour les flux de données {logsPattern} suivant le {dsNamingSchemeLink}.",
|
||||
"xpack.datasetQuality.appDescription.dsNamingSchemeLinkText": "Schéma de dénomination du flux de données",
|
||||
"xpack.datasetQuality.appTitle": "Qualité de l’ensemble de données",
|
||||
"xpack.datasetQuality.betaBadgeDescription": "Cette fonctionnalité est actuellement en version bêta. Nous aimerions beaucoup savoir si vous avez des commentaires ou si vous rencontrez des bugs. Veuillez ouvrir un dossier d'assistance et/ou consulter notre forum de discussion.",
|
||||
|
|
|
@ -14597,7 +14597,6 @@
|
|||
"xpack.dataQuality.Initializing": "データセット品質ページを初期化中",
|
||||
"xpack.dataQuality.name": "データセット品質",
|
||||
"xpack.datasetQuality.actionsColumnName": "アクション",
|
||||
"xpack.datasetQuality.appDescription": "{dsNamingSchemeLink}に従う{logsPattern}データストリームのデータセット品質を監視します。",
|
||||
"xpack.datasetQuality.appDescription.dsNamingSchemeLinkText": "データストリーム命名スキーム",
|
||||
"xpack.datasetQuality.appTitle": "データセット品質",
|
||||
"xpack.datasetQuality.betaBadgeDescription": "現在、この機能はベータです。バグが発生した場合やフィードバックがある場合は、お問い合わせください。サポート問題をオープンするか、ディスカッションフォーラムをご覧ください。",
|
||||
|
|
|
@ -14620,7 +14620,6 @@
|
|||
"xpack.dataQuality.Initializing": "正在初始化“数据集质量”页面",
|
||||
"xpack.dataQuality.name": "数据集质量",
|
||||
"xpack.datasetQuality.actionsColumnName": "操作",
|
||||
"xpack.datasetQuality.appDescription": "监测跟随 {dsNamingSchemeLink} 的 {logsPattern} 数据流的数据集质量。",
|
||||
"xpack.datasetQuality.appDescription.dsNamingSchemeLinkText": "数据流命名方案",
|
||||
"xpack.datasetQuality.appTitle": "数据集质量",
|
||||
"xpack.datasetQuality.betaBadgeDescription": "此功能当前为公测版。如果遇到任何错误或有任何反馈,我们乐于倾听您的意见。请报告支持问题和/或访问我们的讨论论坛。",
|
||||
|
|
|
@ -22,8 +22,7 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
endpoint: 'GET /internal/dataset_quality/data_streams/stats',
|
||||
params: {
|
||||
query: {
|
||||
type: 'logs',
|
||||
datasetQuery: '-',
|
||||
types: ['logs'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
|
@ -39,11 +39,6 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
|
||||
return await datasetQualityApiClient[user]({
|
||||
endpoint: 'GET /internal/dataset_quality/integrations',
|
||||
params: {
|
||||
query: {
|
||||
type: 'logs',
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -53,12 +48,13 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
await Promise.all(integrationPackages.map((pkg) => installPackage({ supertest, pkg })));
|
||||
});
|
||||
|
||||
it('returns only log based integrations and its datasets map', async () => {
|
||||
it('returns all installed integrations and its datasets map', async () => {
|
||||
const resp = await callApiAs();
|
||||
|
||||
expect(resp.body.integrations.map((integration) => integration.name)).to.eql([
|
||||
'apm',
|
||||
'endpoint',
|
||||
'synthetics',
|
||||
'system',
|
||||
]);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue