mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[Dataset quality] Added malformed docs column to table (#172462)
Closes https://github.com/elastic/kibana/issues/170220.
### Changes
- New endpoint added to query malformed docs in elasticsearch `GET
/internal/dataset_quality/data_streams/malformed_docs`
- Decoded response from apis in `data_streams_stats_client.ts` as
suggested by @tonyghiani in
https://github.com/elastic/kibana/pull/171777.
- New synthtrace scenario, malformed logs, where we ingest documents
that will have `_ignored` properties.
- Malformed Docs column was added to `columns.tsx`.
#### Demo
07a76f13
-a837-4621-9366-63053a51b489
### How to test?
1. Go to
https://yngrdyn-deploy-kiban-pr172462.kb.us-west2.gcp.elastic-cloud.com/app/observability-log-explorer/dataset-quality
2. `Malformed docs` column should be present and should be sortable
---------
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
c0c8439fe8
commit
df0a21cac1
26 changed files with 724 additions and 33 deletions
145
packages/kbn-apm-synthtrace/src/scenarios/malformed_logs.ts
Normal file
145
packages/kbn-apm-synthtrace/src/scenarios/malformed_logs.ts
Normal file
|
@ -0,0 +1,145 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import { LogDocument, log, generateShortId, generateLongId } from '@kbn/apm-synthtrace-client';
|
||||
import { Scenario } from '../cli/scenario';
|
||||
import { withClient } from '../lib/utils/with_client';
|
||||
|
||||
const MORE_THAN_1024_CHARS =
|
||||
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?';
|
||||
|
||||
const scenario: Scenario<LogDocument> = async (runOptions) => {
|
||||
return {
|
||||
generate: ({ range, clients: { logsEsClient } }) => {
|
||||
const { logger } = runOptions;
|
||||
|
||||
// Logs Data logic
|
||||
const MESSAGE_LOG_LEVELS = [
|
||||
{ message: 'A simple log', level: 'info' },
|
||||
{
|
||||
message: 'Another log message',
|
||||
level: 'debug',
|
||||
},
|
||||
{ message: 'Error with certificate: "ca_trusted_fingerprint"', level: 'error' },
|
||||
];
|
||||
const CLOUD_PROVIDERS = ['gcp', 'aws', 'azure'];
|
||||
const CLOUD_REGION = ['eu-central-1', 'us-east-1', 'area-51'];
|
||||
|
||||
// "ignore_above": 1024 in mapping
|
||||
const MALFORMED_LOG_LEVEL = MORE_THAN_1024_CHARS;
|
||||
|
||||
// "ignore_above": 1024 in mapping
|
||||
const MALFORMED_CLOUD_REGION = MORE_THAN_1024_CHARS;
|
||||
|
||||
const CLUSTER = [
|
||||
{ clusterId: generateShortId(), clusterName: 'synth-cluster-1' },
|
||||
{ clusterId: generateShortId(), clusterName: 'synth-cluster-2' },
|
||||
{ clusterId: generateShortId(), clusterName: 'synth-cluster-3' },
|
||||
];
|
||||
|
||||
const SERVICE_NAMES = Array(3)
|
||||
.fill(null)
|
||||
.map((_, idx) => `synth-service-${idx}`);
|
||||
|
||||
const datasetSynth1Logs = (timestamp: number) => {
|
||||
const index = Math.floor(Math.random() * 3);
|
||||
return log
|
||||
.create()
|
||||
.dataset('synth.1')
|
||||
.message(MESSAGE_LOG_LEVELS[index].message as string)
|
||||
.logLevel(MESSAGE_LOG_LEVELS[index].level)
|
||||
.service(SERVICE_NAMES[index])
|
||||
.defaults({
|
||||
'trace.id': generateShortId(),
|
||||
'agent.name': 'synth-agent',
|
||||
'orchestrator.cluster.name': CLUSTER[index].clusterName,
|
||||
'orchestrator.cluster.id': CLUSTER[index].clusterId,
|
||||
'orchestrator.resource.id': generateShortId(),
|
||||
'cloud.provider': CLOUD_PROVIDERS[Math.floor(Math.random() * 3)],
|
||||
'cloud.region': CLOUD_REGION[index],
|
||||
'cloud.availability_zone': `${CLOUD_REGION[index]}a`,
|
||||
'cloud.project.id': generateShortId(),
|
||||
'cloud.instance.id': generateShortId(),
|
||||
'log.file.path': `/logs/${generateLongId()}/error.txt`,
|
||||
})
|
||||
.timestamp(timestamp);
|
||||
};
|
||||
|
||||
const datasetSynth2Logs = (i: number, timestamp: number) => {
|
||||
const index = Math.floor(Math.random() * 3);
|
||||
const isMalformed = i % 60 === 0;
|
||||
return log
|
||||
.create()
|
||||
.dataset('synth.2')
|
||||
.message(MESSAGE_LOG_LEVELS[index].message as string)
|
||||
.logLevel(isMalformed ? MALFORMED_LOG_LEVEL : MESSAGE_LOG_LEVELS[index].level)
|
||||
.service(SERVICE_NAMES[index])
|
||||
.defaults({
|
||||
'trace.id': generateShortId(),
|
||||
'agent.name': 'synth-agent',
|
||||
'orchestrator.cluster.name': CLUSTER[index].clusterName,
|
||||
'orchestrator.cluster.id': CLUSTER[index].clusterId,
|
||||
'orchestrator.resource.id': generateShortId(),
|
||||
'cloud.provider': CLOUD_PROVIDERS[Math.floor(Math.random() * 3)],
|
||||
'cloud.region': CLOUD_REGION[index],
|
||||
'cloud.availability_zone': `${CLOUD_REGION[index]}a`,
|
||||
'cloud.project.id': generateShortId(),
|
||||
'cloud.instance.id': generateShortId(),
|
||||
'log.file.path': `/logs/${generateLongId()}/error.txt`,
|
||||
})
|
||||
.timestamp(timestamp);
|
||||
};
|
||||
|
||||
const datasetSynth3Logs = (i: number, timestamp: number) => {
|
||||
const index = Math.floor(Math.random() * 3);
|
||||
const isMalformed = i % 10 === 0;
|
||||
return log
|
||||
.create()
|
||||
.dataset('synth.3')
|
||||
.message(MESSAGE_LOG_LEVELS[index].message as string)
|
||||
.logLevel(isMalformed ? MALFORMED_LOG_LEVEL : MESSAGE_LOG_LEVELS[index].level)
|
||||
.service(SERVICE_NAMES[index])
|
||||
.defaults({
|
||||
'trace.id': generateShortId(),
|
||||
'agent.name': 'synth-agent',
|
||||
'orchestrator.cluster.name': CLUSTER[index].clusterName,
|
||||
'orchestrator.cluster.id': CLUSTER[index].clusterId,
|
||||
'orchestrator.resource.id': generateShortId(),
|
||||
'cloud.provider': CLOUD_PROVIDERS[Math.floor(Math.random() * 3)],
|
||||
'cloud.region': CLOUD_REGION[index],
|
||||
'cloud.availability_zone': isMalformed
|
||||
? MALFORMED_CLOUD_REGION
|
||||
: `${CLOUD_REGION[index]}a`,
|
||||
'cloud.project.id': generateShortId(),
|
||||
'cloud.instance.id': generateShortId(),
|
||||
'log.file.path': `/logs/${generateLongId()}/error.txt`,
|
||||
})
|
||||
.timestamp(timestamp);
|
||||
};
|
||||
|
||||
const logs = range
|
||||
.interval('1m')
|
||||
.rate(1)
|
||||
.generator((timestamp) => {
|
||||
return Array(200)
|
||||
.fill(0)
|
||||
.flatMap((_, index) => [
|
||||
datasetSynth1Logs(timestamp),
|
||||
datasetSynth2Logs(index, timestamp),
|
||||
datasetSynth3Logs(index, timestamp),
|
||||
]);
|
||||
});
|
||||
|
||||
return withClient(
|
||||
logsEsClient,
|
||||
logger.perf('generating_logs', () => logs)
|
||||
);
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default scenario;
|
|
@ -28,7 +28,7 @@ pageLoadAssetSize:
|
|||
dashboard: 82025
|
||||
dashboardEnhanced: 65646
|
||||
data: 454087
|
||||
datasetQuality: 35000
|
||||
datasetQuality: 50624
|
||||
dataViewEditor: 28082
|
||||
dataViewFieldEditor: 27000
|
||||
dataViewManagement: 5100
|
||||
|
|
67
x-pack/plugins/dataset_quality/common/api_types.ts
Normal file
67
x-pack/plugins/dataset_quality/common/api_types.ts
Normal file
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
export const datasetStatRt = rt.intersection([
|
||||
rt.type({
|
||||
name: rt.string,
|
||||
}),
|
||||
rt.partial({
|
||||
size: rt.string,
|
||||
sizeBytes: rt.number,
|
||||
lastActivity: rt.number,
|
||||
integration: rt.string,
|
||||
}),
|
||||
]);
|
||||
|
||||
export const integrationIconRt = rt.intersection([
|
||||
rt.type({
|
||||
path: rt.string,
|
||||
src: rt.string,
|
||||
}),
|
||||
rt.partial({
|
||||
title: rt.string,
|
||||
size: rt.string,
|
||||
type: rt.string,
|
||||
}),
|
||||
]);
|
||||
|
||||
export const integrationRt = rt.intersection([
|
||||
rt.type({
|
||||
name: rt.string,
|
||||
}),
|
||||
rt.partial({
|
||||
title: rt.string,
|
||||
version: rt.string,
|
||||
icons: rt.array(integrationIconRt),
|
||||
}),
|
||||
]);
|
||||
|
||||
export const malformedDocsRt = rt.type({
|
||||
dataset: rt.string,
|
||||
percentage: rt.number,
|
||||
});
|
||||
|
||||
export type MalformedDocs = rt.TypeOf<typeof malformedDocsRt>;
|
||||
|
||||
export const getDataStreamsStatsResponseRt = rt.exact(
|
||||
rt.intersection([
|
||||
rt.type({
|
||||
dataStreamsStats: rt.array(datasetStatRt),
|
||||
}),
|
||||
rt.type({
|
||||
integrations: rt.array(integrationRt),
|
||||
}),
|
||||
])
|
||||
);
|
||||
|
||||
export const getDataStreamsMalformedDocsStatsResponseRt = rt.exact(
|
||||
rt.type({
|
||||
malformedDocs: rt.array(malformedDocsRt),
|
||||
})
|
||||
);
|
|
@ -6,4 +6,6 @@
|
|||
*/
|
||||
|
||||
export const DATASET_QUALITY_APP_ID = 'dataset_quality';
|
||||
export const DATA_STREAMS_STATS_URL = '/internal/dataset_quality/data_streams/stats';
|
||||
|
||||
export const POOR_QUALITY_MINIMUM_PERCENTAGE = 3;
|
||||
export const DEGRADED_QUALITY_MINIMUM_PERCENTAGE = 0;
|
||||
|
|
|
@ -15,6 +15,7 @@ export class DataStreamStat {
|
|||
sizeBytes?: DataStreamStatType['size_bytes'];
|
||||
lastActivity?: DataStreamStatType['last_activity'];
|
||||
integration?: IntegrationType;
|
||||
malformedDocs?: number;
|
||||
|
||||
private constructor(dataStreamStat: DataStreamStat) {
|
||||
this.name = dataStreamStat.name;
|
||||
|
@ -23,6 +24,7 @@ export class DataStreamStat {
|
|||
this.sizeBytes = dataStreamStat.sizeBytes;
|
||||
this.lastActivity = dataStreamStat.lastActivity;
|
||||
this.integration = dataStreamStat.integration;
|
||||
this.malformedDocs = dataStreamStat.malformedDocs;
|
||||
}
|
||||
|
||||
public static create(dataStreamStat: DataStreamStatType) {
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { MalformedDocsStatType } from './types';
|
||||
|
||||
export class MalformedDocsStat {
|
||||
dataset: MalformedDocsStatType['dataset'];
|
||||
percentage: MalformedDocsStatType['percentage'];
|
||||
|
||||
private constructor(malformedDocsStat: MalformedDocsStat) {
|
||||
this.dataset = malformedDocsStat.dataset;
|
||||
this.percentage = malformedDocsStat.percentage;
|
||||
}
|
||||
|
||||
public static create(malformedDocsStat: MalformedDocsStatType) {
|
||||
return new MalformedDocsStat(malformedDocsStat);
|
||||
}
|
||||
}
|
|
@ -14,5 +14,15 @@ export type GetDataStreamsStatsQuery = GetDataStreamsStatsParams['query'];
|
|||
export type GetDataStreamsStatsResponse =
|
||||
APIReturnType<`GET /internal/dataset_quality/data_streams/stats`>;
|
||||
export type DataStreamStatServiceResponse = DataStreamStat[];
|
||||
export type DataStreamStatType = GetDataStreamsStatsResponse['dataStreamsStats'][0];
|
||||
export type IntegrationType = GetDataStreamsStatsResponse['integrations'][0];
|
||||
export type DataStreamStatType = GetDataStreamsStatsResponse['dataStreamsStats'][0] & {
|
||||
integration?: IntegrationType;
|
||||
};
|
||||
|
||||
export type GetDataStreamsMalformedDocsStatsParams =
|
||||
APIClientRequestParamsOf<`GET /internal/dataset_quality/data_streams/malformed_docs`>['params'];
|
||||
export type GetDataStreamsMalformedDocsStatsQuery = GetDataStreamsMalformedDocsStatsParams['query'];
|
||||
export type GetDataStreamsMalformedDocsStatsResponse =
|
||||
APIReturnType<`GET /internal/dataset_quality/data_streams/malformed_docs`>;
|
||||
export type DataStreamMalformedDocsStatServiceResponse = MalformedDocsStatType[];
|
||||
export type MalformedDocsStatType = GetDataStreamsMalformedDocsStatsResponse['malformedDocs'][0];
|
||||
|
|
12
x-pack/plugins/dataset_quality/common/es_fields/index.ts
Normal file
12
x-pack/plugins/dataset_quality/common/es_fields/index.ts
Normal file
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const _IGNORED = '_ignored';
|
||||
|
||||
export const DATA_STREAM_DATASET = 'data_stream.dataset';
|
||||
export const DATA_STREAM_NAMESPACE = 'data_stream.namespace';
|
||||
export const DATA_STREAM_TYPE = 'data_stream.type';
|
|
@ -6,13 +6,28 @@
|
|||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { EuiBasicTableColumn, EuiFlexGroup, EuiFlexItem, EuiIcon } from '@elastic/eui';
|
||||
import {
|
||||
EuiBasicTableColumn,
|
||||
EuiCode,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiIcon,
|
||||
EuiSkeletonRectangle,
|
||||
EuiText,
|
||||
EuiToolTip,
|
||||
} from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { PackageIcon } from '@kbn/fleet-plugin/public';
|
||||
import { ES_FIELD_TYPES, KBN_FIELD_TYPES } from '@kbn/field-types';
|
||||
import { FieldFormatsStart } from '@kbn/field-formats-plugin/public';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import {
|
||||
DEGRADED_QUALITY_MINIMUM_PERCENTAGE,
|
||||
POOR_QUALITY_MINIMUM_PERCENTAGE,
|
||||
} from '../../../common/constants';
|
||||
import { DataStreamStat } from '../../../common/data_streams_stats/data_stream_stat';
|
||||
import loggingIcon from '../../icons/logging.svg';
|
||||
import { QualityIndicator, QualityPercentageIndicator } from '../quality_indicator';
|
||||
|
||||
const nameColumnName = i18n.translate('xpack.datasetQuality.nameColumnName', {
|
||||
defaultMessage: 'Dataset Name',
|
||||
|
@ -22,14 +37,62 @@ const sizeColumnName = i18n.translate('xpack.datasetQuality.sizeColumnName', {
|
|||
defaultMessage: 'Size',
|
||||
});
|
||||
|
||||
const malformedDocsColumnName = i18n.translate('xpack.datasetQuality.malformedDocsColumnName', {
|
||||
defaultMessage: 'Malformed Docs',
|
||||
});
|
||||
|
||||
const malformedDocsDescription = (minimimPercentage: number) =>
|
||||
i18n.translate('xpack.datasetQuality.malformedDocsQualityDescription', {
|
||||
defaultMessage: 'greater than {minimimPercentage}%',
|
||||
values: { minimimPercentage },
|
||||
});
|
||||
|
||||
const malformedDocsColumnTooltip = (
|
||||
<FormattedMessage
|
||||
id="xpack.datasetQuality.malformedDocsColumnTooltip"
|
||||
defaultMessage="The percentage of malformed documents -documents with the {ignoredProperty} property- in your dataset. {visualQueue}"
|
||||
values={{
|
||||
ignoredProperty: (
|
||||
<EuiCode language="json" transparentBackground>
|
||||
_ignored
|
||||
</EuiCode>
|
||||
),
|
||||
visualQueue: (
|
||||
<EuiFlexGroup direction="column" gutterSize="xs">
|
||||
<EuiFlexItem>
|
||||
<EuiText>
|
||||
<QualityIndicator quality="poor" />
|
||||
{` ${malformedDocsDescription(POOR_QUALITY_MINIMUM_PERCENTAGE)}`}
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiText>
|
||||
<QualityIndicator quality="degraded" />
|
||||
{` ${malformedDocsDescription(DEGRADED_QUALITY_MINIMUM_PERCENTAGE)}`}
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiText>
|
||||
<QualityIndicator quality="good" />
|
||||
{' 0%'}
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
),
|
||||
}}
|
||||
/>
|
||||
);
|
||||
|
||||
const lastActivityColumnName = i18n.translate('xpack.datasetQuality.lastActivityColumnName', {
|
||||
defaultMessage: 'Last Activity',
|
||||
});
|
||||
|
||||
export const getDatasetQualitTableColumns = ({
|
||||
fieldFormats,
|
||||
loadingMalformedStats,
|
||||
}: {
|
||||
fieldFormats: FieldFormatsStart;
|
||||
loadingMalformedStats?: boolean;
|
||||
}): Array<EuiBasicTableColumn<DataStreamStat>> => {
|
||||
return [
|
||||
{
|
||||
|
@ -64,6 +127,34 @@ export const getDatasetQualitTableColumns = ({
|
|||
field: 'size',
|
||||
sortable: true,
|
||||
},
|
||||
{
|
||||
name: (
|
||||
<EuiToolTip content={malformedDocsColumnTooltip}>
|
||||
<span>
|
||||
{`${malformedDocsColumnName} `}
|
||||
<EuiIcon size="s" color="subdued" type="questionInCircle" className="eui-alignTop" />
|
||||
</span>
|
||||
</EuiToolTip>
|
||||
),
|
||||
field: 'malformedDocs',
|
||||
sortable: true,
|
||||
render: (_, dataStreamStat: DataStreamStat) => (
|
||||
<EuiSkeletonRectangle
|
||||
width="50px"
|
||||
height="20px"
|
||||
borderRadius="m"
|
||||
isLoading={loadingMalformedStats}
|
||||
contentAriaLabel="Example description"
|
||||
>
|
||||
<EuiFlexGroup alignItems="center" gutterSize="s">
|
||||
<EuiFlexItem grow={false}>
|
||||
<QualityPercentageIndicator percentage={dataStreamStat.malformedDocs} />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>{`${dataStreamStat.malformedDocs}%`}</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiSkeletonRectangle>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: lastActivityColumnName,
|
||||
field: 'lastActivity',
|
||||
|
|
|
@ -5,4 +5,5 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
export * from './types';
|
||||
export * from './indicator';
|
||||
export * from './percentage_indicator';
|
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { EuiIcon, useEuiTheme } from '@elastic/eui';
|
||||
import React from 'react';
|
||||
|
||||
export function QualityIndicator({ quality }: { quality: 'good' | 'degraded' | 'poor' }) {
|
||||
const { euiTheme } = useEuiTheme();
|
||||
|
||||
const qualityColors = {
|
||||
poor: euiTheme.colors.dangerText,
|
||||
degraded: euiTheme.colors.warningText,
|
||||
good: euiTheme.colors.successText,
|
||||
};
|
||||
|
||||
return <EuiIcon type="dot" color={qualityColors[quality]} />;
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { isNil } from 'lodash';
|
||||
import React from 'react';
|
||||
import {
|
||||
DEGRADED_QUALITY_MINIMUM_PERCENTAGE,
|
||||
POOR_QUALITY_MINIMUM_PERCENTAGE,
|
||||
} from '../../../common/constants';
|
||||
import { QualityIndicator } from './indicator';
|
||||
|
||||
export function QualityPercentageIndicator({ percentage }: { percentage?: number }) {
|
||||
if (isNil(percentage)) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
const quality =
|
||||
percentage > POOR_QUALITY_MINIMUM_PERCENTAGE
|
||||
? 'poor'
|
||||
: percentage > DEGRADED_QUALITY_MINIMUM_PERCENTAGE
|
||||
? 'degraded'
|
||||
: 'good';
|
||||
|
||||
return <QualityIndicator quality={quality} />;
|
||||
}
|
|
@ -5,14 +5,14 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { orderBy } from 'lodash';
|
||||
import React, { useState, useMemo, useCallback } from 'react';
|
||||
import { useFetcher } from '@kbn/observability-shared-plugin/public';
|
||||
import { tableSummaryAllText, tableSummaryOfText } from '../../common/translations';
|
||||
import { find, orderBy } from 'lodash';
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
import { DataStreamStat } from '../../common/data_streams_stats/data_stream_stat';
|
||||
import { tableSummaryAllText, tableSummaryOfText } from '../../common/translations';
|
||||
import { getDatasetQualitTableColumns } from '../components/dataset_quality/columns';
|
||||
import { useDatasetQualityContext } from '../components/dataset_quality/context';
|
||||
import { useKibanaContextForPlugin } from '../utils';
|
||||
import { getDefaultTimeRange, useKibanaContextForPlugin } from '../utils';
|
||||
|
||||
const DEFAULT_SORT_FIELD = 'title';
|
||||
const DEFAULT_SORT_DIRECTION = 'desc';
|
||||
|
@ -32,10 +32,23 @@ export const useDatasetQualityTable = () => {
|
|||
const [sortField, setSortField] = useState<SORT_FIELD>(DEFAULT_SORT_FIELD);
|
||||
const [sortDirection, setSortDirection] = useState<DIRECTION>(DEFAULT_SORT_DIRECTION);
|
||||
|
||||
const defaultTimeRange = getDefaultTimeRange();
|
||||
|
||||
const { dataStreamsStatsServiceClient: client } = useDatasetQualityContext();
|
||||
const { data = [], loading } = useFetcher(async () => client.getDataStreamsStats(), []);
|
||||
const { data: malformedStats = [], loading: loadingMalformedStats } = useFetcher(
|
||||
async () =>
|
||||
client.getDataStreamsMalformedStats({
|
||||
start: defaultTimeRange.from,
|
||||
end: defaultTimeRange.to,
|
||||
}),
|
||||
[]
|
||||
);
|
||||
|
||||
const columns = useMemo(() => getDatasetQualitTableColumns({ fieldFormats }), [fieldFormats]);
|
||||
const columns = useMemo(
|
||||
() => getDatasetQualitTableColumns({ fieldFormats, loadingMalformedStats }),
|
||||
[fieldFormats, loadingMalformedStats]
|
||||
);
|
||||
|
||||
const pagination = {
|
||||
pageIndex,
|
||||
|
@ -63,10 +76,19 @@ export const useDatasetQualityTable = () => {
|
|||
|
||||
const renderedItems = useMemo(() => {
|
||||
const overridenSortingField = sortingOverrides[sortField] || sortField;
|
||||
const sortedItems = orderBy(data, overridenSortingField, sortDirection);
|
||||
const mergedData = data.map((dataStream) => {
|
||||
const malformedDocs = find(malformedStats, { dataset: dataStream.name });
|
||||
|
||||
return {
|
||||
...dataStream,
|
||||
malformedDocs: malformedDocs?.percentage,
|
||||
};
|
||||
});
|
||||
|
||||
const sortedItems = orderBy(mergedData, overridenSortingField, sortDirection);
|
||||
|
||||
return sortedItems.slice(pageIndex * pageSize, (pageIndex + 1) * pageSize);
|
||||
}, [data, sortField, sortDirection, pageIndex, pageSize]);
|
||||
}, [data, malformedStats, sortField, sortDirection, pageIndex, pageSize]);
|
||||
|
||||
const resultsCount = useMemo(() => {
|
||||
const startNumberItemsOnPage = pageSize * pageIndex + (renderedItems.length ? 1 : 0);
|
||||
|
@ -84,5 +106,13 @@ export const useDatasetQualityTable = () => {
|
|||
);
|
||||
}, [data.length, pageIndex, pageSize, renderedItems.length]);
|
||||
|
||||
return { sort, onTableChange, pagination, renderedItems, columns, loading, resultsCount };
|
||||
return {
|
||||
sort,
|
||||
onTableChange,
|
||||
pagination,
|
||||
renderedItems,
|
||||
columns,
|
||||
loading,
|
||||
resultsCount,
|
||||
};
|
||||
};
|
||||
|
|
|
@ -5,16 +5,22 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { find, merge } from 'lodash';
|
||||
import { HttpStart } from '@kbn/core/public';
|
||||
import { DataStreamStat } from '../../../common/data_streams_stats/data_stream_stat';
|
||||
import { DATA_STREAMS_STATS_URL } from '../../../common/constants';
|
||||
import { decodeOrThrow } from '@kbn/io-ts-utils';
|
||||
import { find, merge } from 'lodash';
|
||||
import {
|
||||
getDataStreamsMalformedDocsStatsResponseRt,
|
||||
getDataStreamsStatsResponseRt,
|
||||
} from '../../../common/api_types';
|
||||
import {
|
||||
GetDataStreamsStatsError,
|
||||
GetDataStreamsStatsResponse,
|
||||
GetDataStreamsStatsQuery,
|
||||
DataStreamStatServiceResponse,
|
||||
GetDataStreamsMalformedDocsStatsQuery,
|
||||
GetDataStreamsMalformedDocsStatsResponse,
|
||||
GetDataStreamsStatsError,
|
||||
GetDataStreamsStatsQuery,
|
||||
GetDataStreamsStatsResponse,
|
||||
} from '../../../common/data_streams_stats';
|
||||
import { DataStreamStat } from '../../../common/data_streams_stats/data_stream_stat';
|
||||
import { IDataStreamsStatsClient } from './types';
|
||||
|
||||
export class DataStreamsStatsClient implements IDataStreamsStatsClient {
|
||||
|
@ -23,20 +29,54 @@ export class DataStreamsStatsClient implements IDataStreamsStatsClient {
|
|||
public async getDataStreamsStats(
|
||||
params: GetDataStreamsStatsQuery = { type: 'logs' }
|
||||
): Promise<DataStreamStatServiceResponse> {
|
||||
const { dataStreamsStats, integrations } = await this.http
|
||||
.get<GetDataStreamsStatsResponse>(DATA_STREAMS_STATS_URL, {
|
||||
const response = await this.http
|
||||
.get<GetDataStreamsStatsResponse>('/internal/dataset_quality/data_streams/stats', {
|
||||
query: params,
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new GetDataStreamsStatsError(`Failed to fetch data streams stats": ${error}`);
|
||||
});
|
||||
|
||||
const { dataStreamsStats, integrations } = decodeOrThrow(
|
||||
getDataStreamsStatsResponseRt,
|
||||
(message: string) =>
|
||||
new GetDataStreamsStatsError(`Failed to decode data streams stats response: ${message}"`)
|
||||
)(response);
|
||||
|
||||
const mergedDataStreamsStats = dataStreamsStats.map((statsItem) => {
|
||||
const integration = find(integrations, { name: statsItem.integration });
|
||||
|
||||
return integration ? merge({}, statsItem, { integration }) : statsItem;
|
||||
return merge({}, statsItem, { integration });
|
||||
});
|
||||
|
||||
return mergedDataStreamsStats.map(DataStreamStat.create);
|
||||
}
|
||||
|
||||
public async getDataStreamsMalformedStats(params: GetDataStreamsMalformedDocsStatsQuery) {
|
||||
const response = await this.http
|
||||
.get<GetDataStreamsMalformedDocsStatsResponse>(
|
||||
'/internal/dataset_quality/data_streams/malformed_docs',
|
||||
{
|
||||
query: {
|
||||
...params,
|
||||
type: 'logs',
|
||||
},
|
||||
}
|
||||
)
|
||||
.catch((error) => {
|
||||
throw new GetDataStreamsStatsError(
|
||||
`Failed to fetch data streams malformed stats": ${error}`
|
||||
);
|
||||
});
|
||||
|
||||
const { malformedDocs } = decodeOrThrow(
|
||||
getDataStreamsMalformedDocsStatsResponseRt,
|
||||
(message: string) =>
|
||||
new GetDataStreamsStatsError(
|
||||
`Failed to decode data streams malformed docs stats response: ${message}"`
|
||||
)
|
||||
)(response);
|
||||
|
||||
return malformedDocs;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,9 @@
|
|||
|
||||
import { HttpStart } from '@kbn/core/public';
|
||||
import {
|
||||
DataStreamMalformedDocsStatServiceResponse,
|
||||
DataStreamStatServiceResponse,
|
||||
GetDataStreamsMalformedDocsStatsQuery,
|
||||
GetDataStreamsStatsQuery,
|
||||
} from '../../../common/data_streams_stats';
|
||||
|
||||
|
@ -23,4 +25,7 @@ export interface DataStreamsStatsServiceStartDeps {
|
|||
|
||||
export interface IDataStreamsStatsClient {
|
||||
getDataStreamsStats(params?: GetDataStreamsStatsQuery): Promise<DataStreamStatServiceResponse>;
|
||||
getDataStreamsMalformedStats(
|
||||
params?: GetDataStreamsMalformedDocsStatsQuery
|
||||
): Promise<DataStreamMalformedDocsStatServiceResponse>;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
const ONE_DAY_IN_MILLISECONDS = 24 * 3600000;
|
||||
|
||||
export const getDefaultTimeRange = () => {
|
||||
const now = Date.now();
|
||||
|
||||
return {
|
||||
from: new Date(now - ONE_DAY_IN_MILLISECONDS).toISOString(),
|
||||
to: new Date(now).toISOString(),
|
||||
};
|
||||
};
|
|
@ -6,3 +6,4 @@
|
|||
*/
|
||||
|
||||
export * from './use_kibana';
|
||||
export * from './default_timerange';
|
||||
|
|
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ElasticsearchClient } from '@kbn/core/server';
|
||||
import { rangeQuery, termQuery } from '@kbn/observability-plugin/server';
|
||||
import { MalformedDocs } from '../../../common/api_types';
|
||||
import {
|
||||
DATA_STREAM_DATASET,
|
||||
DATA_STREAM_NAMESPACE,
|
||||
DATA_STREAM_TYPE,
|
||||
_IGNORED,
|
||||
} from '../../../common/es_fields';
|
||||
import { DataStreamTypes } from '../../types/data_stream';
|
||||
import { createDatasetQualityESClient, wildcardQuery } from '../../utils';
|
||||
|
||||
export async function getMalformedDocsPaginated(options: {
|
||||
esClient: ElasticsearchClient;
|
||||
type?: DataStreamTypes;
|
||||
start: number;
|
||||
end: number;
|
||||
datasetQuery?: string;
|
||||
after?: {
|
||||
dataset: string;
|
||||
namespace: string;
|
||||
};
|
||||
prevResults?: MalformedDocs[];
|
||||
}): Promise<MalformedDocs[]> {
|
||||
const { esClient, type = 'logs', datasetQuery, start, end, after, prevResults = [] } = options;
|
||||
|
||||
const datasetQualityESClient = createDatasetQualityESClient(esClient);
|
||||
|
||||
const response = await datasetQualityESClient.search({
|
||||
index: '*',
|
||||
size: 0,
|
||||
query: {
|
||||
bool: {
|
||||
...(datasetQuery
|
||||
? {
|
||||
should: [
|
||||
...wildcardQuery(DATA_STREAM_DATASET, datasetQuery),
|
||||
...wildcardQuery(DATA_STREAM_NAMESPACE, datasetQuery),
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
}
|
||||
: {}),
|
||||
filter: [...rangeQuery(start, end), ...termQuery(DATA_STREAM_TYPE, type)],
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
datasets: {
|
||||
composite: {
|
||||
...(after ? { after } : {}),
|
||||
size: 10000,
|
||||
sources: [
|
||||
{ dataset: { terms: { field: DATA_STREAM_DATASET } } },
|
||||
{ namespace: { terms: { field: DATA_STREAM_NAMESPACE } } },
|
||||
],
|
||||
},
|
||||
aggs: {
|
||||
malformed: {
|
||||
filter: {
|
||||
exists: {
|
||||
field: _IGNORED,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const currMalformedDocs =
|
||||
response.aggregations?.datasets.buckets.map((bucket) => ({
|
||||
dataset: `${type}-${bucket.key.dataset}-${bucket.key.namespace}`,
|
||||
percentage: (bucket.malformed.doc_count * 100) / bucket.doc_count,
|
||||
})) ?? [];
|
||||
|
||||
const malformedDocs = [...prevResults, ...currMalformedDocs];
|
||||
|
||||
if (response.aggregations?.datasets.after_key) {
|
||||
return getMalformedDocsPaginated({
|
||||
esClient,
|
||||
type,
|
||||
start,
|
||||
end,
|
||||
datasetQuery,
|
||||
after: {
|
||||
dataset: response.aggregations?.datasets.after_key.dataset as string,
|
||||
namespace: response.aggregations?.datasets.after_key.namespace as string,
|
||||
},
|
||||
prevResults: malformedDocs,
|
||||
});
|
||||
}
|
||||
|
||||
return malformedDocs;
|
||||
}
|
|
@ -7,11 +7,14 @@
|
|||
|
||||
import * as t from 'io-ts';
|
||||
import { keyBy, merge, values } from 'lodash';
|
||||
import { dataStreamTypesRt } from '../../../common/data_streams';
|
||||
import { DataStreamsStatResponse } from '../../types/data_stream';
|
||||
import { DataStreamStat } from '../../types/data_stream';
|
||||
import { dataStreamTypesRt, rangeRt } from '../../types/default_api_types';
|
||||
import { Integration } from '../../types/integration';
|
||||
import { createDatasetQualityServerRoute } from '../create_datasets_quality_server_route';
|
||||
import { getDataStreams } from './get_data_streams';
|
||||
import { getDataStreamsStats } from './get_data_streams_stats';
|
||||
import { getMalformedDocsPaginated } from './get_malformed_docs';
|
||||
import { MalformedDocs } from '../../../common/api_types';
|
||||
|
||||
const statsRoute = createDatasetQualityServerRoute({
|
||||
endpoint: 'GET /internal/dataset_quality/data_streams/stats',
|
||||
|
@ -26,7 +29,10 @@ const statsRoute = createDatasetQualityServerRoute({
|
|||
options: {
|
||||
tags: [],
|
||||
},
|
||||
async handler(resources): Promise<DataStreamsStatResponse> {
|
||||
async handler(resources): Promise<{
|
||||
dataStreamsStats: DataStreamStat[];
|
||||
integrations: Integration[];
|
||||
}> {
|
||||
const { context, params, plugins } = resources;
|
||||
const coreContext = await context.core;
|
||||
|
||||
|
@ -66,6 +72,40 @@ const statsRoute = createDatasetQualityServerRoute({
|
|||
},
|
||||
});
|
||||
|
||||
const malformedDocsRoute = createDatasetQualityServerRoute({
|
||||
endpoint: 'GET /internal/dataset_quality/data_streams/malformed_docs',
|
||||
params: t.type({
|
||||
query: t.intersection([
|
||||
rangeRt,
|
||||
dataStreamTypesRt,
|
||||
t.partial({
|
||||
datasetQuery: t.string,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
options: {
|
||||
tags: [],
|
||||
},
|
||||
async handler(resources): Promise<{
|
||||
malformedDocs: MalformedDocs[];
|
||||
}> {
|
||||
const { context, params } = resources;
|
||||
const coreContext = await context.core;
|
||||
|
||||
const esClient = coreContext.elasticsearch.client.asCurrentUser;
|
||||
|
||||
const malformedDocs = await getMalformedDocsPaginated({
|
||||
esClient,
|
||||
...params.query,
|
||||
});
|
||||
|
||||
return {
|
||||
malformedDocs,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
export const dataStreamsRouteRepository = {
|
||||
...statsRoute,
|
||||
...malformedDocsRoute,
|
||||
};
|
||||
|
|
|
@ -7,12 +7,6 @@
|
|||
|
||||
import { ByteSize } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { Integration } from './integration';
|
||||
|
||||
export interface DataStreamsStatResponse {
|
||||
dataStreamsStats: DataStreamStat[];
|
||||
integrations: Integration[];
|
||||
}
|
||||
|
||||
export interface DataStreamStat {
|
||||
name: string;
|
||||
size?: ByteSize;
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import * as t from 'io-ts';
|
||||
import { isoToEpochRt } from '@kbn/io-ts-utils';
|
||||
|
||||
export const dataStreamTypesRt = t.partial({
|
||||
type: t.union([
|
||||
|
@ -16,3 +17,8 @@ export const dataStreamTypesRt = t.partial({
|
|||
t.literal('profiling'),
|
||||
]),
|
||||
});
|
||||
|
||||
export const rangeRt = t.type({
|
||||
start: isoToEpochRt,
|
||||
end: isoToEpochRt,
|
||||
});
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ESSearchRequest, InferSearchResponseOf } from '@kbn/es-types';
|
||||
import { ElasticsearchClient } from '@kbn/core/server';
|
||||
|
||||
type DatasetQualityESSearchParams = ESSearchRequest & {
|
||||
size: number;
|
||||
};
|
||||
|
||||
export type DatasetQualityESClient = ReturnType<typeof createDatasetQualityESClient>;
|
||||
|
||||
export function createDatasetQualityESClient(esClient: ElasticsearchClient) {
|
||||
return {
|
||||
async search<TDocument, TParams extends DatasetQualityESSearchParams>(
|
||||
searchParams: TParams
|
||||
): Promise<InferSearchResponseOf<TDocument, TParams>> {
|
||||
return esClient.search<TDocument>(searchParams) as Promise<any>;
|
||||
},
|
||||
};
|
||||
}
|
9
x-pack/plugins/dataset_quality/server/utils/index.ts
Normal file
9
x-pack/plugins/dataset_quality/server/utils/index.ts
Normal file
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export { createDatasetQualityESClient } from './create_dataset_quality_es_client';
|
||||
export { wildcardQuery } from './queries';
|
19
x-pack/plugins/dataset_quality/server/utils/queries.ts
Normal file
19
x-pack/plugins/dataset_quality/server/utils/queries.ts
Normal file
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { isUndefinedOrNull } from '@kbn/observability-plugin/server/utils/queries';
|
||||
|
||||
export function wildcardQuery<T extends string>(
|
||||
field: T,
|
||||
value: string | undefined | null
|
||||
): QueryDslQueryContainer[] {
|
||||
if (isUndefinedOrNull(value) || value === '') {
|
||||
return [];
|
||||
}
|
||||
|
||||
return [{ wildcard: { [field]: `*${value}*` } }];
|
||||
}
|
|
@ -23,7 +23,10 @@
|
|||
"@kbn/kibana-react-plugin",
|
||||
"@kbn/i18n-react",
|
||||
"@kbn/field-formats-plugin",
|
||||
"@kbn/field-types"
|
||||
"@kbn/field-types",
|
||||
"@kbn/io-ts-utils",
|
||||
"@kbn/observability-plugin",
|
||||
"@kbn/es-types"
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
|
|
|
@ -9,7 +9,7 @@ import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/types';
|
|||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { fromKueryExpression, toElasticsearchQuery } from '@kbn/es-query';
|
||||
|
||||
function isUndefinedOrNull(value: any): value is undefined | null {
|
||||
export function isUndefinedOrNull(value: any): value is undefined | null {
|
||||
return value === undefined || value === null;
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue