mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[Dataset quality] 🐞 Quality issues is no longer stuck + non-Aggregatable fixes (#209716)
Closes https://github.com/elastic/kibana/issues/209308. ### Notes - Stuck loading state was caused by the changes introduced in https://github.com/elastic/kibana/pull/206758. - non-Aggregatable bugs were long running bugs, since this is a tricky functionality to test I believe they were always there ### 🎥 Demo In the following scenario, I went into the upgrade scenario, so I created first a cluster in 7.27.x and then upgrade to latest 8.18.x. The I performed a manual rollover for `logs-synth.3-default`. Hence what you can see in the video is: 1. The loading state is not stuck anymore in dataset details page (e.g. `logs-synth.2-default` ) 2. The non-aggregatable is calculated properly for `logs-synth.3-default` https://github.com/user-attachments/assets/fa097445-7f0a-4dcb-adae-27688e99bf3c
This commit is contained in:
parent
dc9ecd1684
commit
931c34e219
6 changed files with 210 additions and 75 deletions
|
@ -18,7 +18,6 @@ import {
|
|||
FailedDocsDetails,
|
||||
FailedDocsErrorsResponse,
|
||||
NonAggregatableDatasets,
|
||||
QualityIssue,
|
||||
UpdateFieldLimitResponse,
|
||||
} from '../../../common/api_types';
|
||||
import { indexNameToDataStreamParts } from '../../../common/utils';
|
||||
|
@ -30,7 +29,6 @@ import {
|
|||
DatasetQualityDetailsControllerContext,
|
||||
DatasetQualityDetailsControllerEvent,
|
||||
DatasetQualityDetailsControllerTypeState,
|
||||
QualityIssueType,
|
||||
} from './types';
|
||||
|
||||
import { IntegrationType } from '../../../common/data_stream_details';
|
||||
|
@ -43,6 +41,11 @@ import {
|
|||
rolloverDataStreamFailedNotifier,
|
||||
updateFieldLimitFailedNotifier,
|
||||
} from './notifications';
|
||||
import {
|
||||
filterIssues,
|
||||
mapDegradedFieldsIssues,
|
||||
mapFailedDocsIssues,
|
||||
} from '../../utils/quality_issues';
|
||||
|
||||
export const createPureDatasetQualityDetailsControllerStateMachine = (
|
||||
initialContext: DatasetQualityDetailsControllerContext
|
||||
|
@ -566,18 +569,8 @@ export const createPureDatasetQualityDetailsControllerStateMachine = (
|
|||
qualityIssues: {
|
||||
...context.qualityIssues,
|
||||
data: [
|
||||
...(context.qualityIssues.data ?? []).filter(
|
||||
(field) => field.type !== 'failed'
|
||||
),
|
||||
...(event.data.timeSeries.length > 0
|
||||
? [
|
||||
{
|
||||
...event.data,
|
||||
name: 'failedDocs',
|
||||
type: 'failed' as QualityIssueType,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...filterIssues(context.qualityIssues.data, 'failed'),
|
||||
...mapFailedDocsIssues(event.data),
|
||||
],
|
||||
},
|
||||
}
|
||||
|
@ -601,13 +594,8 @@ export const createPureDatasetQualityDetailsControllerStateMachine = (
|
|||
qualityIssues: {
|
||||
...context.qualityIssues,
|
||||
data: [
|
||||
...(context.qualityIssues.data ?? []).filter(
|
||||
(field) => field.type !== 'degraded'
|
||||
),
|
||||
...(event.data.degradedFields.map((field) => ({
|
||||
...field,
|
||||
type: 'degraded',
|
||||
})) as QualityIssue[]),
|
||||
...filterIssues(context.qualityIssues.data, 'degraded'),
|
||||
...mapDegradedFieldsIssues(event.data?.degradedFields),
|
||||
],
|
||||
},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { DegradedField, FailedDocsDetails, QualityIssue } from '../../common/api_types';
|
||||
import { QualityIssueType } from '../state_machines/dataset_quality_details_controller';
|
||||
|
||||
export function filterIssues(data: QualityIssue[] = [], type: QualityIssueType): QualityIssue[] {
|
||||
return data.filter((field) => field.type !== type);
|
||||
}
|
||||
|
||||
export function mapDegradedFieldsIssues(degradedFields: DegradedField[] = []): QualityIssue[] {
|
||||
return degradedFields.map((field) => ({
|
||||
...field,
|
||||
type: 'degraded',
|
||||
})) as QualityIssue[];
|
||||
}
|
||||
|
||||
export function mapFailedDocsIssues(failedDocsDetails: FailedDocsDetails): QualityIssue[] {
|
||||
return [
|
||||
{
|
||||
...failedDocsDetails,
|
||||
name: 'failedDocs',
|
||||
type: 'failed',
|
||||
},
|
||||
];
|
||||
}
|
|
@ -1,54 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
|
||||
import { _IGNORED } from '../../../common/es_fields';
|
||||
import { DataStreamType } from '../../../common/types';
|
||||
import { extractIndexNameFromBackingIndex } from '../../../common/utils';
|
||||
import { createDatasetQualityESClient } from '../../utils';
|
||||
import { rangeQuery } from '../../utils/queries';
|
||||
|
||||
export async function getNonAggregatableDataStreams({
|
||||
esClient,
|
||||
types,
|
||||
start,
|
||||
end,
|
||||
dataStream,
|
||||
}: {
|
||||
esClient: ElasticsearchClient;
|
||||
types: DataStreamType[];
|
||||
start: number;
|
||||
end: number;
|
||||
dataStream?: string;
|
||||
}) {
|
||||
const datasetQualityESClient = createDatasetQualityESClient(esClient);
|
||||
|
||||
const dataStreamTypes = types.map((type) => `${type}-*-*`).join(',');
|
||||
|
||||
const response = await datasetQualityESClient.fieldCaps({
|
||||
index: dataStream ?? dataStreamTypes,
|
||||
fields: [_IGNORED],
|
||||
index_filter: {
|
||||
...rangeQuery(start, end)[0],
|
||||
},
|
||||
});
|
||||
|
||||
const ignoredField = response.fields._ignored?._ignored;
|
||||
|
||||
const nonAggregatableIndices = ignoredField?.non_aggregatable_indices ?? [];
|
||||
|
||||
const nonAggregatableDatasets = new Set(
|
||||
(Array.isArray(nonAggregatableIndices) ? nonAggregatableIndices : [nonAggregatableIndices]).map(
|
||||
extractIndexNameFromBackingIndex
|
||||
)
|
||||
);
|
||||
|
||||
return {
|
||||
aggregatable: ignoredField?.aggregatable ?? true,
|
||||
datasets: Array.from(nonAggregatableDatasets),
|
||||
};
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { extractNonAggregatableDatasets } from './extract_non_aggregatable_datasets';
|
||||
import { Indices } from '@elastic/elasticsearch/lib/api/types';
|
||||
|
||||
describe('extractNonAggregatableDatasets', () => {
|
||||
it('Indices and nonAggregatableIndices are empty', async () => {
|
||||
const indices: Indices = [];
|
||||
const nonAggregatableIndices: Indices = [];
|
||||
const result = extractNonAggregatableDatasets(indices, nonAggregatableIndices);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('nonAggregatableIndices is empty', async () => {
|
||||
const indices: Indices = ['.ds-logs-synth.2-default-2025.02.05-000001'];
|
||||
const nonAggregatableIndices: Indices = [];
|
||||
const result = extractNonAggregatableDatasets(indices, nonAggregatableIndices);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('Indices is string', async () => {
|
||||
const indices: Indices = '.ds-logs-synth.2-default-2025.02.05-000001';
|
||||
const nonAggregatableIndices: Indices = ['.ds-logs-synth.2-default-2025.02.05-000001'];
|
||||
const result = extractNonAggregatableDatasets(indices, nonAggregatableIndices);
|
||||
|
||||
expect(result).toEqual(['logs-synth.2-default']);
|
||||
});
|
||||
|
||||
it('nonAggregatableIndices is string', async () => {
|
||||
const indices: Indices = ['.ds-logs-synth.2-default-2025.02.05-000001'];
|
||||
const nonAggregatableIndices: Indices = '.ds-logs-synth.2-default-2025.02.05-000001';
|
||||
const result = extractNonAggregatableDatasets(indices, nonAggregatableIndices);
|
||||
|
||||
expect(result).toEqual(['logs-synth.2-default']);
|
||||
});
|
||||
|
||||
it('Dataset is aggregatable', async () => {
|
||||
const indices: Indices = [
|
||||
'.ds-logs-synth.2-default-2025.02.05-000001',
|
||||
'.ds-logs-synth.2-default-2025.02.05-000002',
|
||||
];
|
||||
const nonAggregatableIndices: Indices = ['.ds-logs-synth.2-default-2025.02.05-000001'];
|
||||
const result = extractNonAggregatableDatasets(indices, nonAggregatableIndices);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('Some datasets are non-aggregatable', async () => {
|
||||
const indices: Indices = [
|
||||
'.ds-logs-synth.1-default-2025.02.05-000001',
|
||||
'.ds-logs-synth.2-default-2025.02.05-000001',
|
||||
'.ds-logs-synth.2-default-2025.02.05-000002',
|
||||
'.ds-logs-synth.3-default-2025.02.05-000001',
|
||||
'.ds-logs-synth.3-default-2025.02.05-000002',
|
||||
'.ds-logs-synth.3-default-2025.02.05-000003',
|
||||
'.ds-logs-synth.4-default-2025.02.05-000001',
|
||||
];
|
||||
const nonAggregatableIndices: Indices = [
|
||||
'.ds-logs-synth.1-default-2025.02.05-000001',
|
||||
'.ds-logs-synth.2-default-2025.02.05-000001',
|
||||
'.ds-logs-synth.2-default-2025.02.05-000002',
|
||||
'.ds-logs-synth.3-default-2025.02.05-000001',
|
||||
'.ds-logs-synth.3-default-2025.02.05-000002',
|
||||
];
|
||||
const result = extractNonAggregatableDatasets(indices, nonAggregatableIndices);
|
||||
|
||||
expect(result).toEqual(['logs-synth.1-default', 'logs-synth.2-default']);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Indices } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { extractIndexNameFromBackingIndex } from '../../../../common/utils';
|
||||
|
||||
export const extractNonAggregatableDatasets = (
|
||||
indices: Indices,
|
||||
nonAggregatableIndices: Indices
|
||||
) => {
|
||||
const groupedDatasets = (Array.isArray(indices) ? indices : [indices]).reduce((acc, index) => {
|
||||
const dataset = extractIndexNameFromBackingIndex(index);
|
||||
|
||||
return {
|
||||
...acc,
|
||||
[dataset]: [...(acc[dataset] ?? []), index],
|
||||
};
|
||||
}, {} as Record<string, string[]>);
|
||||
|
||||
const groupedNonAggregatableIndices = (
|
||||
Array.isArray(nonAggregatableIndices) ? nonAggregatableIndices : [nonAggregatableIndices]
|
||||
).reduce((acc, index) => {
|
||||
const dataset = extractIndexNameFromBackingIndex(index);
|
||||
|
||||
return {
|
||||
...acc,
|
||||
[dataset]: [...(acc[dataset] ?? []), index],
|
||||
};
|
||||
}, {} as Record<string, string[]>);
|
||||
|
||||
return Object.entries(groupedNonAggregatableIndices)
|
||||
.filter(
|
||||
([dataset, datasetIndices]) => groupedDatasets[dataset]?.length <= datasetIndices.length
|
||||
)
|
||||
.map(([dataset]) => dataset)
|
||||
.flat();
|
||||
};
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
|
||||
import { _IGNORED } from '../../../../common/es_fields';
|
||||
import { DataStreamType } from '../../../../common/types';
|
||||
import { createDatasetQualityESClient } from '../../../utils';
|
||||
import { rangeQuery } from '../../../utils/queries';
|
||||
import { extractNonAggregatableDatasets } from './extract_non_aggregatable_datasets';
|
||||
|
||||
export async function getNonAggregatableDataStreams({
|
||||
esClient,
|
||||
types,
|
||||
start,
|
||||
end,
|
||||
dataStream,
|
||||
}: {
|
||||
esClient: ElasticsearchClient;
|
||||
types: DataStreamType[];
|
||||
start: number;
|
||||
end: number;
|
||||
dataStream?: string;
|
||||
}) {
|
||||
const datasetQualityESClient = createDatasetQualityESClient(esClient);
|
||||
|
||||
const dataStreamTypes = types.map((type) => `${type}-*-*`).join(',');
|
||||
|
||||
const response = await datasetQualityESClient.fieldCaps({
|
||||
index: dataStream ?? dataStreamTypes,
|
||||
fields: [_IGNORED],
|
||||
index_filter: {
|
||||
...rangeQuery(start, end)[0],
|
||||
},
|
||||
});
|
||||
|
||||
const indices = response?.indices ?? [];
|
||||
const nonAggregatableIndices = response.fields._ignored?._ignored?.non_aggregatable_indices ?? [];
|
||||
|
||||
const datasets = extractNonAggregatableDatasets(indices, nonAggregatableIndices);
|
||||
// If there are no non_aggregatable_indices, it means that either all indices are either aggregatable or non-aggregatable
|
||||
// so we need to check the aggregatable field to determine
|
||||
const aggregatable = response.fields._ignored?._ignored?.non_aggregatable_indices
|
||||
? datasets.length === 0
|
||||
: Boolean(response.fields._ignored?._ignored?.aggregatable);
|
||||
|
||||
return {
|
||||
aggregatable,
|
||||
datasets,
|
||||
};
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue