mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[8.x] [Security Solution][Data Quality Dashboard] fix pattern state reset on ilm phase filter change (#198549) (#198806)
# Backport This will backport the following commits from `main` to `8.x`: - [[Security Solution][Data Quality Dashboard] fix pattern state reset on ilm phase filter change (#198549)](https://github.com/elastic/kibana/pull/198549) <!--- Backport version: 9.4.3 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Karen Grigoryan","email":"karen.grigoryan@elastic.co"},"sourceCommit":{"committedDate":"2024-11-04T15:09:12Z","message":"[Security Solution][Data Quality Dashboard] fix pattern state reset on ilm phase filter change (#198549)\n\naddresses #196523\r\n\r\n- Fixes ilm phase change propagation on patterns.\r\n- Adds missing tests for useResultsRollup functionality\r\n\r\n## UI changes\r\n\r\n### Before\r\n\r\nhttps://github.com/user-attachments/assets/78a1d809-6a9a-4bfc-88a9-079f829a2017\r\n\r\n### After\r\n\r\nhttps://github.com/user-attachments/assets/f689fcc9-e1c6-4ccf-a7ca-8f13e9507ba4","sha":"ddf55ea3a79ad7439dc02cb1f93291b1bc95c3b9","branchLabelMapping":{"^v9.0.0$":"main","^v8.17.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","Team:Threat Hunting","Team:Threat Hunting:Explore","backport:prev-minor","ci:cloud-deploy","ci:project-deploy-security"],"title":"[Security Solution][Data Quality Dashboard] fix pattern state reset on ilm phase filter change","number":198549,"url":"https://github.com/elastic/kibana/pull/198549","mergeCommit":{"message":"[Security Solution][Data Quality Dashboard] fix pattern state reset on ilm phase filter change (#198549)\n\naddresses #196523\r\n\r\n- Fixes ilm phase change propagation on patterns.\r\n- Adds missing tests for useResultsRollup functionality\r\n\r\n## UI changes\r\n\r\n### Before\r\n\r\nhttps://github.com/user-attachments/assets/78a1d809-6a9a-4bfc-88a9-079f829a2017\r\n\r\n### After\r\n\r\nhttps://github.com/user-attachments/assets/f689fcc9-e1c6-4ccf-a7ca-8f13e9507ba4","sha":"ddf55ea3a79ad7439dc02cb1f93291b1bc95c3b9"}},"sourceBranch":"main","suggestedTargetBranches":[],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/198549","number":198549,"mergeCommit":{"message":"[Security Solution][Data Quality Dashboard] fix pattern state reset on ilm phase filter change (#198549)\n\naddresses #196523\r\n\r\n- Fixes ilm phase change propagation on patterns.\r\n- Adds missing tests for useResultsRollup functionality\r\n\r\n## UI changes\r\n\r\n### Before\r\n\r\nhttps://github.com/user-attachments/assets/78a1d809-6a9a-4bfc-88a9-079f829a2017\r\n\r\n### After\r\n\r\nhttps://github.com/user-attachments/assets/f689fcc9-e1c6-4ccf-a7ca-8f13e9507ba4","sha":"ddf55ea3a79ad7439dc02cb1f93291b1bc95c3b9"}}]}] BACKPORT--> Co-authored-by: Karen Grigoryan <karen.grigoryan@elastic.co>
This commit is contained in:
parent
51c3d765dd
commit
fa0bddaac1
8 changed files with 986 additions and 71 deletions
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { renderHook } from '@testing-library/react-hooks';
|
||||
import { notificationServiceMock } from '@kbn/core-notifications-browser-mocks';
|
||||
|
||||
import { getHistoricalResultStub } from '../../../../stub/get_historical_result_stub';
|
||||
import { useStoredPatternResults } from '.';
|
||||
|
||||
describe('useStoredPatternResults', () => {
|
||||
const httpFetch = jest.fn();
|
||||
const mockToasts = notificationServiceMock.createStartContract().toasts;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('when patterns are empty', () => {
|
||||
it('should return an empty array and not call getStorageResults', () => {
|
||||
const { result } = renderHook(() => useStoredPatternResults([], mockToasts, httpFetch));
|
||||
|
||||
expect(result.current).toEqual([]);
|
||||
expect(httpFetch).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when patterns are provided', () => {
|
||||
it('should fetch and return stored pattern results correctly', async () => {
|
||||
const patterns = ['pattern1-*', 'pattern2-*'];
|
||||
|
||||
httpFetch.mockImplementation((path: string) => {
|
||||
if (path === '/internal/ecs_data_quality_dashboard/results_latest/pattern1-*') {
|
||||
return Promise.resolve([getHistoricalResultStub('pattern1-index1')]);
|
||||
}
|
||||
|
||||
if (path === '/internal/ecs_data_quality_dashboard/results_latest/pattern2-*') {
|
||||
return Promise.resolve([getHistoricalResultStub('pattern2-index1')]);
|
||||
}
|
||||
|
||||
return Promise.reject(new Error('Invalid path'));
|
||||
});
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useStoredPatternResults(patterns, mockToasts, httpFetch)
|
||||
);
|
||||
|
||||
await waitFor(() => result.current.length > 0);
|
||||
|
||||
expect(httpFetch).toHaveBeenCalledTimes(2);
|
||||
|
||||
expect(httpFetch).toHaveBeenCalledWith(
|
||||
'/internal/ecs_data_quality_dashboard/results_latest/pattern1-*',
|
||||
{
|
||||
method: 'GET',
|
||||
signal: expect.any(AbortSignal),
|
||||
version: '1',
|
||||
}
|
||||
);
|
||||
expect(httpFetch).toHaveBeenCalledWith(
|
||||
'/internal/ecs_data_quality_dashboard/results_latest/pattern2-*',
|
||||
{
|
||||
method: 'GET',
|
||||
signal: expect.any(AbortSignal),
|
||||
version: '1',
|
||||
}
|
||||
);
|
||||
|
||||
expect(result.current).toEqual([
|
||||
{
|
||||
pattern: 'pattern1-*',
|
||||
results: {
|
||||
'pattern1-index1': {
|
||||
docsCount: expect.any(Number),
|
||||
error: null,
|
||||
ilmPhase: expect.any(String),
|
||||
incompatible: expect.any(Number),
|
||||
indexName: 'pattern1-index1',
|
||||
pattern: 'pattern1-*',
|
||||
markdownComments: expect.any(Array),
|
||||
sameFamily: expect.any(Number),
|
||||
checkedAt: expect.any(Number),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: 'pattern2-*',
|
||||
results: {
|
||||
'pattern2-index1': {
|
||||
docsCount: expect.any(Number),
|
||||
error: null,
|
||||
ilmPhase: expect.any(String),
|
||||
incompatible: expect.any(Number),
|
||||
indexName: 'pattern2-index1',
|
||||
pattern: 'pattern2-*',
|
||||
markdownComments: expect.any(Array),
|
||||
sameFamily: expect.any(Number),
|
||||
checkedAt: expect.any(Number),
|
||||
},
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { useEffect, useState } from 'react';
|
||||
import { IToasts } from '@kbn/core-notifications-browser';
|
||||
import { HttpHandler } from '@kbn/core-http-browser';
|
||||
import { isEmpty } from 'lodash/fp';
|
||||
|
||||
import { DataQualityCheckResult } from '../../../../types';
|
||||
import { formatResultFromStorage, getStorageResults } from '../../utils/storage';
|
||||
|
||||
export const useStoredPatternResults = (
|
||||
patterns: string[],
|
||||
toasts: IToasts,
|
||||
httpFetch: HttpHandler
|
||||
) => {
|
||||
const [storedPatternResults, setStoredPatternResults] = useState<
|
||||
Array<{ pattern: string; results: Record<string, DataQualityCheckResult> }>
|
||||
>([]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isEmpty(patterns)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const abortController = new AbortController();
|
||||
const fetchStoredPatternResults = async () => {
|
||||
const requests = patterns.map((pattern) =>
|
||||
getStorageResults({ pattern, httpFetch, abortController, toasts }).then((results = []) => ({
|
||||
pattern,
|
||||
results: Object.fromEntries(
|
||||
results.map((storageResult) => [
|
||||
storageResult.indexName,
|
||||
formatResultFromStorage({ storageResult, pattern }),
|
||||
])
|
||||
),
|
||||
}))
|
||||
);
|
||||
|
||||
const patternResults = await Promise.all(requests);
|
||||
if (patternResults?.length) {
|
||||
setStoredPatternResults(patternResults);
|
||||
}
|
||||
};
|
||||
|
||||
fetchStoredPatternResults();
|
||||
}, [httpFetch, patterns, toasts]);
|
||||
|
||||
return storedPatternResults;
|
||||
};
|
|
@ -0,0 +1,685 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
// fixing timezone for Date
|
||||
// so when tests are run in different timezones, the results are consistent
|
||||
process.env.TZ = 'UTC';
|
||||
|
||||
import { renderHook, act } from '@testing-library/react-hooks';
|
||||
import { notificationServiceMock } from '@kbn/core-notifications-browser-mocks';
|
||||
|
||||
import type { TelemetryEvents } from '../../types';
|
||||
import { useStoredPatternResults } from './hooks/use_stored_pattern_results';
|
||||
import { mockPartitionedFieldMetadata } from '../../mock/partitioned_field_metadata/mock_partitioned_field_metadata';
|
||||
import { useResultsRollup } from '.';
|
||||
import { getPatternRollupStub } from '../../stub/get_pattern_rollup_stub';
|
||||
import { formatBytes, formatNumber } from '../../mock/test_providers/utils/format';
|
||||
|
||||
jest.mock('./hooks/use_stored_pattern_results', () => ({
|
||||
...jest.requireActual('./hooks/use_stored_pattern_results'),
|
||||
useStoredPatternResults: jest.fn().mockReturnValue([]),
|
||||
}));
|
||||
|
||||
describe('useResultsRollup', () => {
|
||||
const httpFetch = jest.fn();
|
||||
const toasts = notificationServiceMock.createStartContract().toasts;
|
||||
|
||||
const mockTelemetryEvents: TelemetryEvents = {
|
||||
reportDataQualityIndexChecked: jest.fn(),
|
||||
reportDataQualityCheckAllCompleted: jest.fn(),
|
||||
};
|
||||
|
||||
const patterns = ['auditbeat-*', 'packetbeat-*'];
|
||||
const isILMAvailable = true;
|
||||
|
||||
const useStoredPatternResultsMock = useStoredPatternResults as jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
useStoredPatternResultsMock.mockReturnValue([]);
|
||||
});
|
||||
|
||||
describe('initialization', () => {
|
||||
it('should initialize with default values', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useResultsRollup({
|
||||
httpFetch,
|
||||
toasts,
|
||||
patterns,
|
||||
isILMAvailable,
|
||||
telemetryEvents: mockTelemetryEvents,
|
||||
})
|
||||
);
|
||||
|
||||
expect(result.current.patternIndexNames).toEqual({});
|
||||
expect(result.current.patternRollups).toEqual({});
|
||||
expect(result.current.totalDocsCount).toBe(0);
|
||||
expect(result.current.totalIncompatible).toBeUndefined();
|
||||
expect(result.current.totalIndices).toBe(0);
|
||||
expect(result.current.totalIndicesChecked).toBe(0);
|
||||
expect(result.current.totalSameFamily).toBeUndefined();
|
||||
expect(result.current.totalSizeInBytes).toBe(0);
|
||||
});
|
||||
|
||||
it('should fetch stored pattern results and update patternRollups from it', () => {
|
||||
const mockStoredResults = [
|
||||
{
|
||||
pattern: 'auditbeat-*',
|
||||
results: {
|
||||
'auditbeat-7.11.0-2021.01.01': {
|
||||
indexName: 'auditbeat-7.11.0-2021.01.01',
|
||||
pattern: 'auditbeat-*',
|
||||
docsCount: 500,
|
||||
incompatible: 0,
|
||||
error: null,
|
||||
ilmPhase: 'hot',
|
||||
sameFamily: 0,
|
||||
markdownComments: [],
|
||||
checkedAt: Date.now(),
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
useStoredPatternResultsMock.mockReturnValue(mockStoredResults);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useResultsRollup({
|
||||
httpFetch,
|
||||
toasts,
|
||||
patterns: ['auditbeat-*'],
|
||||
isILMAvailable,
|
||||
telemetryEvents: mockTelemetryEvents,
|
||||
})
|
||||
);
|
||||
|
||||
expect(useStoredPatternResultsMock).toHaveBeenCalledWith(['auditbeat-*'], toasts, httpFetch);
|
||||
|
||||
expect(result.current.patternRollups).toEqual({
|
||||
'auditbeat-*': {
|
||||
pattern: 'auditbeat-*',
|
||||
results: {
|
||||
'auditbeat-7.11.0-2021.01.01': expect.any(Object),
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('updatePatternIndexNames', () => {
|
||||
it('should update pattern index names', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useResultsRollup({
|
||||
httpFetch,
|
||||
toasts,
|
||||
patterns,
|
||||
isILMAvailable,
|
||||
telemetryEvents: mockTelemetryEvents,
|
||||
})
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current.updatePatternIndexNames({
|
||||
pattern: 'packetbeat-*',
|
||||
indexNames: ['packetbeat-7.10.0-2021.01.01'],
|
||||
});
|
||||
});
|
||||
|
||||
expect(result.current.patternIndexNames).toEqual({
|
||||
'packetbeat-*': ['packetbeat-7.10.0-2021.01.01'],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('updatePatternRollup', () => {
|
||||
it('should update pattern rollup when called', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useResultsRollup({
|
||||
httpFetch,
|
||||
toasts,
|
||||
patterns,
|
||||
isILMAvailable,
|
||||
telemetryEvents: mockTelemetryEvents,
|
||||
})
|
||||
);
|
||||
|
||||
const patternRollup = getPatternRollupStub('packetbeat-*', 1);
|
||||
|
||||
expect(result.current.patternRollups).toEqual({});
|
||||
|
||||
act(() => {
|
||||
result.current.updatePatternRollup(patternRollup);
|
||||
});
|
||||
|
||||
expect(result.current.patternRollups).toEqual({
|
||||
'packetbeat-*': patternRollup,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('onCheckCompleted', () => {
|
||||
describe('when invoked with successful check data', () => {
|
||||
beforeEach(() => {
|
||||
jest.useFakeTimers();
|
||||
jest.setSystemTime(new Date('2021-10-07T00:00:00Z').getTime());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('should update patternRollup with said data, report to telemetry and persist it in storage', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useResultsRollup({
|
||||
httpFetch,
|
||||
toasts,
|
||||
patterns,
|
||||
isILMAvailable,
|
||||
telemetryEvents: mockTelemetryEvents,
|
||||
})
|
||||
);
|
||||
|
||||
const patternRollup = getPatternRollupStub('packetbeat-*', 1);
|
||||
|
||||
act(() => {
|
||||
result.current.updatePatternRollup(patternRollup);
|
||||
});
|
||||
|
||||
expect(result.current.patternRollups['packetbeat-*'].results?.['.ds-packetbeat-1']).toEqual(
|
||||
{
|
||||
checkedAt: new Date('2021-10-07T00:00:00Z').getTime(),
|
||||
docsCount: 1000000,
|
||||
error: null,
|
||||
ilmPhase: 'hot',
|
||||
incompatible: 0,
|
||||
indexName: '.ds-packetbeat-1',
|
||||
markdownComments: ['foo', 'bar', 'baz'],
|
||||
pattern: 'packetbeat-*',
|
||||
sameFamily: 0,
|
||||
}
|
||||
);
|
||||
|
||||
jest.advanceTimersByTime(1000);
|
||||
|
||||
const mockOnCheckCompletedOpts = {
|
||||
batchId: 'test-batch',
|
||||
checkAllStartTime: Date.now(),
|
||||
error: null,
|
||||
formatBytes,
|
||||
formatNumber,
|
||||
indexName: '.ds-packetbeat-1',
|
||||
partitionedFieldMetadata: mockPartitionedFieldMetadata,
|
||||
pattern: 'packetbeat-*',
|
||||
requestTime: 1500,
|
||||
isLastCheck: true,
|
||||
isCheckAll: true,
|
||||
};
|
||||
|
||||
jest.advanceTimersByTime(1000);
|
||||
|
||||
act(() => {
|
||||
result.current.onCheckCompleted(mockOnCheckCompletedOpts);
|
||||
});
|
||||
|
||||
expect(result.current.patternRollups['packetbeat-*'].results?.['.ds-packetbeat-1']).toEqual(
|
||||
{
|
||||
checkedAt: new Date('2021-10-07T00:00:02Z').getTime(),
|
||||
docsCount: 1000000,
|
||||
error: null,
|
||||
ilmPhase: 'hot',
|
||||
incompatible: 3,
|
||||
indexName: '.ds-packetbeat-1',
|
||||
markdownComments: expect.any(Array),
|
||||
pattern: 'packetbeat-*',
|
||||
sameFamily: 0,
|
||||
}
|
||||
);
|
||||
|
||||
expect(mockTelemetryEvents.reportDataQualityIndexChecked).toHaveBeenCalledWith({
|
||||
batchId: 'test-batch',
|
||||
ecsVersion: '8.11.0',
|
||||
errorCount: 0,
|
||||
ilmPhase: 'hot',
|
||||
indexId: 'uuid-1',
|
||||
indexName: '.ds-packetbeat-1',
|
||||
isCheckAll: true,
|
||||
numberOfCustomFields: 4,
|
||||
numberOfDocuments: 1000000,
|
||||
numberOfEcsFields: 2,
|
||||
numberOfFields: 9,
|
||||
numberOfIncompatibleFields: 3,
|
||||
numberOfIndices: 1,
|
||||
numberOfIndicesChecked: 1,
|
||||
numberOfSameFamily: 0,
|
||||
sameFamilyFields: [],
|
||||
sizeInBytes: 500000000,
|
||||
timeConsumedMs: 1500,
|
||||
unallowedMappingFields: ['host.name', 'source.ip'],
|
||||
unallowedValueFields: ['event.category'],
|
||||
});
|
||||
expect(mockTelemetryEvents.reportDataQualityCheckAllCompleted).toHaveBeenCalledWith({
|
||||
batchId: 'test-batch',
|
||||
ecsVersion: '8.11.0',
|
||||
isCheckAll: true,
|
||||
numberOfDocuments: 1000000,
|
||||
numberOfIncompatibleFields: 3,
|
||||
numberOfIndices: 1,
|
||||
numberOfIndicesChecked: 1,
|
||||
numberOfSameFamily: 0,
|
||||
sizeInBytes: 500000000,
|
||||
timeConsumedMs: 1000,
|
||||
});
|
||||
|
||||
expect(httpFetch).toHaveBeenCalledWith('/internal/ecs_data_quality_dashboard/results', {
|
||||
method: 'POST',
|
||||
version: '1',
|
||||
signal: expect.any(AbortSignal),
|
||||
body: expect.any(String),
|
||||
});
|
||||
|
||||
const body = JSON.parse(httpFetch.mock.calls[0][1].body);
|
||||
|
||||
expect(body).toEqual({
|
||||
batchId: 'test-batch',
|
||||
indexName: '.ds-packetbeat-1',
|
||||
indexPattern: 'packetbeat-*',
|
||||
isCheckAll: true,
|
||||
checkedAt: new Date('2021-10-07T00:00:02Z').getTime(),
|
||||
docsCount: 1000000,
|
||||
totalFieldCount: 9,
|
||||
ecsFieldCount: 2,
|
||||
customFieldCount: 4,
|
||||
incompatibleFieldCount: 3,
|
||||
incompatibleFieldMappingItems: [
|
||||
{
|
||||
fieldName: 'host.name',
|
||||
expectedValue: 'keyword',
|
||||
actualValue: 'text',
|
||||
description:
|
||||
'Name of the host.\nIt can contain what `hostname` returns on Unix systems, the fully qualified domain name, or a name specified by the user. The sender decides which value to use.',
|
||||
},
|
||||
{
|
||||
fieldName: 'source.ip',
|
||||
expectedValue: 'ip',
|
||||
actualValue: 'text',
|
||||
description: 'IP address of the source (IPv4 or IPv6).',
|
||||
},
|
||||
],
|
||||
incompatibleFieldValueItems: [
|
||||
{
|
||||
fieldName: 'event.category',
|
||||
expectedValues: [
|
||||
'authentication',
|
||||
'configuration',
|
||||
'database',
|
||||
'driver',
|
||||
'email',
|
||||
'file',
|
||||
'host',
|
||||
'iam',
|
||||
'intrusion_detection',
|
||||
'malware',
|
||||
'network',
|
||||
'package',
|
||||
'process',
|
||||
'registry',
|
||||
'session',
|
||||
'threat',
|
||||
'vulnerability',
|
||||
'web',
|
||||
],
|
||||
actualValues: [
|
||||
{ name: 'an_invalid_category', count: 2 },
|
||||
{ name: 'theory', count: 1 },
|
||||
],
|
||||
description:
|
||||
'This is one of four ECS Categorization Fields, and indicates the second level in the ECS category hierarchy.\n`event.category` represents the "big buckets" of ECS categories. For example, filtering on `event.category:process` yields all events relating to process activity. This field is closely related to `event.type`, which is used as a subcategory.\nThis field is an array. This will allow proper categorization of some events that fall in multiple categories.',
|
||||
},
|
||||
],
|
||||
sameFamilyFieldCount: 0,
|
||||
sameFamilyFields: [],
|
||||
sameFamilyFieldItems: [],
|
||||
unallowedMappingFields: ['host.name', 'source.ip'],
|
||||
unallowedValueFields: ['event.category'],
|
||||
sizeInBytes: 500000000,
|
||||
ilmPhase: 'hot',
|
||||
markdownComments: [
|
||||
'### .ds-packetbeat-1\n',
|
||||
'| Result | Index | Docs | Incompatible fields | ILM Phase | Size |\n|--------|-------|------|---------------------|-----------|------|\n| ❌ | .ds-packetbeat-1 | 1,000,000 (100.0%) | 3 | `hot` | 476.8MB |\n\n',
|
||||
'### **Incompatible fields** `3` **Same family** `0` **Custom fields** `4` **ECS compliant fields** `2` **All fields** `9`\n',
|
||||
"#### 3 incompatible fields\n\nFields are incompatible with ECS when index mappings, or the values of the fields in the index, don't conform to the Elastic Common Schema (ECS), version 8.11.0.\n\n❌ Detection engine rules referencing these fields may not match them correctly\n❌ Pages may not display some events or fields due to unexpected field mappings or values\n❌ Mappings or field values that don't comply with ECS are not supported\n",
|
||||
'\n#### Incompatible field mappings - .ds-packetbeat-1\n\n\n| Field | ECS mapping type (expected) | Index mapping type (actual) | \n|-------|-----------------------------|-----------------------------|\n| host.name | `keyword` | `text` |\n| source.ip | `ip` | `text` |\n\n#### Incompatible field values - .ds-packetbeat-1\n\n\n| Field | ECS values (expected) | Document values (actual) | \n|-------|-----------------------|--------------------------|\n| event.category | `authentication`, `configuration`, `database`, `driver`, `email`, `file`, `host`, `iam`, `intrusion_detection`, `malware`, `network`, `package`, `process`, `registry`, `session`, `threat`, `vulnerability`, `web` | `an_invalid_category` (2), `theory` (1) |\n\n',
|
||||
],
|
||||
ecsVersion: '8.11.0',
|
||||
indexId: 'uuid-1',
|
||||
error: null,
|
||||
});
|
||||
});
|
||||
|
||||
describe('when isILMAvailable is false', () => {
|
||||
it('should omit ilmPhase and nullify sizeInBytes when storing payload', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useResultsRollup({
|
||||
httpFetch,
|
||||
toasts,
|
||||
patterns,
|
||||
isILMAvailable: false,
|
||||
telemetryEvents: mockTelemetryEvents,
|
||||
})
|
||||
);
|
||||
|
||||
const patternRollup = getPatternRollupStub('packetbeat-*', 1, false);
|
||||
|
||||
act(() => {
|
||||
result.current.updatePatternRollup(patternRollup);
|
||||
});
|
||||
|
||||
jest.advanceTimersByTime(1000);
|
||||
|
||||
const mockOnCheckCompletedOpts = {
|
||||
batchId: 'test-batch',
|
||||
checkAllStartTime: Date.now(),
|
||||
error: null,
|
||||
formatBytes,
|
||||
formatNumber,
|
||||
indexName: '.ds-packetbeat-1',
|
||||
partitionedFieldMetadata: mockPartitionedFieldMetadata,
|
||||
pattern: 'packetbeat-*',
|
||||
requestTime: 1500,
|
||||
isLastCheck: true,
|
||||
isCheckAll: true,
|
||||
};
|
||||
|
||||
jest.advanceTimersByTime(1000);
|
||||
|
||||
act(() => {
|
||||
result.current.onCheckCompleted(mockOnCheckCompletedOpts);
|
||||
});
|
||||
|
||||
expect(mockTelemetryEvents.reportDataQualityIndexChecked).toHaveBeenCalledWith({
|
||||
batchId: 'test-batch',
|
||||
ecsVersion: '8.11.0',
|
||||
errorCount: 0,
|
||||
ilmPhase: undefined,
|
||||
indexId: 'uuid-1',
|
||||
indexName: '.ds-packetbeat-1',
|
||||
isCheckAll: true,
|
||||
numberOfCustomFields: 4,
|
||||
numberOfDocuments: 1000000,
|
||||
numberOfEcsFields: 2,
|
||||
numberOfFields: 9,
|
||||
numberOfIncompatibleFields: 3,
|
||||
numberOfIndices: 1,
|
||||
numberOfIndicesChecked: 1,
|
||||
numberOfSameFamily: 0,
|
||||
sameFamilyFields: [],
|
||||
sizeInBytes: undefined,
|
||||
timeConsumedMs: 1500,
|
||||
unallowedMappingFields: ['host.name', 'source.ip'],
|
||||
unallowedValueFields: ['event.category'],
|
||||
});
|
||||
expect(mockTelemetryEvents.reportDataQualityCheckAllCompleted).toHaveBeenCalledWith({
|
||||
batchId: 'test-batch',
|
||||
ecsVersion: '8.11.0',
|
||||
isCheckAll: true,
|
||||
numberOfDocuments: 1000000,
|
||||
numberOfIncompatibleFields: 3,
|
||||
numberOfIndices: 1,
|
||||
numberOfIndicesChecked: 1,
|
||||
numberOfSameFamily: 0,
|
||||
sizeInBytes: undefined,
|
||||
timeConsumedMs: 1000,
|
||||
});
|
||||
|
||||
expect(httpFetch).toHaveBeenCalledWith('/internal/ecs_data_quality_dashboard/results', {
|
||||
method: 'POST',
|
||||
version: '1',
|
||||
signal: expect.any(AbortSignal),
|
||||
body: expect.any(String),
|
||||
});
|
||||
|
||||
const body = JSON.parse(httpFetch.mock.calls[0][1].body);
|
||||
|
||||
expect(body).toEqual({
|
||||
batchId: 'test-batch',
|
||||
indexName: '.ds-packetbeat-1',
|
||||
indexPattern: 'packetbeat-*',
|
||||
isCheckAll: true,
|
||||
checkedAt: new Date('2021-10-07T00:00:02Z').getTime(),
|
||||
docsCount: 1000000,
|
||||
totalFieldCount: 9,
|
||||
ecsFieldCount: 2,
|
||||
customFieldCount: 4,
|
||||
incompatibleFieldCount: 3,
|
||||
incompatibleFieldMappingItems: [
|
||||
{
|
||||
fieldName: 'host.name',
|
||||
expectedValue: 'keyword',
|
||||
actualValue: 'text',
|
||||
description:
|
||||
'Name of the host.\nIt can contain what `hostname` returns on Unix systems, the fully qualified domain name, or a name specified by the user. The sender decides which value to use.',
|
||||
},
|
||||
{
|
||||
fieldName: 'source.ip',
|
||||
expectedValue: 'ip',
|
||||
actualValue: 'text',
|
||||
description: 'IP address of the source (IPv4 or IPv6).',
|
||||
},
|
||||
],
|
||||
incompatibleFieldValueItems: [
|
||||
{
|
||||
fieldName: 'event.category',
|
||||
expectedValues: [
|
||||
'authentication',
|
||||
'configuration',
|
||||
'database',
|
||||
'driver',
|
||||
'email',
|
||||
'file',
|
||||
'host',
|
||||
'iam',
|
||||
'intrusion_detection',
|
||||
'malware',
|
||||
'network',
|
||||
'package',
|
||||
'process',
|
||||
'registry',
|
||||
'session',
|
||||
'threat',
|
||||
'vulnerability',
|
||||
'web',
|
||||
],
|
||||
actualValues: [
|
||||
{ name: 'an_invalid_category', count: 2 },
|
||||
{ name: 'theory', count: 1 },
|
||||
],
|
||||
description:
|
||||
'This is one of four ECS Categorization Fields, and indicates the second level in the ECS category hierarchy.\n`event.category` represents the "big buckets" of ECS categories. For example, filtering on `event.category:process` yields all events relating to process activity. This field is closely related to `event.type`, which is used as a subcategory.\nThis field is an array. This will allow proper categorization of some events that fall in multiple categories.',
|
||||
},
|
||||
],
|
||||
sameFamilyFieldCount: 0,
|
||||
sameFamilyFields: [],
|
||||
sameFamilyFieldItems: [],
|
||||
unallowedMappingFields: ['host.name', 'source.ip'],
|
||||
unallowedValueFields: ['event.category'],
|
||||
ilmPhase: undefined,
|
||||
sizeInBytes: 0,
|
||||
markdownComments: [
|
||||
'### .ds-packetbeat-1\n',
|
||||
'| Result | Index | Docs | Incompatible fields |\n|--------|-------|------|---------------------|\n| ❌ | .ds-packetbeat-1 | 1,000,000 (100.0%) | 3 |\n\n',
|
||||
'### **Incompatible fields** `3` **Same family** `0` **Custom fields** `4` **ECS compliant fields** `2` **All fields** `9`\n',
|
||||
"#### 3 incompatible fields\n\nFields are incompatible with ECS when index mappings, or the values of the fields in the index, don't conform to the Elastic Common Schema (ECS), version 8.11.0.\n\n❌ Detection engine rules referencing these fields may not match them correctly\n❌ Pages may not display some events or fields due to unexpected field mappings or values\n❌ Mappings or field values that don't comply with ECS are not supported\n",
|
||||
'\n#### Incompatible field mappings - .ds-packetbeat-1\n\n\n| Field | ECS mapping type (expected) | Index mapping type (actual) | \n|-------|-----------------------------|-----------------------------|\n| host.name | `keyword` | `text` |\n| source.ip | `ip` | `text` |\n\n#### Incompatible field values - .ds-packetbeat-1\n\n\n| Field | ECS values (expected) | Document values (actual) | \n|-------|-----------------------|--------------------------|\n| event.category | `authentication`, `configuration`, `database`, `driver`, `email`, `file`, `host`, `iam`, `intrusion_detection`, `malware`, `network`, `package`, `process`, `registry`, `session`, `threat`, `vulnerability`, `web` | `an_invalid_category` (2), `theory` (1) |\n\n',
|
||||
],
|
||||
ecsVersion: '8.11.0',
|
||||
indexId: 'uuid-1',
|
||||
error: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when check fails with error message and no partitionedFieldMetadata', () => {
|
||||
it('should update patternRollup with error message, reset state without persisting in storage', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useResultsRollup({
|
||||
httpFetch,
|
||||
toasts,
|
||||
patterns,
|
||||
isILMAvailable,
|
||||
telemetryEvents: mockTelemetryEvents,
|
||||
})
|
||||
);
|
||||
|
||||
const patternRollup = getPatternRollupStub('packetbeat-*', 1);
|
||||
|
||||
act(() => {
|
||||
result.current.updatePatternRollup(patternRollup);
|
||||
});
|
||||
|
||||
const mockOnCheckCompletedOpts = {
|
||||
batchId: 'test-batch',
|
||||
checkAllStartTime: Date.now(),
|
||||
error: 'Something went wrong',
|
||||
formatBytes,
|
||||
formatNumber,
|
||||
indexName: '.ds-packetbeat-1',
|
||||
partitionedFieldMetadata: null,
|
||||
pattern: 'packetbeat-*',
|
||||
requestTime: 1500,
|
||||
isLastCheck: true,
|
||||
isCheckAll: true,
|
||||
};
|
||||
|
||||
act(() => {
|
||||
result.current.onCheckCompleted(mockOnCheckCompletedOpts);
|
||||
});
|
||||
|
||||
expect(result.current.patternRollups['packetbeat-*'].results?.['.ds-packetbeat-1']).toEqual(
|
||||
{
|
||||
checkedAt: undefined,
|
||||
docsCount: 1000000,
|
||||
error: 'Something went wrong',
|
||||
ilmPhase: 'hot',
|
||||
incompatible: undefined,
|
||||
indexName: '.ds-packetbeat-1',
|
||||
markdownComments: expect.any(Array),
|
||||
pattern: 'packetbeat-*',
|
||||
sameFamily: undefined,
|
||||
}
|
||||
);
|
||||
|
||||
expect(mockTelemetryEvents.reportDataQualityIndexChecked).not.toHaveBeenCalled();
|
||||
|
||||
expect(httpFetch).not.toHaveBeenCalledWith(
|
||||
'/internal/ecs_data_quality_dashboard/results',
|
||||
expect.any(Object)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
describe('given no error nor partitionedFieldMetadata', () => {
|
||||
it('should reset result state accordingly and not invoke telemetry report nor persist in storage', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useResultsRollup({
|
||||
httpFetch,
|
||||
toasts,
|
||||
patterns,
|
||||
isILMAvailable,
|
||||
telemetryEvents: mockTelemetryEvents,
|
||||
})
|
||||
);
|
||||
|
||||
const patternRollup = getPatternRollupStub('packetbeat-*', 1);
|
||||
|
||||
act(() => {
|
||||
result.current.updatePatternRollup(patternRollup);
|
||||
});
|
||||
|
||||
const mockOnCheckCompletedOpts = {
|
||||
batchId: 'test-batch',
|
||||
checkAllStartTime: Date.now(),
|
||||
error: null,
|
||||
formatBytes,
|
||||
formatNumber,
|
||||
indexName: '.ds-packetbeat-1',
|
||||
partitionedFieldMetadata: null,
|
||||
pattern: 'packetbeat-*',
|
||||
requestTime: 1500,
|
||||
isLastCheck: true,
|
||||
isCheckAll: true,
|
||||
};
|
||||
|
||||
act(() => {
|
||||
result.current.onCheckCompleted(mockOnCheckCompletedOpts);
|
||||
});
|
||||
|
||||
expect(
|
||||
result.current.patternRollups['packetbeat-*'].results?.['.ds-packetbeat-1']
|
||||
).toEqual({
|
||||
checkedAt: undefined,
|
||||
docsCount: 1000000,
|
||||
error: null,
|
||||
ilmPhase: 'hot',
|
||||
incompatible: undefined,
|
||||
indexName: '.ds-packetbeat-1',
|
||||
markdownComments: expect.any(Array),
|
||||
pattern: 'packetbeat-*',
|
||||
sameFamily: undefined,
|
||||
});
|
||||
|
||||
expect(mockTelemetryEvents.reportDataQualityIndexChecked).not.toHaveBeenCalled();
|
||||
|
||||
expect(httpFetch).not.toHaveBeenCalledWith(
|
||||
'/internal/ecs_data_quality_dashboard/results',
|
||||
expect.any(Object)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculating totals', () => {
|
||||
describe('when patternRollups change', () => {
|
||||
it('should update totals', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useResultsRollup({
|
||||
httpFetch,
|
||||
toasts,
|
||||
patterns: ['packetbeat-*', 'auditbeat-*'],
|
||||
isILMAvailable,
|
||||
telemetryEvents: mockTelemetryEvents,
|
||||
})
|
||||
);
|
||||
|
||||
const patternRollup1 = getPatternRollupStub('packetbeat-*', 1);
|
||||
const patternRollup2 = getPatternRollupStub('auditbeat-*', 1);
|
||||
|
||||
expect(result.current.totalIndices).toBe(0);
|
||||
expect(result.current.totalDocsCount).toBe(0);
|
||||
expect(result.current.totalSizeInBytes).toBe(0);
|
||||
|
||||
act(() => {
|
||||
result.current.updatePatternRollup(patternRollup1);
|
||||
});
|
||||
|
||||
expect(result.current.totalIndices).toEqual(1);
|
||||
expect(result.current.totalDocsCount).toEqual(1000000);
|
||||
expect(result.current.totalSizeInBytes).toEqual(500000000);
|
||||
|
||||
act(() => {
|
||||
result.current.updatePatternRollup(patternRollup2);
|
||||
});
|
||||
|
||||
expect(result.current.totalIndices).toEqual(2);
|
||||
expect(result.current.totalDocsCount).toEqual(2000000);
|
||||
expect(result.current.totalSizeInBytes).toEqual(1000000000);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -21,83 +21,29 @@ import {
|
|||
getTotalPatternSameFamily,
|
||||
getIndexId,
|
||||
} from './utils/stats';
|
||||
import {
|
||||
getStorageResults,
|
||||
postStorageResult,
|
||||
formatStorageResult,
|
||||
formatResultFromStorage,
|
||||
} from './utils/storage';
|
||||
import { postStorageResult, formatStorageResult } from './utils/storage';
|
||||
import { getPatternRollupsWithLatestCheckResult } from './utils/get_pattern_rollups_with_latest_check_result';
|
||||
import type {
|
||||
DataQualityCheckResult,
|
||||
OnCheckCompleted,
|
||||
PatternRollup,
|
||||
TelemetryEvents,
|
||||
} from '../../types';
|
||||
import type { OnCheckCompleted, PatternRollup, TelemetryEvents } from '../../types';
|
||||
import {
|
||||
getEscapedIncompatibleMappingsFields,
|
||||
getEscapedIncompatibleValuesFields,
|
||||
getEscapedSameFamilyFields,
|
||||
} from './utils/metadata';
|
||||
import { UseResultsRollupReturnValue } from './types';
|
||||
import { useIsMountedRef } from '../use_is_mounted_ref';
|
||||
import { getDocsCount, getIndexIncompatible, getSizeInBytes } from '../../utils/stats';
|
||||
import { getIlmPhase } from '../../utils/get_ilm_phase';
|
||||
import { useStoredPatternResults } from './hooks/use_stored_pattern_results';
|
||||
|
||||
interface Props {
|
||||
ilmPhases: string[];
|
||||
patterns: string[];
|
||||
toasts: IToasts;
|
||||
httpFetch: HttpHandler;
|
||||
telemetryEvents: TelemetryEvents;
|
||||
isILMAvailable: boolean;
|
||||
}
|
||||
const useStoredPatternResults = (patterns: string[], toasts: IToasts, httpFetch: HttpHandler) => {
|
||||
const { isMountedRef } = useIsMountedRef();
|
||||
const [storedPatternResults, setStoredPatternResults] = useState<
|
||||
Array<{ pattern: string; results: Record<string, DataQualityCheckResult> }>
|
||||
>([]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isEmpty(patterns)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let ignore = false;
|
||||
const abortController = new AbortController();
|
||||
const fetchStoredPatternResults = async () => {
|
||||
const requests = patterns.map((pattern) =>
|
||||
getStorageResults({ pattern, httpFetch, abortController, toasts }).then((results = []) => ({
|
||||
pattern,
|
||||
results: Object.fromEntries(
|
||||
results.map((storageResult) => [
|
||||
storageResult.indexName,
|
||||
formatResultFromStorage({ storageResult, pattern }),
|
||||
])
|
||||
),
|
||||
}))
|
||||
);
|
||||
const patternResults = await Promise.all(requests);
|
||||
if (patternResults?.length && !ignore) {
|
||||
if (isMountedRef.current) {
|
||||
setStoredPatternResults(patternResults);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fetchStoredPatternResults();
|
||||
return () => {
|
||||
ignore = true;
|
||||
};
|
||||
}, [httpFetch, isMountedRef, patterns, toasts]);
|
||||
|
||||
return storedPatternResults;
|
||||
};
|
||||
|
||||
export const useResultsRollup = ({
|
||||
httpFetch,
|
||||
toasts,
|
||||
ilmPhases,
|
||||
patterns,
|
||||
isILMAvailable,
|
||||
telemetryEvents,
|
||||
|
@ -247,12 +193,6 @@ export const useResultsRollup = ({
|
|||
[httpFetch, isILMAvailable, telemetryEvents, toasts]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
// reset all state
|
||||
setPatternRollups({});
|
||||
setPatternIndexNames({});
|
||||
}, [ilmPhases, patterns]);
|
||||
|
||||
const useResultsRollupReturnValue = useMemo(
|
||||
() => ({
|
||||
onCheckCompleted,
|
||||
|
|
|
@ -104,7 +104,6 @@ const DataQualityPanelComponent: React.FC<Props> = ({
|
|||
);
|
||||
|
||||
const resultsRollupHookReturnValue = useResultsRollup({
|
||||
ilmPhases,
|
||||
patterns,
|
||||
httpFetch,
|
||||
toasts,
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import numeral from '@elastic/numeral';
|
||||
|
||||
import { EMPTY_STAT } from '../../../constants';
|
||||
|
||||
const defaultBytesFormat = '0,0.[0]b';
|
||||
export const formatBytes = (value: number | undefined) =>
|
||||
value != null ? numeral(value).format(defaultBytesFormat) : EMPTY_STAT;
|
||||
|
||||
const defaultNumberFormat = '0,0.[000]';
|
||||
export const formatNumber = (value: number | undefined) =>
|
||||
value != null ? numeral(value).format(defaultNumberFormat) : EMPTY_STAT;
|
|
@ -5,10 +5,9 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import numeral from '@elastic/numeral';
|
||||
|
||||
import { DataQualityProviderProps } from '../../../data_quality_context';
|
||||
import { EMPTY_STAT } from '../../../constants';
|
||||
|
||||
import { formatBytes as formatBytesMock, formatNumber as formatNumberMock } from './format';
|
||||
|
||||
export const getMergedDataQualityContextProps = (
|
||||
dataQualityContextProps?: Partial<DataQualityProviderProps>
|
||||
|
@ -36,10 +35,8 @@ export const getMergedDataQualityContextProps = (
|
|||
addSuccessToast: jest.fn(),
|
||||
canUserCreateAndReadCases: jest.fn(() => true),
|
||||
endDate: null,
|
||||
formatBytes: (value: number | undefined) =>
|
||||
value != null ? numeral(value).format('0,0.[0]b') : EMPTY_STAT,
|
||||
formatNumber: (value: number | undefined) =>
|
||||
value != null ? numeral(value).format('0,0.[000]') : EMPTY_STAT,
|
||||
formatBytes: formatBytesMock,
|
||||
formatNumber: formatNumberMock,
|
||||
isAssistantEnabled: true,
|
||||
lastChecked: '2023-03-28T22:27:28.159Z',
|
||||
openCreateCaseFlyout: jest.fn(),
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { PatternRollup } from '../../types';
|
||||
|
||||
const phases = ['hot', 'warm', 'cold', 'frozen'] as const;
|
||||
|
||||
/**
|
||||
*
|
||||
* This function derives ilmExplain, results, stats and ilmExplainPhaseCounts
|
||||
* from the provided pattern and indicesCount for the purpose of simplifying
|
||||
* stubbing of resultsRollup in tests.
|
||||
*
|
||||
* @param pattern - The index pattern to simulate. Defaults to `'packetbeat-*'`.
|
||||
* @param indicesCount - The number of indices to generate. Defaults to `2`.
|
||||
* @param isILMAvailable - Whether ILM is available. Defaults to `true`.
|
||||
* @returns An object containing stubbed pattern rollup data
|
||||
*/
|
||||
export const getPatternRollupStub = (
|
||||
pattern = 'packetbeat-*',
|
||||
indicesCount = 2,
|
||||
isILMAvailable = true
|
||||
): PatternRollup => {
|
||||
// Derive ilmExplain from isILMAvailable, pattern and indicesCount
|
||||
const ilmExplain = isILMAvailable
|
||||
? Object.fromEntries(
|
||||
Array.from({ length: indicesCount }).map((_, i) => {
|
||||
const indexName = pattern.replace('*', `${i + 1}`);
|
||||
const dsIndexName = `.ds-${indexName}`;
|
||||
// Cycle through phases
|
||||
const phase = phases[i % phases.length];
|
||||
return [
|
||||
dsIndexName,
|
||||
{
|
||||
index: dsIndexName,
|
||||
managed: true,
|
||||
policy: pattern,
|
||||
phase,
|
||||
},
|
||||
];
|
||||
})
|
||||
)
|
||||
: null;
|
||||
|
||||
// Derive ilmExplainPhaseCounts from ilmExplain
|
||||
const ilmExplainPhaseCounts = ilmExplain
|
||||
? phases.reduce(
|
||||
(counts, phase) => ({
|
||||
...counts,
|
||||
[phase]: Object.values(ilmExplain).filter((explain) => explain.phase === phase).length,
|
||||
}),
|
||||
{ hot: 0, warm: 0, cold: 0, frozen: 0, unmanaged: 0 }
|
||||
)
|
||||
: undefined;
|
||||
|
||||
// Derive results from pattern and indicesCount
|
||||
const results = Object.fromEntries(
|
||||
Array.from({ length: indicesCount }, (_, i) => {
|
||||
const indexName = pattern.replace('*', `${i + 1}`);
|
||||
const dsIndexName = `.ds-${indexName}`;
|
||||
return [
|
||||
dsIndexName,
|
||||
{
|
||||
docsCount: 1000000 + i * 100000, // Example doc count
|
||||
error: null,
|
||||
ilmPhase: ilmExplain?.[dsIndexName].phase,
|
||||
incompatible: i,
|
||||
indexName: dsIndexName,
|
||||
markdownComments: ['foo', 'bar', 'baz'],
|
||||
pattern,
|
||||
sameFamily: i,
|
||||
checkedAt: Date.now(),
|
||||
},
|
||||
];
|
||||
})
|
||||
);
|
||||
|
||||
// Derive stats from isILMAvailable, pattern and indicesCount
|
||||
const stats = Object.fromEntries(
|
||||
Array.from({ length: indicesCount }, (_, i) => {
|
||||
const indexName = pattern.replace('*', `${i + 1}`);
|
||||
const dsIndexName = `.ds-${indexName}`;
|
||||
return [
|
||||
dsIndexName,
|
||||
{
|
||||
uuid: `uuid-${i + 1}`,
|
||||
size_in_bytes: isILMAvailable ? 500000000 + i * 10000000 : null,
|
||||
name: dsIndexName,
|
||||
num_docs: results[dsIndexName].docsCount,
|
||||
},
|
||||
];
|
||||
})
|
||||
);
|
||||
|
||||
// Derive total docsCount and sizeInBytes from stats
|
||||
const totalDocsCount = Object.values(stats).reduce((sum, stat) => sum + stat.num_docs, 0);
|
||||
const totalSizeInBytes = isILMAvailable
|
||||
? Object.values(stats).reduce((sum, stat) => sum + (stat.size_in_bytes ?? 0), 0)
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
docsCount: totalDocsCount,
|
||||
error: null,
|
||||
pattern,
|
||||
ilmExplain,
|
||||
ilmExplainPhaseCounts,
|
||||
indices: indicesCount,
|
||||
results,
|
||||
sizeInBytes: totalSizeInBytes,
|
||||
stats,
|
||||
};
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue