[Dataset Quality] Add fix it flow for field limit (#195561)

## Summary

Closes - https://github.com/elastic/kibana/issues/190330

This PR implements the logic to support 

- One click increasing of Field Limit for Field Limit Issues (applicable
on for Integrations). For Non Integrations, only text is displayed as to
how they can do it.
- The One click increase updates the linked custom component template as
well as the last backing Index
- If Last Backing Index update fails due to any reason, it provides user
an option to trigger a Rollover manually.

## Demo

Not possible, to many things to display 😆 

## What's Pending ?

Tests

- [x] API tests
    - [x] Settings API
    - [x] Rollover API
    - [x] Apply New limit API
- [x] FTR tests
- [x] Displaying of various issues for integrations and non integrations
    - [x] Fix it Flow Good case, without Rollover
    - [x] Fix it Flow Good case, with Rollover
- [x] Manual Mitigation - Click on Component Template shold navigate to
proper logic based on Integration / Non
    - [x] Manual Mitigation - Ingest Pipeline
    - [x] Link for official Documentation
    
 ## How to setup a local environment
 
We will be setting up 2 different data streams, one with integration and
one without. Please follow the steps in the exact order
 
1. Start Local ES and Local Kibana
2. Install Nginx Integration 1st
3. Ingest data as per script here -
https://gist.github.com/achyutjhunjhunwala/03ea29190c6594544f584d2f0efa71e5
4. Set the Limit for the 2 datasets

```
PUT logs-synth.3-default/_settings
{
    "mapping.total_fields.limit": 36
}

// Set the limit for Nginx
PUT logs-nginx.access-default/_settings
{
    "mapping.total_fields.limit": 52
}
```

5. Now uncomment line number 59 from the synthtrace script to enable
cloud.project.id field and run the scenario again
6. Do a Rollover

```
POST logs-synth.3-default/_rollover
POST logs-nginx.access-default/_rollover
```

7. Get last backing index for both dataset

```
GET _data_stream/logs-synth.3-default/
GET _data_stream/logs-nginx.access-default
```

8. Increase the Limit by 1 but for last backing index

```
PUT .ds-logs-synth.3-default-2024.10.10-000002/_settings
{
    "mapping.total_fields.limit": 37
}

PUT .ds-logs-nginx.access-default-2024.10.10-000002/_settings
{
    "mapping.total_fields.limit": 53
}
```

9. Run the same Synthtrace scenario again.

This setup will give you 3 fields for testings

1. cloud.availability_zone - Which will show the character limit isue
2. cloud.project - Which will show an obsolete error which happened in
the past and now does not exists due to field limit
3. cloud.project.id - A current field limit issue

---------

Co-authored-by: Marco Antonio Ghiani <marcoantonio.ghiani01@gmail.com>
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Achyut Jhunjhunwala 2024-10-25 11:20:26 +02:00 committed by GitHub
parent dc2d8e4634
commit 3ece950156
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
50 changed files with 3724 additions and 736 deletions

View file

@ -17,10 +17,21 @@ import type { LocatorPublic } from '@kbn/share-plugin/public';
import { ExtensionsSetup } from './services/extensions_service';
import { PublicApiServiceSetup } from './services/public_api_service';
export interface IndexManagementLocatorParams extends SerializableRecord {
page: 'data_streams_details';
dataStreamName?: string;
}
export type IndexManagementLocatorParams = SerializableRecord &
(
| {
page: 'data_streams_details';
dataStreamName?: string;
}
| {
page: 'index_template';
indexTemplate: string;
}
| {
page: 'component_template';
componentTemplate: string;
}
);
export type IndexManagementLocator = LocatorPublic<IndexManagementLocatorParams>;

View file

@ -11,6 +11,7 @@ import { Section } from '../../../common/constants';
import type { IndexDetailsTabId } from '../../../common/constants';
import { ExtensionsService } from '../../services/extensions_service';
import { IndexDetailsSection } from '../../../common/constants';
export const getTemplateListLink = () => `/templates`;
export const getTemplateDetailsLink = (name: string, isLegacy?: boolean) => {
@ -81,6 +82,11 @@ export const getComponentTemplatesLink = (usedByTemplateName?: string) => {
}
return url;
};
export const getComponentTemplateDetailLink = (name: string) => {
return `/component_templates/${encodeURIComponent(name)}`;
};
export const navigateToIndexDetailsPage = (
indexName: string,
indicesListURLParams: string,

View file

@ -34,4 +34,26 @@ describe('Index Management URL locator', () => {
});
expect(path).toBe('/data/index_management/data_streams/test');
});
test('locator returns the correct url for index_template', async () => {
const indexTemplateName = 'test@custom';
const { path } = await locator.getLocation({
page: 'index_template',
indexTemplate: indexTemplateName,
});
expect(path).toBe(
encodeURI(`/data/index_management/templates/${encodeURIComponent(indexTemplateName)}`)
);
});
test('locator returns the correct url for component_template', async () => {
const componentTemplateName = 'log@custom';
const { path } = await locator.getLocation({
page: 'component_template',
componentTemplate: componentTemplateName,
});
expect(path).toBe(
`/data/index_management/component_templates/${encodeURIComponent(componentTemplateName)}`
);
});
});

View file

@ -8,7 +8,11 @@
import { ManagementAppLocator } from '@kbn/management-plugin/common';
import { LocatorDefinition } from '@kbn/share-plugin/public';
import { IndexManagementLocatorParams } from '@kbn/index-management-shared-types';
import { getDataStreamDetailsLink } from './application/services/routing';
import {
getComponentTemplateDetailLink,
getDataStreamDetailsLink,
getTemplateDetailsLink,
} from './application/services/routing';
import { PLUGIN } from '../common/constants';
export const INDEX_MANAGEMENT_LOCATOR_ID = 'INDEX_MANAGEMENT_LOCATOR_ID';
@ -37,6 +41,18 @@ export class IndexManagementLocatorDefinition
path: location.path + getDataStreamDetailsLink(params.dataStreamName!),
};
}
case 'index_template': {
return {
...location,
path: location.path + getTemplateDetailsLink(params.indexTemplate),
};
}
case 'component_template': {
return {
...location,
path: location.path + getComponentTemplateDetailLink(params.componentTemplate),
};
}
}
};
}

View file

@ -134,22 +134,39 @@ export const degradedFieldAnalysisRt = rt.intersection([
type: rt.string,
ignore_above: rt.number,
}),
defaultPipeline: rt.string,
}),
]);
export type DegradedFieldAnalysis = rt.TypeOf<typeof degradedFieldAnalysisRt>;
export const dataStreamSettingsRt = rt.intersection([
export const updateFieldLimitResponseRt = rt.intersection([
rt.type({
lastBackingIndexName: rt.string,
isComponentTemplateUpdated: rt.union([rt.boolean, rt.undefined]),
isLatestBackingIndexUpdated: rt.union([rt.boolean, rt.undefined]),
customComponentTemplateName: rt.string,
}),
rt.partial({
createdOn: rt.union([rt.null, rt.number]), // rt.null is needed because `createdOn` is not available on Serverless
integration: rt.string,
datasetUserPrivileges: datasetUserPrivilegesRt,
error: rt.string,
}),
]);
export type UpdateFieldLimitResponse = rt.TypeOf<typeof updateFieldLimitResponseRt>;
export const dataStreamRolloverResponseRt = rt.type({
acknowledged: rt.boolean,
});
export type DataStreamRolloverResponse = rt.TypeOf<typeof dataStreamRolloverResponseRt>;
export const dataStreamSettingsRt = rt.partial({
lastBackingIndexName: rt.string,
indexTemplate: rt.string,
createdOn: rt.union([rt.null, rt.number]), // rt.null is needed because `createdOn` is not available on Serverless
integration: rt.string,
datasetUserPrivileges: datasetUserPrivilegesRt,
});
export type DataStreamSettings = rt.TypeOf<typeof dataStreamSettingsRt>;
export const dataStreamDetailsRt = rt.partial({

View file

@ -14,3 +14,8 @@ export interface AnalyzeDegradedFieldsParams {
lastBackingIndex: string;
degradedField: string;
}
export interface UpdateFieldLimitParams {
dataStream: string;
newFieldLimit: number;
}

View file

@ -500,3 +500,182 @@ export const degradedFieldMessageIssueDoesNotExistInLatestIndex = i18n.translate
'This issue was detected in an older version of the dataset, but not in the most recent version.',
}
);
export const possibleMitigationTitle = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigationTitle',
{
defaultMessage: 'Possible mitigation',
}
);
export const increaseFieldMappingLimitTitle = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.increaseFieldMappingLimitTitle',
{
defaultMessage: 'Increase field mapping limit',
}
);
export const fieldLimitMitigationDescriptionText = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationDescription',
{
defaultMessage:
'The field mapping limit sets the maximum number of fields in an index. When exceeded, additional fields are ignored. To prevent this, increase your field mapping limit.',
}
);
export const fieldLimitMitigationConsiderationText = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationConsiderations',
{
defaultMessage: 'Before changing the field limit, consider the following:',
}
);
export const fieldLimitMitigationConsiderationText1 = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationConsiderations1',
{
defaultMessage: 'Increasing the field limit could slow cluster performance.',
}
);
export const fieldLimitMitigationConsiderationText2 = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationConsiderations2',
{
defaultMessage: 'Increasing the field limit also resolves field limit issues for other fields.',
}
);
export const fieldLimitMitigationConsiderationText3 = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationConsiderations3',
{
defaultMessage:
'This change applies to the [name] component template and affects all namespaces in the template.',
}
);
export const fieldLimitMitigationConsiderationText4 = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationConsiderations4',
{
defaultMessage:
'You need to roll over affected data streams to apply mapping changes to component templates.',
}
);
export const fieldLimitMitigationCurrentLimitLabelText = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationCurrentLimitLabelText',
{
defaultMessage: 'Current limit',
}
);
export const fieldLimitMitigationNewLimitButtonText = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationNewLimitButtonText',
{
defaultMessage: 'New limit',
}
);
export const fieldLimitMitigationNewLimitPlaceholderText = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationNewLimitPlaceholderText',
{
defaultMessage: 'New field limit',
}
);
export const fieldLimitMitigationApplyButtonText = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationApplyButtonText',
{
defaultMessage: 'Apply',
}
);
export const otherMitigationsLoadingAriaText = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.otherMitigationsLoadingText',
{
defaultMessage: 'Loading possible mitigations',
}
);
export const otherMitigationsCustomComponentTemplate = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.otherMitigationsCustomComponentTemplate',
{
defaultMessage: 'Add or edit custom component template',
}
);
export const otherMitigationsCustomIngestPipeline = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.otherMitigationsCustomIngestPipeline',
{
defaultMessage: 'Add or edit custom ingest pipeline',
}
);
export const fieldLimitMitigationOfficialDocumentation = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationOfficialDocumentation',
{
defaultMessage: 'Documentation',
}
);
export const fieldLimitMitigationSuccessMessage = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationSuccessMessage',
{
defaultMessage: 'New limit set!',
}
);
export const fieldLimitMitigationSuccessComponentTemplateLinkText = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationSuccessComponentTemplateLinkText',
{
defaultMessage: 'See component template',
}
);
export const fieldLimitMitigationPartiallyFailedMessage = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationPartiallyFailedMessage',
{
defaultMessage: 'Changes not applied to new data',
}
);
export const fieldLimitMitigationFailedMessage = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationFailedMessage',
{
defaultMessage: 'Changes not applied',
}
);
export const fieldLimitMitigationFailedMessageDescription = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationFailedMessageDescription',
{
defaultMessage: 'Failed to set new limit',
}
);
export const fieldLimitMitigationPartiallyFailedMessageDescription = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationPartiallyFailedMessageDescription',
{
defaultMessage:
'The component template was successfully updated with the new field limit, but the changes were not applied to the most recent backing index. Perform a rollover to apply your changes to new data.',
}
);
export const fieldLimitMitigationRolloverButton = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.fieldLimitMitigationRolloverButton',
{
defaultMessage: 'Rollover',
}
);
export const manualMitigationCustomPipelineCopyPipelineNameAriaText = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.copyPipelineNameAriaText',
{
defaultMessage: 'Copy pipeline name',
}
);
export const manualMitigationCustomPipelineCreateEditPipelineLink = i18n.translate(
'xpack.datasetQuality.details.degradedField.possibleMitigation.createEditPipelineLink',
{
defaultMessage: 'create or edit the pipeline',
}
);

View file

@ -0,0 +1,18 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
/*
* There are index templates like this metrics-apm.service_transaction.10m@template which exists.
* Hence this @ needs to be removed to derive the custom component template name.
*/
export function getComponentTemplatePrefixFromIndexTemplate(indexTemplate: string) {
if (indexTemplate.includes('@')) {
return indexTemplate.split('@')[0];
}
return indexTemplate;
}

View file

@ -38,7 +38,7 @@ export const DegradedFieldInfo = ({ fieldList }: { fieldList?: DegradedField })
degradedFieldValues,
isDegradedFieldsLoading,
isAnalysisInProgress,
degradedFieldAnalysisResult,
degradedFieldAnalysisFormattedResult,
degradedFieldAnalysis,
} = useDegradedFields();
@ -94,9 +94,12 @@ export const DegradedFieldInfo = ({ fieldList }: { fieldList?: DegradedField })
grow={2}
>
<div>
<EuiToolTip position="top" content={degradedFieldAnalysisResult?.tooltipContent}>
<EuiToolTip
position="top"
content={degradedFieldAnalysisFormattedResult?.tooltipContent}
>
<EuiBadge color="hollow">
<strong>{degradedFieldAnalysisResult?.potentialCause}</strong>
<strong>{degradedFieldAnalysisFormattedResult?.potentialCause}</strong>
</EuiBadge>
</EuiToolTip>
</div>
@ -125,52 +128,53 @@ export const DegradedFieldInfo = ({ fieldList }: { fieldList?: DegradedField })
</>
)}
{!isAnalysisInProgress && degradedFieldAnalysisResult?.shouldDisplayValues && (
<>
<EuiFlexGroup
data-test-subj={'datasetQualityDetailsDegradedFieldFlyoutFieldsList-characterLimit'}
>
<EuiFlexItem grow={1}>
<EuiTitle size="xxs">
<span>{degradedFieldMaximumCharacterLimitColumnName}</span>
</EuiTitle>
</EuiFlexItem>
<EuiFlexItem
data-test-subj="datasetQualityDetailsDegradedFieldFlyoutFieldValue-characterLimit"
css={{ maxWidth: '64%' }}
grow={2}
{!isAnalysisInProgress &&
degradedFieldAnalysisFormattedResult?.shouldDisplayIgnoredValuesAndLimit && (
<>
<EuiFlexGroup
data-test-subj={'datasetQualityDetailsDegradedFieldFlyoutFieldsList-characterLimit'}
>
<span>{degradedFieldAnalysis?.fieldMapping?.ignore_above}</span>
</EuiFlexItem>
</EuiFlexGroup>
<EuiHorizontalRule margin="s" />
<EuiFlexGroup
data-test-subj={`datasetQualityDetailsDegradedFieldFlyoutFieldsList-values`}
>
<EuiFlexItem grow={1}>
<EuiTitle size="xxs">
<span>{degradedFieldValuesColumnName}</span>
</EuiTitle>
</EuiFlexItem>
<EuiFlexItem
data-test-subj="datasetQualityDetailsDegradedFieldFlyoutFieldValue-values"
css={{ maxWidth: '64%' }}
grow={2}
<EuiFlexItem grow={1}>
<EuiTitle size="xxs">
<span>{degradedFieldMaximumCharacterLimitColumnName}</span>
</EuiTitle>
</EuiFlexItem>
<EuiFlexItem
data-test-subj="datasetQualityDetailsDegradedFieldFlyoutFieldValue-characterLimit"
css={{ maxWidth: '64%' }}
grow={2}
>
<span>{degradedFieldAnalysis?.fieldMapping?.ignore_above}</span>
</EuiFlexItem>
</EuiFlexGroup>
<EuiHorizontalRule margin="s" />
<EuiFlexGroup
data-test-subj={`datasetQualityDetailsDegradedFieldFlyoutFieldsList-values`}
>
<EuiBadgeGroup gutterSize="s">
{degradedFieldValues?.values.map((value, idx) => (
<EuiBadge color="hollow" key={idx}>
<EuiTextColor color="#765B96">
<strong>{value}</strong>
</EuiTextColor>
</EuiBadge>
))}
</EuiBadgeGroup>
</EuiFlexItem>
</EuiFlexGroup>
<EuiHorizontalRule margin="s" />
</>
)}
<EuiFlexItem grow={1}>
<EuiTitle size="xxs">
<span>{degradedFieldValuesColumnName}</span>
</EuiTitle>
</EuiFlexItem>
<EuiFlexItem
data-test-subj="datasetQualityDetailsDegradedFieldFlyoutFieldValue-values"
css={{ maxWidth: '64%' }}
grow={2}
>
<EuiBadgeGroup gutterSize="s">
{degradedFieldValues?.values.map((value, idx) => (
<EuiBadge color="hollow" key={idx}>
<EuiTextColor color="#765B96">
<strong>{value}</strong>
</EuiTextColor>
</EuiBadge>
))}
</EuiBadgeGroup>
</EuiFlexItem>
</EuiFlexGroup>
<EuiHorizontalRule margin="s" />
</>
)}
</EuiFlexGroup>
);
};

View file

@ -6,6 +6,7 @@
*/
import React, { useMemo } from 'react';
import { i18n } from '@kbn/i18n';
import {
EuiBadge,
EuiFlyout,
@ -20,6 +21,7 @@ import {
EuiButtonIcon,
EuiToolTip,
} from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n-react';
import { NavigationSource } from '../../../services/telemetry';
import {
useDatasetDetailsRedirectLinkTelemetry,
@ -38,11 +40,18 @@ import {
} from '../../../../common/translations';
import { DegradedFieldInfo } from './field_info';
import { _IGNORED } from '../../../../common/es_fields';
import { PossibleMitigations } from './possible_mitigations';
// Allow for lazy loading
// eslint-disable-next-line import/no-default-export
export default function DegradedFieldFlyout() {
const { closeDegradedFieldFlyout, expandedDegradedField, renderedItems } = useDegradedFields();
const {
closeDegradedFieldFlyout,
expandedDegradedField,
renderedItems,
isAnalysisInProgress,
degradedFieldAnalysisFormattedResult,
} = useDegradedFields();
const { dataStreamSettings, datasetDetails, timeRange } = useDatasetQualityDetailsState();
const pushedFlyoutTitleId = useGeneratedHtmlId({
prefix: 'pushedFlyoutTitle',
@ -118,9 +127,42 @@ export default function DegradedFieldFlyout() {
</EuiTextColor>
</>
)}
{isUserViewingTheIssueOnLatestBackingIndex &&
!isAnalysisInProgress &&
degradedFieldAnalysisFormattedResult &&
!degradedFieldAnalysisFormattedResult.identifiedUsingHeuristics && (
<>
<EuiSpacer size="s" />
<EuiTextColor
color="danger"
data-test-subj="datasetQualityDetailsDegradedFieldFlyoutIssueDoesNotExist"
>
<FormattedMessage
id="xpack.datasetQuality.details.degradedField.potentialCause.ignoreMalformedWarning"
defaultMessage="If you've recently updated your {field_limit} settings, this quality issue may not be relevant. Rollover the data stream to verify."
values={{
field_limit: (
<strong>
{i18n.translate(
'xpack.datasetQuality.degradedFieldFlyout.strong.fieldLimitLabel',
{ defaultMessage: 'field limit' }
)}
</strong>
),
}}
/>
</EuiTextColor>
</>
)}
</EuiFlyoutHeader>
<EuiFlyoutBody>
<DegradedFieldInfo fieldList={fieldList} />
{isUserViewingTheIssueOnLatestBackingIndex && (
<>
<EuiSpacer size="s" />
<PossibleMitigations />
</>
)}
</EuiFlyoutBody>
</EuiFlyout>
);

View file

@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import { EuiLink } from '@elastic/eui';
import { useKibanaContextForPlugin } from '../../../../../utils';
import { fieldLimitMitigationOfficialDocumentation } from '../../../../../../common/translations';
export function FieldLimitDocLink() {
const {
services: { docLinks },
} = useKibanaContextForPlugin();
return (
<EuiLink
data-test-subj="datasetQualityManualMitigationsPipelineOfficialDocumentationLink"
href={docLinks.links.elasticsearch.mappingSettingsLimit}
target="_blank"
className="eui-displayBlock eui-textRight"
>
{fieldLimitMitigationOfficialDocumentation}
</EuiLink>
);
}

View file

@ -0,0 +1,83 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import {
EuiAccordion,
EuiHorizontalRule,
EuiPanel,
EuiSpacer,
EuiText,
EuiTitle,
useGeneratedHtmlId,
} from '@elastic/eui';
import {
fieldLimitMitigationConsiderationText,
fieldLimitMitigationConsiderationText1,
fieldLimitMitigationConsiderationText2,
fieldLimitMitigationConsiderationText3,
fieldLimitMitigationConsiderationText4,
fieldLimitMitigationDescriptionText,
increaseFieldMappingLimitTitle,
} from '../../../../../../common/translations';
import { useDegradedFields } from '../../../../../hooks';
import { IncreaseFieldMappingLimit } from './increase_field_mapping_limit';
import { FieldLimitDocLink } from './field_limit_documentation_link';
import { MessageCallout } from './message_callout';
export function FieldMappingLimit({ isIntegration }: { isIntegration: boolean }) {
const accordionId = useGeneratedHtmlId({
prefix: increaseFieldMappingLimitTitle,
});
const { degradedFieldAnalysis } = useDegradedFields();
const accordionTitle = (
<EuiTitle size="xxs">
<h6>{increaseFieldMappingLimitTitle}</h6>
</EuiTitle>
);
return (
<EuiPanel hasBorder grow={false}>
<EuiAccordion
id={accordionId}
buttonContent={accordionTitle}
initialIsOpen={true}
data-test-subj="datasetQualityDetailsDegradedFieldFlyoutFieldLimitMitigationAccordion"
paddingSize="s"
>
<EuiText size="xs" component="p">
{fieldLimitMitigationDescriptionText}
</EuiText>
<EuiHorizontalRule margin="s" />
<EuiText size="xs">
<p>{fieldLimitMitigationConsiderationText}</p>
<ul>
<li>{fieldLimitMitigationConsiderationText1}</li>
<li>{fieldLimitMitigationConsiderationText2}</li>
<li>{fieldLimitMitigationConsiderationText3}</li>
<li>{fieldLimitMitigationConsiderationText4}</li>
</ul>
</EuiText>
<EuiHorizontalRule margin="s" />
{isIntegration && (
<>
<IncreaseFieldMappingLimit
totalFieldLimit={degradedFieldAnalysis?.totalFieldLimit ?? 0}
/>
<EuiSpacer size="s" />
<MessageCallout />
<EuiHorizontalRule margin="s" />
</>
)}
<FieldLimitDocLink />
</EuiAccordion>
</EuiPanel>
);
}

View file

@ -0,0 +1,83 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useState } from 'react';
import {
EuiFlexGroup,
EuiFlexItem,
EuiFieldText,
EuiFormRow,
EuiButton,
EuiFieldNumber,
} from '@elastic/eui';
import {
fieldLimitMitigationApplyButtonText,
fieldLimitMitigationCurrentLimitLabelText,
fieldLimitMitigationNewLimitButtonText,
fieldLimitMitigationNewLimitPlaceholderText,
} from '../../../../../../common/translations';
import { useDegradedFields } from '../../../../../hooks';
export function IncreaseFieldMappingLimit({ totalFieldLimit }: { totalFieldLimit: number }) {
// Propose the user a 30% increase over the current limit
const proposedNewLimit = Math.round(totalFieldLimit * 1.3);
const [newFieldLimit, setNewFieldLimit] = useState<number>(proposedNewLimit);
const [isInvalid, setIsInvalid] = useState(false);
const { updateNewFieldLimit, isMitigationInProgress } = useDegradedFields();
const validateNewLimit = (newLimit: string) => {
const parsedLimit = parseInt(newLimit, 10);
setNewFieldLimit(parsedLimit);
if (totalFieldLimit > parsedLimit) {
setIsInvalid(true);
} else {
setIsInvalid(false);
}
};
return (
<EuiFlexGroup
gutterSize="s"
data-test-subj="datasetQualityDetailsDegradedFieldFlyoutIncreaseFieldLimitPanel"
>
<EuiFlexItem>
<EuiFormRow label={fieldLimitMitigationCurrentLimitLabelText}>
<EuiFieldText
data-test-subj="datasetQualityIncreaseFieldMappingCurrentLimitFieldText"
disabled
value={totalFieldLimit}
/>
</EuiFormRow>
</EuiFlexItem>
<EuiFlexItem>
<EuiFormRow label={fieldLimitMitigationNewLimitButtonText}>
<EuiFieldNumber
data-test-subj="datasetQualityIncreaseFieldMappingProposedLimitFieldText"
placeholder={fieldLimitMitigationNewLimitPlaceholderText}
value={newFieldLimit}
onChange={(e) => validateNewLimit(e.target.value)}
aria-label={fieldLimitMitigationNewLimitPlaceholderText}
isInvalid={isInvalid}
min={totalFieldLimit + 1}
/>
</EuiFormRow>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiFormRow hasEmptyLabelSpace>
<EuiButton
data-test-subj="datasetQualityIncreaseFieldMappingLimitButtonButton"
disabled={isInvalid}
onClick={() => updateNewFieldLimit(newFieldLimit)}
isLoading={isMitigationInProgress}
>
{fieldLimitMitigationApplyButtonText}
</EuiButton>
</EuiFormRow>
</EuiFlexItem>
</EuiFlexGroup>
);
}

View file

@ -0,0 +1,119 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import { EuiButton, EuiCallOut, EuiLink } from '@elastic/eui';
import {
fieldLimitMitigationFailedMessage,
fieldLimitMitigationFailedMessageDescription,
fieldLimitMitigationPartiallyFailedMessage,
fieldLimitMitigationPartiallyFailedMessageDescription,
fieldLimitMitigationRolloverButton,
fieldLimitMitigationSuccessComponentTemplateLinkText,
fieldLimitMitigationSuccessMessage,
} from '../../../../../../common/translations';
import { useDatasetQualityDetailsState, useDegradedFields } from '../../../../../hooks';
import { getComponentTemplatePrefixFromIndexTemplate } from '../../../../../../common/utils/component_template_name';
import { useKibanaContextForPlugin } from '../../../../../utils';
export function MessageCallout() {
const {
isMitigationInProgress,
newFieldLimitData,
isRolloverRequired,
isMitigationAppliedSuccessfully,
} = useDegradedFields();
const { error: serverError } = newFieldLimitData ?? {};
if (serverError) {
return <ErrorCallout />;
}
if (!isMitigationInProgress && isRolloverRequired) {
return <ManualRolloverCallout />;
}
if (!isMitigationInProgress && isMitigationAppliedSuccessfully) {
return <SuccessCallout />;
}
return null;
}
export function SuccessCallout() {
const {
services: {
share: {
url: { locators },
},
},
} = useKibanaContextForPlugin();
const { dataStreamSettings, datasetDetails } = useDatasetQualityDetailsState();
const { name } = datasetDetails;
const componentTemplateUrl = locators.get('INDEX_MANAGEMENT_LOCATOR_ID')?.useUrl({
page: 'component_template',
componentTemplate: `${getComponentTemplatePrefixFromIndexTemplate(
dataStreamSettings?.indexTemplate ?? name
)}@custom`,
});
return (
<EuiCallOut
title={fieldLimitMitigationSuccessMessage}
color="success"
iconType="checkInCircleFilled"
data-test-subj="datasetQualityDetailsDegradedFlyoutNewLimitSetSuccessCallout"
>
<EuiLink
data-test-subj="datasetQualityDetailsDegradedFlyoutNewLimitSetCheckComponentTemplate"
href={componentTemplateUrl}
target="_blank"
color="success"
>
{fieldLimitMitigationSuccessComponentTemplateLinkText}
</EuiLink>
</EuiCallOut>
);
}
export function ManualRolloverCallout() {
const { triggerRollover, isRolloverInProgress } = useDegradedFields();
return (
<EuiCallOut
title={fieldLimitMitigationPartiallyFailedMessage}
color="danger"
iconType="checkInCircleFilled"
>
<p>{fieldLimitMitigationPartiallyFailedMessageDescription}</p>
<EuiButton
data-test-subj="datasetQualityNewLimitSetManualRollover"
onClick={triggerRollover}
iconType="popout"
size="s"
title={fieldLimitMitigationRolloverButton}
color="danger"
isLoading={isRolloverInProgress}
>
{fieldLimitMitigationRolloverButton}
</EuiButton>
</EuiCallOut>
);
}
export function ErrorCallout() {
return (
<EuiCallOut
title={fieldLimitMitigationFailedMessage}
color="danger"
iconType="error"
data-test-subj="datasetQualityDetailsNewFieldLimitErrorCallout"
>
<p>{fieldLimitMitigationFailedMessageDescription}</p>
</EuiCallOut>
);
}

View file

@ -0,0 +1,35 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import { EuiSpacer } from '@elastic/eui';
import { ManualMitigations } from './manual';
import { FieldMappingLimit } from './field_limit/field_mapping_limit';
import { useDatasetQualityDetailsState, useDegradedFields } from '../../../../hooks';
import { PossibleMitigationTitle } from './title';
export function PossibleMitigations() {
const { degradedFieldAnalysis, isAnalysisInProgress } = useDegradedFields();
const { integrationDetails } = useDatasetQualityDetailsState();
const isIntegration = Boolean(integrationDetails?.integration);
return (
!isAnalysisInProgress && (
<div>
<PossibleMitigationTitle />
<EuiSpacer size="m" />
{degradedFieldAnalysis?.isFieldLimitIssue && (
<>
<FieldMappingLimit isIntegration={isIntegration} />
<EuiSpacer size="m" />
</>
)}
<ManualMitigations />
</div>
)
);
}

View file

@ -0,0 +1,86 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useCallback, useEffect, useState } from 'react';
import { MANAGEMENT_APP_ID } from '@kbn/deeplinks-management/constants';
import { EuiFlexGroup, EuiIcon, EuiLink, EuiPanel, EuiTitle } from '@elastic/eui';
import { useKibanaContextForPlugin } from '../../../../../utils';
import { useDatasetQualityDetailsState } from '../../../../../hooks';
import { getComponentTemplatePrefixFromIndexTemplate } from '../../../../../../common/utils/component_template_name';
import { otherMitigationsCustomComponentTemplate } from '../../../../../../common/translations';
export function CreateEditComponentTemplateLink({ isIntegration }: { isIntegration: boolean }) {
const {
services: {
application,
share: {
url: { locators },
},
},
} = useKibanaContextForPlugin();
const [indexTemplatePath, setIndexTemplatePath] = useState<string | null>(null);
const [componentTemplatePath, setComponentTemplatePath] = useState<string | null>(null);
const { dataStreamSettings, datasetDetails } = useDatasetQualityDetailsState();
const { name } = datasetDetails;
const indexManagementLocator = locators.get('INDEX_MANAGEMENT_LOCATOR_ID');
useEffect(() => {
indexManagementLocator
?.getLocation({
page: 'index_template',
indexTemplate: dataStreamSettings?.indexTemplate ?? '',
})
.then(({ path }) => setIndexTemplatePath(path));
indexManagementLocator
?.getLocation({
page: 'component_template',
componentTemplate: `${getComponentTemplatePrefixFromIndexTemplate(
dataStreamSettings?.indexTemplate ?? name
)}@custom`,
})
.then(({ path }) => setComponentTemplatePath(path));
}, [
locators,
setIndexTemplatePath,
dataStreamSettings?.indexTemplate,
indexManagementLocator,
name,
]);
const templateUrl = isIntegration ? componentTemplatePath : indexTemplatePath;
const onClickHandler = useCallback(async () => {
const options = {
openInNewTab: true,
...(templateUrl && { path: templateUrl }),
};
await application.navigateToApp(MANAGEMENT_APP_ID, options);
}, [application, templateUrl]);
return (
<EuiPanel hasBorder grow={false}>
<EuiLink
data-test-subj="datasetQualityManualMitigationsCustomComponentTemplateLink"
data-test-url={templateUrl}
onClick={onClickHandler}
target="_blank"
css={{ width: '100%' }}
>
<EuiFlexGroup alignItems="center" gutterSize="s">
<EuiIcon type="popout" />
<EuiTitle size="xxs">
<p>{otherMitigationsCustomComponentTemplate}</p>
</EuiTitle>
</EuiFlexGroup>
</EuiLink>
</EuiPanel>
);
}

View file

@ -0,0 +1,39 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import { EuiSkeletonRectangle, EuiSpacer } from '@elastic/eui';
import { useDatasetQualityDetailsState } from '../../../../../hooks';
import { CreateEditComponentTemplateLink } from './component_template_link';
import { CreateEditPipelineLink } from './pipeline_link';
import { otherMitigationsLoadingAriaText } from '../../../../../../common/translations';
export function ManualMitigations() {
const { integrationDetails, loadingState, dataStreamSettings } = useDatasetQualityDetailsState();
const isIntegrationPresentInSettings = dataStreamSettings?.integration;
const isIntegration = !!integrationDetails?.integration;
const { dataStreamSettingsLoading, integrationDetailsLoadings } = loadingState;
const hasIntegrationCheckCompleted =
!dataStreamSettingsLoading &&
((isIntegrationPresentInSettings && !integrationDetailsLoadings) ||
!isIntegrationPresentInSettings);
return (
<EuiSkeletonRectangle
isLoading={!hasIntegrationCheckCompleted}
contentAriaLabel={otherMitigationsLoadingAriaText}
width="100%"
height={300}
borderRadius="none"
>
<CreateEditComponentTemplateLink isIntegration={isIntegration} />
<EuiSpacer size="s" />
<CreateEditPipelineLink isIntegration={isIntegration} />
</EuiSkeletonRectangle>
);
}

View file

@ -0,0 +1,136 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useCallback, useMemo } from 'react';
import { FormattedMessage } from '@kbn/i18n-react';
import { i18n } from '@kbn/i18n';
import {
copyToClipboard,
EuiAccordion,
EuiButtonIcon,
EuiFieldText,
EuiHorizontalRule,
EuiLink,
EuiPanel,
EuiSpacer,
EuiTitle,
useGeneratedHtmlId,
} from '@elastic/eui';
import {
manualMitigationCustomPipelineCopyPipelineNameAriaText,
manualMitigationCustomPipelineCreateEditPipelineLink,
otherMitigationsCustomIngestPipeline,
} from '../../../../../../common/translations';
import { useKibanaContextForPlugin } from '../../../../../utils';
import { useDatasetQualityDetailsState } from '../../../../../hooks';
const AccordionTitle = () => (
<EuiTitle size="xxs">
<h6>{otherMitigationsCustomIngestPipeline}</h6>
</EuiTitle>
);
export function CreateEditPipelineLink({ isIntegration }: { isIntegration: boolean }) {
const {
services: {
share: {
url: { locators },
},
},
} = useKibanaContextForPlugin();
const accordionId = useGeneratedHtmlId({
prefix: otherMitigationsCustomIngestPipeline,
});
const { datasetDetails } = useDatasetQualityDetailsState();
const { type, name } = datasetDetails;
const pipelineName = useMemo(
() => (isIntegration ? `${type}-${name}@custom` : `${type}@custom`),
[isIntegration, type, name]
);
const ingestPipelineLocator = locators.get('INGEST_PIPELINES_APP_LOCATOR');
const pipelineUrl = ingestPipelineLocator?.useUrl(
{ pipelineId: pipelineName, page: 'pipelines_list' },
{},
[pipelineName]
);
const onClickHandler = useCallback(() => {
copyToClipboard(pipelineName);
}, [pipelineName]);
return (
<EuiPanel hasBorder grow={false}>
<EuiAccordion
id={accordionId}
buttonContent={<AccordionTitle />}
paddingSize="none"
initialIsOpen={true}
data-test-subj="datasetQualityManualMitigationsPipelineAccordion"
>
<EuiHorizontalRule margin="s" />
<FormattedMessage
id="xpack.datasetQuality.details.degradedField.possibleMitigation.otherMitigationsCustomPipelineText1"
defaultMessage="{lineNumber} Copy the following pipeline name"
values={{
lineNumber: (
<strong>
{i18n.translate('xpack.datasetQuality.editPipeline.strong.Label', {
defaultMessage: '1.',
})}
</strong>
),
}}
/>
<EuiSpacer size="m" />
<EuiFieldText
append={
<EuiButtonIcon
iconType="copy"
data-test-subj="datasetQualityManualMitigationsPipelineNameCopyButton"
onClick={onClickHandler}
/>
}
readOnly={true}
aria-label={manualMitigationCustomPipelineCopyPipelineNameAriaText}
value={pipelineName}
data-test-subj="datasetQualityManualMitigationsPipelineName"
fullWidth
/>
<EuiSpacer size="m" />
<FormattedMessage
id="xpack.datasetQuality.details.degradedField.possibleMitigation.otherMitigationsCustomPipelineText2"
defaultMessage="{lineNumber} Using the name you copied, {createEditPipelineLink}"
values={{
lineNumber: (
<strong>
{i18n.translate('xpack.datasetQuality.editPipeline.strong.Label', {
defaultMessage: '2.',
})}
</strong>
),
createEditPipelineLink: (
<EuiLink
data-test-subj="datasetQualityManualMitigationsPipelineLink"
data-test-url={pipelineUrl}
href={pipelineUrl}
target="_blank"
>
{manualMitigationCustomPipelineCreateEditPipelineLink}
</EuiLink>
),
}}
/>
<EuiSpacer size="m" />
</EuiAccordion>
</EuiPanel>
);
}

View file

@ -0,0 +1,35 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import { EuiBetaBadge, EuiFlexGroup, EuiIcon, EuiTitle } from '@elastic/eui';
import {
overviewQualityIssuesAccordionTechPreviewBadge,
possibleMitigationTitle,
} from '../../../../../common/translations';
export function PossibleMitigationTitle() {
return (
<EuiFlexGroup alignItems="center" gutterSize="s">
<EuiIcon type="wrench" />
<EuiTitle
size="xs"
data-test-subj="datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTitle"
>
<p>{possibleMitigationTitle}</p>
</EuiTitle>
<EuiBetaBadge
alignment="middle"
color="hollow"
data-test-subj="datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTechPreviewBadge"
label={overviewQualityIssuesAccordionTechPreviewBadge}
size="s"
/>
</EuiFlexGroup>
);
}

View file

@ -45,6 +45,7 @@ export function DegradedFields() {
aria-describedby={toggleTextSwitchId}
compressed
data-test-subj="datasetQualityDetailsOverviewDegradedFieldToggleSwitch"
css={{ marginRight: '5px' }}
/>
<EuiIconTip content={overviewDegradedFieldToggleSwitchTooltip} position="top" />
</>

View file

@ -104,20 +104,25 @@ export function useDegradedFields() {
}, [service]);
const degradedFieldValues = useSelector(service, (state) =>
state.matches('initializing.degradedFieldFlyout.open.ignoredValues.done')
state.matches('initializing.degradedFieldFlyout.open.initialized.ignoredValues.done')
? state.context.degradedFieldValues
: undefined
);
const degradedFieldAnalysis = useSelector(service, (state) =>
state.matches('initializing.degradedFieldFlyout.open.analyze.done')
state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.analyzed') ||
state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.mitigating') ||
state.matches(
'initializing.degradedFieldFlyout.open.initialized.mitigation.askingForRollover'
) ||
state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.rollingOver') ||
state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.success') ||
state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.error')
? state.context.degradedFieldAnalysis
: undefined
);
// This piece only cater field limit issue at the moment.
// In future this will cater the other 2 reasons as well
const degradedFieldAnalysisResult = useMemo(() => {
const degradedFieldAnalysisFormattedResult = useMemo(() => {
if (!degradedFieldAnalysis) {
return undefined;
}
@ -127,8 +132,8 @@ export function useDegradedFields() {
return {
potentialCause: degradedFieldCauseFieldLimitExceeded,
tooltipContent: degradedFieldCauseFieldLimitExceededTooltip,
shouldDisplayMitigation: true,
shouldDisplayValues: false,
shouldDisplayIgnoredValuesAndLimit: false,
identifiedUsingHeuristics: true,
};
}
@ -143,8 +148,8 @@ export function useDegradedFields() {
return {
potentialCause: degradedFieldCauseFieldIgnored,
tooltipContent: degradedFieldCauseFieldIgnoredTooltip,
shouldDisplayMitigation: false,
shouldDisplayValues: true,
shouldDisplayIgnoredValuesAndLimit: true,
identifiedUsingHeuristics: true,
};
}
}
@ -153,19 +158,59 @@ export function useDegradedFields() {
return {
potentialCause: degradedFieldCauseFieldMalformed,
tooltipContent: degradedFieldCauseFieldMalformedTooltip,
shouldDisplayMitigation: false,
shouldDisplayValues: false,
shouldDisplayIgnoredValuesAndLimit: false,
identifiedUsingHeuristics: false, // TODO: Add heuristics to identify ignore_malformed issues
};
}, [degradedFieldAnalysis, degradedFieldValues]);
const isDegradedFieldsValueLoading = useSelector(service, (state) => {
return state.matches('initializing.degradedFieldFlyout.open.ignoredValues.fetching');
return state.matches(
'initializing.degradedFieldFlyout.open.initialized.ignoredValues.fetching'
);
});
const isRolloverRequired = useSelector(service, (state) => {
return state.matches(
'initializing.degradedFieldFlyout.open.initialized.mitigation.askingForRollover'
);
});
const isMitigationAppliedSuccessfully = useSelector(service, (state) => {
return state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.success');
});
const isAnalysisInProgress = useSelector(service, (state) => {
return state.matches('initializing.degradedFieldFlyout.open.analyze.fetching');
return state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.analyzing');
});
const isRolloverInProgress = useSelector(service, (state) => {
return state.matches(
'initializing.degradedFieldFlyout.open.initialized.mitigation.rollingOver'
);
});
const updateNewFieldLimit = useCallback(
(newFieldLimit: number) => {
service.send({ type: 'SET_NEW_FIELD_LIMIT', newFieldLimit });
},
[service]
);
const isMitigationInProgress = useSelector(service, (state) => {
return state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.mitigating');
});
const newFieldLimitData = useSelector(service, (state) =>
state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.success') ||
state.matches('initializing.degradedFieldFlyout.open.initialized.mitigation.error')
? state.context.fieldLimit
: undefined
);
const triggerRollover = useCallback(() => {
service.send('ROLLOVER_DATA_STREAM');
}, [service]);
return {
isDegradedFieldsLoading,
pagination,
@ -181,9 +226,16 @@ export function useDegradedFields() {
isDegradedFieldsValueLoading,
isAnalysisInProgress,
degradedFieldAnalysis,
degradedFieldAnalysisResult,
degradedFieldAnalysisFormattedResult,
toggleCurrentQualityIssues,
showCurrentQualityIssues,
expandedRenderedItem,
updateNewFieldLimit,
isMitigationInProgress,
isRolloverInProgress,
newFieldLimitData,
isRolloverRequired,
isMitigationAppliedSuccessfully,
triggerRollover,
};
}

View file

@ -8,6 +8,8 @@
import { HttpStart } from '@kbn/core/public';
import { decodeOrThrow } from '@kbn/io-ts-utils';
import {
DataStreamRolloverResponse,
dataStreamRolloverResponseRt,
DegradedFieldAnalysis,
degradedFieldAnalysisRt,
DegradedFieldValues,
@ -19,6 +21,8 @@ import {
IntegrationDashboardsResponse,
integrationDashboardsRT,
IntegrationResponse,
UpdateFieldLimitResponse,
updateFieldLimitResponseRt,
} from '../../../common/api_types';
import {
DataStreamDetails,
@ -37,6 +41,7 @@ import { Integration } from '../../../common/data_streams_stats/integration';
import {
AnalyzeDegradedFieldsParams,
GetDataStreamIntegrationParams,
UpdateFieldLimitParams,
} from '../../../common/data_stream_details/types';
import { DatasetQualityError } from '../../../common/errors';
@ -196,4 +201,46 @@ export class DataStreamDetailsClient implements IDataStreamDetailsClient {
new DatasetQualityError(`Failed to decode the analysis response: ${message}`)
)(response);
}
public async setNewFieldLimit({
dataStream,
newFieldLimit,
}: UpdateFieldLimitParams): Promise<UpdateFieldLimitResponse> {
const response = await this.http
.put<UpdateFieldLimitResponse>(
`/internal/dataset_quality/data_streams/${dataStream}/update_field_limit`,
{ body: JSON.stringify({ newFieldLimit }) }
)
.catch((error) => {
throw new DatasetQualityError(`Failed to set new Limit: ${error.message}`, error);
});
const decodedResponse = decodeOrThrow(
updateFieldLimitResponseRt,
(message: string) =>
new DatasetQualityError(`Failed to decode setting of new limit response: ${message}"`)
)(response);
return decodedResponse;
}
public async rolloverDataStream({
dataStream,
}: {
dataStream: string;
}): Promise<DataStreamRolloverResponse> {
const response = await this.http
.post<DataStreamRolloverResponse>(
`/internal/dataset_quality/data_streams/${dataStream}/rollover`
)
.catch((error) => {
throw new DatasetQualityError(`Failed to rollover datastream": ${error}`, error);
});
return decodeOrThrow(
dataStreamRolloverResponseRt,
(message: string) =>
new DatasetQualityError(`Failed to decode rollover response: ${message}"`)
)(response);
}
}

View file

@ -20,8 +20,15 @@ import {
import {
AnalyzeDegradedFieldsParams,
GetDataStreamIntegrationParams,
UpdateFieldLimitParams,
} from '../../../common/data_stream_details/types';
import { Dashboard, DegradedFieldAnalysis, DegradedFieldValues } from '../../../common/api_types';
import {
Dashboard,
DataStreamRolloverResponse,
DegradedFieldAnalysis,
DegradedFieldValues,
UpdateFieldLimitResponse,
} from '../../../common/api_types';
export type DataStreamDetailsServiceSetup = void;
@ -47,4 +54,6 @@ export interface IDataStreamDetailsClient {
params: GetDataStreamIntegrationParams
): Promise<Integration | undefined>;
analyzeDegradedField(params: AnalyzeDegradedFieldsParams): Promise<DegradedFieldAnalysis>;
setNewFieldLimit(params: UpdateFieldLimitParams): Promise<UpdateFieldLimitResponse>;
rolloverDataStream(params: { dataStream: string }): Promise<DataStreamRolloverResponse>;
}

View file

@ -59,3 +59,28 @@ export const fetchDataStreamIntegrationFailedNotifier = (
text: error.message,
});
};
export const updateFieldLimitFailedNotifier = (toasts: IToasts, error: Error) => {
toasts.addDanger({
title: i18n.translate('xpack.datasetQuality.details.updateFieldLimitFailed', {
defaultMessage: "We couldn't update the field limit.",
}),
text: error.message,
});
};
export const rolloverDataStreamFailedNotifier = (
toasts: IToasts,
error: Error,
dataStream: string
) => {
toasts.addDanger({
title: i18n.translate('xpack.datasetQuality.details.rolloverDataStreamFailed', {
defaultMessage: "We couldn't rollover the data stream: {dataStream}.",
values: {
dataStream,
},
}),
text: error.message,
});
};

View file

@ -25,6 +25,7 @@ import {
DegradedFieldResponse,
DegradedFieldValues,
NonAggregatableDatasets,
UpdateFieldLimitResponse,
} from '../../../common/api_types';
import { fetchNonAggregatableDatasetsFailedNotifier } from '../common/notifications';
import {
@ -33,6 +34,8 @@ import {
fetchDataStreamSettingsFailedNotifier,
fetchDataStreamIntegrationFailedNotifier,
fetchIntegrationDashboardsFailedNotifier,
updateFieldLimitFailedNotifier,
rolloverDataStreamFailedNotifier,
} from './notifications';
import { Integration } from '../../../common/data_streams_stats/integration';
@ -189,10 +192,6 @@ export const createPureDatasetQualityDetailsControllerStateMachine = (
},
done: {
on: {
UPDATE_TIME_RANGE: {
target: 'fetching',
actions: ['resetDegradedFieldPageAndRowsPerPage'],
},
UPDATE_DEGRADED_FIELDS_TABLE_CRITERIA: {
target: 'done',
actions: ['storeDegradedFieldTableOptions'],
@ -200,7 +199,10 @@ export const createPureDatasetQualityDetailsControllerStateMachine = (
OPEN_DEGRADED_FIELD_FLYOUT: {
target:
'#DatasetQualityDetailsController.initializing.degradedFieldFlyout.open',
actions: ['storeExpandedDegradedField'],
actions: [
'storeExpandedDegradedField',
'resetFieldLimitServerResponse',
],
},
TOGGLE_CURRENT_QUALITY_ISSUES: {
target: 'fetching',
@ -282,48 +284,105 @@ export const createPureDatasetQualityDetailsControllerStateMachine = (
],
},
open: {
type: 'parallel',
initial: 'initialized',
states: {
ignoredValues: {
initial: 'fetching',
initialized: {
type: 'parallel',
states: {
fetching: {
invoke: {
src: 'loadDegradedFieldValues',
onDone: {
target: 'done',
actions: ['storeDegradedFieldValues'],
ignoredValues: {
initial: 'fetching',
states: {
fetching: {
invoke: {
src: 'loadDegradedFieldValues',
onDone: {
target: 'done',
actions: ['storeDegradedFieldValues'],
},
onError: [
{
target: '#DatasetQualityDetailsController.indexNotFound',
cond: 'isIndexNotFoundError',
},
{
target: 'done',
},
],
},
},
onError: [
{
target: '#DatasetQualityDetailsController.indexNotFound',
cond: 'isIndexNotFoundError',
},
{
target: 'done',
},
],
done: {},
},
},
done: {},
},
},
analyze: {
initial: 'fetching',
states: {
fetching: {
invoke: {
src: 'analyzeDegradedField',
onDone: {
target: 'done',
actions: ['storeDegradedFieldAnalysis'],
mitigation: {
initial: 'analyzing',
states: {
analyzing: {
invoke: {
src: 'analyzeDegradedField',
onDone: {
target: 'analyzed',
actions: ['storeDegradedFieldAnalysis'],
},
onError: {
target: 'analyzed',
},
},
},
onError: {
target: 'done',
analyzed: {
on: {
SET_NEW_FIELD_LIMIT: {
target: 'mitigating',
actions: 'storeNewFieldLimit',
},
},
},
mitigating: {
invoke: {
src: 'saveNewFieldLimit',
onDone: [
{
target: 'askingForRollover',
actions: 'storeNewFieldLimitResponse',
cond: 'hasFailedToUpdateLastBackingIndex',
},
{
target: 'success',
actions: 'storeNewFieldLimitResponse',
},
],
onError: {
target: 'error',
actions: [
'storeNewFieldLimitErrorResponse',
'notifySaveNewFieldLimitError',
],
},
},
},
askingForRollover: {
on: {
ROLLOVER_DATA_STREAM: {
target: 'rollingOver',
},
},
},
rollingOver: {
invoke: {
src: 'rolloverDataStream',
onDone: {
target: 'success',
actions: ['raiseForceTimeRangeRefresh'],
},
onError: {
target: 'error',
actions: 'notifySaveNewFieldLimitError',
},
},
},
success: {},
error: {},
},
},
done: {},
},
},
},
@ -482,9 +541,28 @@ export const createPureDatasetQualityDetailsControllerStateMachine = (
isIndexNotFoundError: true,
};
}),
storeNewFieldLimit: assign((_, event) => {
return 'newFieldLimit' in event
? { fieldLimit: { newFieldLimit: event.newFieldLimit } }
: {};
}),
storeNewFieldLimitResponse: assign(
(context, event: DoneInvokeEvent<UpdateFieldLimitResponse>) => {
return 'data' in event
? { fieldLimit: { ...context.fieldLimit, result: event.data, error: false } }
: {};
}
),
storeNewFieldLimitErrorResponse: assign((context) => {
return { fieldLimit: { ...context.fieldLimit, error: true } };
}),
resetFieldLimitServerResponse: assign(() => ({
fieldLimit: undefined,
})),
raiseForceTimeRangeRefresh: raise('UPDATE_TIME_RANGE'),
},
guards: {
checkIfActionForbidden: (context, event) => {
checkIfActionForbidden: (_, event) => {
return (
'data' in event &&
typeof event.data === 'object' &&
@ -516,6 +594,14 @@ export const createPureDatasetQualityDetailsControllerStateMachine = (
hasNoDegradedFieldsSelected: (context) => {
return !Boolean(context.expandedDegradedField);
},
hasFailedToUpdateLastBackingIndex: (_, event) => {
return (
'data' in event &&
typeof event.data === 'object' &&
'isLatestBackingIndexUpdated' in event.data &&
!event.data.isLatestBackingIndexUpdated
);
},
},
}
);
@ -552,6 +638,10 @@ export const createDatasetQualityDetailsControllerStateMachine = ({
'dataStreamSettings' in context ? context.dataStreamSettings?.integration : undefined;
return fetchDataStreamIntegrationFailedNotifier(toasts, event.data, integrationName);
},
notifySaveNewFieldLimitError: (_context, event: DoneInvokeEvent<Error>) =>
updateFieldLimitFailedNotifier(toasts, event.data),
notifyRolloverDataStreamError: (context, event: DoneInvokeEvent<Error>) =>
rolloverDataStreamFailedNotifier(toasts, event.data, context.dataStream),
},
services: {
checkDatasetIsAggregatable: (context) => {
@ -603,7 +693,8 @@ export const createDatasetQualityDetailsControllerStateMachine = ({
dataStream:
context.showCurrentQualityIssues &&
'dataStreamSettings' in context &&
context.dataStreamSettings
context.dataStreamSettings &&
context.dataStreamSettings.lastBackingIndexName
? context.dataStreamSettings.lastBackingIndexName
: context.dataStream,
start,
@ -661,6 +752,21 @@ export const createDatasetQualityDetailsControllerStateMachine = ({
return Promise.resolve();
},
saveNewFieldLimit: (context) => {
if ('fieldLimit' in context && context.fieldLimit && context.fieldLimit.newFieldLimit) {
return dataStreamDetailsClient.setNewFieldLimit({
dataStream: context.dataStream,
newFieldLimit: context.fieldLimit.newFieldLimit,
});
}
return Promise.resolve();
},
rolloverDataStream: (context) => {
return dataStreamDetailsClient.rolloverDataStream({
dataStream: context.dataStream,
});
},
},
});

View file

@ -10,12 +10,14 @@ import type { DegradedFieldSortField } from '../../hooks';
import {
Dashboard,
DataStreamDetails,
DataStreamRolloverResponse,
DataStreamSettings,
DegradedField,
DegradedFieldAnalysis,
DegradedFieldResponse,
DegradedFieldValues,
NonAggregatableDatasets,
UpdateFieldLimitResponse,
} from '../../../common/api_types';
import { TableCriteria, TimeRangeConfig } from '../../../common/types';
import { Integration } from '../../../common/data_streams_stats/integration';
@ -37,6 +39,12 @@ export interface DegradedFieldsWithData {
data: DegradedField[];
}
export interface FieldLimit {
newFieldLimit?: number;
result?: UpdateFieldLimitResponse;
error?: boolean;
}
export interface WithDefaultControllerState {
dataStream: string;
degradedFields: DegradedFieldsTableConfig;
@ -48,6 +56,7 @@ export interface WithDefaultControllerState {
integration?: Integration;
expandedDegradedField?: string;
isNonAggregatable?: boolean;
fieldLimit?: FieldLimit;
}
export interface WithDataStreamDetails {
@ -87,6 +96,16 @@ export interface WithDegradeFieldAnalysis {
degradedFieldAnalysis: DegradedFieldAnalysis;
}
export interface WithNewFieldLimit {
fieldLimit?: FieldLimit & {
newFieldLimit: number;
};
}
export interface WithNewFieldLimitResponse {
fieldLimit: FieldLimit;
}
export type DefaultDatasetQualityDetailsContext = Pick<
WithDefaultControllerState,
'degradedFields' | 'timeRange' | 'isIndexNotFoundError' | 'showCurrentQualityIssues'
@ -126,27 +145,6 @@ export type DatasetQualityDetailsControllerTypeState =
WithNonAggregatableDatasetStatus &
WithDegradedFieldsData;
}
| {
value:
| 'initializing.degradedFieldFlyout.open.ignoredValues.fetching'
| 'initializing.degradedFieldFlyout.open.analyze.fetching';
context: WithDefaultControllerState & WithDegradedFieldsData;
}
| {
value: 'initializing.degradedFieldFlyout.open.ignoredValues.done';
context: WithDefaultControllerState & WithDegradedFieldsData & WithDegradedFieldValues;
}
| {
value: 'initializing.degradedFieldFlyout.open.analyze.done';
context: WithDefaultControllerState & WithDegradedFieldsData & WithDegradeFieldAnalysis;
}
| {
value: 'initializing.degradedFieldFlyout.open';
context: WithDefaultControllerState &
WithDegradedFieldsData &
WithDegradedFieldValues &
WithDegradeFieldAnalysis;
}
| {
value:
| 'initializing.dataStreamSettings.loadingIntegrationsAndDegradedFields'
@ -160,6 +158,39 @@ export type DatasetQualityDetailsControllerTypeState =
| 'initializing.dataStreamSettings.loadingIntegrationsAndDegradedFields.integrationDetails.done'
| 'initializing.dataStreamSettings.loadingIntegrationsAndDegradedFields.integrationDashboards.done';
context: WithDefaultControllerState & WithDataStreamSettings & WithIntegration;
}
| {
value: 'initializing.degradedFieldFlyout.open';
context: WithDefaultControllerState;
}
| {
value:
| 'initializing.degradedFieldFlyout.open.initialized.ignoredValues.fetching'
| 'initializing.degradedFieldFlyout.open.initialized.mitigation.analyzing';
context: WithDefaultControllerState & WithDegradedFieldsData;
}
| {
value: 'initializing.degradedFieldFlyout.open.initialized.ignoredValues.done';
context: WithDefaultControllerState & WithDegradedFieldsData & WithDegradedFieldValues;
}
| {
value:
| 'initializing.degradedFieldFlyout.open.initialized.mitigation.analyzed'
| 'initializing.degradedFieldFlyout.open.initialized.mitigation.mitigating'
| 'initializing.degradedFieldFlyout.open.initialized.mitigation.askingForRollover'
| 'initializing.degradedFieldFlyout.open.initialized.mitigation.rollingOver'
| 'initializing.degradedFieldFlyout.open.initialized.mitigation.success'
| 'initializing.degradedFieldFlyout.open.initialized.mitigation.error';
context: WithDefaultControllerState & WithDegradedFieldsData & WithDegradeFieldAnalysis;
}
| {
value: 'initializing.degradedFieldFlyout.open.initialized.mitigation.success';
context: WithDefaultControllerState &
WithDegradedFieldsData &
WithDegradedFieldValues &
WithDegradeFieldAnalysis &
WithNewFieldLimit &
WithNewFieldLimitResponse;
};
export type DatasetQualityDetailsControllerContext =
@ -188,6 +219,13 @@ export type DatasetQualityDetailsControllerEvent =
type: 'UPDATE_DEGRADED_FIELDS_TABLE_CRITERIA';
degraded_field_criteria: TableCriteria<DegradedFieldSortField>;
}
| {
type: 'SET_NEW_FIELD_LIMIT';
newFieldLimit: number;
}
| {
type: 'ROLLOVER_DATA_STREAM';
}
| DoneInvokeEvent<NonAggregatableDatasets>
| DoneInvokeEvent<DataStreamDetails>
| DoneInvokeEvent<Error>
@ -197,4 +235,6 @@ export type DatasetQualityDetailsControllerEvent =
| DoneInvokeEvent<DataStreamSettings>
| DoneInvokeEvent<Integration>
| DoneInvokeEvent<Dashboard[]>
| DoneInvokeEvent<DegradedFieldAnalysis>;
| DoneInvokeEvent<DegradedFieldAnalysis>
| DoneInvokeEvent<UpdateFieldLimitResponse>
| DoneInvokeEvent<DataStreamRolloverResponse>;

View file

@ -29,8 +29,6 @@ export async function getDataStreamSettings({
esClient: ElasticsearchClient;
dataStream: string;
}): Promise<DataStreamSettings> {
throwIfInvalidDataStreamParams(dataStream);
const [createdOn, [dataStreamInfo], datasetUserPrivileges] = await Promise.all([
getDataStreamCreatedOn(esClient, dataStream),
dataStreamService.getMatchingDataStreams(esClient, dataStream),
@ -39,12 +37,14 @@ export async function getDataStreamSettings({
const integration = dataStreamInfo?._meta?.package?.name;
const lastBackingIndex = dataStreamInfo?.indices?.slice(-1)[0];
const indexTemplate = dataStreamInfo?.template;
return {
createdOn,
integration,
datasetUserPrivileges,
lastBackingIndexName: lastBackingIndex?.index_name,
indexTemplate,
};
}

View file

@ -13,6 +13,7 @@ export interface DataStreamSettingResponse {
totalFieldLimit: number;
ignoreDynamicBeyondLimit?: boolean;
ignoreMalformed?: boolean;
defaultPipeline?: string;
}
const DEFAULT_FIELD_LIMIT = 1000;
@ -28,16 +29,20 @@ export async function getDataStreamSettings({
lastBackingIndex: string;
}): Promise<DataStreamSettingResponse> {
const settings = await datasetQualityESClient.settings({ index: dataStream });
const indexSettings = settings[lastBackingIndex]?.settings?.index?.mapping;
const setting = settings[lastBackingIndex]?.settings;
const mappingsInsideSettings = setting?.index?.mapping;
return {
nestedFieldLimit: indexSettings?.nested_fields?.limit
? Number(indexSettings?.nested_fields?.limit)
nestedFieldLimit: mappingsInsideSettings?.nested_fields?.limit
? Number(mappingsInsideSettings?.nested_fields?.limit)
: DEFAULT_NESTED_FIELD_LIMIT,
totalFieldLimit: indexSettings?.total_fields?.limit
? Number(indexSettings?.total_fields?.limit)
totalFieldLimit: mappingsInsideSettings?.total_fields?.limit
? Number(mappingsInsideSettings?.total_fields?.limit)
: DEFAULT_FIELD_LIMIT,
ignoreDynamicBeyondLimit: toBoolean(indexSettings?.total_fields?.ignore_dynamic_beyond_limit),
ignoreMalformed: toBoolean(indexSettings?.ignore_malformed),
ignoreDynamicBeyondLimit: toBoolean(
mappingsInsideSettings?.total_fields?.ignore_dynamic_beyond_limit
),
ignoreMalformed: toBoolean(mappingsInsideSettings?.ignore_malformed),
defaultPipeline: setting?.index?.default_pipeline,
};
}

View file

@ -28,7 +28,13 @@ export async function analyzeDegradedField({
const [
{ fieldCount, fieldPresent, fieldMapping },
{ nestedFieldLimit, totalFieldLimit, ignoreDynamicBeyondLimit, ignoreMalformed },
{
nestedFieldLimit,
totalFieldLimit,
ignoreDynamicBeyondLimit,
ignoreMalformed,
defaultPipeline,
},
] = await Promise.all([
getDataStreamMapping({
datasetQualityESClient,
@ -48,5 +54,6 @@ export async function analyzeDegradedField({
totalFieldLimit,
ignoreMalformed,
nestedFieldLimit,
defaultPipeline,
};
}

View file

@ -16,6 +16,8 @@ import {
DatasetUserPrivileges,
DegradedFieldValues,
DegradedFieldAnalysis,
UpdateFieldLimitResponse,
DataStreamRolloverResponse,
} from '../../../common/api_types';
import { rangeRt, typeRt, typesRt } from '../../types/default_api_types';
import { createDatasetQualityServerRoute } from '../create_datasets_quality_server_route';
@ -29,6 +31,8 @@ import { getDegradedFields } from './get_degraded_fields';
import { getDegradedFieldValues } from './get_degraded_field_values';
import { analyzeDegradedField } from './get_degraded_field_analysis';
import { getDataStreamsMeteringStats } from './get_data_streams_metering_stats';
import { updateFieldLimit } from './update_field_limit';
import { createDatasetQualityESClient } from '../../utils';
const statsRoute = createDatasetQualityServerRoute({
endpoint: 'GET /internal/dataset_quality/data_streams/stats',
@ -324,6 +328,58 @@ const analyzeDegradedFieldRoute = createDatasetQualityServerRoute({
},
});
const updateFieldLimitRoute = createDatasetQualityServerRoute({
endpoint: 'PUT /internal/dataset_quality/data_streams/{dataStream}/update_field_limit',
params: t.type({
path: t.type({
dataStream: t.string,
}),
body: t.type({
newFieldLimit: t.number,
}),
}),
options: {
tags: [],
},
async handler(resources): Promise<UpdateFieldLimitResponse> {
const { context, params } = resources;
const coreContext = await context.core;
const esClient = coreContext.elasticsearch.client.asCurrentUser;
const updatedLimitResponse = await updateFieldLimit({
esClient,
newFieldLimit: params.body.newFieldLimit,
dataStream: params.path.dataStream,
});
return updatedLimitResponse;
},
});
const rolloverDataStream = createDatasetQualityServerRoute({
endpoint: 'POST /internal/dataset_quality/data_streams/{dataStream}/rollover',
params: t.type({
path: t.type({
dataStream: t.string,
}),
}),
options: {
tags: [],
},
async handler(resources): Promise<DataStreamRolloverResponse> {
const { context, params } = resources;
const coreContext = await context.core;
const esClient = coreContext.elasticsearch.client.asCurrentUser;
const datasetQualityESClient = createDatasetQualityESClient(esClient);
const { acknowledged } = await datasetQualityESClient.rollover({
alias: params.path.dataStream,
});
return { acknowledged };
},
});
export const dataStreamsRouteRepository = {
...statsRoute,
...degradedDocsRoute,
@ -334,4 +390,6 @@ export const dataStreamsRouteRepository = {
...dataStreamDetailsRoute,
...dataStreamSettingsRoute,
...analyzeDegradedFieldRoute,
...updateFieldLimitRoute,
...rolloverDataStream,
};

View file

@ -0,0 +1,59 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { badRequest } from '@hapi/boom';
import { createDatasetQualityESClient } from '../../../utils';
import { updateComponentTemplate } from './update_component_template';
import { updateLastBackingIndexSettings } from './update_settings_last_backing_index';
import { UpdateFieldLimitResponse } from '../../../../common/api_types';
import { getDataStreamSettings } from '../get_data_stream_details';
export async function updateFieldLimit({
esClient,
newFieldLimit,
dataStream,
}: {
esClient: ElasticsearchClient;
newFieldLimit: number;
dataStream: string;
}): Promise<UpdateFieldLimitResponse> {
const datasetQualityESClient = createDatasetQualityESClient(esClient);
const { lastBackingIndexName, indexTemplate } = await getDataStreamSettings({
esClient,
dataStream,
});
if (!lastBackingIndexName || !indexTemplate) {
throw badRequest(`Data stream does not exists. Received value "${dataStream}"`);
}
const {
acknowledged: isComponentTemplateUpdated,
componentTemplateName,
error: errorUpdatingComponentTemplate,
} = await updateComponentTemplate({ datasetQualityESClient, indexTemplate, newFieldLimit });
if (errorUpdatingComponentTemplate) {
throw badRequest(errorUpdatingComponentTemplate);
}
const { acknowledged: isLatestBackingIndexUpdated, error: errorUpdatingBackingIndex } =
await updateLastBackingIndexSettings({
datasetQualityESClient,
lastBackingIndex: lastBackingIndexName,
newFieldLimit,
});
return {
isComponentTemplateUpdated,
isLatestBackingIndexUpdated,
customComponentTemplateName: componentTemplateName,
error: errorUpdatingBackingIndex,
};
}

View file

@ -0,0 +1,53 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { DatasetQualityESClient } from '../../../utils/create_dataset_quality_es_client';
import { getComponentTemplatePrefixFromIndexTemplate } from '../../../../common/utils/component_template_name';
interface UpdateComponentTemplateResponse {
acknowledged: boolean | undefined;
componentTemplateName: string;
error?: string;
}
export async function updateComponentTemplate({
datasetQualityESClient,
indexTemplate,
newFieldLimit,
}: {
datasetQualityESClient: DatasetQualityESClient;
indexTemplate: string;
newFieldLimit: number;
}): Promise<UpdateComponentTemplateResponse> {
const newSettings = {
settings: {
'index.mapping.total_fields.limit': newFieldLimit,
},
};
const customComponentTemplateName = `${getComponentTemplatePrefixFromIndexTemplate(
indexTemplate
)}@custom`;
try {
const { acknowledged } = await datasetQualityESClient.updateComponentTemplate({
name: customComponentTemplateName,
template: newSettings,
});
return {
acknowledged,
componentTemplateName: customComponentTemplateName,
};
} catch (error) {
return {
acknowledged: undefined, // acknowledge is undefined when the request fails
componentTemplateName: customComponentTemplateName,
error: error.message,
};
}
}

View file

@ -0,0 +1,41 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { DatasetQualityESClient } from '../../../utils/create_dataset_quality_es_client';
interface UpdateLastBackingIndexSettingsResponse {
acknowledged: boolean | undefined;
error?: string;
}
export async function updateLastBackingIndexSettings({
datasetQualityESClient,
lastBackingIndex,
newFieldLimit,
}: {
datasetQualityESClient: DatasetQualityESClient;
lastBackingIndex: string;
newFieldLimit: number;
}): Promise<UpdateLastBackingIndexSettingsResponse> {
const newSettings = {
'index.mapping.total_fields.limit': newFieldLimit,
};
try {
const { acknowledged } = await datasetQualityESClient.updateSettings({
index: lastBackingIndex,
settings: newSettings,
});
return { acknowledged };
} catch (error) {
return {
acknowledged: undefined, // acknowledge is undefined when the request fails
error: error.message,
};
}
}

View file

@ -8,11 +8,16 @@
import { ESSearchRequest, InferSearchResponseOf } from '@kbn/es-types';
import { ElasticsearchClient } from '@kbn/core/server';
import {
ClusterPutComponentTemplateRequest,
ClusterPutComponentTemplateResponse,
FieldCapsRequest,
FieldCapsResponse,
Indices,
IndicesGetMappingResponse,
IndicesGetSettingsResponse,
IndicesPutSettingsRequest,
IndicesPutSettingsResponse,
IndicesRolloverResponse,
} from '@elastic/elasticsearch/lib/api/types';
type DatasetQualityESSearchParams = ESSearchRequest & {
@ -23,12 +28,12 @@ export type DatasetQualityESClient = ReturnType<typeof createDatasetQualityESCli
export function createDatasetQualityESClient(esClient: ElasticsearchClient) {
return {
async search<TDocument, TParams extends DatasetQualityESSearchParams>(
search<TDocument, TParams extends DatasetQualityESSearchParams>(
searchParams: TParams
): Promise<InferSearchResponseOf<TDocument, TParams>> {
return esClient.search<TDocument>(searchParams) as Promise<any>;
},
async msearch<TDocument, TParams extends DatasetQualityESSearchParams>(
msearch<TDocument, TParams extends DatasetQualityESSearchParams>(
index = {} as { index?: Indices },
searches: TParams[]
): Promise<{
@ -38,14 +43,25 @@ export function createDatasetQualityESClient(esClient: ElasticsearchClient) {
searches: searches.map((search) => [index, search]).flat(),
}) as Promise<any>;
},
async fieldCaps(params: FieldCapsRequest): Promise<FieldCapsResponse> {
return esClient.fieldCaps(params) as Promise<any>;
fieldCaps(params: FieldCapsRequest): Promise<FieldCapsResponse> {
return esClient.fieldCaps(params);
},
async mappings(params: { index: string }): Promise<IndicesGetMappingResponse> {
mappings(params: { index: string }): Promise<IndicesGetMappingResponse> {
return esClient.indices.getMapping(params);
},
async settings(params: { index: string }): Promise<IndicesGetSettingsResponse> {
settings(params: { index: string }): Promise<IndicesGetSettingsResponse> {
return esClient.indices.getSettings(params);
},
updateComponentTemplate(
params: ClusterPutComponentTemplateRequest
): Promise<ClusterPutComponentTemplateResponse> {
return esClient.cluster.putComponentTemplate(params);
},
updateSettings(params: IndicesPutSettingsRequest): Promise<IndicesPutSettingsResponse> {
return esClient.indices.putSettings(params);
},
rollover(params: { alias: string }): Promise<IndicesRolloverResponse> {
return esClient.indices.rollover(params);
},
};
}

View file

@ -0,0 +1,86 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { log, timerange } from '@kbn/apm-synthtrace-client';
import expect from '@kbn/expect';
import { DeploymentAgnosticFtrProviderContext } from '../../../ftr_provider_context';
import { SupertestWithRoleScopeType } from '../../../services';
export default function ({ getService }: DeploymentAgnosticFtrProviderContext) {
const roleScopedSupertest = getService('roleScopedSupertest');
const synthtrace = getService('logsSynthtraceEsClient');
const start = '2024-10-17T11:00:00.000Z';
const end = '2024-10-17T11:01:00.000Z';
const type = 'logs';
const dataset = 'synth';
const namespace = 'default';
const serviceName = 'my-service';
const hostName = 'synth-host';
const dataStreamName = `${type}-${dataset}-${namespace}`;
async function callApiAs({
roleScopedSupertestWithCookieCredentials,
apiParams: { dataStream },
}: {
roleScopedSupertestWithCookieCredentials: SupertestWithRoleScopeType;
apiParams: {
dataStream: string;
};
}) {
return roleScopedSupertestWithCookieCredentials.post(
`/internal/dataset_quality/data_streams/${dataStream}/rollover`
);
}
describe('Datastream Rollover', function () {
let supertestAdminWithCookieCredentials: SupertestWithRoleScopeType;
before(async () => {
supertestAdminWithCookieCredentials = await roleScopedSupertest.getSupertestWithRoleScope(
'admin',
{
useCookieHeader: true,
withInternalHeaders: true,
}
);
await synthtrace.index([
timerange(start, end)
.interval('1m')
.rate(1)
.generator((timestamp) =>
log
.create()
.message('This is a log message')
.timestamp(timestamp)
.dataset(dataset)
.namespace(namespace)
.defaults({
'log.file.path': '/my-service.log',
'service.name': serviceName,
'host.name': hostName,
})
),
]);
});
after(async () => {
await synthtrace.clean();
});
it('returns acknowledged when rollover is successful', async () => {
const resp = await callApiAs({
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
apiParams: {
dataStream: dataStreamName,
},
});
expect(resp.body.acknowledged).to.be(true);
});
});
}

View file

@ -0,0 +1,251 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { log, timerange } from '@kbn/apm-synthtrace-client';
import expect from '@kbn/expect';
import {
createBackingIndexNameWithoutVersion,
getDataStreamSettingsOfEarliestIndex,
rolloverDataStream,
} from './utils/es_utils';
import { DeploymentAgnosticFtrProviderContext } from '../../../ftr_provider_context';
import { RoleCredentials, SupertestWithRoleScopeType } from '../../../services';
export default function ({ getService }: DeploymentAgnosticFtrProviderContext) {
const samlAuth = getService('samlAuth');
const roleScopedSupertest = getService('roleScopedSupertest');
const synthtrace = getService('logsSynthtraceEsClient');
const esClient = getService('es');
const packageApi = getService('packageApi');
const config = getService('config');
const isServerless = !!config.get('serverless');
const start = '2024-09-20T11:00:00.000Z';
const end = '2024-09-20T11:01:00.000Z';
const type = 'logs';
const dataset = 'synth';
const syntheticsDataset = 'synthetics';
const namespace = 'default';
const serviceName = 'my-service';
const hostName = 'synth-host';
const dataStreamName = `${type}-${dataset}-${namespace}`;
const syntheticsDataStreamName = `${type}-${syntheticsDataset}-${namespace}`;
const defaultDataStreamPrivileges = {
datasetUserPrivileges: { canRead: true, canMonitor: true, canViewIntegrations: true },
};
async function callApiAs({
roleScopedSupertestWithCookieCredentials,
apiParams: { dataStream },
}: {
roleScopedSupertestWithCookieCredentials: SupertestWithRoleScopeType;
apiParams: {
dataStream: string;
};
}) {
return roleScopedSupertestWithCookieCredentials.get(
`/internal/dataset_quality/data_streams/${dataStream}/settings`
);
}
describe('Dataset quality settings', function () {
let adminRoleAuthc: RoleCredentials;
let supertestAdminWithCookieCredentials: SupertestWithRoleScopeType;
before(async () => {
adminRoleAuthc = await samlAuth.createM2mApiKeyWithRoleScope('admin');
supertestAdminWithCookieCredentials = await roleScopedSupertest.getSupertestWithRoleScope(
'admin',
{
useCookieHeader: true,
withInternalHeaders: true,
}
);
});
after(async () => {
await samlAuth.invalidateM2mApiKeyWithRoleScope(adminRoleAuthc);
});
it('returns only privileges if matching data stream is not available', async () => {
const nonExistentDataSet = 'Non-existent';
const nonExistentDataStream = `${type}-${nonExistentDataSet}-${namespace}`;
const resp = await callApiAs({
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
apiParams: {
dataStream: nonExistentDataStream,
},
});
expect(resp.body).eql(defaultDataStreamPrivileges);
});
describe('gets the data stream settings for non integrations', () => {
before(async () => {
await synthtrace.index([
timerange(start, end)
.interval('1m')
.rate(1)
.generator((timestamp) =>
log
.create()
.message('This is a log message')
.timestamp(timestamp)
.dataset(dataset)
.namespace(namespace)
.defaults({
'log.file.path': '/my-service.log',
'service.name': serviceName,
'host.name': hostName,
})
),
]);
});
after(async () => {
await synthtrace.clean();
});
it('returns "createdOn", "indexTemplate" and "lastBackingIndexName" correctly when available for non integration', async () => {
const dataStreamSettings = await getDataStreamSettingsOfEarliestIndex(
esClient,
dataStreamName
);
const resp = await callApiAs({
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
apiParams: {
dataStream: dataStreamName,
},
});
if (!isServerless) {
expect(resp.body.createdOn).to.be(Number(dataStreamSettings?.index?.creation_date));
}
expect(resp.body.indexTemplate).to.be('logs');
expect(resp.body.lastBackingIndexName).to.be(
`${createBackingIndexNameWithoutVersion({
type,
dataset,
namespace,
})}-000001`
);
expect(resp.body.datasetUserPrivileges).to.eql(
defaultDataStreamPrivileges.datasetUserPrivileges
);
});
it('returns "createdOn", "indexTemplate" and "lastBackingIndexName" correctly for rolled over dataStream', async () => {
await rolloverDataStream(esClient, dataStreamName);
const dataStreamSettings = await getDataStreamSettingsOfEarliestIndex(
esClient,
dataStreamName
);
const resp = await callApiAs({
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
apiParams: {
dataStream: dataStreamName,
},
});
if (!isServerless) {
expect(resp.body.createdOn).to.be(Number(dataStreamSettings?.index?.creation_date));
}
expect(resp.body.lastBackingIndexName).to.be(
`${createBackingIndexNameWithoutVersion({ type, dataset, namespace })}-000002`
);
expect(resp.body.indexTemplate).to.be('logs');
});
});
describe('gets the data stream settings for integrations', () => {
before(async () => {
await packageApi.installPackage({
roleAuthc: adminRoleAuthc,
pkg: syntheticsDataset,
});
await synthtrace.index([
timerange(start, end)
.interval('1m')
.rate(1)
.generator((timestamp) =>
log
.create()
.message('This is a log message')
.timestamp(timestamp)
.dataset(syntheticsDataset)
.namespace(namespace)
.defaults({
'log.file.path': '/my-service.log',
'service.name': serviceName,
'host.name': hostName,
})
),
]);
});
after(async () => {
await synthtrace.clean();
await packageApi.uninstallPackage({
roleAuthc: adminRoleAuthc,
pkg: syntheticsDataset,
});
});
it('returns "createdOn", "integration", "indexTemplate" and "lastBackingIndexName" correctly when available for integration', async () => {
const dataStreamSettings = await getDataStreamSettingsOfEarliestIndex(
esClient,
syntheticsDataStreamName
);
const resp = await callApiAs({
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
apiParams: {
dataStream: syntheticsDataStreamName,
},
});
if (!isServerless) {
expect(resp.body.createdOn).to.be(Number(dataStreamSettings?.index?.creation_date));
}
expect(resp.body.indexTemplate).to.be('logs');
expect(resp.body.lastBackingIndexName).to.be(
`${createBackingIndexNameWithoutVersion({
type,
dataset: syntheticsDataset,
namespace,
})}-000001`
);
expect(resp.body.datasetUserPrivileges).to.eql(
defaultDataStreamPrivileges.datasetUserPrivileges
);
});
it('returns "createdOn", "integration", "indexTemplate" and "lastBackingIndexName" correctly for rolled over dataStream', async () => {
await rolloverDataStream(esClient, syntheticsDataStreamName);
const dataStreamSettings = await getDataStreamSettingsOfEarliestIndex(
esClient,
syntheticsDataStreamName
);
const resp = await callApiAs({
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
apiParams: {
dataStream: syntheticsDataStreamName,
},
});
if (!isServerless) {
expect(resp.body.createdOn).to.be(Number(dataStreamSettings?.index?.creation_date));
}
expect(resp.body.lastBackingIndexName).to.be(
`${createBackingIndexNameWithoutVersion({
type,
dataset: syntheticsDataset,
namespace,
})}-000002`
);
expect(resp.body.indexTemplate).to.be('logs');
});
});
});
}

View file

@ -9,7 +9,7 @@ import expect from '@kbn/expect';
import { log, timerange } from '@kbn/apm-synthtrace-client';
import { SupertestWithRoleScopeType } from '../../../services';
import { DeploymentAgnosticFtrProviderContext } from '../../../ftr_provider_context';
import { createBackingIndexNameWithoutVersion, setDataStreamSettings } from './es_utils';
import { createBackingIndexNameWithoutVersion, setDataStreamSettings } from './utils/es_utils';
import { logsSynthMappings } from './custom_mappings/custom_synth_mappings';
const MORE_THAN_1024_CHARS =

View file

@ -11,5 +11,8 @@ export default function ({ loadTestFile }: DeploymentAgnosticFtrProviderContext)
describe('Dataset quality', () => {
loadTestFile(require.resolve('./integrations'));
loadTestFile(require.resolve('./degraded_field_analyze'));
loadTestFile(require.resolve('./data_stream_settings'));
loadTestFile(require.resolve('./data_stream_rollover'));
loadTestFile(require.resolve('./update_field_limit'));
});
}

View file

@ -5,15 +5,15 @@
* 2.0.
*/
import { RoleCredentials, InternalRequestHeader } from '@kbn/ftr-common-functional-services';
import expect from '@kbn/expect';
import { APIReturnType } from '@kbn/dataset-quality-plugin/common/rest';
import { CustomIntegration } from '../../../services/package_api';
import { DeploymentAgnosticFtrProviderContext } from '../../../ftr_provider_context';
import { RoleCredentials, SupertestWithRoleScopeType } from '../../../services';
export default function ({ getService }: DeploymentAgnosticFtrProviderContext) {
const samlAuth = getService('samlAuth');
const supertestWithoutAuth = getService('supertestWithoutAuth');
const roleScopedSupertest = getService('roleScopedSupertest');
const packageApi = getService('packageApi');
const endpoint = 'GET /internal/dataset_quality/integrations';
@ -33,27 +33,30 @@ export default function ({ getService }: DeploymentAgnosticFtrProviderContext) {
];
async function callApiAs({
roleAuthc,
headers,
roleScopedSupertestWithCookieCredentials,
}: {
roleAuthc: RoleCredentials;
headers: InternalRequestHeader;
roleScopedSupertestWithCookieCredentials: SupertestWithRoleScopeType;
}): Promise<any> {
const { body } = await supertestWithoutAuth
.get('/internal/dataset_quality/integrations')
.set(roleAuthc.apiKeyHeader)
.set(headers);
const { body } = await roleScopedSupertestWithCookieCredentials.get(
'/internal/dataset_quality/integrations'
);
return body;
}
describe('Integrations', () => {
let adminRoleAuthc: RoleCredentials;
let internalHeaders: InternalRequestHeader;
let supertestAdminWithCookieCredentials: SupertestWithRoleScopeType;
before(async () => {
adminRoleAuthc = await samlAuth.createM2mApiKeyWithRoleScope('admin');
internalHeaders = samlAuth.getInternalRequestHeader();
supertestAdminWithCookieCredentials = await roleScopedSupertest.getSupertestWithRoleScope(
'admin',
{
useCookieHeader: true,
withInternalHeaders: true,
}
);
});
after(async () => {
@ -74,8 +77,7 @@ export default function ({ getService }: DeploymentAgnosticFtrProviderContext) {
it('returns all installed integrations and its datasets map', async () => {
const body = await callApiAs({
roleAuthc: adminRoleAuthc,
headers: internalHeaders,
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
});
expect(body.integrations.map((integration: Integration) => integration.name)).to.eql([
@ -108,8 +110,7 @@ export default function ({ getService }: DeploymentAgnosticFtrProviderContext) {
it('returns custom integrations and its datasets map', async () => {
const body = await callApiAs({
roleAuthc: adminRoleAuthc,
headers: internalHeaders,
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
});
expect(body.integrations.map((integration: Integration) => integration.name)).to.eql([

View file

@ -0,0 +1,176 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { log, timerange } from '@kbn/apm-synthtrace-client';
import { DeploymentAgnosticFtrProviderContext } from '../../../ftr_provider_context';
import { RoleCredentials, SupertestWithRoleScopeType } from '../../../services';
import { createBackingIndexNameWithoutVersion, rolloverDataStream } from './utils/es_utils';
export default function ({ getService }: DeploymentAgnosticFtrProviderContext) {
const samlAuth = getService('samlAuth');
const roleScopedSupertest = getService('roleScopedSupertest');
const synthtrace = getService('logsSynthtraceEsClient');
const esClient = getService('es');
const packageApi = getService('packageApi');
const start = '2024-10-17T11:00:00.000Z';
const end = '2024-10-17T11:01:00.000Z';
const type = 'logs';
const invalidDataset = 'invalid';
const integrationsDataset = 'nginx.access';
const pkg = 'nginx';
const namespace = 'default';
const serviceName = 'my-service';
const hostName = 'synth-host';
const invalidDataStreamName = `${type}-${invalidDataset}-${namespace}`;
const integrationsDataStreamName = `${type}-${integrationsDataset}-${namespace}`;
async function callApiAs({
roleScopedSupertestWithCookieCredentials,
apiParams: { dataStream, fieldLimit },
}: {
roleScopedSupertestWithCookieCredentials: SupertestWithRoleScopeType;
apiParams: {
dataStream: string;
fieldLimit: number;
};
}) {
return roleScopedSupertestWithCookieCredentials
.put(`/internal/dataset_quality/data_streams/${dataStream}/update_field_limit`)
.send({
newFieldLimit: fieldLimit,
});
}
describe('Update field limit', function () {
let adminRoleAuthc: RoleCredentials;
let supertestAdminWithCookieCredentials: SupertestWithRoleScopeType;
before(async () => {
adminRoleAuthc = await samlAuth.createM2mApiKeyWithRoleScope('admin');
supertestAdminWithCookieCredentials = await roleScopedSupertest.getSupertestWithRoleScope(
'admin',
{
useCookieHeader: true,
withInternalHeaders: true,
}
);
await packageApi.installPackage({
roleAuthc: adminRoleAuthc,
pkg,
});
await synthtrace.index([
timerange(start, end)
.interval('1m')
.rate(1)
.generator((timestamp) =>
log
.create()
.message('This is a log message')
.timestamp(timestamp)
.dataset(integrationsDataset)
.namespace(namespace)
.defaults({
'log.file.path': '/my-service.log',
'service.name': serviceName,
'host.name': hostName,
})
),
]);
});
after(async () => {
await synthtrace.clean();
await packageApi.uninstallPackage({
roleAuthc: adminRoleAuthc,
pkg,
});
await samlAuth.invalidateM2mApiKeyWithRoleScope(adminRoleAuthc);
});
it('should handles failure gracefully when invalid datastream provided ', async () => {
const resp = await callApiAs({
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
apiParams: {
dataStream: invalidDataStreamName,
fieldLimit: 10,
},
});
expect(resp.body.statusCode).to.be(400);
expect(resp.body.message).to.be(
`Data stream does not exists. Received value "${invalidDataStreamName}"`
);
});
it('should update last backing index and custom component template', async () => {
// We rollover the data stream to create a new backing index
await rolloverDataStream(esClient, integrationsDataStreamName);
const resp = await callApiAs({
roleScopedSupertestWithCookieCredentials: supertestAdminWithCookieCredentials,
apiParams: {
dataStream: integrationsDataStreamName,
fieldLimit: 50,
},
});
expect(resp.body.isComponentTemplateUpdated).to.be(true);
expect(resp.body.isLatestBackingIndexUpdated).to.be(true);
expect(resp.body.customComponentTemplateName).to.be(`${type}-${integrationsDataset}@custom`);
expect(resp.body.error).to.be(undefined);
const { component_templates: componentTemplates } =
await esClient.cluster.getComponentTemplate({
name: `${type}-${integrationsDataset}@custom`,
});
const customTemplate = componentTemplates.filter(
(tmp) => tmp.name === `${type}-${integrationsDataset}@custom`
);
expect(customTemplate).to.have.length(1);
expect(
customTemplate[0].component_template.template.settings?.index?.mapping?.total_fields?.limit
).to.be('50');
const settingsForAllIndices = await esClient.indices.getSettings({
index: integrationsDataStreamName,
});
const backingIndexWithoutVersion = createBackingIndexNameWithoutVersion({
type,
dataset: integrationsDataset,
namespace,
});
const settingsForLastBackingIndex =
settingsForAllIndices[backingIndexWithoutVersion + '-000002'].settings;
const settingsForPreviousBackingIndex =
settingsForAllIndices[backingIndexWithoutVersion + '-000001'].settings;
// Only the Last Backing Index should have the updated limit and not the one previous to it
expect(settingsForLastBackingIndex?.index?.mapping?.total_fields?.limit).to.be('50');
// The previous one should have the default limit of 1000
expect(settingsForPreviousBackingIndex?.index?.mapping?.total_fields?.limit).to.be('1000');
// Rollover to test custom component template
await rolloverDataStream(esClient, integrationsDataStreamName);
const settingsForLatestBackingIndex = await esClient.indices.getSettings({
index: backingIndexWithoutVersion + '-000003',
});
// The new backing index should read settings from custom component template
expect(
settingsForLatestBackingIndex[backingIndexWithoutVersion + '-000003'].settings?.index
?.mapping?.total_fields?.limit
).to.be('50');
});
});
}

View file

@ -39,3 +39,21 @@ export async function setDataStreamSettings(
settings,
});
}
export async function rolloverDataStream(es: Client, name: string) {
return es.indices.rollover({ alias: name });
}
export async function getDataStreamSettingsOfEarliestIndex(es: Client, name: string) {
const matchingIndexesObj = await es.indices.getSettings({ index: name });
const matchingIndexes = Object.keys(matchingIndexesObj ?? {});
matchingIndexes.sort((a, b) => {
return (
Number(matchingIndexesObj[a].settings?.index?.creation_date) -
Number(matchingIndexesObj[b].settings?.index?.creation_date)
);
});
return matchingIndexesObj[matchingIndexes[0]].settings;
}

View file

@ -1,159 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { log, timerange } from '@kbn/apm-synthtrace-client';
import expect from '@kbn/expect';
import { DatasetQualityApiClientKey } from '../../common/config';
import { DatasetQualityApiError } from '../../common/dataset_quality_api_supertest';
import { FtrProviderContext } from '../../common/ftr_provider_context';
import {
expectToReject,
getDataStreamSettingsOfEarliestIndex,
rolloverDataStream,
} from '../../utils';
import { createBackingIndexNameWithoutVersion } from './es_utils';
export default function ApiTest({ getService }: FtrProviderContext) {
const registry = getService('registry');
const synthtrace = getService('logSynthtraceEsClient');
const esClient = getService('es');
const datasetQualityApiClient = getService('datasetQualityApiClient');
const pkgService = getService('packageService');
const start = '2023-12-11T18:00:00.000Z';
const end = '2023-12-11T18:01:00.000Z';
const type = 'logs';
const dataset = 'synth.1';
const integrationDataset = 'apache.access';
const namespace = 'default';
const serviceName = 'my-service';
const hostName = 'synth-host';
const pkg = {
name: 'apache',
version: '1.14.0',
};
const defaultDataStreamPrivileges = {
datasetUserPrivileges: { canRead: true, canMonitor: true, canViewIntegrations: true },
};
async function callApiAs(user: DatasetQualityApiClientKey, dataStream: string) {
return await datasetQualityApiClient[user]({
endpoint: 'GET /internal/dataset_quality/data_streams/{dataStream}/settings',
params: {
path: {
dataStream,
},
},
});
}
registry.when('DataStream Settings', { config: 'basic' }, () => {
describe('gets the data stream settings', () => {
before(async () => {
// Install Integration and ingest logs for it
await pkgService.installPackage(pkg);
await synthtrace.index([
timerange(start, end)
.interval('1m')
.rate(1)
.generator((timestamp) =>
log
.create()
.message('This is a log message')
.timestamp(timestamp)
.dataset(integrationDataset)
.namespace(namespace)
.defaults({
'log.file.path': '/my-service.log',
'service.name': serviceName,
'host.name': hostName,
})
),
]);
// Ingest basic logs
await synthtrace.index([
timerange(start, end)
.interval('1m')
.rate(1)
.generator((timestamp) =>
log
.create()
.message('This is a log message')
.timestamp(timestamp)
.dataset(dataset)
.namespace(namespace)
.defaults({
'log.file.path': '/my-service.log',
'service.name': serviceName,
'host.name': hostName,
})
),
]);
});
it('returns error when dataStream param is not provided', async () => {
const expectedMessage = 'Data Stream name cannot be empty';
const err = await expectToReject<DatasetQualityApiError>(() =>
callApiAs('datasetQualityMonitorUser', encodeURIComponent(' '))
);
expect(err.res.status).to.be(400);
expect(err.res.body.message.indexOf(expectedMessage)).to.greaterThan(-1);
});
it('returns only privileges if matching data stream is not available', async () => {
const nonExistentDataSet = 'Non-existent';
const nonExistentDataStream = `${type}-${nonExistentDataSet}-${namespace}`;
const resp = await callApiAs('datasetQualityMonitorUser', nonExistentDataStream);
expect(resp.body).eql(defaultDataStreamPrivileges);
});
it('returns "createdOn", "integration" and "lastBackingIndexName" correctly when available', async () => {
const dataStreamSettings = await getDataStreamSettingsOfEarliestIndex(
esClient,
`${type}-${integrationDataset}-${namespace}`
);
const resp = await callApiAs(
'datasetQualityMonitorUser',
`${type}-${integrationDataset}-${namespace}`
);
expect(resp.body.createdOn).to.be(Number(dataStreamSettings?.index?.creation_date));
expect(resp.body.integration).to.be('apache');
expect(resp.body.lastBackingIndexName).to.be(
`${createBackingIndexNameWithoutVersion({
type,
dataset: integrationDataset,
namespace,
})}-000001`
);
expect(resp.body.datasetUserPrivileges).to.eql(
defaultDataStreamPrivileges.datasetUserPrivileges
);
});
it('returns "createdOn" and "lastBackingIndexName" for rolled over dataStream', async () => {
await rolloverDataStream(esClient, `${type}-${dataset}-${namespace}`);
const dataStreamSettings = await getDataStreamSettingsOfEarliestIndex(
esClient,
`${type}-${dataset}-${namespace}`
);
const resp = await callApiAs(
'datasetQualityMonitorUser',
`${type}-${dataset}-${namespace}`
);
expect(resp.body.createdOn).to.be(Number(dataStreamSettings?.index?.creation_date));
expect(resp.body.lastBackingIndexName).to.be(
`${createBackingIndexNameWithoutVersion({ type, dataset, namespace })}-000002`
);
});
after(async () => {
await synthtrace.clean();
await pkgService.uninstallPackage(pkg);
});
});
});
}

View file

@ -0,0 +1,177 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { MappingTypeMapping } from '@elastic/elasticsearch/lib/api/types';
export const logsNginxMappings = (dataset: string): MappingTypeMapping => ({
properties: {
'@timestamp': {
type: 'date',
ignore_malformed: false,
},
cloud: {
properties: {
image: {
properties: {
id: {
type: 'keyword',
ignore_above: 1024,
},
},
},
},
},
data_stream: {
properties: {
dataset: {
type: 'constant_keyword',
value: dataset,
},
namespace: {
type: 'constant_keyword',
value: 'default',
},
type: {
type: 'constant_keyword',
value: 'logs',
},
},
},
ecs: {
properties: {
version: {
type: 'keyword',
ignore_above: 1024,
},
},
},
error: {
properties: {
message: {
type: 'match_only_text',
},
},
},
event: {
properties: {
agent_id_status: {
type: 'keyword',
ignore_above: 1024,
},
dataset: {
type: 'constant_keyword',
value: 'nginx.access',
},
ingested: {
type: 'date',
format: 'strict_date_time_no_millis||strict_date_optional_time||epoch_millis',
ignore_malformed: false,
},
module: {
type: 'constant_keyword',
value: 'nginx',
},
},
},
host: {
properties: {
containerized: {
type: 'boolean',
},
name: {
type: 'keyword',
fields: {
text: {
type: 'match_only_text',
},
},
},
os: {
properties: {
build: {
type: 'keyword',
ignore_above: 1024,
},
codename: {
type: 'keyword',
ignore_above: 1024,
},
},
},
},
},
input: {
properties: {
type: {
type: 'keyword',
ignore_above: 1024,
},
},
},
log: {
properties: {
level: {
type: 'keyword',
ignore_above: 1024,
},
offset: {
type: 'long',
},
},
},
network: {
properties: {
bytes: {
type: 'long',
},
},
},
nginx: {
properties: {
access: {
properties: {
remote_ip_list: {
type: 'keyword',
ignore_above: 1024,
},
},
},
},
},
service: {
properties: {
name: {
type: 'keyword',
fields: {
text: {
type: 'match_only_text',
},
},
},
},
},
test_field: {
type: 'keyword',
ignore_above: 1024,
},
tls: {
properties: {
established: {
type: 'boolean',
},
},
},
trace: {
properties: {
id: {
type: 'keyword',
ignore_above: 1024,
},
},
},
},
});

View file

@ -17,6 +17,7 @@ import {
MORE_THAN_1024_CHARS,
} from './data';
import { logsSynthMappings } from './custom_mappings/custom_synth_mappings';
import { logsNginxMappings } from './custom_mappings/custom_integration_mappings';
export default function ({ getService, getPageObjects }: DatasetQualityFtrProviderContext) {
const PageObjects = getPageObjects([
@ -30,34 +31,42 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
const esClient = getService('es');
const retry = getService('retry');
const to = new Date().toISOString();
const type = 'logs';
const degradedDatasetName = 'synth.degraded';
const degradedDataStreamName = `logs-${degradedDatasetName}-${defaultNamespace}`;
const degradedDataStreamName = `${type}-${degradedDatasetName}-${defaultNamespace}`;
const degradedDatasetWithLimitsName = 'synth.degraded.rca';
const degradedDatasetWithLimitDataStreamName = `logs-${degradedDatasetWithLimitsName}-${defaultNamespace}`;
const degradedDatasetWithLimitDataStreamName = `${type}-${degradedDatasetWithLimitsName}-${defaultNamespace}`;
const serviceName = 'test_service';
const count = 5;
const customComponentTemplateName = 'logs-synth@mappings';
const nginxAccessDatasetName = 'nginx.access';
const customComponentTemplateNameNginx = 'logs-nginx.access@custom';
const nginxAccessDataStreamName = `${type}-${nginxAccessDatasetName}-${defaultNamespace}`;
const nginxPkg = {
name: 'nginx',
version: '1.23.0',
};
describe('Degraded fields flyout', () => {
before(async () => {
await synthtrace.index([
// Ingest basic logs
getInitialTestLogs({ to, count: 4 }),
// Ingest Degraded Logs
createDegradedFieldsRecord({
to: new Date().toISOString(),
count: 2,
dataset: degradedDatasetName,
}),
]);
});
after(async () => {
await synthtrace.clean();
});
describe('degraded field flyout open-close', () => {
before(async () => {
await synthtrace.index([
// Ingest basic logs
getInitialTestLogs({ to, count: 4 }),
// Ingest Degraded Logs
createDegradedFieldsRecord({
to: new Date().toISOString(),
count: 2,
dataset: degradedDatasetName,
}),
]);
});
after(async () => {
await synthtrace.clean();
});
it('should open and close the flyout when user clicks on the expand button', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDataStreamName,
@ -90,31 +99,7 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
});
});
describe('values exist', () => {
it('should display the degraded field values', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const cloudAvailabilityZoneValueExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
ANOTHER_1024_CHARS
);
const cloudAvailabilityZoneValue2Exists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
MORE_THAN_1024_CHARS
);
expect(cloudAvailabilityZoneValueExists).to.be(true);
expect(cloudAvailabilityZoneValue2Exists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
});
describe('testing root cause for ignored fields', () => {
describe('detecting root cause for ignored fields', () => {
before(async () => {
// Create custom component template
await synthtrace.createComponentTemplate(
@ -142,8 +127,18 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
hidden: false,
},
});
// Ingest Degraded Logs with 25 fields
// Install Nginx Integration and ingest logs for it
await PageObjects.observabilityLogsExplorer.installPackage(nginxPkg);
// Create custom component template to avoid issues with LogsDB
await synthtrace.createComponentTemplate(
customComponentTemplateNameNginx,
logsNginxMappings(nginxAccessDatasetName)
);
await synthtrace.index([
// Ingest Degraded Logs with 25 fields in degraded DataSet
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
@ -161,7 +156,30 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
.defaults({
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, 'hello world'],
test_field: [MORE_THAN_1024_CHARS, ANOTHER_1024_CHARS],
})
.timestamp(timestamp)
);
}),
// Ingest Degraded Logs with 42 fields in Nginx DataSet
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
.generator((timestamp) => {
return Array(1)
.fill(0)
.flatMap(() =>
log
.create()
.dataset(nginxAccessDatasetName)
.message('a log message')
.logLevel(MORE_THAN_1024_CHARS)
.service(serviceName)
.namespace(defaultNamespace)
.defaults({
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, ANOTHER_1024_CHARS],
})
.timestamp(timestamp)
);
@ -176,8 +194,13 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
}
);
// Ingest Degraded Logs with 26 field
// Set Limit of 42
await PageObjects.datasetQuality.setDataStreamSettings(nginxAccessDataStreamName, {
'mapping.total_fields.limit': 42,
});
await synthtrace.index([
// Ingest Degraded Logs with 26 field
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
@ -196,7 +219,31 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, 'hello world'],
'cloud.region': 'us-east-1',
'cloud.project.id': generateShortId(),
})
.timestamp(timestamp)
);
}),
// Ingest Degraded Logs with 43 fields in Nginx DataSet
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
.generator((timestamp) => {
return Array(1)
.fill(0)
.flatMap(() =>
log
.create()
.dataset(nginxAccessDatasetName)
.message('a log message')
.logLevel(MORE_THAN_1024_CHARS)
.service(serviceName)
.namespace(defaultNamespace)
.defaults({
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, ANOTHER_1024_CHARS],
'cloud.project.id': generateShortId(),
})
.timestamp(timestamp)
);
@ -205,9 +252,30 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
// Rollover Datastream to reset the limit to default which is 1000
await PageObjects.datasetQuality.rolloverDataStream(degradedDatasetWithLimitDataStreamName);
await PageObjects.datasetQuality.rolloverDataStream(nginxAccessDataStreamName);
// Set Limit of 26
await PageObjects.datasetQuality.setDataStreamSettings(
PageObjects.datasetQuality.generateBackingIndexNameWithoutVersion({
dataset: degradedDatasetWithLimitsName,
}) + '-000002',
{
'mapping.total_fields.limit': 26,
}
);
// Set Limit of 43
await PageObjects.datasetQuality.setDataStreamSettings(
PageObjects.datasetQuality.generateBackingIndexNameWithoutVersion({
dataset: nginxAccessDatasetName,
}) + '-000002',
{
'mapping.total_fields.limit': 43,
}
);
// Ingest docs with 26 fields again
await synthtrace.index([
// Ingest Degraded Logs with 26 field
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
@ -223,11 +291,34 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
.service(serviceName)
.namespace(defaultNamespace)
.defaults({
'log.file.path': '/my-service.log',
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, 'hello world'],
'cloud.region': 'us-east-1',
'cloud.project.id': generateShortId(),
})
.timestamp(timestamp)
);
}),
// Ingest Degraded Logs with 43 fields in Nginx DataSet
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
.generator((timestamp) => {
return Array(1)
.fill(0)
.flatMap(() =>
log
.create()
.dataset(nginxAccessDatasetName)
.message('a log message')
.logLevel(MORE_THAN_1024_CHARS)
.service(serviceName)
.namespace(defaultNamespace)
.defaults({
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, ANOTHER_1024_CHARS],
'cloud.project.id': generateShortId(),
})
.timestamp(timestamp)
);
@ -235,90 +326,6 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
]);
});
describe('field character limit exceeded', () => {
it('should display cause as "field ignored" when a field is ignored due to field above issue', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const fieldIgnoredMessageExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-cause',
'field character limit exceeded'
);
expect(fieldIgnoredMessageExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display values when cause is "field ignored"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const testFieldValueExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
MORE_THAN_1024_CHARS
);
expect(testFieldValueExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
});
describe('field limit exceeded', () => {
it('should display cause as "field limit exceeded" when a field is ignored due to field limit issue', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await retry.tryForTime(5000, async () => {
const fieldLimitMessageExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-cause',
'field limit exceeded'
);
expect(fieldLimitMessageExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display the limit when the cause is "field limit exceeded"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await retry.tryForTime(5000, async () => {
const limitExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-mappingLimit',
'25'
);
expect(limitExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should warn users about the issue not present in latest backing index', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await testSubjects.existOrFail(
PageObjects.datasetQuality.testSubjectSelectors
.datasetQualityDetailsDegradedFieldFlyoutIssueDoesNotExist
);
});
});
describe('current quality issues', () => {
it('should display issues only from latest backing index when current issues toggle is on', async () => {
await PageObjects.datasetQuality.navigateToDetails({
@ -333,7 +340,7 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
const rows =
await PageObjects.datasetQuality.getDatasetQualityDetailsDegradedFieldTableRows();
expect(rows.length).to.eql(3);
expect(rows.length).to.eql(4);
await testSubjects.click(
PageObjects.datasetQuality.testSubjectSelectors
@ -348,7 +355,7 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
const newRows =
await PageObjects.datasetQuality.getDatasetQualityDetailsDegradedFieldTableRows();
expect(newRows.length).to.eql(2);
expect(newRows.length).to.eql(3);
});
it('should keep the toggle on when url state says so', async () => {
@ -374,7 +381,7 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
// Check value in Table
const table = await PageObjects.datasetQuality.parseDegradedFieldTable();
const countColumn = table['Docs count'];
expect(await countColumn.getCellTexts()).to.eql(['5', '5']);
expect(await countColumn.getCellTexts()).to.eql(['5', '5', '5']);
// Check value in Flyout
await retry.tryForTime(5000, async () => {
@ -394,7 +401,7 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
// Check value in Table
const newTable = await PageObjects.datasetQuality.parseDegradedFieldTable();
const newCountColumn = newTable['Docs count'];
expect(await newCountColumn.getCellTexts()).to.eql(['15', '15', '5']);
expect(await newCountColumn.getCellTexts()).to.eql(['15', '15', '5', '5']);
// Check value in Flyout
await retry.tryForTime(5000, async () => {
@ -439,12 +446,432 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
});
});
describe('character limit exceeded', () => {
it('should display cause as "field character limit exceeded" when a field is ignored due to character limit issue', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const fieldIgnoredMessageExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-cause',
'field character limit exceeded'
);
expect(fieldIgnoredMessageExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display values when cause is "field character limit exceeded"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const testFieldValue1Exists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
MORE_THAN_1024_CHARS
);
const testFieldValue2Exists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
ANOTHER_1024_CHARS
);
expect(testFieldValue1Exists).to.be(true);
expect(testFieldValue2Exists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display the maximum character limit when cause is "field character limit exceeded"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const limitValueExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-characterLimit',
'1024'
);
expect(limitValueExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should show possible mitigation section with manual options for non integrations', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
// Possible Mitigation Section should exist
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTitle'
);
// It's a technical preview
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTechPreviewBadge'
);
// Should display Edit/Create Component Template Link option
await testSubjects.existOrFail(
'datasetQualityManualMitigationsCustomComponentTemplateLink'
);
// Should display Edit/Create Ingest Pipeline Link option
await testSubjects.existOrFail('datasetQualityManualMitigationsPipelineAccordion');
// Check Component Template URl
const button = await testSubjects.find(
'datasetQualityManualMitigationsCustomComponentTemplateLink'
);
const componentTemplateUrl = await button.getAttribute('data-test-url');
// Should point to index template with the datastream name as value
expect(componentTemplateUrl).to.be(
`/data/index_management/templates/${degradedDatasetWithLimitDataStreamName}`
);
const nonIntegrationCustomName = `${type}@custom`;
const pipelineInputBox = await testSubjects.find(
'datasetQualityManualMitigationsPipelineName'
);
const pipelineValue = await pipelineInputBox.getAttribute('value');
// Expect Pipeline Name to be default logs for non integrations
expect(pipelineValue).to.be(nonIntegrationCustomName);
const pipelineLink = await testSubjects.find(
'datasetQualityManualMitigationsPipelineLink'
);
const pipelineLinkURL = await pipelineLink.getAttribute('data-test-url');
// Expect the pipeline link to point to the pipeline page with empty pipeline value
expect(pipelineLinkURL).to.be(
`/app/management/ingest/ingest_pipelines/?pipeline=${encodeURIComponent(
nonIntegrationCustomName
)}`
);
});
it('should show possible mitigation section with different manual options for integrations', async () => {
// Navigate to Integration Dataset
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'test_field',
});
// Possible Mitigation Section should exist
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTitle'
);
// It's a technical preview
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTechPreviewBadge'
);
// Should display Edit/Create Component Template Link option
await testSubjects.existOrFail(
'datasetQualityManualMitigationsCustomComponentTemplateLink'
);
// Should display Edit/Create Ingest Pipeline Link option
await testSubjects.existOrFail('datasetQualityManualMitigationsPipelineAccordion');
// Check Component Template URl
const button = await testSubjects.find(
'datasetQualityManualMitigationsCustomComponentTemplateLink'
);
const componentTemplateUrl = await button.getAttribute('data-test-url');
const integrationSpecificCustomName = `${type}-${nginxAccessDatasetName}@custom`;
// Should point to component template with @custom as value
expect(componentTemplateUrl).to.be(
`/data/index_management/component_templates/${encodeURIComponent(
integrationSpecificCustomName
)}`
);
const pipelineInputBox = await testSubjects.find(
'datasetQualityManualMitigationsPipelineName'
);
const pipelineValue = await pipelineInputBox.getAttribute('value');
// Expect Pipeline Name to be default logs for non integrations
expect(pipelineValue).to.be(integrationSpecificCustomName);
const pipelineLink = await testSubjects.find(
'datasetQualityManualMitigationsPipelineLink'
);
const pipelineLinkURL = await pipelineLink.getAttribute('data-test-url');
// Expect the pipeline link to point to the pipeline page with empty pipeline value
expect(pipelineLinkURL).to.be(
`/app/management/ingest/ingest_pipelines/?pipeline=${encodeURIComponent(
integrationSpecificCustomName
)}`
);
});
});
describe('past field limit exceeded', () => {
it('should display cause as "field limit exceeded" when a field is ignored due to field limit issue', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await retry.tryForTime(5000, async () => {
const fieldLimitMessageExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-cause',
'field limit exceeded'
);
expect(fieldLimitMessageExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display the current field limit when the cause is "field limit exceeded"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await retry.tryForTime(5000, async () => {
const limitExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-mappingLimit',
'25'
);
expect(limitExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should warn users about the issue not present in latest backing index', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await testSubjects.existOrFail(
PageObjects.datasetQuality.testSubjectSelectors
.datasetQualityDetailsDegradedFieldFlyoutIssueDoesNotExist
);
});
});
describe('current field limit issues', () => {
it('should display increase field limit as a possible mitigation for integrations', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'cloud.project.id',
});
// Field Limit Mitigation Section should exist
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutFieldLimitMitigationAccordion'
);
// Should display the panel to increase field limit
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutIncreaseFieldLimitPanel'
);
// Should display official online documentation link
await testSubjects.existOrFail(
'datasetQualityManualMitigationsPipelineOfficialDocumentationLink'
);
const linkButton = await testSubjects.find(
'datasetQualityManualMitigationsPipelineOfficialDocumentationLink'
);
const linkURL = await linkButton.getAttribute('href');
expect(linkURL).to.be(
'https://www.elastic.co/guide/en/elasticsearch/reference/master/mapping-settings-limit.html'
);
});
it('should display increase field limit as a possible mitigation for non integration', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud.project',
});
// Field Limit Mitigation Section should exist
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutFieldLimitMitigationAccordion'
);
// Should not display the panel to increase field limit
await testSubjects.missingOrFail(
'datasetQualityDetailsDegradedFieldFlyoutIncreaseFieldLimitPanel'
);
// Should display official online documentation link
await testSubjects.existOrFail(
'datasetQualityManualMitigationsPipelineOfficialDocumentationLink'
);
});
it('should display additional input fields and button increasing the limit for integrations', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'cloud.project.id',
});
// Should display current field limit
await testSubjects.existOrFail('datasetQualityIncreaseFieldMappingCurrentLimitFieldText');
const currentFieldLimitInput = await testSubjects.find(
'datasetQualityIncreaseFieldMappingCurrentLimitFieldText'
);
const currentFieldLimitValue = await currentFieldLimitInput.getAttribute('value');
const currentFieldLimit = parseInt(currentFieldLimitValue as string, 10);
const currentFieldLimitDisabledStatus = await currentFieldLimitInput.getAttribute(
'disabled'
);
expect(currentFieldLimit).to.be(43);
expect(currentFieldLimitDisabledStatus).to.be('true');
// Should display new field limit
await testSubjects.existOrFail(
'datasetQualityIncreaseFieldMappingProposedLimitFieldText'
);
const newFieldLimitInput = await testSubjects.find(
'datasetQualityIncreaseFieldMappingProposedLimitFieldText'
);
const newFieldLimitValue = await newFieldLimitInput.getAttribute('value');
const newFieldLimit = parseInt(newFieldLimitValue as string, 10);
// Should be 30% more the current limit
const newLimit = Math.round(currentFieldLimit * 1.3);
expect(newFieldLimit).to.be(newLimit);
// Should display the apply button
await testSubjects.existOrFail('datasetQualityIncreaseFieldMappingLimitButtonButton');
const applyButton = await testSubjects.find(
'datasetQualityIncreaseFieldMappingLimitButtonButton'
);
const applyButtonDisabledStatus = await applyButton.getAttribute('disabled');
// The apply button should be active
expect(applyButtonDisabledStatus).to.be(null);
});
it('should validate input for new field limit', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'cloud.project.id',
});
// Should not allow values less than current limit of 43
await testSubjects.setValue(
'datasetQualityIncreaseFieldMappingProposedLimitFieldText',
'42',
{
clearWithKeyboard: true,
typeCharByChar: true,
}
);
const applyButton = await testSubjects.find(
'datasetQualityIncreaseFieldMappingLimitButtonButton'
);
const applyButtonDisabledStatus = await applyButton.getAttribute('disabled');
// The apply button should be active
expect(applyButtonDisabledStatus).to.be('true');
const newFieldLimitInput = await testSubjects.find(
'datasetQualityIncreaseFieldMappingProposedLimitFieldText'
);
const invalidStatus = await newFieldLimitInput.getAttribute('aria-invalid');
expect(invalidStatus).to.be('true');
});
it('should let user increase the field limit for integrations', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'cloud.project.id',
});
const applyButton = await testSubjects.find(
'datasetQualityIncreaseFieldMappingLimitButtonButton'
);
await applyButton.click();
await retry.tryForTime(5000, async () => {
// Should display the success callout
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFlyoutNewLimitSetSuccessCallout'
);
// Should display link to component template edited
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFlyoutNewLimitSetCheckComponentTemplate'
);
const ctLink = await testSubjects.find(
'datasetQualityDetailsDegradedFlyoutNewLimitSetCheckComponentTemplate'
);
const ctLinkURL = await ctLink.getAttribute('href');
const componentTemplateName = `${type}-${nginxAccessDatasetName}@custom`;
// Should point to the component template page
expect(
ctLinkURL?.endsWith(
`/data/index_management/component_templates/${encodeURIComponent(
componentTemplateName
)}`
)
).to.be(true);
});
// Refresh the time range to get the latest data
await PageObjects.datasetQuality.refreshDetailsPageData();
// The page should now handle this as ignore_malformed issue and show a warning
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutIssueDoesNotExist'
);
// Should not display the panel to increase field limit
await testSubjects.missingOrFail(
'datasetQualityDetailsDegradedFieldFlyoutIncreaseFieldLimitPanel'
);
});
});
after(async () => {
await synthtrace.clean();
await esClient.indices.deleteIndexTemplate({
name: degradedDatasetWithLimitDataStreamName,
});
await synthtrace.deleteComponentTemplate(customComponentTemplateName);
await PageObjects.observabilityLogsExplorer.uninstallPackage(nginxPkg);
await synthtrace.deleteComponentTemplate(customComponentTemplateNameNginx);
});
});
});

View file

@ -204,6 +204,10 @@ export function DatasetQualityPageObject({ getPageObjects, getService }: FtrProv
}
},
async waitUntilPossibleMitigationsLoaded() {
await find.waitForDeletedByCssSelector('.euiFlyoutBody .euiSkeletonRectangle', 20 * 1000);
},
async waitUntilDegradedFieldFlyoutLoaded() {
await testSubjects.existOrFail(testSubjectSelectors.datasetQualityDetailsDegradedFieldFlyout);
},
@ -239,6 +243,18 @@ export function DatasetQualityPageObject({ getPageObjects, getService }: FtrProv
);
},
generateBackingIndexNameWithoutVersion({
type = 'logs',
dataset,
namespace = 'default',
}: {
type?: string;
dataset: string;
namespace?: string;
}) {
return `.ds-${type}-${dataset}-${namespace}-${getCurrentDateFormatted()}`;
},
getDatasetsTable(): Promise<WebElementWrapper> {
return testSubjects.find(testSubjectSelectors.datasetQualityTable);
},
@ -554,3 +570,12 @@ async function getDatasetTableHeaderTexts(tableWrapper: WebElementWrapper) {
headerElementWrappers.map((headerElementWrapper) => headerElementWrapper.getVisibleText())
);
}
function getCurrentDateFormatted() {
const date = new Date();
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, '0');
const day = String(date.getDate()).padStart(2, '0');
return `${year}.${month}.${day}`;
}

View file

@ -1,130 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { log, timerange } from '@kbn/apm-synthtrace-client';
import expect from '@kbn/expect';
import type { InternalRequestHeader, RoleCredentials } from '../../../../shared/services';
import { expectToReject, getDataStreamSettingsOfEarliestIndex, rolloverDataStream } from './utils';
import {
DatasetQualityApiClient,
DatasetQualityApiError,
} from './common/dataset_quality_api_supertest';
import { DatasetQualityFtrContextProvider } from './common/services';
import { createBackingIndexNameWithoutVersion } from './utils';
export default function ({ getService }: DatasetQualityFtrContextProvider) {
const datasetQualityApiClient: DatasetQualityApiClient = getService('datasetQualityApiClient');
const synthtrace = getService('logSynthtraceEsClient');
const svlCommonApi = getService('svlCommonApi');
const svlUserManager = getService('svlUserManager');
const esClient = getService('es');
const start = '2023-12-11T18:00:00.000Z';
const end = '2023-12-11T18:01:00.000Z';
const type = 'logs';
const dataset = 'nginx.access';
const namespace = 'default';
const serviceName = 'my-service';
const hostName = 'synth-host';
const defaultDataStreamPrivileges = {
datasetUserPrivileges: { canRead: true, canMonitor: true, canViewIntegrations: true },
};
async function callApi(
dataStream: string,
roleAuthc: RoleCredentials,
internalReqHeader: InternalRequestHeader
) {
return await datasetQualityApiClient.slsUser({
endpoint: 'GET /internal/dataset_quality/data_streams/{dataStream}/settings',
params: {
path: {
dataStream,
},
},
roleAuthc,
internalReqHeader,
});
}
describe('gets the data stream settings', () => {
let roleAuthc: RoleCredentials;
let internalReqHeader: InternalRequestHeader;
before(async () => {
roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('admin');
internalReqHeader = svlCommonApi.getInternalRequestHeader();
await synthtrace.index([
timerange(start, end)
.interval('1m')
.rate(1)
.generator((timestamp) =>
log
.create()
.message('This is a log message')
.timestamp(timestamp)
.dataset(dataset)
.namespace(namespace)
.defaults({
'log.file.path': '/my-service.log',
'service.name': serviceName,
'host.name': hostName,
})
),
]);
});
after(async () => {
await synthtrace.clean();
await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc);
});
it('returns error when dataStream param is not provided', async () => {
const expectedMessage = 'Data Stream name cannot be empty';
const err = await expectToReject<DatasetQualityApiError>(() =>
callApi(encodeURIComponent(' '), roleAuthc, internalReqHeader)
);
expect(err.res.status).to.be(400);
expect(err.res.body.message.indexOf(expectedMessage)).to.greaterThan(-1);
});
it('returns only privileges if matching data stream is not available', async () => {
const nonExistentDataSet = 'Non-existent';
const nonExistentDataStream = `${type}-${nonExistentDataSet}-${namespace}`;
const resp = await callApi(nonExistentDataStream, roleAuthc, internalReqHeader);
expect(resp.body).eql(defaultDataStreamPrivileges);
});
it('returns "createdOn" and "lastBackingIndexName" correctly', async () => {
const dataStreamSettings = await getDataStreamSettingsOfEarliestIndex(
esClient,
`${type}-${dataset}-${namespace}`
);
const resp = await callApi(`${type}-${dataset}-${namespace}`, roleAuthc, internalReqHeader);
expect(resp.body.createdOn).to.be(Number(dataStreamSettings?.index?.creation_date));
expect(resp.body.lastBackingIndexName).to.be(
`${createBackingIndexNameWithoutVersion({
type,
dataset,
namespace,
})}-000001`
);
});
it('returns "createdOn" and "lastBackingIndexName" correctly for rolled over dataStream', async () => {
await rolloverDataStream(esClient, `${type}-${dataset}-${namespace}`);
const dataStreamSettings = await getDataStreamSettingsOfEarliestIndex(
esClient,
`${type}-${dataset}-${namespace}`
);
const resp = await callApi(`${type}-${dataset}-${namespace}`, roleAuthc, internalReqHeader);
expect(resp.body.createdOn).to.be(Number(dataStreamSettings?.index?.creation_date));
expect(resp.body.lastBackingIndexName).to.be(
`${createBackingIndexNameWithoutVersion({ type, dataset, namespace })}-000002`
);
});
});
}

View file

@ -9,7 +9,6 @@ import { FtrProviderContext } from '../../../ftr_provider_context';
export default function ({ loadTestFile }: FtrProviderContext) {
describe('Dataset Quality', function () {
loadTestFile(require.resolve('./data_stream_details'));
loadTestFile(require.resolve('./data_stream_settings'));
loadTestFile(require.resolve('./degraded_field_values'));
});
}

View file

@ -0,0 +1,177 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { MappingTypeMapping } from '@elastic/elasticsearch/lib/api/types';
export const logsNginxMappings = (dataset: string): MappingTypeMapping => ({
properties: {
'@timestamp': {
type: 'date',
ignore_malformed: false,
},
cloud: {
properties: {
image: {
properties: {
id: {
type: 'keyword',
ignore_above: 1024,
},
},
},
},
},
data_stream: {
properties: {
dataset: {
type: 'constant_keyword',
value: dataset,
},
namespace: {
type: 'constant_keyword',
value: 'default',
},
type: {
type: 'constant_keyword',
value: 'logs',
},
},
},
ecs: {
properties: {
version: {
type: 'keyword',
ignore_above: 1024,
},
},
},
error: {
properties: {
message: {
type: 'match_only_text',
},
},
},
event: {
properties: {
agent_id_status: {
type: 'keyword',
ignore_above: 1024,
},
dataset: {
type: 'constant_keyword',
value: 'nginx.access',
},
ingested: {
type: 'date',
format: 'strict_date_time_no_millis||strict_date_optional_time||epoch_millis',
ignore_malformed: false,
},
module: {
type: 'constant_keyword',
value: 'nginx',
},
},
},
host: {
properties: {
containerized: {
type: 'boolean',
},
name: {
type: 'keyword',
fields: {
text: {
type: 'match_only_text',
},
},
},
os: {
properties: {
build: {
type: 'keyword',
ignore_above: 1024,
},
codename: {
type: 'keyword',
ignore_above: 1024,
},
},
},
},
},
input: {
properties: {
type: {
type: 'keyword',
ignore_above: 1024,
},
},
},
log: {
properties: {
level: {
type: 'keyword',
ignore_above: 1024,
},
offset: {
type: 'long',
},
},
},
network: {
properties: {
bytes: {
type: 'long',
},
},
},
nginx: {
properties: {
access: {
properties: {
remote_ip_list: {
type: 'keyword',
ignore_above: 1024,
},
},
},
},
},
service: {
properties: {
name: {
type: 'keyword',
fields: {
text: {
type: 'match_only_text',
},
},
},
},
},
test_field: {
type: 'keyword',
ignore_above: 1024,
},
tls: {
properties: {
established: {
type: 'boolean',
},
},
},
trace: {
properties: {
id: {
type: 'keyword',
ignore_above: 1024,
},
},
},
},
});

View file

@ -385,6 +385,8 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
dataStream: degradedDataStreamName,
});
await PageObjects.datasetQuality.waitUntilTableLoaded();
const rows =
await PageObjects.datasetQuality.getDatasetQualityDetailsDegradedFieldTableRows();

View file

@ -17,6 +17,7 @@ import {
} from './data';
import { FtrProviderContext } from '../../../ftr_provider_context';
import { logsSynthMappings } from './custom_mappings/custom_synth_mappings';
import { logsNginxMappings } from './custom_mappings/custom_integration_mappings';
export default function ({ getService, getPageObjects }: FtrProviderContext) {
const PageObjects = getPageObjects([
@ -31,35 +32,43 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
const esClient = getService('es');
const retry = getService('retry');
const to = new Date().toISOString();
const type = 'logs';
const degradedDatasetName = 'synth.degraded';
const degradedDataStreamName = `logs-${degradedDatasetName}-${defaultNamespace}`;
const degradedDataStreamName = `${type}-${degradedDatasetName}-${defaultNamespace}`;
const degradedDatasetWithLimitsName = 'synth.degraded.rca';
const degradedDatasetWithLimitDataStreamName = `logs-${degradedDatasetWithLimitsName}-${defaultNamespace}`;
const degradedDatasetWithLimitDataStreamName = `${type}-${degradedDatasetWithLimitsName}-${defaultNamespace}`;
const serviceName = 'test_service';
const count = 5;
const customComponentTemplateName = 'logs-synth@mappings';
describe('Degraded fields flyout', function () {
before(async () => {
await synthtrace.index([
// Ingest basic logs
getInitialTestLogs({ to, count: 4 }),
// Ingest Degraded Logs
createDegradedFieldsRecord({
to: new Date().toISOString(),
count: 2,
dataset: degradedDatasetName,
}),
]);
await PageObjects.svlCommonPage.loginWithPrivilegedRole();
});
after(async () => {
await synthtrace.clean();
});
const nginxAccessDatasetName = 'nginx.access';
const customComponentTemplateNameNginx = 'logs-nginx.access@custom';
const nginxAccessDataStreamName = `${type}-${nginxAccessDatasetName}-${defaultNamespace}`;
const nginxPkg = {
name: 'nginx',
version: '1.23.0',
};
describe('Degraded fields flyout', () => {
describe('degraded field flyout open-close', () => {
before(async () => {
await synthtrace.index([
// Ingest basic logs
getInitialTestLogs({ to, count: 4 }),
// Ingest Degraded Logs
createDegradedFieldsRecord({
to: new Date().toISOString(),
count: 2,
dataset: degradedDatasetName,
}),
]);
await PageObjects.svlCommonPage.loginAsAdmin();
});
after(async () => {
await synthtrace.clean();
});
it('should open and close the flyout when user clicks on the expand button', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDataStreamName,
@ -88,30 +97,6 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
});
});
describe('values exist', () => {
it('should display the degraded field values', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const cloudAvailabilityZoneValueExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
ANOTHER_1024_CHARS
);
const cloudAvailabilityZoneValue2Exists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
MORE_THAN_1024_CHARS
);
expect(cloudAvailabilityZoneValueExists).to.be(true);
expect(cloudAvailabilityZoneValue2Exists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
});
describe('testing root cause for ignored fields', () => {
before(async () => {
// Create custom component template
@ -140,8 +125,18 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
hidden: false,
},
});
// Ingest Degraded Logs with 25 fields
// Install Nginx Integration and ingest logs for it
await PageObjects.observabilityLogsExplorer.installPackage(nginxPkg);
// Create custom component template to avoid issues with LogsDB
await synthtrace.createComponentTemplate(
customComponentTemplateNameNginx,
logsNginxMappings(nginxAccessDatasetName)
);
await synthtrace.index([
// Ingest Degraded Logs with 25 fields
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
@ -159,7 +154,30 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
.defaults({
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, 'hello world'],
test_field: [MORE_THAN_1024_CHARS, ANOTHER_1024_CHARS],
})
.timestamp(timestamp)
);
}),
// Ingest Degraded Logs with 43 fields in Nginx DataSet
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
.generator((timestamp) => {
return Array(1)
.fill(0)
.flatMap(() =>
log
.create()
.dataset(nginxAccessDatasetName)
.message('a log message')
.logLevel(MORE_THAN_1024_CHARS)
.service(serviceName)
.namespace(defaultNamespace)
.defaults({
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, ANOTHER_1024_CHARS],
})
.timestamp(timestamp)
);
@ -174,8 +192,13 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
}
);
// Ingest Degraded Logs with 26 field
// Set Limit of 42
await PageObjects.datasetQuality.setDataStreamSettings(nginxAccessDataStreamName, {
'mapping.total_fields.limit': 43,
});
await synthtrace.index([
// Ingest Degraded Logs with 26 field
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
@ -194,7 +217,31 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, 'hello world'],
'cloud.region': 'us-east-1',
'cloud.project.id': generateShortId(),
})
.timestamp(timestamp)
);
}),
// Ingest Degraded Logs with 44 fields in Nginx DataSet
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
.generator((timestamp) => {
return Array(1)
.fill(0)
.flatMap(() =>
log
.create()
.dataset(nginxAccessDatasetName)
.message('a log message')
.logLevel(MORE_THAN_1024_CHARS)
.service(serviceName)
.namespace(defaultNamespace)
.defaults({
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, ANOTHER_1024_CHARS],
'cloud.project.id': generateShortId(),
})
.timestamp(timestamp)
);
@ -203,9 +250,30 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
// Rollover Datastream to reset the limit to default which is 1000
await PageObjects.datasetQuality.rolloverDataStream(degradedDatasetWithLimitDataStreamName);
await PageObjects.datasetQuality.rolloverDataStream(nginxAccessDataStreamName);
// Set Limit of 26
await PageObjects.datasetQuality.setDataStreamSettings(
PageObjects.datasetQuality.generateBackingIndexNameWithoutVersion({
dataset: degradedDatasetWithLimitsName,
}) + '-000002',
{
'mapping.total_fields.limit': 26,
}
);
// Set Limit of 44
await PageObjects.datasetQuality.setDataStreamSettings(
PageObjects.datasetQuality.generateBackingIndexNameWithoutVersion({
dataset: nginxAccessDatasetName,
}) + '-000002',
{
'mapping.total_fields.limit': 44,
}
);
// Ingest docs with 26 fields again
await synthtrace.index([
// Ingest Degraded Logs with 26 field
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
@ -221,100 +289,40 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
.service(serviceName)
.namespace(defaultNamespace)
.defaults({
'log.file.path': '/my-service.log',
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, 'hello world'],
'cloud.region': 'us-east-1',
'cloud.project.id': generateShortId(),
})
.timestamp(timestamp)
);
}),
// Ingest Degraded Logs with 43 fields in Nginx DataSet
timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
.generator((timestamp) => {
return Array(1)
.fill(0)
.flatMap(() =>
log
.create()
.dataset(nginxAccessDatasetName)
.message('a log message')
.logLevel(MORE_THAN_1024_CHARS)
.service(serviceName)
.namespace(defaultNamespace)
.defaults({
'service.name': serviceName,
'trace.id': generateShortId(),
test_field: [MORE_THAN_1024_CHARS, ANOTHER_1024_CHARS],
'cloud.project.id': generateShortId(),
})
.timestamp(timestamp)
);
}),
]);
});
describe('field character limit exceeded', () => {
it('should display cause as "field ignored" when a field is ignored due to field above issue', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const fieldIgnoredMessageExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-cause',
'field character limit exceeded'
);
expect(fieldIgnoredMessageExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display values when cause is "field ignored"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const testFieldValueExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
MORE_THAN_1024_CHARS
);
expect(testFieldValueExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
});
describe('field limit exceeded', () => {
it('should display cause as "field limit exceeded" when a field is ignored due to field limit issue', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await retry.tryForTime(5000, async () => {
const fieldLimitMessageExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-cause',
'field limit exceeded'
);
expect(fieldLimitMessageExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display the limit when the cause is "field limit exceeded"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await retry.tryForTime(5000, async () => {
const limitExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-mappingLimit',
'25'
);
expect(limitExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should warn users about the issue not present in latest backing index', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await testSubjects.existOrFail(
PageObjects.datasetQuality.testSubjectSelectors
.datasetQualityDetailsDegradedFieldFlyoutIssueDoesNotExist
);
});
await PageObjects.svlCommonPage.loginAsAdmin();
});
describe('current quality issues', () => {
@ -331,7 +339,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
const rows =
await PageObjects.datasetQuality.getDatasetQualityDetailsDegradedFieldTableRows();
expect(rows.length).to.eql(3);
expect(rows.length).to.eql(4);
await testSubjects.click(
PageObjects.datasetQuality.testSubjectSelectors
@ -346,7 +354,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
const newRows =
await PageObjects.datasetQuality.getDatasetQualityDetailsDegradedFieldTableRows();
expect(newRows.length).to.eql(2);
expect(newRows.length).to.eql(3);
});
it('should keep the toggle on when url state says so', async () => {
@ -372,7 +380,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
// Check value in Table
const table = await PageObjects.datasetQuality.parseDegradedFieldTable();
const countColumn = table['Docs count'];
expect(await countColumn.getCellTexts()).to.eql(['5', '5']);
expect(await countColumn.getCellTexts()).to.eql(['5', '5', '5']);
// Check value in Flyout
await retry.tryForTime(5000, async () => {
@ -392,7 +400,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
// Check value in Table
const newTable = await PageObjects.datasetQuality.parseDegradedFieldTable();
const newCountColumn = newTable['Docs count'];
expect(await newCountColumn.getCellTexts()).to.eql(['15', '15', '5']);
expect(await newCountColumn.getCellTexts()).to.eql(['15', '15', '5', '5']);
// Check value in Flyout
await retry.tryForTime(5000, async () => {
@ -437,12 +445,456 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
});
});
describe('character limit exceeded', () => {
it('should display cause as "field character limit exceeded" when a field is ignored due to character limit issue', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const fieldIgnoredMessageExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-cause',
'field character limit exceeded'
);
expect(fieldIgnoredMessageExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display values when cause is "field character limit exceeded"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const testFieldValue1Exists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
MORE_THAN_1024_CHARS
);
const testFieldValue2Exists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-values',
ANOTHER_1024_CHARS
);
expect(testFieldValue1Exists).to.be(true);
expect(testFieldValue2Exists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display the maximum character limit when cause is "field character limit exceeded"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
await retry.tryForTime(5000, async () => {
const limitValueExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-characterLimit',
'1024'
);
expect(limitValueExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should show possible mitigation section with manual options for non integrations', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'test_field',
});
// Possible Mitigation Section should exist
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTitle'
);
// It's a technical preview
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTechPreviewBadge'
);
// Should display Edit/Create Component Template Link option
await testSubjects.existOrFail(
'datasetQualityManualMitigationsCustomComponentTemplateLink'
);
// Should display Edit/Create Ingest Pipeline Link option
await testSubjects.existOrFail('datasetQualityManualMitigationsPipelineAccordion');
// Check Component Template URl
const button = await testSubjects.find(
'datasetQualityManualMitigationsCustomComponentTemplateLink'
);
const componentTemplateUrl = await button.getAttribute('data-test-url');
// Should point to index template with the datastream name as value
expect(componentTemplateUrl).to.be(
`/data/index_management/templates/${degradedDatasetWithLimitDataStreamName}`
);
const nonIntegrationCustomName = `${type}@custom`;
const pipelineInputBox = await testSubjects.find(
'datasetQualityManualMitigationsPipelineName'
);
const pipelineValue = await pipelineInputBox.getAttribute('value');
// Expect Pipeline Name to be default logs for non integrations
expect(pipelineValue).to.be(nonIntegrationCustomName);
const pipelineLink = await testSubjects.find(
'datasetQualityManualMitigationsPipelineLink'
);
const pipelineLinkURL = await pipelineLink.getAttribute('data-test-url');
// Expect the pipeline link to point to the pipeline page with empty pipeline value
expect(pipelineLinkURL).to.be(
`/app/management/ingest/ingest_pipelines/?pipeline=${encodeURIComponent(
nonIntegrationCustomName
)}`
);
});
it('should show possible mitigation section with different manual options for integrations', async () => {
// Navigate to Integration Dataset
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'test_field',
});
await PageObjects.datasetQuality.waitUntilPossibleMitigationsLoaded();
// Possible Mitigation Section should exist
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTitle'
);
// It's a technical preview
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutPossibleMitigationTechPreviewBadge'
);
// Should display Edit/Create Component Template Link option
await testSubjects.existOrFail(
'datasetQualityManualMitigationsCustomComponentTemplateLink'
);
// Should display Edit/Create Ingest Pipeline Link option
await testSubjects.existOrFail('datasetQualityManualMitigationsPipelineAccordion');
// Check Component Template URl
const button = await testSubjects.find(
'datasetQualityManualMitigationsCustomComponentTemplateLink'
);
const componentTemplateUrl = await button.getAttribute('data-test-url');
const integrationSpecificCustomName = `${type}-${nginxAccessDatasetName}@custom`;
// Should point to component template with @custom as value
expect(componentTemplateUrl).to.be(
`/data/index_management/component_templates/${encodeURIComponent(
integrationSpecificCustomName
)}`
);
const pipelineInputBox = await testSubjects.find(
'datasetQualityManualMitigationsPipelineName'
);
const pipelineValue = await pipelineInputBox.getAttribute('value');
// Expect Pipeline Name to be default logs for non integrations
expect(pipelineValue).to.be(integrationSpecificCustomName);
const pipelineLink = await testSubjects.find(
'datasetQualityManualMitigationsPipelineLink'
);
const pipelineLinkURL = await pipelineLink.getAttribute('data-test-url');
// Expect the pipeline link to point to the pipeline page with empty pipeline value
expect(pipelineLinkURL).to.be(
`/app/management/ingest/ingest_pipelines/?pipeline=${encodeURIComponent(
integrationSpecificCustomName
)}`
);
});
});
describe('past field limit exceeded', () => {
it('should display cause as "field limit exceeded" when a field is ignored due to field limit issue', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await retry.tryForTime(5000, async () => {
const fieldLimitMessageExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-cause',
'field limit exceeded'
);
expect(fieldLimitMessageExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should display the current field limit when the cause is "field limit exceeded"', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await retry.tryForTime(5000, async () => {
const limitExists = await PageObjects.datasetQuality.doesTextExist(
'datasetQualityDetailsDegradedFieldFlyoutFieldValue-mappingLimit',
'25'
);
expect(limitExists).to.be(true);
});
await PageObjects.datasetQuality.closeFlyout();
});
it('should warn users about the issue not present in latest backing index', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud',
});
await testSubjects.existOrFail(
PageObjects.datasetQuality.testSubjectSelectors
.datasetQualityDetailsDegradedFieldFlyoutIssueDoesNotExist
);
});
});
describe('current field limit issues', () => {
it('should display increase field limit as a possible mitigation for integrations', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'cloud.project.id',
});
// Field Limit Mitigation Section should exist
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutFieldLimitMitigationAccordion'
);
// Should display the panel to increase field limit
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutIncreaseFieldLimitPanel'
);
// Should display official online documentation link
await testSubjects.existOrFail(
'datasetQualityManualMitigationsPipelineOfficialDocumentationLink'
);
const linkButton = await testSubjects.find(
'datasetQualityManualMitigationsPipelineOfficialDocumentationLink'
);
const linkURL = await linkButton.getAttribute('href');
expect(linkURL).to.be(
'https://www.elastic.co/guide/en/elasticsearch/reference/master/mapping-settings-limit.html'
);
});
it('should display increase field limit as a possible mitigation for non integration', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: degradedDatasetWithLimitDataStreamName,
expandedDegradedField: 'cloud.project',
});
// Field Limit Mitigation Section should exist
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutFieldLimitMitigationAccordion'
);
// Should not display the panel to increase field limit
await testSubjects.missingOrFail(
'datasetQualityDetailsDegradedFieldFlyoutIncreaseFieldLimitPanel'
);
// Should display official online documentation link
await testSubjects.existOrFail(
'datasetQualityManualMitigationsPipelineOfficialDocumentationLink'
);
});
it('should display additional input fields and button increasing the limit for integrations', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'cloud.project.id',
});
// Should display current field limit
await testSubjects.existOrFail('datasetQualityIncreaseFieldMappingCurrentLimitFieldText');
const currentFieldLimitInput = await testSubjects.find(
'datasetQualityIncreaseFieldMappingCurrentLimitFieldText'
);
const currentFieldLimitValue = await currentFieldLimitInput.getAttribute('value');
const currentFieldLimit = parseInt(currentFieldLimitValue as string, 10);
const currentFieldLimitDisabledStatus = await currentFieldLimitInput.getAttribute(
'disabled'
);
expect(currentFieldLimit).to.be(44);
expect(currentFieldLimitDisabledStatus).to.be('true');
// Should display new field limit
await testSubjects.existOrFail(
'datasetQualityIncreaseFieldMappingProposedLimitFieldText'
);
const newFieldLimitInput = await testSubjects.find(
'datasetQualityIncreaseFieldMappingProposedLimitFieldText'
);
const newFieldLimitValue = await newFieldLimitInput.getAttribute('value');
const newFieldLimit = parseInt(newFieldLimitValue as string, 10);
// Should be 30% more the current limit
const newLimit = Math.round(currentFieldLimit * 1.3);
expect(newFieldLimit).to.be(newLimit);
// Should display the apply button
await testSubjects.existOrFail('datasetQualityIncreaseFieldMappingLimitButtonButton');
const applyButton = await testSubjects.find(
'datasetQualityIncreaseFieldMappingLimitButtonButton'
);
const applyButtonDisabledStatus = await applyButton.getAttribute('disabled');
// The apply button should be active
expect(applyButtonDisabledStatus).to.be(null);
});
it('should validate input for new field limit', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'cloud.project.id',
});
// Should not allow values less than current limit of 44
await testSubjects.setValue(
'datasetQualityIncreaseFieldMappingProposedLimitFieldText',
'42',
{
clearWithKeyboard: true,
typeCharByChar: true,
}
);
const applyButton = await testSubjects.find(
'datasetQualityIncreaseFieldMappingLimitButtonButton'
);
const applyButtonDisabledStatus = await applyButton.getAttribute('disabled');
// The apply button should be active
expect(applyButtonDisabledStatus).to.be('true');
const newFieldLimitInput = await testSubjects.find(
'datasetQualityIncreaseFieldMappingProposedLimitFieldText'
);
const invalidStatus = await newFieldLimitInput.getAttribute('aria-invalid');
expect(invalidStatus).to.be('true');
});
it('should validate and show error callout when API call fails', async () => {
await PageObjects.svlCommonPage.loginWithPrivilegedRole();
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'cloud.project.id',
});
const applyButton = await testSubjects.find(
'datasetQualityIncreaseFieldMappingLimitButtonButton'
);
await applyButton.click();
await retry.tryForTime(5000, async () => {
// Should display the error callout
await testSubjects.existOrFail('datasetQualityDetailsNewFieldLimitErrorCallout');
});
await PageObjects.svlCommonPage.loginAsAdmin();
});
it('should let user increase the field limit for integrations', async () => {
await PageObjects.datasetQuality.navigateToDetails({
dataStream: nginxAccessDataStreamName,
expandedDegradedField: 'cloud.project.id',
});
const applyButton = await testSubjects.find(
'datasetQualityIncreaseFieldMappingLimitButtonButton'
);
await applyButton.click();
await retry.tryForTime(5000, async () => {
// Should display the success callout
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFlyoutNewLimitSetSuccessCallout'
);
// Should display link to component template edited
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFlyoutNewLimitSetCheckComponentTemplate'
);
const ctLink = await testSubjects.find(
'datasetQualityDetailsDegradedFlyoutNewLimitSetCheckComponentTemplate'
);
const ctLinkURL = await ctLink.getAttribute('href');
const componentTemplateName = `${type}-${nginxAccessDatasetName}@custom`;
// Should point to the component template page
expect(
ctLinkURL?.endsWith(
`/data/index_management/component_templates/${encodeURIComponent(
componentTemplateName
)}`
)
).to.be(true);
});
// Refresh the time range to get the latest data
await PageObjects.datasetQuality.refreshDetailsPageData();
// The page should now handle this as ignore_malformed issue and show a warning
await testSubjects.existOrFail(
'datasetQualityDetailsDegradedFieldFlyoutIssueDoesNotExist'
);
// Should not display the panel to increase field limit
await testSubjects.missingOrFail(
'datasetQualityDetailsDegradedFieldFlyoutIncreaseFieldLimitPanel'
);
});
});
after(async () => {
await synthtrace.clean();
await esClient.indices.deleteIndexTemplate({
name: degradedDatasetWithLimitDataStreamName,
});
await synthtrace.deleteComponentTemplate(customComponentTemplateName);
await PageObjects.observabilityLogsExplorer.uninstallPackage(nginxPkg);
await synthtrace.deleteComponentTemplate(customComponentTemplateNameNginx);
});
});
});