mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Logs+] Enforce dataset names (#166654)
## Summary Closes #163830 This adds server side validation to enforce dataset name format rules for custom integrations. It then enhances the custom integrations Kibana package to handle this seamlessly in the create form. There is no client side validation for the rules per se because as long as the dataset name passes other validations (length etc) then it is always valid - it just comes down to whether it's prefixed or not. ## Other notes - Added a "fields pipeline" to improve the readability of the context update. ## UI / UX changes - Users are informed when a prefix will be added. <img width="886" alt="Screenshot 2023-09-20 at 13 19 49" src="764d2bd0
-03ef-40ce-8dae-107079c15feb"> - If the integration name has been touched, and the dataset name is untouched, the dataset name will automatically match the integration name.  --------- Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
fabaa2f89e
commit
795ec3e4ad
18 changed files with 358 additions and 89 deletions
|
@ -28,6 +28,11 @@ import {
|
|||
} from '../../state_machines/create/types';
|
||||
import { Dataset, IntegrationError } from '../../types';
|
||||
import { hasFailedSelector } from '../../state_machines/create/selectors';
|
||||
import {
|
||||
datasetNameWillBePrefixed,
|
||||
getDatasetNamePrefix,
|
||||
prefixDatasetName,
|
||||
} from '../../state_machines/create/pipelines/fields';
|
||||
|
||||
// NOTE: Hardcoded for now. We will likely extend the functionality here to allow the selection of the type.
|
||||
// And also to allow adding multiple datasets.
|
||||
|
@ -50,6 +55,7 @@ export const ConnectedCreateCustomIntegrationForm = ({
|
|||
testSubjects?: CreateTestSubjects;
|
||||
}) => {
|
||||
const [state, send] = useActor(machineRef);
|
||||
|
||||
const updateIntegrationName = useCallback(
|
||||
(integrationName: string) => {
|
||||
send({ type: 'UPDATE_FIELDS', fields: { integrationName } });
|
||||
|
@ -181,17 +187,32 @@ export const CreateCustomIntegrationForm = ({
|
|||
<EuiIconTip
|
||||
content={i18n.translate('customIntegrationsPackage.create.dataset.name.tooltip', {
|
||||
defaultMessage:
|
||||
'Provide a dataset name to help organise these custom logs. This dataset will be associated with the integration.',
|
||||
'Provide a dataset name to help organise these custom logs. This dataset will be associated with the integration. ',
|
||||
})}
|
||||
position="right"
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
}
|
||||
helpText={i18n.translate('customIntegrationsPackage.create.dataset.helper', {
|
||||
defaultMessage:
|
||||
"All lowercase, max 100 chars, special characters will be replaced with '_'.",
|
||||
})}
|
||||
helpText={[
|
||||
i18n.translate('customIntegrationsPackage.create.dataset.helper', {
|
||||
defaultMessage:
|
||||
"All lowercase, max 100 chars, special characters will be replaced with '_'.",
|
||||
}),
|
||||
datasetNameWillBePrefixed(datasetName, integrationName)
|
||||
? i18n.translate(
|
||||
'customIntegrationsPackage.create.dataset.name.tooltipPrefixMessage',
|
||||
{
|
||||
defaultMessage:
|
||||
'This name will be prefixed with {prefixValue}, e.g. {prefixedDatasetName}',
|
||||
values: {
|
||||
prefixValue: getDatasetNamePrefix(integrationName),
|
||||
prefixedDatasetName: prefixDatasetName(datasetName, integrationName),
|
||||
},
|
||||
}
|
||||
)
|
||||
: '',
|
||||
].join(' ')}
|
||||
isInvalid={hasErrors(errors?.fields?.datasets?.[0]?.name) && touchedFields.datasets}
|
||||
error={errorsList(errors?.fields?.datasets?.[0]?.name)}
|
||||
>
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export const replaceSpecialChars = (filename: string) => {
|
||||
export const replaceSpecialChars = (value: string) => {
|
||||
// Replace special characters with _
|
||||
const replacedSpecialCharacters = filename.replaceAll(/[^a-zA-Z0-9_]/g, '_');
|
||||
const replacedSpecialCharacters = value.replaceAll(/[^a-zA-Z0-9_]/g, '_');
|
||||
// Allow only one _ in a row
|
||||
const noRepetitions = replacedSpecialCharacters.replaceAll(/[\_]{2,}/g, '_');
|
||||
return noRepetitions;
|
||||
|
|
|
@ -27,11 +27,20 @@ export type CreateCustomIntegrationNotificationEvent =
|
|||
};
|
||||
|
||||
export const CreateIntegrationNotificationEventSelectors = {
|
||||
integrationCreated: (context: CreateCustomIntegrationContext) =>
|
||||
({
|
||||
type: 'INTEGRATION_CREATED',
|
||||
fields: context.fields,
|
||||
} as CreateCustomIntegrationNotificationEvent),
|
||||
integrationCreated: (
|
||||
context: CreateCustomIntegrationContext,
|
||||
event: CreateCustomIntegrationEvent
|
||||
) => {
|
||||
return 'data' in event && 'integrationName' in event.data && 'datasets' in event.data
|
||||
? ({
|
||||
type: 'INTEGRATION_CREATED',
|
||||
fields: {
|
||||
integrationName: event.data.integrationName,
|
||||
datasets: event.data.datasets,
|
||||
},
|
||||
} as CreateCustomIntegrationNotificationEvent)
|
||||
: null;
|
||||
},
|
||||
integrationCleanup: (
|
||||
context: CreateCustomIntegrationContext,
|
||||
event: CreateCustomIntegrationEvent
|
||||
|
|
|
@ -0,0 +1,134 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { replaceSpecialChars } from '../../../components/create/utils';
|
||||
import { CreateCustomIntegrationContext, UpdateFieldsEvent, WithTouchedFields } from '../types';
|
||||
|
||||
type ValuesTuple = [CreateCustomIntegrationContext, UpdateFieldsEvent];
|
||||
|
||||
// Pipeline for updating the fields and touchedFields properties within context
|
||||
export const executeFieldsPipeline = (
|
||||
context: CreateCustomIntegrationContext,
|
||||
event: UpdateFieldsEvent
|
||||
) => {
|
||||
return pipe(
|
||||
[context, event] as ValuesTuple,
|
||||
updateFields(context),
|
||||
updateTouchedFields(context),
|
||||
maybeMatchDatasetNameToIntegrationName(context),
|
||||
replaceSpecialCharacters(context)
|
||||
);
|
||||
};
|
||||
|
||||
const updateFields =
|
||||
(originalContext: CreateCustomIntegrationContext) =>
|
||||
(values: ValuesTuple): ValuesTuple => {
|
||||
const [context, event] = values;
|
||||
|
||||
const mergedContext = {
|
||||
...context,
|
||||
fields: {
|
||||
...context.fields,
|
||||
...event.fields,
|
||||
},
|
||||
};
|
||||
return [mergedContext, event];
|
||||
};
|
||||
|
||||
const updateTouchedFields =
|
||||
(originalContext: CreateCustomIntegrationContext) =>
|
||||
(values: ValuesTuple): ValuesTuple => {
|
||||
const [context, event] = values;
|
||||
|
||||
const mergedContext = {
|
||||
...context,
|
||||
touchedFields: {
|
||||
...context.touchedFields,
|
||||
...Object.keys(event.fields).reduce<WithTouchedFields['touchedFields']>(
|
||||
(acc, field) => ({ ...acc, [field]: true }),
|
||||
{} as WithTouchedFields['touchedFields']
|
||||
),
|
||||
},
|
||||
};
|
||||
return [mergedContext, event];
|
||||
};
|
||||
|
||||
const maybeMatchDatasetNameToIntegrationName =
|
||||
(originalContext: CreateCustomIntegrationContext) =>
|
||||
(values: ValuesTuple): ValuesTuple => {
|
||||
const [context, event] = values;
|
||||
if (context.touchedFields.integrationName && !context.touchedFields.datasets) {
|
||||
return [
|
||||
{
|
||||
...context,
|
||||
fields: {
|
||||
...context.fields,
|
||||
datasets: context.fields.datasets.map((dataset, index) => ({
|
||||
...dataset,
|
||||
name: index === 0 ? context.fields.integrationName : dataset.name,
|
||||
})),
|
||||
},
|
||||
},
|
||||
event,
|
||||
];
|
||||
} else {
|
||||
return [context, event];
|
||||
}
|
||||
};
|
||||
|
||||
const replaceSpecialCharacters =
|
||||
(originalContext: CreateCustomIntegrationContext) =>
|
||||
(values: ValuesTuple): ValuesTuple => {
|
||||
const [context, event] = values;
|
||||
|
||||
const mergedContext = {
|
||||
...context,
|
||||
fields: {
|
||||
...context.fields,
|
||||
integrationName: replaceSpecialChars(context.fields.integrationName),
|
||||
datasets: context.fields.datasets.map((dataset) => ({
|
||||
...dataset,
|
||||
name: replaceSpecialChars(dataset.name),
|
||||
})),
|
||||
},
|
||||
};
|
||||
|
||||
return [mergedContext, event];
|
||||
};
|
||||
|
||||
export const getDatasetNamePrefix = (integrationName: string) => `${integrationName}.`;
|
||||
export const datasetNameIsPrefixed = (datasetName: string, integrationName: string) =>
|
||||
datasetName.startsWith(getDatasetNamePrefix(integrationName));
|
||||
export const datasetNameWillBePrefixed = (datasetName: string, integrationName: string) =>
|
||||
datasetName !== integrationName;
|
||||
export const prefixDatasetName = (datasetName: string, integrationName: string) =>
|
||||
`${getDatasetNamePrefix(integrationName)}${datasetName}`;
|
||||
|
||||
// The content after the integration name prefix.
|
||||
export const getDatasetNameWithoutPrefix = (datasetName: string, integrationName: string) =>
|
||||
datasetNameIsPrefixed(datasetName, integrationName)
|
||||
? datasetName.split(getDatasetNamePrefix(integrationName))[1]
|
||||
: datasetName;
|
||||
|
||||
// The machine holds unprefixed names internally to dramatically reduce complexity and improve performance for input changes in the UI.
|
||||
// Prefixed names are used at the outermost edges e.g. when providing initial state and submitting to the API.
|
||||
export const normalizeDatasetNames = (fields: UpdateFieldsEvent['fields']) => {
|
||||
const value = {
|
||||
...fields,
|
||||
...(fields.datasets !== undefined && fields.integrationName !== undefined
|
||||
? {
|
||||
datasets: fields.datasets.map((dataset) => ({
|
||||
...dataset,
|
||||
name: getDatasetNameWithoutPrefix(dataset.name, fields.integrationName!),
|
||||
})),
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
return value;
|
||||
};
|
|
@ -28,10 +28,12 @@ import {
|
|||
DefaultCreateCustomIntegrationContext,
|
||||
WithErrors,
|
||||
WithPreviouslyCreatedIntegration,
|
||||
WithTouchedFields,
|
||||
WithFields,
|
||||
} from './types';
|
||||
import { replaceSpecialChars } from '../../components/create/utils';
|
||||
import {
|
||||
datasetNameWillBePrefixed,
|
||||
executeFieldsPipeline,
|
||||
prefixDatasetName,
|
||||
} from './pipelines/fields';
|
||||
|
||||
export const createPureCreateCustomIntegrationStateMachine = (
|
||||
initialContext: DefaultCreateCustomIntegrationContext = DEFAULT_CONTEXT
|
||||
|
@ -211,32 +213,12 @@ export const createPureCreateCustomIntegrationStateMachine = (
|
|||
: {};
|
||||
}),
|
||||
storeFields: actions.assign((context, event) => {
|
||||
return event.type === 'UPDATE_FIELDS'
|
||||
? ({
|
||||
fields: {
|
||||
...context.fields,
|
||||
...event.fields,
|
||||
integrationName:
|
||||
event.fields.integrationName !== undefined
|
||||
? replaceSpecialChars(event.fields.integrationName)
|
||||
: context.fields.integrationName,
|
||||
datasets:
|
||||
event.fields.datasets !== undefined
|
||||
? event.fields.datasets.map((dataset) => ({
|
||||
...dataset,
|
||||
name: replaceSpecialChars(dataset.name),
|
||||
}))
|
||||
: context.fields.datasets,
|
||||
},
|
||||
touchedFields: {
|
||||
...context.touchedFields,
|
||||
...Object.keys(event.fields).reduce<WithTouchedFields['touchedFields']>(
|
||||
(acc, field) => ({ ...acc, [field]: true }),
|
||||
{} as WithTouchedFields['touchedFields']
|
||||
),
|
||||
},
|
||||
} as WithFields & WithTouchedFields)
|
||||
: {};
|
||||
if (event.type === 'UPDATE_FIELDS') {
|
||||
const [contextResult] = executeFieldsPipeline(context, event);
|
||||
return contextResult;
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
}),
|
||||
resetValues: actions.assign((context, event) => {
|
||||
return {
|
||||
|
@ -315,7 +297,15 @@ export const createCreateCustomIntegrationStateMachine = ({
|
|||
}),
|
||||
}),
|
||||
save: (context) => {
|
||||
return integrationsClient.createCustomIntegration(context.fields);
|
||||
return integrationsClient.createCustomIntegration({
|
||||
...context.fields,
|
||||
datasets: context.fields.datasets.map((dataset) => ({
|
||||
...dataset,
|
||||
name: datasetNameWillBePrefixed(dataset.name, context.fields.integrationName)
|
||||
? prefixDatasetName(dataset.name, context.fields.integrationName)
|
||||
: dataset.name,
|
||||
})),
|
||||
});
|
||||
},
|
||||
cleanup: (context) => {
|
||||
return integrationsClient.deleteCustomIntegration({
|
||||
|
|
|
@ -19,7 +19,9 @@ export interface WithTouchedFields {
|
|||
touchedFields: Record<keyof CreateCustomIntegrationOptions, boolean>;
|
||||
}
|
||||
|
||||
export type CreateInitialState = WithOptions & WithFields & WithPreviouslyCreatedIntegration;
|
||||
export type CreateInitialState = Partial<WithOptions> &
|
||||
Partial<WithFields> &
|
||||
WithPreviouslyCreatedIntegration;
|
||||
|
||||
export interface WithOptions {
|
||||
options: {
|
||||
|
@ -91,11 +93,13 @@ export type CreateCustomIntegrationTypestate =
|
|||
|
||||
export type CreateCustomIntegrationContext = CreateCustomIntegrationTypestate['context'];
|
||||
|
||||
export interface UpdateFieldsEvent {
|
||||
type: 'UPDATE_FIELDS';
|
||||
fields: Partial<CreateCustomIntegrationOptions>;
|
||||
}
|
||||
|
||||
export type CreateCustomIntegrationEvent =
|
||||
| {
|
||||
type: 'UPDATE_FIELDS';
|
||||
fields: Partial<CreateCustomIntegrationOptions>;
|
||||
}
|
||||
| UpdateFieldsEvent
|
||||
| {
|
||||
type: 'INITIALIZE';
|
||||
}
|
||||
|
|
|
@ -20,6 +20,8 @@ import {
|
|||
import { createCreateCustomIntegrationStateMachine } from '../create/state_machine';
|
||||
import { IIntegrationsClient } from '../services/integrations_client';
|
||||
import { CustomIntegrationsNotificationChannel } from './notifications';
|
||||
import { executeFieldsPipeline, normalizeDatasetNames } from '../create/pipelines/fields';
|
||||
import { CreateInitialState } from '../create/types';
|
||||
|
||||
export const createPureCustomIntegrationsStateMachine = (
|
||||
initialContext: DefaultCustomIntegrationsContext = DEFAULT_CONTEXT
|
||||
|
@ -95,22 +97,32 @@ export const createCustomIntegrationsStateMachine = ({
|
|||
return createPureCustomIntegrationsStateMachine(initialContext).withConfig({
|
||||
services: {
|
||||
createCustomIntegration: (context) => {
|
||||
const getInitialContextForCreate = (initialCreateState: CreateInitialState) => {
|
||||
const baseAndOptions = {
|
||||
...DEFAULT_CREATE_CONTEXT,
|
||||
...(initialCreateState ? initialCreateState : {}),
|
||||
options: {
|
||||
...DEFAULT_CREATE_CONTEXT.options,
|
||||
...(initialCreateState?.options ? initialCreateState.options : {}),
|
||||
},
|
||||
};
|
||||
const fields = initialCreateState.fields
|
||||
? executeFieldsPipeline(baseAndOptions, {
|
||||
type: 'UPDATE_FIELDS',
|
||||
fields: normalizeDatasetNames(initialCreateState.fields),
|
||||
})[0]
|
||||
: {};
|
||||
return {
|
||||
...baseAndOptions,
|
||||
...fields,
|
||||
};
|
||||
};
|
||||
|
||||
return createCreateCustomIntegrationStateMachine({
|
||||
integrationsClient,
|
||||
initialContext:
|
||||
initialState.mode === 'create'
|
||||
? {
|
||||
...DEFAULT_CREATE_CONTEXT,
|
||||
...(initialState?.context ? initialState?.context : {}),
|
||||
options: {
|
||||
...DEFAULT_CREATE_CONTEXT.options,
|
||||
...(initialState?.context?.options ? initialState.context.options : {}),
|
||||
},
|
||||
fields: {
|
||||
...DEFAULT_CREATE_CONTEXT.fields,
|
||||
...(initialState?.context?.fields ? initialState.context.fields : {}),
|
||||
},
|
||||
}
|
||||
initialState.mode === 'create' && initialState.context
|
||||
? getInitialContextForCreate(initialState.context)
|
||||
: DEFAULT_CREATE_CONTEXT,
|
||||
});
|
||||
},
|
||||
|
|
|
@ -19,6 +19,8 @@ import {
|
|||
IntegrationNotInstalledError,
|
||||
NamingCollisionError,
|
||||
UnknownError,
|
||||
IntegrationName,
|
||||
Dataset,
|
||||
} from '../../types';
|
||||
|
||||
const GENERIC_CREATE_ERROR_MESSAGE = i18n.translate(
|
||||
|
@ -70,6 +72,7 @@ export class IntegrationsClient implements IIntegrationsClient {
|
|||
|
||||
return {
|
||||
integrationName: params.integrationName,
|
||||
datasets: params.datasets,
|
||||
installedAssets: data.items,
|
||||
};
|
||||
} catch (error) {
|
||||
|
@ -128,7 +131,8 @@ export const createCustomIntegrationResponseRT = rt.exact(
|
|||
);
|
||||
|
||||
export interface CreateCustomIntegrationValue {
|
||||
integrationName: string;
|
||||
integrationName: IntegrationName;
|
||||
datasets: Dataset[];
|
||||
installedAssets: AssetList;
|
||||
}
|
||||
|
||||
|
|
|
@ -9,25 +9,26 @@
|
|||
import * as rt from 'io-ts';
|
||||
|
||||
export const integrationNameRT = rt.string;
|
||||
export type IntegrationName = rt.TypeOf<typeof integrationNameRT>;
|
||||
|
||||
const datasetTypes = rt.keyof({
|
||||
const datasetTypesRT = rt.keyof({
|
||||
logs: null,
|
||||
metrics: null,
|
||||
});
|
||||
|
||||
const dataset = rt.exact(
|
||||
export const datasetRT = rt.exact(
|
||||
rt.type({
|
||||
name: rt.string,
|
||||
type: datasetTypes,
|
||||
type: datasetTypesRT,
|
||||
})
|
||||
);
|
||||
|
||||
export type Dataset = rt.TypeOf<typeof dataset>;
|
||||
export type Dataset = rt.TypeOf<typeof datasetRT>;
|
||||
|
||||
export const customIntegrationOptionsRT = rt.exact(
|
||||
rt.type({
|
||||
integrationName: integrationNameRT,
|
||||
datasets: rt.array(dataset),
|
||||
datasets: rt.array(datasetRT),
|
||||
})
|
||||
);
|
||||
|
||||
|
|
|
@ -85,6 +85,7 @@ import type {
|
|||
} from '../../types';
|
||||
import { getDataStreams } from '../../services/epm/data_streams';
|
||||
import { NamingCollisionError } from '../../services/epm/packages/custom_integrations/validation/check_naming_collision';
|
||||
import { DatasetNamePrefixError } from '../../services/epm/packages/custom_integrations/validation/check_dataset_name_format';
|
||||
|
||||
const CACHE_CONTROL_10_MINUTES_HEADER: HttpResponseOptions['headers'] = {
|
||||
'cache-control': 'max-age=600',
|
||||
|
@ -452,6 +453,13 @@ export const createCustomIntegrationHandler: FleetRequestHandler<
|
|||
message: error.message,
|
||||
},
|
||||
});
|
||||
} else if (error instanceof DatasetNamePrefixError) {
|
||||
return response.customError({
|
||||
statusCode: 422,
|
||||
body: {
|
||||
message: error.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
return await defaultFleetErrorHandler({ error, response });
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ export const generateDatastreamEntries = (
|
|||
const { name, type } = dataset;
|
||||
return {
|
||||
type,
|
||||
dataset: `${packageName}.${name}`,
|
||||
dataset: `${name}`,
|
||||
title: `Data stream for the ${packageName} custom integration, and ${name} dataset.`,
|
||||
package: packageName,
|
||||
path: name,
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { CustomPackageDatasetConfiguration } from '../../install';
|
||||
|
||||
// Dataset name must either match integration name exactly OR be prefixed with integration name and a dot.
|
||||
export const checkDatasetsNameFormat = (
|
||||
datasets: CustomPackageDatasetConfiguration[],
|
||||
integrationName: string
|
||||
) => {
|
||||
const invalidNames = datasets
|
||||
.filter((dataset) => {
|
||||
const { name } = dataset;
|
||||
return name !== integrationName && !name.startsWith(`${integrationName}.`);
|
||||
})
|
||||
.map((dataset) => dataset.name);
|
||||
|
||||
if (invalidNames.length > 0) {
|
||||
throw new DatasetNamePrefixError(
|
||||
`Dataset names '${invalidNames.join(
|
||||
', '
|
||||
)}' must either match integration name '${integrationName}' exactly or be prefixed with integration name and a dot (e.g. '${integrationName}.<dataset_name>').`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export class DatasetNamePrefixError extends Error {
|
||||
constructor(message?: string) {
|
||||
super(message);
|
||||
Object.setPrototypeOf(this, new.target.prototype);
|
||||
this.name = 'DatasetNamePrefixError';
|
||||
}
|
||||
}
|
|
@ -103,6 +103,7 @@ import { createAssets } from './custom_integrations';
|
|||
import { cacheAssets } from './custom_integrations/assets/cache';
|
||||
import { generateDatastreamEntries } from './custom_integrations/assets/dataset/utils';
|
||||
import { checkForNamingCollision } from './custom_integrations/validation/check_naming_collision';
|
||||
import { checkDatasetsNameFormat } from './custom_integrations/validation/check_dataset_name_format';
|
||||
|
||||
export async function isPackageInstalled(options: {
|
||||
savedObjectsClient: SavedObjectsClientContract;
|
||||
|
@ -784,6 +785,7 @@ export async function installCustomPackage(
|
|||
|
||||
// Validate that we can create this package, validations will throw if they don't pass
|
||||
await checkForNamingCollision(savedObjectsClient, pkgName);
|
||||
checkDatasetsNameFormat(datasets, pkgName);
|
||||
|
||||
// Compose a packageInfo
|
||||
const packageInfo = {
|
||||
|
|
|
@ -623,7 +623,7 @@ describe('[Logs onboarding] Custom logs - install elastic agent', () => {
|
|||
cy.getByTestSubj('obltOnboardingExploreLogs').should('exist').click();
|
||||
|
||||
cy.url().should('include', '/app/observability-log-explorer');
|
||||
cy.get('button').contains('[mylogs] mylogs').should('exist');
|
||||
cy.get('button').contains('[Mylogs] mylogs').should('exist');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -107,9 +107,9 @@ Cypress.Commands.add('installCustomIntegration', (integrationName: string) => {
|
|||
force: true,
|
||||
integrationName,
|
||||
datasets: [
|
||||
{ name: 'access', type: 'logs' },
|
||||
{ name: 'error', type: 'metrics' },
|
||||
{ name: 'warning', type: 'logs' },
|
||||
{ name: `${integrationName}.access`, type: 'logs' },
|
||||
{ name: `${integrationName}.error`, type: 'metrics' },
|
||||
{ name: `${integrationName}.warning`, type: 'logs' },
|
||||
],
|
||||
},
|
||||
headers: {
|
||||
|
|
|
@ -92,10 +92,14 @@ export function ConfigureLogs() {
|
|||
resetOnCreation: false,
|
||||
errorOnFailedCleanup: false,
|
||||
},
|
||||
fields: {
|
||||
integrationName,
|
||||
datasets: [{ name: datasetName, type: 'logs' as const }],
|
||||
},
|
||||
...(integrationName !== undefined && datasetName !== undefined
|
||||
? {
|
||||
fields: {
|
||||
integrationName,
|
||||
datasets: [{ name: datasetName, type: 'logs' as const }],
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
previouslyCreatedIntegration: lastCreatedIntegrationOptions,
|
||||
},
|
||||
}}
|
||||
|
|
|
@ -17,9 +17,9 @@ import { InstallElasticAgent } from './install_elastic_agent';
|
|||
import { SelectLogs } from './select_logs';
|
||||
|
||||
interface WizardState {
|
||||
integrationName: string;
|
||||
integrationName?: string;
|
||||
lastCreatedIntegrationOptions?: CustomIntegrationOptions;
|
||||
datasetName: string;
|
||||
datasetName?: string;
|
||||
serviceName: string;
|
||||
logFilePaths: string[];
|
||||
namespace: string;
|
||||
|
@ -40,8 +40,8 @@ interface WizardState {
|
|||
}
|
||||
|
||||
const initialState: WizardState = {
|
||||
integrationName: '',
|
||||
datasetName: '',
|
||||
integrationName: undefined,
|
||||
datasetName: undefined,
|
||||
serviceName: '',
|
||||
logFilePaths: [''],
|
||||
namespace: 'default',
|
||||
|
|
|
@ -39,9 +39,9 @@ export default function (providerContext: FtrProviderContext) {
|
|||
force: true,
|
||||
integrationName: INTEGRATION_NAME,
|
||||
datasets: [
|
||||
{ name: 'access', type: 'logs' },
|
||||
{ name: 'error', type: 'metrics' },
|
||||
{ name: 'warning', type: 'logs' },
|
||||
{ name: `${INTEGRATION_NAME}.access`, type: 'logs' },
|
||||
{ name: `${INTEGRATION_NAME}.error`, type: 'metrics' },
|
||||
{ name: `${INTEGRATION_NAME}.warning`, type: 'logs' },
|
||||
],
|
||||
})
|
||||
.expect(200);
|
||||
|
@ -108,9 +108,9 @@ export default function (providerContext: FtrProviderContext) {
|
|||
force: true,
|
||||
integrationName: INTEGRATION_NAME,
|
||||
datasets: [
|
||||
{ name: 'access', type: 'logs' },
|
||||
{ name: 'error', type: 'metrics' },
|
||||
{ name: 'warning', type: 'logs' },
|
||||
{ name: `${INTEGRATION_NAME}.access`, type: 'logs' },
|
||||
{ name: `${INTEGRATION_NAME}.error`, type: 'metrics' },
|
||||
{ name: `${INTEGRATION_NAME}.warning`, type: 'logs' },
|
||||
],
|
||||
})
|
||||
.expect(200);
|
||||
|
@ -123,9 +123,9 @@ export default function (providerContext: FtrProviderContext) {
|
|||
force: true,
|
||||
integrationName: INTEGRATION_NAME,
|
||||
datasets: [
|
||||
{ name: 'access', type: 'logs' },
|
||||
{ name: 'error', type: 'metrics' },
|
||||
{ name: 'warning', type: 'logs' },
|
||||
{ name: `${INTEGRATION_NAME}.access`, type: 'logs' },
|
||||
{ name: `${INTEGRATION_NAME}.error`, type: 'metrics' },
|
||||
{ name: `${INTEGRATION_NAME}.warning`, type: 'logs' },
|
||||
],
|
||||
})
|
||||
.expect(409);
|
||||
|
@ -145,7 +145,7 @@ export default function (providerContext: FtrProviderContext) {
|
|||
.send({
|
||||
force: true,
|
||||
integrationName: pkgName,
|
||||
datasets: [{ name: 'error', type: 'logs' }],
|
||||
datasets: [{ name: `${INTEGRATION_NAME}.error`, type: 'logs' }],
|
||||
})
|
||||
.expect(409);
|
||||
|
||||
|
@ -153,5 +153,48 @@ export default function (providerContext: FtrProviderContext) {
|
|||
`Failed to create the integration as an integration with the name ${pkgName} already exists in the package registry or as a bundled package.`
|
||||
);
|
||||
});
|
||||
|
||||
it('Throws an error when dataset names are not prefixed correctly', async () => {
|
||||
const response = await supertest
|
||||
.post(`/api/fleet/epm/custom_integrations`)
|
||||
.set('kbn-xsrf', 'xxxx')
|
||||
.type('application/json')
|
||||
.send({
|
||||
force: true,
|
||||
integrationName: INTEGRATION_NAME,
|
||||
datasets: [{ name: 'error', type: 'logs' }],
|
||||
})
|
||||
.expect(422);
|
||||
|
||||
expect(response.body.message).to.be(
|
||||
`Dataset names 'error' must either match integration name '${INTEGRATION_NAME}' exactly or be prefixed with integration name and a dot (e.g. '${INTEGRATION_NAME}.<dataset_name>').`
|
||||
);
|
||||
|
||||
await uninstallPackage();
|
||||
|
||||
await supertest
|
||||
.post(`/api/fleet/epm/custom_integrations`)
|
||||
.set('kbn-xsrf', 'xxxx')
|
||||
.type('application/json')
|
||||
.send({
|
||||
force: true,
|
||||
integrationName: INTEGRATION_NAME,
|
||||
datasets: [{ name: INTEGRATION_NAME, type: 'logs' }],
|
||||
})
|
||||
.expect(200);
|
||||
|
||||
await uninstallPackage();
|
||||
|
||||
await supertest
|
||||
.post(`/api/fleet/epm/custom_integrations`)
|
||||
.set('kbn-xsrf', 'xxxx')
|
||||
.type('application/json')
|
||||
.send({
|
||||
force: true,
|
||||
integrationName: INTEGRATION_NAME,
|
||||
datasets: [{ name: `${INTEGRATION_NAME}.error`, type: 'logs' }],
|
||||
})
|
||||
.expect(200);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue