[Logs+] API to create a basic integration (#160777)

## Summary

Closes https://github.com/elastic/kibana/issues/159991

Fields that have been utilised to fulfil `basic` and `agent` fields can
be easily amended if these are incorrect.

Multiple datasets are supported, and these can contain more than one
type.

## Testing

A curl command similar to the following should allow you to hit the API
(check the credentials etc):

```
curl -XPOST -u 'elastic:changeme' -H 'kbn-xsrf: something' -d '{
    "integrationName": "web_custom_nginx",
    "datasets": [{"name": "access", "type": "logs"}, {"name": "error", "type": "metrics"}, {"name": "warning", "type":"logs"}]
}' 'http://localhost:5601/<BASE_PATH>/api/fleet/epm/custom_integrations'
```

## History / context

- [Prototype
learnings](https://github.com/elastic/kibana/issues/158552#issuecomment-1598685163)
- [Prototype PR](https://github.com/elastic/kibana/pull/160003)

## Results / expectations

API response (with installed assets):

![Screenshot 2023-07-05 at 16 56
33](fc4a0bab-7057-430a-8c03-18dd4ee17ab7)

We see the custom integration in "installed integrations" (albeit with a
verification warning):

![Screenshot 2023-07-05 at 16 57
14](0c9177d2-2871-490f-9b5c-f338e96484c4)

We see the custom integration in Discover with the logs explorer
profile:

![Screenshot 2023-07-05 at 16 58
20](30c556f2-9fcd-416e-8047-5976fc11ffa2)

The assets are installed correctly:

![Screenshot 2023-07-05 at 16 59
06](abb82632-f619-4fc3-be93-dc6ce97abedd)

![Screenshot 2023-07-05 at 16 59
20](ca1c1da5-1e4b-422c-9edb-0f56e0ed3f98)

![Screenshot 2023-07-05 at 16 59
36](8bd60d7e-aebc-4833-b423-eba3336fb42c)
This commit is contained in:
Kerry Gallagher 2023-07-13 10:00:58 +01:00 committed by GitHub
parent 52c645f886
commit 5b89675835
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 821 additions and 3 deletions

View file

@ -30,6 +30,9 @@ export const PACKAGE_TEMPLATE_SUFFIX = '@package';
export const USER_SETTINGS_TEMPLATE_SUFFIX = '@custom';
export const DATASET_VAR_NAME = 'data_stream.dataset';
export const CUSTOM_INTEGRATION_PACKAGE_SPEC_VERSION = '2.9.0';
/*
Package rules:
| | autoUpdatePackages |

View file

@ -34,6 +34,7 @@ export const EPM_API_ROUTES = {
DATA_STREAMS_PATTERN: `${EPM_API_ROOT}/data_streams`,
INSTALL_FROM_REGISTRY_PATTERN: EPM_PACKAGES_ONE,
INSTALL_BY_UPLOAD_PATTERN: EPM_PACKAGES_MANY,
CUSTOM_INTEGRATIONS_PATTERN: `${EPM_API_ROOT}/custom_integrations`,
DELETE_PATTERN: EPM_PACKAGES_ONE,
FILEPATH_PATTERN: `${EPM_PACKAGES_ONE}/{filePath*}`,
CATEGORIES_PATTERN: `${EPM_API_ROOT}/categories`,

View file

@ -34,7 +34,7 @@ export interface DefaultPackagesInstallationError {
}
export type InstallType = 'reinstall' | 'reupdate' | 'rollback' | 'update' | 'install' | 'unknown';
export type InstallSource = 'registry' | 'upload' | 'bundled';
export type InstallSource = 'registry' | 'upload' | 'bundled' | 'custom';
export type EpmPackageInstallStatus = 'installed' | 'installing' | 'install_failed';

View file

@ -52,6 +52,7 @@ import type {
UpdatePackageRequestSchema,
GetLimitedPackagesRequestSchema,
GetBulkAssetsRequestSchema,
CreateCustomIntegrationRequestSchema,
} from '../../types';
import {
bulkInstallPackages,
@ -403,6 +404,45 @@ export const installPackageFromRegistryHandler: FleetRequestHandler<
return await defaultFleetErrorHandler({ error: res.error, response });
}
};
export const createCustomIntegrationHandler: FleetRequestHandler<
undefined,
undefined,
TypeOf<typeof CreateCustomIntegrationRequestSchema.body>
> = async (context, request, response) => {
const coreContext = await context.core;
const fleetContext = await context.fleet;
const savedObjectsClient = fleetContext.internalSoClient;
const esClient = coreContext.elasticsearch.client.asInternalUser;
const user = (await appContextService.getSecurity()?.authc.getCurrentUser(request)) || undefined;
const kibanaVersion = appContextService.getKibanaVersion();
const authorizationHeader = HTTPAuthorizationHeader.parseFromRequest(request, user?.username);
const spaceId = fleetContext.spaceId;
const { integrationName, force, datasets } = request.body;
const res = await installPackage({
installSource: 'custom',
savedObjectsClient,
pkgName: integrationName,
datasets,
esClient,
spaceId,
force,
authorizationHeader,
kibanaVersion,
});
if (!res.error) {
const body: InstallPackageResponse = {
items: res.assets || [],
_meta: {
install_source: res.installSource,
},
};
return response.ok({ body });
} else {
return await defaultFleetErrorHandler({ error: res.error, response });
}
};
const bulkInstallServiceResponseToHttpEntry = (
result: BulkInstallResponse

View file

@ -43,6 +43,7 @@ import {
UpdatePackageRequestSchemaDeprecated,
ReauthorizeTransformRequestSchema,
GetDataStreamsRequestSchema,
CreateCustomIntegrationRequestSchema,
} from '../../types';
import {
@ -62,6 +63,7 @@ import {
getVerificationKeyIdHandler,
reauthorizeTransformsHandler,
getDataStreamsHandler,
createCustomIntegrationHandler,
} from './handlers';
const MAX_FILE_SIZE_BYTES = 104857600; // 100MB
@ -196,6 +198,17 @@ export const registerRoutes = (router: FleetAuthzRouter) => {
installPackageByUploadHandler
);
router.post(
{
path: EPM_API_ROUTES.CUSTOM_INTEGRATIONS_PATTERN,
validate: CreateCustomIntegrationRequestSchema,
fleetAuthz: {
integrations: { installPackages: true },
},
},
createCustomIntegrationHandler
);
router.delete(
{
path: EPM_API_ROUTES.DELETE_PATTERN,

View file

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { setArchiveEntry, setArchiveFilelist } from '../../../archive';
interface Assets {
path: string;
content: Buffer;
}
export const cacheAssets = (assets: Assets[], name: string, version: string) => {
const paths = assets.map((asset) => asset.path);
setArchiveFilelist({ name, version }, paths);
assets.forEach((asset) => {
setArchiveEntry(asset.path, asset.content);
});
return paths;
};

View file

@ -0,0 +1,292 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import * as yaml from 'js-yaml';
export const createBaseFields = () => {
const fields = [
{ name: 'data_stream.type', type: 'constant_keyword', description: 'Data stream type.' },
{ name: 'data_stream.dataset', type: 'constant_keyword', description: 'Data stream dataset.' },
{
name: 'data_stream.namespace',
type: 'constant_keyword',
description: 'Data stream namespace.',
},
{ name: '@timestamp', type: 'date', description: 'Event timestamp.' },
];
return yaml.dump(fields);
};
export const createAgentFields = () => {
const fields = [
{
name: 'cloud',
title: 'Cloud',
group: 2,
description: 'Fields related to the cloud or infrastructure the events are coming from.',
footnote:
'Examples: If Metricbeat is running on an EC2 host and fetches data from its host, the cloud info contains the data about this machine. If Metricbeat runs on a remote machine outside the cloud and fetches data from a service running in the cloud, the field contains cloud data from the machine the service is running on.',
type: 'group',
fields: [
{
name: 'account.id',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description:
'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.',
example: 666777888999,
},
{
name: 'availability_zone',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Availability zone in which this host is running.',
example: 'us-east-1c',
},
{
name: 'instance.id',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Instance ID of the host machine.',
example: 'i-1234567890abcdef0',
},
{
name: 'instance.name',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Instance name of the host machine.',
},
{
name: 'machine.type',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Machine type of the host machine.',
example: 't2.medium',
},
{
name: 'provider',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description:
'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.',
example: 'aws',
},
{
name: 'region',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Region in which this host is running.',
example: 'us-east-1',
},
{
name: 'project.id',
type: 'keyword',
description: 'Name of the project in Google Cloud.',
},
{
name: 'image.id',
type: 'keyword',
description: 'Image ID for the cloud instance.',
},
],
},
{
name: 'container',
title: 'Container',
group: 2,
description:
'Container fields are used for meta information about the specific container that is the source of information.\nThese fields help correlate data based containers from any runtime.',
type: 'group',
fields: [
{
name: 'id',
level: 'core',
type: 'keyword',
ignore_above: 1024,
description: 'Unique container id.',
},
{
name: 'image.name',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Name of the image the container was built on.',
},
{
name: 'labels',
level: 'extended',
type: 'object',
object_type: 'keyword',
description: 'Image labels.',
},
{
name: 'name',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Container name.',
},
],
},
{
name: 'host',
title: 'Host',
group: 2,
description:
'A host is defined as a general computing instance.\nECS host.* fields should be populated with details about the host on which the event happened, or from which the measurement was taken. Host types include hardware, virtual machines, Docker containers, and Kubernetes nodes.',
type: 'group',
fields: [
{
name: 'architecture',
level: 'core',
type: 'keyword',
ignore_above: 1024,
description: 'Operating system architecture.',
example: 'x86_64',
},
{
name: 'domain',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description:
"Name of the domain of which the host is a member.\nFor example, on Windows this could be the host's Active Directory domain or NetBIOS domain name. For Linux this could be the domain of the host's LDAP provider.",
example: 'CONTOSO',
default_field: false,
},
{
name: 'hostname',
level: 'core',
type: 'keyword',
ignore_above: 1024,
description:
'Hostname of the host.\nIt normally contains what the `hostname` command returns on the host machine',
},
{
name: 'id',
level: 'core',
type: 'keyword',
ignore_above: 1024,
description:
'Unique host id.\nAs hostname is not always unique, use values that are meaningful in your environment.\nExample: The current usage of `beat.name`',
},
{
name: 'ip',
level: 'core',
type: 'ip',
description: 'Host ip addresses.',
},
{
name: 'mac',
level: 'core',
type: 'keyword',
ignore_above: 1024,
description: 'Host mac addresses.',
},
{
name: 'name',
level: 'core',
type: 'keyword',
ignore_above: 1024,
description:
'Name of the host.\nIt can contain what `hostname` returns on Unix systems, the fully qualified domain name, or a name specified by the user. The sender decides which value to use',
},
{
name: 'os.family',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'OS family (such as redhat, debian, freebsd, windows).',
example: 'debian',
},
{
name: 'os.kernel',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Operating system kernel version as a raw string.',
example: '4.4.0-112-generic',
},
{
name: 'os.name',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
multi_fields: [
{
name: 'text',
type: 'text',
norms: false,
default_field: false,
},
],
description: 'Operating system name, without the version.',
example: 'Mac OS X',
},
{
name: 'os.platform',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Operating system platform (such centos, ubuntu, windows).',
example: 'darwin',
},
{
name: 'os.version',
level: 'extended',
type: 'keyword',
ignore_above: 1024,
description: 'Operating system version as a raw string.',
example: '10.14.1',
},
{
name: 'type',
level: 'core',
type: 'keyword',
ignore_above: 1024,
description:
'Type of host.\nFor Cloud providers this can be the machine type like `t2.medium`. If vm, this could be the container, for example, or other information meaningful in your environment',
},
{
name: 'containerized',
type: 'boolean',
description: 'If the host is a container.\n',
},
{
name: 'os.build',
type: 'keyword',
example: '18D109',
description: 'OS build information.\n',
},
{
name: 'os.codename',
type: 'keyword',
example: 'stretch',
description: 'OS codename, if any.\n',
},
],
},
{
name: 'input.type',
type: 'keyword',
description: 'Input type',
},
{
name: 'log.offset',
type: 'long',
description: 'Log offset',
},
];
return yaml.dump(fields);
};

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export * from './fields';
export * from './manifest';
export * from './utils';

View file

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import * as yaml from 'js-yaml';
// NOTE: The install methods will take care of adding a reference to a @custom pipeline. We don't need to add one here.
export const createDefaultPipeline = (dataset: string, type: string) => {
const pipeline = {
processors: [
{
set: {
description: "If '@timestamp' is missing, set it with the ingest timestamp",
field: '@timestamp',
override: false,
copy_from: '_ingest.timestamp',
},
},
],
_meta: {
description: `default pipeline for the ${dataset} dataset`,
managed: true,
},
};
return yaml.dump(pipeline);
};

View file

@ -0,0 +1,21 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import * as yaml from 'js-yaml';
import { convertStringToTitle } from '../../utils';
import type { AssetOptions } from '../generate';
export const createDatasetManifest = (dataset: string, assetOptions: AssetOptions) => {
const { format_version: formatVersion, type } = assetOptions;
const manifest = {
format_version: formatVersion,
dataset,
title: convertStringToTitle(dataset),
type,
};
return yaml.dump(manifest);
};

View file

@ -0,0 +1,27 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { CustomPackageDatasetConfiguration } from '../../../install';
export const generateDatastreamEntries = (
datasets: CustomPackageDatasetConfiguration[],
packageName: string
) => {
return datasets.map((dataset) => {
const { name, type } = dataset;
return {
type,
dataset: `${packageName}.${name}`,
title: `Data stream for the ${packageName} custom integration, and ${name} dataset.`,
package: packageName,
path: name,
release: 'ga' as const,
// NOTE: This ensures our default.yml pipeline is used as the default_pipeline in the index template
ingest_pipeline: 'default',
};
});
};

View file

@ -0,0 +1,63 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { ArchivePackage } from '../../../../../../common';
import { pkgToPkgKey } from '../../../registry';
import type { CustomPackageDatasetConfiguration } from '../../install';
import { createAgentFields, createBaseFields, createDatasetManifest } from './dataset';
import { createDefaultPipeline } from './dataset/ingest_pipeline';
import { createManifest } from './manifest';
export type AssetOptions = ArchivePackage & {
kibanaVersion: string;
datasets: CustomPackageDatasetConfiguration[];
};
// Mimic the use of an archive buffer via the same naming conventions
export const createAssets = (assetOptions: AssetOptions) => {
const { name, version, datasets } = assetOptions;
return [
{
path: `${pkgToPkgKey({ name, version })}/manifest.yml`,
content: Buffer.from(createManifest(assetOptions)),
},
...datasets
.map((datasetConfiguration) => {
const { name: datasetName, type: datasetType } = datasetConfiguration;
return [
{
path: `${pkgToPkgKey({ name, version })}/data_stream/${datasetName}/manifest.yml`,
content: Buffer.from(createDatasetManifest(datasetName, assetOptions)),
},
// NOTE: buildDefaultSettings() will add a reference to the global ILM policy when
// building the index template based on the fields assets.
{
path: `${pkgToPkgKey({
name,
version,
})}/data_stream/${datasetName}/fields/base-fields.yml`,
content: Buffer.from(createBaseFields()),
},
{
path: `${pkgToPkgKey({ name, version })}/data_stream/${datasetName}/fields/agent.yml`,
content: Buffer.from(createAgentFields()),
},
{
path: `${pkgToPkgKey({
name,
version,
})}/data_stream/${datasetName}/elasticsearch/ingest_pipeline/default.yml`,
content: Buffer.from(createDefaultPipeline(datasetName, datasetType)),
},
];
})
.flat(),
];
};

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export * from './generate';
export * from './manifest';
export * from './dataset';

View file

@ -0,0 +1,38 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import * as yaml from 'js-yaml';
import type { AssetOptions } from './generate';
export const createManifest = (assetOptions: AssetOptions) => {
const {
format_version: formatVersion,
name,
title,
description,
version,
owner,
kibanaVersion,
} = assetOptions;
const manifest = {
format_version: formatVersion,
name,
title,
description,
version,
owner,
type: 'integration' as const,
conditions: {
kibana: {
version: kibanaVersion,
},
},
};
return yaml.dump(manifest);
};

View file

@ -0,0 +1,8 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export const INITIAL_VERSION = '1.0.0';

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export * from './assets';
export * from './constants';
export * from './utils';

View file

@ -0,0 +1,18 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export const convertStringToTitle = (name: string) => {
return name
.split('_')
.map((word) => {
return word[0].toUpperCase() + word.substring(1);
})
.join(' ');
};
export const generateDescription = (datasetNames: string[]) =>
`Collect logs for the datasets: ${datasetNames.join(', ')}`;

View file

@ -25,6 +25,8 @@ import { uniqBy } from 'lodash';
import type { LicenseType } from '@kbn/licensing-plugin/server';
import type { PackageDataStreamTypes } from '../../../../common/types';
import type { HTTPAuthorizationHeader } from '../../../../common/http_authorization_header';
import { isPackagePrerelease, getNormalizedDataStreams } from '../../../../common/services';
@ -49,7 +51,11 @@ import type {
PackageVerificationResult,
RegistryDataStream,
} from '../../../types';
import { AUTO_UPGRADE_POLICIES_PACKAGES, DATASET_VAR_NAME } from '../../../../common/constants';
import {
AUTO_UPGRADE_POLICIES_PACKAGES,
CUSTOM_INTEGRATION_PACKAGE_SPEC_VERSION,
DATASET_VAR_NAME,
} from '../../../../common/constants';
import {
type FleetError,
PackageOutdatedError,
@ -91,6 +97,11 @@ import { _installPackage } from './_install_package';
import { removeOldAssets } from './cleanup';
import { getBundledPackages } from './bundled_packages';
import { withPackageSpan } from './utils';
import { convertStringToTitle, generateDescription } from './custom_integrations/utils';
import { INITIAL_VERSION } from './custom_integrations/constants';
import { createAssets } from './custom_integrations';
import { cacheAssets } from './custom_integrations/assets/cache';
import { generateDatastreamEntries } from './custom_integrations/assets/dataset/utils';
export async function isPackageInstalled(options: {
savedObjectsClient: SavedObjectsClientContract;
@ -288,6 +299,21 @@ interface InstallRegistryPackageParams {
prerelease?: boolean;
authorizationHeader?: HTTPAuthorizationHeader | null;
}
export interface CustomPackageDatasetConfiguration {
name: string;
type: PackageDataStreamTypes;
}
interface InstallCustomPackageParams {
savedObjectsClient: SavedObjectsClientContract;
pkgName: string;
datasets: CustomPackageDatasetConfiguration[];
esClient: ElasticsearchClient;
spaceId: string;
force?: boolean;
authorizationHeader?: HTTPAuthorizationHeader | null;
kibanaVersion: string;
}
interface InstallUploadedArchiveParams {
savedObjectsClient: SavedObjectsClientContract;
esClient: ElasticsearchClient;
@ -424,7 +450,7 @@ function getElasticSubscription(packageInfo: ArchivePackage) {
async function installPackageCommon(options: {
pkgName: string;
pkgVersion: string;
installSource: 'registry' | 'upload';
installSource: 'registry' | 'upload' | 'custom';
installedPkg?: SavedObject<Installation>;
installType: InstallType;
savedObjectsClient: SavedObjectsClientContract;
@ -659,6 +685,7 @@ export type InstallPackageParams = {
| ({ installSource: Extract<InstallSource, 'registry'> } & InstallRegistryPackageParams)
| ({ installSource: Extract<InstallSource, 'upload'> } & InstallUploadedArchiveParams)
| ({ installSource: Extract<InstallSource, 'bundled'> } & InstallUploadedArchiveParams)
| ({ installSource: Extract<InstallSource, 'custom'> } & InstallCustomPackageParams)
);
export async function installPackage(args: InstallPackageParams): Promise<InstallResult> {
@ -723,10 +750,72 @@ export async function installPackage(args: InstallPackageParams): Promise<Instal
authorizationHeader,
});
return response;
} else if (args.installSource === 'custom') {
const { pkgName, force, datasets, spaceId, kibanaVersion } = args;
const response = await installCustomPackage({
savedObjectsClient,
pkgName,
datasets,
esClient,
spaceId,
force,
authorizationHeader,
kibanaVersion,
});
return response;
}
throw new Error(`Unknown installSource: ${args.installSource}`);
}
export async function installCustomPackage(
args: InstallCustomPackageParams
): Promise<InstallResult> {
const {
savedObjectsClient,
esClient,
spaceId,
pkgName,
force,
authorizationHeader,
datasets,
kibanaVersion,
} = args;
// Compose a packageInfo
const packageInfo = {
format_version: CUSTOM_INTEGRATION_PACKAGE_SPEC_VERSION,
name: pkgName,
title: convertStringToTitle(pkgName),
description: generateDescription(datasets.map((dataset) => dataset.name)),
version: INITIAL_VERSION,
owner: { github: authorizationHeader?.username ?? 'unknown' },
type: 'integration' as const,
data_streams: generateDatastreamEntries(datasets, pkgName),
};
const assets = createAssets({
...packageInfo,
kibanaVersion,
datasets,
});
const paths = cacheAssets(assets, pkgName, INITIAL_VERSION);
return await installPackageCommon({
pkgName,
pkgVersion: INITIAL_VERSION,
installSource: 'custom',
installType: 'install',
savedObjectsClient,
esClient,
spaceId,
force,
packageInfo,
paths,
authorizationHeader,
});
}
export const updateVersion = async (
savedObjectsClient: SavedObjectsClientContract,
pkgName: string,

View file

@ -194,6 +194,25 @@ export const InstallPackageByUploadRequestSchema = {
body: schema.buffer(),
};
export const CreateCustomIntegrationRequestSchema = {
body: schema.object({
integrationName: schema.string(),
datasets: schema.arrayOf(
schema.object({
name: schema.string(),
type: schema.oneOf([
schema.literal('logs'),
schema.literal('metrics'),
schema.literal('traces'),
schema.literal('synthetics'),
schema.literal('profiling'),
]),
})
),
force: schema.maybe(schema.boolean()),
}),
};
export const DeletePackageRequestSchema = {
params: schema.object({
pkgName: schema.string(),

View file

@ -21,6 +21,7 @@ export default function loadTests({ loadTestFile, getService }) {
loadTestFile(require.resolve('./ilm'));
loadTestFile(require.resolve('./install_bundled'));
loadTestFile(require.resolve('./install_by_upload'));
loadTestFile(require.resolve('./install_custom'));
loadTestFile(require.resolve('./install_endpoint'));
loadTestFile(require.resolve('./install_overrides'));
loadTestFile(require.resolve('./install_prerelease'));

View file

@ -0,0 +1,102 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { PACKAGES_SAVED_OBJECT_TYPE } from '@kbn/fleet-plugin/common';
import { FtrProviderContext } from '../../../api_integration/ftr_provider_context';
const INTEGRATION_NAME = 'my_custom_nginx';
const INTEGRATION_VERSION = '1.0.0';
export default function (providerContext: FtrProviderContext) {
const { getService } = providerContext;
const supertest = getService('supertest');
const kibanaServer = getService('kibanaServer');
const uninstallPackage = async () => {
await supertest
.delete(`/api/fleet/epm/packages/${INTEGRATION_NAME}/${INTEGRATION_VERSION}`)
.set('kbn-xsrf', 'xxxx');
};
describe('Installing custom integrations', async () => {
afterEach(async () => {
await uninstallPackage();
});
it("Correcty installs a custom integration and all of it's assets", async () => {
const response = await supertest
.post(`/api/fleet/epm/custom_integrations`)
.set('kbn-xsrf', 'xxxx')
.type('application/json')
.send({
force: true,
integrationName: 'my_custom_nginx',
datasets: [
{ name: 'access', type: 'logs' },
{ name: 'error', type: 'metrics' },
{ name: 'warning', type: 'logs' },
],
})
.expect(200);
const expectedIngestPipelines = [
'logs-my_custom_nginx.access-1.0.0',
'metrics-my_custom_nginx.error-1.0.0',
'logs-my_custom_nginx.warning-1.0.0',
];
const expectedIndexTemplates = [
'logs-my_custom_nginx.access',
'metrics-my_custom_nginx.error',
'logs-my_custom_nginx.warning',
];
const expectedComponentTemplates = [
'logs-my_custom_nginx.access@package',
'logs-my_custom_nginx.access@custom',
'metrics-my_custom_nginx.error@package',
'metrics-my_custom_nginx.error@custom',
'logs-my_custom_nginx.warning@package',
'logs-my_custom_nginx.warning@custom',
];
expect(response.body._meta.install_source).to.be('custom');
const actualIngestPipelines = response.body.items
.filter((item: any) => item.type === 'ingest_pipeline')
.map((pipeline: any) => pipeline.id);
const actualIndexTemplates = response.body.items
.filter((item: any) => item.type === 'index_template')
.map((template: any) => template.id);
const actualComponentTemplates = response.body.items
.filter((item: any) => item.type === 'component_template')
.map((template: any) => template.id);
expectedIngestPipelines.forEach((pipeline) => {
expect(actualIngestPipelines).to.contain(pipeline);
});
expectedIndexTemplates.forEach((template) => {
expect(actualIndexTemplates).to.contain(template);
});
expectedComponentTemplates.forEach((template) => {
expect(actualComponentTemplates).to.contain(template);
});
const installation = await kibanaServer.savedObjects.get({
type: PACKAGES_SAVED_OBJECT_TYPE,
id: INTEGRATION_NAME,
});
expect(installation.attributes.name).to.be(INTEGRATION_NAME);
expect(installation.attributes.version).to.be(INTEGRATION_VERSION);
expect(installation.attributes.install_source).to.be('custom');
expect(installation.attributes.install_status).to.be('installed');
});
});
}