[Automatic Import] Remove pipeline tests from the package (#211223)

## Summary

This PR removes unused pipeline tests from the packaging of integration.

The pipeline tests are not run today when the integration is built.
Hence removing them for now.
This commit is contained in:
Bharat Pasupula 2025-02-17 10:28:57 +01:00 committed by GitHub
parent 0ecbbd52be
commit 54b6e65a20
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 103 additions and 191 deletions

View file

@ -37,6 +37,7 @@ export enum GenerationErrorCode {
UNSUPPORTED_LOG_SAMPLES_FORMAT = 'unsupported-log-samples-format',
UNPARSEABLE_CSV_DATA = 'unparseable-csv-data',
CEF_ERROR = 'cef-not-supported',
BUILD_INTEGRATION_ERROR = 'build-integration-error',
}
// Size limits

View file

@ -271,6 +271,12 @@ export const GENERATION_ERROR_TRANSLATION: Record<
defaultMessage:
'CEF format detected. Please decode the CEF logs into JSON format using filebeat decode_cef processor.',
}),
[GenerationErrorCode.BUILD_INTEGRATION_ERROR]: i18n.translate(
'xpack.automaticImport.errors.buildIntegrationError',
{
defaultMessage: 'An error occurred while building the integration package. Please try again.',
}
),
[GenerationErrorCode.UNPARSEABLE_CSV_DATA]: (attributes) => {
if (
attributes.underlyingMessages !== undefined &&

View file

@ -85,7 +85,7 @@ describe('EcsGraph', () => {
throw Error(`getEcsGraph threw an error: ${error}`);
}
expect(response.results).toStrictEqual(ecsMappingExpectedResults);
expect(response.results).toEqual(ecsMappingExpectedResults);
// Check if the functions were called
expect(handleEcsMapping).toHaveBeenCalled();

View file

@ -20,6 +20,7 @@ import { createDataStream } from './data_stream';
import { createFieldMapping } from './fields';
import { createPipeline } from './pipeline';
import { createReadme } from './readme_files';
import { BuildIntegrationError } from '../lib/errors/build_integration_error';
const initialVersion = '1.0.0';
@ -37,46 +38,56 @@ export async function buildPackage(integration: Integration): Promise<Buffer> {
configureNunjucks();
if (!isValidName(integration.name)) {
throw new Error(
throw new BuildIntegrationError(
`Invalid integration name: ${integration.name}, Should only contain letters, numbers and underscores`
);
}
const workingDir = joinPath(getDataPath(), `automatic-import-${generateUniqueId()}`);
const packageDirectoryName = `${integration.name}-${initialVersion}`;
const packageDir = createDirectories(workingDir, integration, packageDirectoryName);
const dataStreamsDir = joinPath(packageDir, 'data_stream');
const fieldsPerDatastream = integration.dataStreams.map((dataStream) => {
const dataStreamName = dataStream.name;
if (!isValidDatastreamName(dataStreamName)) {
throw new Error(
`Invalid datastream name: ${dataStreamName}, Name must be at least 2 characters long and can only contain lowercase letters, numbers, and underscores`
try {
const packageDirectoryName = `${integration.name}-${initialVersion}`;
const packageDir = createDirectories(workingDir, integration, packageDirectoryName);
const dataStreamsDir = joinPath(packageDir, 'data_stream');
const fieldsPerDatastream = integration.dataStreams.map((dataStream) => {
const dataStreamName = dataStream.name;
if (!isValidDatastreamName(dataStreamName)) {
throw new Error(
`Invalid datastream name: ${dataStreamName}, Name must be at least 2 characters long and can only contain lowercase letters, numbers, and underscores`
);
}
const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName);
const dataStreamFields = createDataStream(
integration.name,
specificDataStreamDir,
dataStream
);
createAgentInput(specificDataStreamDir, dataStream.inputTypes, dataStream.celInput);
createPipeline(specificDataStreamDir, dataStream.pipeline);
const fields = createFieldMapping(
integration.name,
dataStreamName,
specificDataStreamDir,
dataStream.docs
);
}
const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName);
const dataStreamFields = createDataStream(integration.name, specificDataStreamDir, dataStream);
createAgentInput(specificDataStreamDir, dataStream.inputTypes, dataStream.celInput);
createPipeline(specificDataStreamDir, dataStream.pipeline);
const fields = createFieldMapping(
integration.name,
dataStreamName,
specificDataStreamDir,
dataStream.docs
);
return {
datastream: dataStreamName,
fields: mergeAndSortFields(fields, dataStreamFields),
};
});
return {
datastream: dataStreamName,
fields: mergeAndSortFields(fields, dataStreamFields),
};
});
createReadme(packageDir, integration.name, integration.dataStreams, fieldsPerDatastream);
const zipBuffer = await createZipArchive(integration, workingDir, packageDirectoryName);
createReadme(packageDir, integration.name, integration.dataStreams, fieldsPerDatastream);
const zipBuffer = await createZipArchive(integration, workingDir, packageDirectoryName);
removeDirSync(workingDir);
return zipBuffer;
removeDirSync(workingDir);
return zipBuffer;
} catch (error) {
throw new BuildIntegrationError('Building the Integration failed');
} finally {
removeDirSync(workingDir);
}
}
export function isValidName(input: string): boolean {

View file

@ -89,17 +89,6 @@ describe('createDataStream', () => {
// dataStream files
expect(copySync).toHaveBeenCalledWith(expect.any(String), `${dataStreamPath}/fields`);
// test files
expect(ensureDirSync).toHaveBeenCalledWith(`${dataStreamPath}/_dev/test/pipeline`);
expect(copySync).toHaveBeenCalledWith(
expect.any(String),
`${dataStreamPath}/_dev/test/pipeline/test-common-config.yml`
);
expect(createSync).toHaveBeenCalledWith(
`${dataStreamPath}/_dev/test/pipeline/test-${packageName}-datastream-1.log`,
samples
);
// // Manifest files
expect(createSync).toHaveBeenCalledWith(`${dataStreamPath}/manifest.yml`, undefined);
expect(render).toHaveBeenCalledWith(`filestream_manifest.yml.njk`, expect.anything());

View file

@ -27,8 +27,6 @@ export function createDataStream(
ensureDirSync(specificDataStreamDir);
const fields = createDataStreamFolders(specificDataStreamDir, pipelineDir);
createPipelineTests(specificDataStreamDir, dataStream.rawSamples, packageName, dataStreamName);
const dataStreams: string[] = [];
for (const inputType of dataStream.inputTypes) {
let mappedValues = {
@ -89,30 +87,6 @@ function loadFieldsFromFiles(sourcePath: string, files: string[]): Field[] {
});
}
function createPipelineTests(
specificDataStreamDir: string,
rawSamples: string[],
packageName: string,
dataStreamName: string
): void {
const pipelineTestTemplatesDir = joinPath(__dirname, '../templates/pipeline_tests');
const pipelineTestsDir = joinPath(specificDataStreamDir, '_dev/test/pipeline');
ensureDirSync(pipelineTestsDir);
const items = listDirSync(pipelineTestTemplatesDir);
for (const item of items) {
const s = joinPath(pipelineTestTemplatesDir, item);
const d = joinPath(pipelineTestsDir, item.replaceAll('_', '-'));
copySync(s, d);
}
const formattedPackageName = packageName.replace(/_/g, '-');
const formattedDataStreamName = dataStreamName.replace(/_/g, '-');
const testFileName = joinPath(
pipelineTestsDir,
`test-${formattedPackageName}-${formattedDataStreamName}.log`
);
createSync(testFileName, rawSamples.join('\n'));
}
function prepareCelValues(mappedValues: object, celInput: CelInput | undefined) {
if (celInput != null) {
// Ready the program for printing with correct indentation

View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { KibanaResponseFactory } from '@kbn/core/server';
import { ErrorThatHandlesItsOwnResponse } from './types';
import { GenerationErrorCode } from '../../../common/constants';
export class BuildIntegrationError extends Error implements ErrorThatHandlesItsOwnResponse {
private readonly errorCode: GenerationErrorCode = GenerationErrorCode.BUILD_INTEGRATION_ERROR;
public sendResponse(res: KibanaResponseFactory) {
return res.badRequest({
body: { message: this.message, attributes: { errorCode: this.errorCode } },
});
}
}

View file

@ -1,3 +0,0 @@
fields:
tags:
- preserve_original_event

View file

@ -1,3 +0,0 @@
version: "{{ docker_compose_version }}"
services: {% for service in services %}
{{ service }}{% endfor %}

View file

@ -1,6 +0,0 @@
{{package_name}}-{{data_stream_name}}-filestream:
image: alpine
volumes:
- ./sample_logs:/sample_logs:ro
- ${SERVICE_LOGS_DIR}:/var/log
command: /bin/sh -c "cp /sample_logs/* /var/log/"

View file

@ -1,7 +0,0 @@
{{package_name}}-{{data_stream_name}}-gcs:
image: fsouza/fake-gcs-server:latest
command: -host=0.0.0.0 -public-host=elastic-package-service_{{package_name}}-{{data_stream_name}}-gcs_1 -port=4443 -scheme=http
volumes:
- ./sample_logs:/data
ports:
- 4443/tcp

View file

@ -1,6 +0,0 @@
{{package_name}}-{{data_stream_name}}-logfile:
image: alpine
volumes:
- ./sample_logs:/sample_logs:ro
- ${SERVICE_LOGS_DIR}:/var/log
command: /bin/sh -c "cp /sample_logs/* /var/log/"

View file

@ -1,6 +0,0 @@
{{package_name}}-{{data_stream_name}}-tcp:
image: docker.elastic.co/observability/stream:{{stream_version}}
volumes:
- ./sample_logs:/sample_logs:ro
entrypoint: /bin/bash
command: -c "/stream log --start-signal=SIGHUP --delay=5s --addr elastic-agent:9025 -p=tcp /sample_logs/{{package_name}}.log"

View file

@ -1,6 +0,0 @@
{{package_name}}-{{data_stream_name}}-udp:
image: docker.elastic.co/observability/stream:{{stream_version}}
volumes:
- ./sample_logs:/sample_logs:ro
entrypoint: /bin/bash
command: -c "/stream log --start-signal=SIGHUP --delay=5s --addr elastic-agent:9025 -p=udp /sample_logs/{{package_name}}.log"

View file

@ -1,13 +0,0 @@
service: {{package_name}}-{{data_stream_name}}-filestream
input: filestream
data_stream:
vars:
preserve_original_event: true
paths:
- '{% raw %}{{SERVICE_LOGS_DIR}}{% endraw %}/test-{{package_name}}-{{data_stream_name}}.log'
numeric_keyword_fields:
- log.file.device_id
- log.file.inode
- log.file.idxhi
- log.file.idxlo
- log.file.vol

View file

@ -1,10 +0,0 @@
service: {{package_name}}-{{data_stream_name}}-gcs
input: gcs
data_stream:
vars:
project_id: testproject
alternative_host: "http://{% raw %}{{Hostname}}:{{Port}}{% endraw %}"
buckets: |
- name: testbucket
poll: true
poll_interval: 15s

View file

@ -1,13 +0,0 @@
service: {{package_name}}-{{data_stream_name}}-logfile
input: logfile
data_stream:
vars:
preserve_original_event: true
paths:
- '{% raw %}{{SERVICE_LOGS_DIR}}{% endraw %}/{{package_name}}-{{data_stream_name}}.log'
numeric_keyword_fields:
- log.file.device_id
- log.file.inode
- log.file.idxhi
- log.file.idxlo
- log.file.vol

View file

@ -1,7 +0,0 @@
service: {{package_name}}-{{data_stream_name}}-tcp
input: tcp
data_stream:
vars:
preserve_original_event: true
listen_address: 0.0.0.0
listen_port: 9025

View file

@ -1,7 +0,0 @@
service: {{package_name}}-{{data_stream_name}}-udp
input: udp
data_stream:
vars:
preserve_original_event: true
listen_address: 0.0.0.0
listen_port: 9025

View file

@ -153,20 +153,46 @@ export function generateFields(mergedDocs: string): string {
return dump(fieldsStructure, { sortKeys: false });
}
export function isObject(value: any): boolean {
return typeof value === 'object' && value !== null;
}
export function isEmptyValue(value: unknown): boolean {
if (value == null) return true;
if (isObject(value)) {
if (Array.isArray(value)) return value.length === 0;
return value && Object.keys(value).length === 0;
}
return false;
}
export function isUnsafeProperty(key: string, obj: Record<string, any>): boolean {
return (
key === '__proto__' || key === 'constructor' || key === 'prototype' || !Object.hasOwn(obj, key)
);
}
export function merge(
target: Record<string, any>,
source: Record<string, any>
): Record<string, unknown> {
const filteredTarget = filterOwnProperties(target);
const filteredTarget = Object.create(null);
for (const [key, targetValue] of Object.entries(target)) {
if (!isUnsafeProperty(key, target)) {
filteredTarget[key] = targetValue;
}
}
for (const [key, sourceValue] of Object.entries(source)) {
if (!isBuiltInProperties(key, source)) {
if (!isUnsafeProperty(key, source)) {
const targetValue = filteredTarget[key];
if (Array.isArray(sourceValue)) {
// Directly assign arrays
filteredTarget[key] = sourceValue;
} else if (isObject(sourceValue) && !Array.isArray(targetValue)) {
filteredTarget[key] = [...sourceValue];
} else if (isObject(sourceValue) && !Array.isArray(sourceValue)) {
if (!isObject(targetValue) || isEmptyValue(targetValue)) {
filteredTarget[key] = merge({}, sourceValue);
filteredTarget[key] = merge(Object.create(null), sourceValue);
} else {
filteredTarget[key] = merge(targetValue, sourceValue);
}
@ -178,38 +204,10 @@ export function merge(
}
}
}
return filteredTarget;
}
function isEmptyValue(value: unknown): boolean {
if (value == null) return true;
if (isObject(value)) {
if (Array.isArray(value)) return value.length === 0;
return value && Object.keys(value).length === 0;
}
return false;
}
function isObject(value: any): boolean {
return typeof value === 'object' && value !== null;
}
function isBuiltInProperties(key: string, obj: Record<string, any>): boolean {
return key === 'constructor' || !Object.prototype.hasOwnProperty.call(obj, key);
}
function filterOwnProperties(obj: Record<string, any>): Record<string, any> {
const ownProps: Record<string, any> = {};
for (const key of Object.getOwnPropertyNames(obj)) {
if (!isBuiltInProperties(key, obj)) {
ownProps[key] = (obj as any)[key];
}
}
return ownProps;
}
export function mergeSamples(objects: any[]): string {
let result: Record<string, unknown> = {};