mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
# Backport This will backport the following commits from `main` to `8.16`: - [[Automatic Import] Add RBAC to APIs (#203882)](https://github.com/elastic/kibana/pull/203882) <!--- Backport version: 8.9.8 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Bharat Pasupula","email":"123897612+bhapas@users.noreply.github.com"},"sourceCommit":{"committedDate":"2024-12-16T12:42:44Z","message":"[Automatic Import] Add RBAC to APIs (#203882)\n\n## Release Note\r\n\r\nAdds RBAC to the Automatic Import APIs\r\n\r\n## Summary\r\n\r\nThis PR adds RBAC privileges to Automatic Import APIs\r\n\r\nIt adds `all` access to the users having `fleet:all` `fleetv2:all`\r\n`actions:all` UI access\r\n\r\nThis PR also adds a validation to the `integrationName` and\r\n`dataStreamName`.\r\n\r\n### Checklist\r\n\r\nCheck the PR satisfies following conditions. \r\n\r\nReviewers should verify this PR satisfies this list as well.\r\n- [x] [Unit or functional\r\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\r\nwere updated or added to match the most common scenarios\r\n- [x] The PR description includes the appropriate Release Notes section,\r\nand the correct `release_note:*` label is applied per the\r\n[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"eb1c70a1a2b5cedbfd2f1435d66579b5f4ef2328","branchLabelMapping":{"^v9.0.0$":"main","^v8.18.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:fix","v9.0.0","backport:prev-major","Team:Security-Scalability","Feature:AutomaticImport"],"number":203882,"url":"https://github.com/elastic/kibana/pull/203882","mergeCommit":{"message":"[Automatic Import] Add RBAC to APIs (#203882)\n\n## Release Note\r\n\r\nAdds RBAC to the Automatic Import APIs\r\n\r\n## Summary\r\n\r\nThis PR adds RBAC privileges to Automatic Import APIs\r\n\r\nIt adds `all` access to the users having `fleet:all` `fleetv2:all`\r\n`actions:all` UI access\r\n\r\nThis PR also adds a validation to the `integrationName` and\r\n`dataStreamName`.\r\n\r\n### Checklist\r\n\r\nCheck the PR satisfies following conditions. \r\n\r\nReviewers should verify this PR satisfies this list as well.\r\n- [x] [Unit or functional\r\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\r\nwere updated or added to match the most common scenarios\r\n- [x] The PR description includes the appropriate Release Notes section,\r\nand the correct `release_note:*` label is applied per the\r\n[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"eb1c70a1a2b5cedbfd2f1435d66579b5f4ef2328"}},"sourceBranch":"main","suggestedTargetBranches":[],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","labelRegex":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/203882","number":203882,"mergeCommit":{"message":"[Automatic Import] Add RBAC to APIs (#203882)\n\n## Release Note\r\n\r\nAdds RBAC to the Automatic Import APIs\r\n\r\n## Summary\r\n\r\nThis PR adds RBAC privileges to Automatic Import APIs\r\n\r\nIt adds `all` access to the users having `fleet:all` `fleetv2:all`\r\n`actions:all` UI access\r\n\r\nThis PR also adds a validation to the `integrationName` and\r\n`dataStreamName`.\r\n\r\n### Checklist\r\n\r\nCheck the PR satisfies following conditions. \r\n\r\nReviewers should verify this PR satisfies this list as well.\r\n- [x] [Unit or functional\r\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\r\nwere updated or added to match the most common scenarios\r\n- [x] The PR description includes the appropriate Release Notes section,\r\nand the correct `release_note:*` label is applied per the\r\n[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"eb1c70a1a2b5cedbfd2f1435d66579b5f4ef2328"}},{"url":"https://github.com/elastic/kibana/pull/204382","number":204382,"branch":"8.x","state":"OPEN"}]}] BACKPORT--> --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
5b9434c356
commit
8e433df4c8
3 changed files with 107 additions and 45 deletions
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { buildPackage, renderPackageManifestYAML } from './build_integration';
|
||||
import { buildPackage, isValidName, renderPackageManifestYAML } from './build_integration';
|
||||
import { testIntegration } from '../../__jest__/fixtures/build_integration';
|
||||
import { generateUniqueId, ensureDirSync, createSync } from '../util';
|
||||
import { createDataStream } from './data_stream';
|
||||
|
@ -39,6 +39,7 @@ jest.mock('adm-zip', () => {
|
|||
return jest.fn().mockImplementation(() => ({
|
||||
addLocalFolder: jest.fn(),
|
||||
toBuffer: jest.fn(),
|
||||
addFile: jest.fn(),
|
||||
}));
|
||||
});
|
||||
|
||||
|
@ -46,8 +47,8 @@ describe('buildPackage', () => {
|
|||
const packagePath = `${mockedDataPath}/integration-assistant-${mockedId}`;
|
||||
const integrationPath = `${packagePath}/integration-1.0.0`;
|
||||
|
||||
const firstDatastreamName = 'datastream_1';
|
||||
const secondDatastreamName = 'datastream_2';
|
||||
const firstDatastreamName = 'datastream_one';
|
||||
const secondDatastreamName = 'datastream_two';
|
||||
|
||||
const firstDataStreamInputTypes: InputType[] = ['filestream', 'kafka'];
|
||||
const secondDataStreamInputTypes: InputType[] = ['kafka'];
|
||||
|
@ -74,8 +75,8 @@ describe('buildPackage', () => {
|
|||
|
||||
const firstDataStream: DataStream = {
|
||||
name: firstDatastreamName,
|
||||
title: 'Datastream_1',
|
||||
description: 'Datastream_1 description',
|
||||
title: 'datastream_one',
|
||||
description: 'datastream_one description',
|
||||
inputTypes: firstDataStreamInputTypes,
|
||||
docs: firstDataStreamDocs,
|
||||
rawSamples: ['{"test1": "test1"}'],
|
||||
|
@ -85,8 +86,8 @@ describe('buildPackage', () => {
|
|||
|
||||
const secondDataStream: DataStream = {
|
||||
name: secondDatastreamName,
|
||||
title: 'Datastream_2',
|
||||
description: 'Datastream_2 description',
|
||||
title: 'datastream_two',
|
||||
description: 'datastream_two description',
|
||||
inputTypes: secondDataStreamInputTypes,
|
||||
docs: secondDataStreamDocs,
|
||||
rawSamples: ['{"test1": "test1"}'],
|
||||
|
@ -123,15 +124,6 @@ describe('buildPackage', () => {
|
|||
expect(createSync).toHaveBeenCalledWith(`${integrationPath}/manifest.yml`, expect.any(String));
|
||||
});
|
||||
|
||||
it('Should create logo files if info is present in the integration', async () => {
|
||||
testIntegration.logo = 'logo';
|
||||
|
||||
await buildPackage(testIntegration);
|
||||
|
||||
expect(ensureDirSync).toHaveBeenCalledWith(`${integrationPath}/img`);
|
||||
expect(createSync).toHaveBeenCalledWith(`${integrationPath}/img/logo.svg`, expect.any(Buffer));
|
||||
});
|
||||
|
||||
it('Should not create logo files if info is not present in the integration', async () => {
|
||||
jest.clearAllMocks();
|
||||
testIntegration.logo = undefined;
|
||||
|
@ -186,19 +178,19 @@ describe('buildPackage', () => {
|
|||
it('Should call createReadme once with sorted fields', async () => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
const firstDSFieldsMapping = [{ name: 'name a', description: 'description 1', type: 'type 1' }];
|
||||
const firstDSFieldsMapping = [{ name: 'name_a', description: 'description 1', type: 'type 1' }];
|
||||
|
||||
const firstDataStreamFields = [
|
||||
{ name: 'name b', description: 'description 1', type: 'type 1' },
|
||||
{ name: 'name_b', description: 'description 1', type: 'type 1' },
|
||||
];
|
||||
|
||||
const secondDSFieldsMapping = [
|
||||
{ name: 'name c', description: 'description 2', type: 'type 2' },
|
||||
{ name: 'name e', description: 'description 3', type: 'type 3' },
|
||||
{ name: 'name_c', description: 'description 2', type: 'type 2' },
|
||||
{ name: 'name_e', description: 'description 3', type: 'type 3' },
|
||||
];
|
||||
|
||||
const secondDataStreamFields = [
|
||||
{ name: 'name d', description: 'description 2', type: 'type 2' },
|
||||
{ name: 'name_d', description: 'description 2', type: 'type 2' },
|
||||
];
|
||||
|
||||
(createFieldMapping as jest.Mock).mockReturnValueOnce(firstDSFieldsMapping);
|
||||
|
@ -213,17 +205,17 @@ describe('buildPackage', () => {
|
|||
{
|
||||
datastream: firstDatastreamName,
|
||||
fields: [
|
||||
{ name: 'name a', description: 'description 1', type: 'type 1' },
|
||||
{ name: 'name_a', description: 'description 1', type: 'type 1' },
|
||||
|
||||
{ name: 'name b', description: 'description 1', type: 'type 1' },
|
||||
{ name: 'name_b', description: 'description 1', type: 'type 1' },
|
||||
],
|
||||
},
|
||||
{
|
||||
datastream: secondDatastreamName,
|
||||
fields: [
|
||||
{ name: 'name c', description: 'description 2', type: 'type 2' },
|
||||
{ name: 'name d', description: 'description 2', type: 'type 2' },
|
||||
{ name: 'name e', description: 'description 3', type: 'type 3' },
|
||||
{ name: 'name_c', description: 'description 2', type: 'type 2' },
|
||||
{ name: 'name_d', description: 'description 2', type: 'type 2' },
|
||||
{ name: 'name_e', description: 'description 3', type: 'type 3' },
|
||||
],
|
||||
},
|
||||
]);
|
||||
|
@ -234,13 +226,13 @@ describe('renderPackageManifestYAML', () => {
|
|||
test('generates the package manifest correctly', () => {
|
||||
const integration: Integration = {
|
||||
title: 'Sample Integration',
|
||||
name: 'sample-integration',
|
||||
name: 'sample_integration',
|
||||
description:
|
||||
' This is a sample integration\n\nWith multiple lines and weird spacing. \n\n And more lines ',
|
||||
logo: 'some-logo.png',
|
||||
dataStreams: [
|
||||
{
|
||||
name: 'data-stream-1',
|
||||
name: 'data_stream_one',
|
||||
title: 'Data Stream 1',
|
||||
description: 'This is data stream 1',
|
||||
inputTypes: ['filestream'],
|
||||
|
@ -252,7 +244,7 @@ describe('renderPackageManifestYAML', () => {
|
|||
samplesFormat: { name: 'ndjson', multiline: false },
|
||||
},
|
||||
{
|
||||
name: 'data-stream-2',
|
||||
name: 'data_stream_two',
|
||||
title: 'Data Stream 2',
|
||||
description:
|
||||
'This is data stream 2\nWith multiple lines of description\nBut otherwise, nothing special',
|
||||
|
@ -287,3 +279,59 @@ describe('renderPackageManifestYAML', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('isValidName', () => {
|
||||
it('should return true for valid names', () => {
|
||||
expect(isValidName('validName')).toBe(true);
|
||||
expect(isValidName('Valid_Name')).toBe(true);
|
||||
expect(isValidName('anotherValidName')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for names with numbers', () => {
|
||||
expect(isValidName('invalid123')).toBe(false);
|
||||
expect(isValidName('123invalid')).toBe(false);
|
||||
expect(isValidName('invalid_123')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for empty string', () => {
|
||||
expect(isValidName('')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for names with spaces', () => {
|
||||
expect(isValidName('invalid name')).toBe(false);
|
||||
expect(isValidName(' invalid')).toBe(false);
|
||||
expect(isValidName('invalid ')).toBe(false);
|
||||
expect(isValidName('invalid name with spaces')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for names with special characters', () => {
|
||||
expect(isValidName('invalid@name')).toBe(false);
|
||||
expect(isValidName('invalid#name')).toBe(false);
|
||||
expect(isValidName('invalid$name')).toBe(false);
|
||||
expect(isValidName('invalid%name')).toBe(false);
|
||||
expect(isValidName('invalid^name')).toBe(false);
|
||||
expect(isValidName('invalid&name')).toBe(false);
|
||||
expect(isValidName('invalid*name')).toBe(false);
|
||||
expect(isValidName('invalid(name')).toBe(false);
|
||||
expect(isValidName('invalid/name')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for names with dashes', () => {
|
||||
expect(isValidName('invalid-name')).toBe(false);
|
||||
expect(isValidName('invalid-name-with-dashes')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for names with periods', () => {
|
||||
expect(isValidName('invalid.name')).toBe(false);
|
||||
expect(isValidName('invalid.name.with.periods')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for names with mixed invalid characters', () => {
|
||||
expect(isValidName('invalid@name#with$special%characters')).toBe(false);
|
||||
expect(isValidName('invalid name with spaces and 123')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for names with empty string', () => {
|
||||
expect(isValidName('')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -34,6 +34,12 @@ function configureNunjucks() {
|
|||
export async function buildPackage(integration: Integration): Promise<Buffer> {
|
||||
configureNunjucks();
|
||||
|
||||
if (!isValidName(integration.name)) {
|
||||
throw new Error(
|
||||
`Invalid integration name: ${integration.name}, Should only contain letters and underscores`
|
||||
);
|
||||
}
|
||||
|
||||
const workingDir = joinPath(getDataPath(), `integration-assistant-${generateUniqueId()}`);
|
||||
const packageDirectoryName = `${integration.name}-${initialVersion}`;
|
||||
const packageDir = createDirectories(workingDir, integration, packageDirectoryName);
|
||||
|
@ -41,6 +47,11 @@ export async function buildPackage(integration: Integration): Promise<Buffer> {
|
|||
const dataStreamsDir = joinPath(packageDir, 'data_stream');
|
||||
const fieldsPerDatastream = integration.dataStreams.map((dataStream) => {
|
||||
const dataStreamName = dataStream.name;
|
||||
if (!isValidName(dataStreamName)) {
|
||||
throw new Error(
|
||||
`Invalid datastream name: ${dataStreamName}, Should only contain letters and underscores`
|
||||
);
|
||||
}
|
||||
const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName);
|
||||
|
||||
const dataStreamFields = createDataStream(integration.name, specificDataStreamDir, dataStream);
|
||||
|
@ -60,12 +71,14 @@ export async function buildPackage(integration: Integration): Promise<Buffer> {
|
|||
});
|
||||
|
||||
createReadme(packageDir, integration.name, fieldsPerDatastream);
|
||||
const zipBuffer = await createZipArchive(workingDir, packageDirectoryName);
|
||||
|
||||
const zipBuffer = await createZipArchive(integration, workingDir, packageDirectoryName);
|
||||
removeDirSync(workingDir);
|
||||
return zipBuffer;
|
||||
}
|
||||
|
||||
export function isValidName(input: string): boolean {
|
||||
const regex = /^[a-zA-Z_]+$/;
|
||||
return input.length > 0 && regex.test(input);
|
||||
}
|
||||
function createDirectories(
|
||||
workingDir: string,
|
||||
integration: Integration,
|
||||
|
@ -84,17 +97,6 @@ function createPackage(packageDir: string, integration: Integration): void {
|
|||
createPackageManifest(packageDir, integration);
|
||||
// Skipping creation of system tests temporarily for custom package generation
|
||||
// createPackageSystemTests(packageDir, integration);
|
||||
if (integration?.logo !== undefined) {
|
||||
createLogo(packageDir, integration.logo);
|
||||
}
|
||||
}
|
||||
|
||||
function createLogo(packageDir: string, logo: string): void {
|
||||
const logoDir = joinPath(packageDir, 'img');
|
||||
ensureDirSync(logoDir);
|
||||
|
||||
const buffer = Buffer.from(logo, 'base64');
|
||||
createSync(joinPath(logoDir, 'logo.svg'), buffer);
|
||||
}
|
||||
|
||||
function createBuildFile(packageDir: string): void {
|
||||
|
@ -113,10 +115,20 @@ function createChangelog(packageDir: string): void {
|
|||
createSync(joinPath(packageDir, 'changelog.yml'), changelogTemplate);
|
||||
}
|
||||
|
||||
async function createZipArchive(workingDir: string, packageDirectoryName: string): Promise<Buffer> {
|
||||
async function createZipArchive(
|
||||
integration: Integration,
|
||||
workingDir: string,
|
||||
packageDirectoryName: string
|
||||
): Promise<Buffer> {
|
||||
const tmpPackageDir = joinPath(workingDir, packageDirectoryName);
|
||||
const zip = new AdmZip();
|
||||
zip.addLocalFolder(tmpPackageDir, packageDirectoryName);
|
||||
|
||||
if (integration.logo) {
|
||||
const logoDir = joinPath(packageDirectoryName, 'img/logo.svg');
|
||||
const logoBuffer = Buffer.from(integration.logo, 'base64');
|
||||
zip.addFile(logoDir, logoBuffer);
|
||||
}
|
||||
const buffer = zip.toBuffer();
|
||||
return buffer;
|
||||
}
|
||||
|
|
|
@ -12,7 +12,9 @@
|
|||
"__jest__/**/*",
|
||||
"../../typings/**/*"
|
||||
],
|
||||
"exclude": ["target/**/*"],
|
||||
"exclude": [
|
||||
"target/**/*"
|
||||
],
|
||||
"kbn_references": [
|
||||
"@kbn/core",
|
||||
"@kbn/config-schema",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue