mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[Integration AutoImport] Use kibana data directory as integration build working dir (#188661)
## Summary This PR changes the working directory to build the integration Zip package from `/tmp` to the Kibana data directory using the `@kbn/utils` library. It also removes the working directory when the integration zip build finishes, to keep the house clean. This change is necessary to prevent the ENOENT error from happening in serverless environments when creating the working directory. Before:  After:  --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
parent
8e7d634e1c
commit
3dd2034b27
4 changed files with 29 additions and 13 deletions
|
@ -7,10 +7,10 @@
|
|||
|
||||
import AdmZip from 'adm-zip';
|
||||
import nunjucks from 'nunjucks';
|
||||
import { tmpdir } from 'os';
|
||||
import { getDataPath } from '@kbn/utils';
|
||||
import { join as joinPath } from 'path';
|
||||
import type { DataStream, Integration } from '../../common';
|
||||
import { createSync, ensureDirSync, generateUniqueId } from '../util';
|
||||
import { createSync, ensureDirSync, generateUniqueId, removeDirSync } from '../util';
|
||||
import { createAgentInput } from './agent';
|
||||
import { createDataStream } from './data_stream';
|
||||
import { createFieldMapping } from './fields';
|
||||
|
@ -27,9 +27,10 @@ export async function buildPackage(integration: Integration): Promise<Buffer> {
|
|||
autoescape: false,
|
||||
});
|
||||
|
||||
const tmpDir = joinPath(tmpdir(), `integration-assistant-${generateUniqueId()}`);
|
||||
const workingDir = joinPath(getDataPath(), `integration-assistant-${generateUniqueId()}`);
|
||||
const packageDirectoryName = `${integration.name}-${initialVersion}`;
|
||||
const packageDir = createDirectories(tmpDir, integration, packageDirectoryName);
|
||||
const packageDir = createDirectories(workingDir, integration, packageDirectoryName);
|
||||
|
||||
const dataStreamsDir = joinPath(packageDir, 'data_stream');
|
||||
|
||||
for (const dataStream of integration.dataStreams) {
|
||||
|
@ -42,17 +43,19 @@ export async function buildPackage(integration: Integration): Promise<Buffer> {
|
|||
createFieldMapping(integration.name, dataStreamName, specificDataStreamDir, dataStream.docs);
|
||||
}
|
||||
|
||||
const zipBuffer = await createZipArchive(tmpDir, packageDirectoryName);
|
||||
const zipBuffer = await createZipArchive(workingDir, packageDirectoryName);
|
||||
|
||||
removeDirSync(workingDir);
|
||||
return zipBuffer;
|
||||
}
|
||||
|
||||
function createDirectories(
|
||||
tmpDir: string,
|
||||
workingDir: string,
|
||||
integration: Integration,
|
||||
packageDirectoryName: string
|
||||
): string {
|
||||
const packageDir = joinPath(tmpDir, packageDirectoryName);
|
||||
ensureDirSync(tmpDir);
|
||||
const packageDir = joinPath(workingDir, packageDirectoryName);
|
||||
ensureDirSync(workingDir);
|
||||
ensureDirSync(packageDir);
|
||||
createPackage(packageDir, integration);
|
||||
return packageDir;
|
||||
|
@ -105,8 +108,8 @@ function createReadme(packageDir: string, integration: Integration) {
|
|||
createSync(joinPath(readmeDirPath, 'README.md'), readmeTemplate);
|
||||
}
|
||||
|
||||
async function createZipArchive(tmpDir: string, packageDirectoryName: string): Promise<Buffer> {
|
||||
const tmpPackageDir = joinPath(tmpDir, packageDirectoryName);
|
||||
async function createZipArchive(workingDir: string, packageDirectoryName: string): Promise<Buffer> {
|
||||
const tmpPackageDir = joinPath(workingDir, packageDirectoryName);
|
||||
const zip = new AdmZip();
|
||||
zip.addLocalFolder(tmpPackageDir, packageDirectoryName);
|
||||
const buffer = zip.toBuffer();
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { cpSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync } from 'fs';
|
||||
import { cpSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync, rmSync } from 'fs';
|
||||
import { dirname } from 'path';
|
||||
|
||||
export function existsSync(path: string): boolean {
|
||||
|
@ -45,3 +45,7 @@ export function listDirSync(path: string): string[] {
|
|||
export function readSync(path: string): string {
|
||||
return readFileSync(path, { encoding: 'utf-8' });
|
||||
}
|
||||
|
||||
export function removeDirSync(path: string): void {
|
||||
rmSync(path, { recursive: true, force: true });
|
||||
}
|
||||
|
|
|
@ -5,7 +5,15 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
export { existsSync, ensureDirSync, createSync, copySync, listDirSync, readSync } from './files';
|
||||
export {
|
||||
existsSync,
|
||||
ensureDirSync,
|
||||
createSync,
|
||||
copySync,
|
||||
listDirSync,
|
||||
readSync,
|
||||
removeDirSync,
|
||||
} from './files';
|
||||
|
||||
export { generateFields, mergeSamples } from './samples';
|
||||
export { deepCopy, generateUniqueId } from './util';
|
||||
|
|
|
@ -37,6 +37,7 @@
|
|||
"@kbn/core-http-request-handler-context-server",
|
||||
"@kbn/core-http-router-server-mocks",
|
||||
"@kbn/core-http-server",
|
||||
"@kbn/kibana-utils-plugin"
|
||||
"@kbn/kibana-utils-plugin",
|
||||
"@kbn/utils"
|
||||
]
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue