mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
Previously our plugin installation used over 5,000 file descriptors when analyzing and extracting the archive. To mitigate this, we are moving to a more supported zip library, Yauzl. In addition to the replacement of the zip library, this refactors the getPackData method. It was extracting the plugin package.json files, reading them, then finally deleting them. This commit performs this all in memory, removing the need for any file system operations during this step. These changes reduced the installation time by an average of 35%, from 211.18 seconds to 137.8 seconds. The majority of this time is now spent during optimize. Signed-off-by: Tyler Smalley <tyler.smalley@elastic.co>
This commit is contained in:
parent
2530365bcd
commit
af4de542d5
9 changed files with 198 additions and 411 deletions
|
@ -70,7 +70,6 @@
|
|||
"url": "https://github.com/elastic/kibana.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@bigfunger/decompress-zip": "0.2.0-stripfix3",
|
||||
"@bigfunger/jsondiffpatch": "0.1.38-webpack",
|
||||
"@elastic/datemath": "2.3.0",
|
||||
"@elastic/httpolyglot": "0.1.2-elasticpatch1",
|
||||
|
@ -197,7 +196,8 @@
|
|||
"vision": "4.1.0",
|
||||
"webpack": "github:elastic/webpack#fix/query-params-for-aliased-loaders",
|
||||
"whatwg-fetch": "0.9.0",
|
||||
"wreck": "6.2.0"
|
||||
"wreck": "6.2.0",
|
||||
"yauzl": "2.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@elastic/eslint-config-kibana": "0.5.0",
|
||||
|
@ -258,7 +258,7 @@
|
|||
"makelogs": "3.2.3",
|
||||
"marked-text-renderer": "0.1.0",
|
||||
"mocha": "2.5.3",
|
||||
"mock-fs": "4.0.0",
|
||||
"mock-fs": "4.2.0",
|
||||
"murmurhash3js": "3.0.1",
|
||||
"ncp": "2.0.0",
|
||||
"nock": "8.0.0",
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import mockFs from 'mock-fs';
|
||||
import Logger from '../../lib/logger';
|
||||
import { join } from 'path';
|
||||
import rimraf from 'rimraf';
|
||||
|
@ -13,13 +14,15 @@ describe('kibana cli', function () {
|
|||
describe('kibana', function () {
|
||||
const testWorkingPath = join(__dirname, '.test.data');
|
||||
const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
|
||||
const pluginDir = join(__dirname, 'plugins');
|
||||
|
||||
const settings = {
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
plugin: 'test-plugin',
|
||||
version: '1.0.0',
|
||||
plugins: [ { name: 'foo', path: join(testWorkingPath, 'foo') } ]
|
||||
plugins: [ { name: 'foo' } ],
|
||||
pluginDir
|
||||
};
|
||||
|
||||
const logger = new Logger(settings);
|
||||
|
@ -130,13 +133,10 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
describe('existingInstall', function () {
|
||||
let testWorkingPath;
|
||||
let processExitStub;
|
||||
|
||||
beforeEach(function () {
|
||||
processExitStub = sinon.stub(process, 'exit');
|
||||
testWorkingPath = join(__dirname, '.test.data');
|
||||
rimraf.sync(testWorkingPath);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
});
|
||||
|
@ -145,26 +145,23 @@ describe('kibana cli', function () {
|
|||
processExitStub.restore();
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
it('should throw an error if the workingPath already exists.', function () {
|
||||
mkdirp.sync(settings.plugins[0].path);
|
||||
existingInstall(settings, logger);
|
||||
it('should throw an error if the plugin already exists.', function () {
|
||||
mockFs({ [`${pluginDir}/foo`]: {} });
|
||||
|
||||
existingInstall(settings, logger);
|
||||
expect(logger.error.firstCall.args[0]).to.match(/already exists/);
|
||||
expect(process.exit.called).to.be(true);
|
||||
|
||||
mockFs.restore();
|
||||
});
|
||||
|
||||
it('should not throw an error if the workingPath does not exist.', function () {
|
||||
it('should not throw an error if the plugin does not exist.', function () {
|
||||
existingInstall(settings, logger);
|
||||
expect(logger.error.called).to.be(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -98,10 +98,9 @@ describe('kibana cli', function () {
|
|||
})
|
||||
.then(() => {
|
||||
expect(settings.plugins[0].name).to.be('test-plugin');
|
||||
expect(settings.plugins[0].folder).to.be('test-plugin');
|
||||
expect(settings.plugins[0].archivePath).to.be('kibana/test-plugin');
|
||||
expect(settings.plugins[0].version).to.be('1.0.0');
|
||||
expect(settings.plugins[0].kibanaVersion).to.be('1.0.0');
|
||||
expect(settings.plugins[0].platform).to.be(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -134,40 +133,28 @@ describe('kibana cli', function () {
|
|||
})
|
||||
.then(() => {
|
||||
expect(settings.plugins[0].name).to.be('funger-plugin');
|
||||
expect(settings.plugins[0].file).to.be('kibana/funger-plugin/package.json');
|
||||
expect(settings.plugins[0].folder).to.be('funger-plugin');
|
||||
expect(settings.plugins[0].archivePath).to.be('kibana/funger-plugin');
|
||||
expect(settings.plugins[0].version).to.be('1.0.0');
|
||||
expect(settings.plugins[0].platform).to.be(undefined);
|
||||
|
||||
expect(settings.plugins[1].name).to.be('pdf');
|
||||
expect(settings.plugins[1].file).to.be('kibana/pdf-linux/package.json');
|
||||
expect(settings.plugins[1].folder).to.be('pdf-linux');
|
||||
expect(settings.plugins[1].archivePath).to.be('kibana/pdf-linux');
|
||||
expect(settings.plugins[1].version).to.be('1.0.0');
|
||||
expect(settings.plugins[1].platform).to.be('linux');
|
||||
|
||||
expect(settings.plugins[2].name).to.be('pdf');
|
||||
expect(settings.plugins[2].file).to.be('kibana/pdf-win32/package.json');
|
||||
expect(settings.plugins[2].folder).to.be('pdf-win32');
|
||||
expect(settings.plugins[2].archivePath).to.be('kibana/pdf-win32');
|
||||
expect(settings.plugins[2].version).to.be('1.0.0');
|
||||
expect(settings.plugins[2].platform).to.be('win32');
|
||||
|
||||
expect(settings.plugins[3].name).to.be('pdf');
|
||||
expect(settings.plugins[3].file).to.be('kibana/pdf-win64/package.json');
|
||||
expect(settings.plugins[3].folder).to.be('pdf-win64');
|
||||
expect(settings.plugins[3].archivePath).to.be('kibana/pdf-win64');
|
||||
expect(settings.plugins[3].version).to.be('1.0.0');
|
||||
expect(settings.plugins[3].platform).to.be('win64');
|
||||
|
||||
expect(settings.plugins[4].name).to.be('pdf');
|
||||
expect(settings.plugins[4].file).to.be('kibana/pdf/package.json');
|
||||
expect(settings.plugins[4].folder).to.be('pdf');
|
||||
expect(settings.plugins[4].archivePath).to.be('kibana/pdf');
|
||||
expect(settings.plugins[4].version).to.be('1.0.0');
|
||||
expect(settings.plugins[4].platform).to.be(undefined);
|
||||
|
||||
expect(settings.plugins[5].name).to.be('test-plugin');
|
||||
expect(settings.plugins[5].file).to.be('kibana/test-plugin/package.json');
|
||||
expect(settings.plugins[5].folder).to.be('test-plugin');
|
||||
expect(settings.plugins[5].archivePath).to.be('kibana/test-plugin');
|
||||
expect(settings.plugins[5].version).to.be('1.0.0');
|
||||
expect(settings.plugins[5].platform).to.be(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
Binary file not shown.
|
@ -1,207 +1,67 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import glob from 'glob-all';
|
||||
import rimraf from 'rimraf';
|
||||
import mkdirp from 'mkdirp';
|
||||
import Logger from '../../lib/logger';
|
||||
import { _downloadSingle } from '../download';
|
||||
import { join } from 'path';
|
||||
import { listFiles, extractFiles } from '../zip';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import glob from 'glob';
|
||||
import { analyzeArchive, extractArchive } from '../zip';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('zip', function () {
|
||||
const repliesPath = path.resolve(__dirname, './replies');
|
||||
const archivePath = path.resolve(repliesPath, 'test_plugin.zip');
|
||||
|
||||
const testWorkingPath = join(__dirname, '.test.data');
|
||||
const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
|
||||
let logger;
|
||||
let tempPath;
|
||||
|
||||
const settings = {
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
plugin: 'test-plugin',
|
||||
setPlugin: function () {}
|
||||
};
|
||||
|
||||
function shouldReject() {
|
||||
throw new Error('expected the promise to reject');
|
||||
}
|
||||
|
||||
beforeEach(function () {
|
||||
logger = new Logger(settings);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
sinon.stub(settings, 'setPlugin');
|
||||
rimraf.sync(testWorkingPath);
|
||||
mkdirp.sync(testWorkingPath);
|
||||
beforeEach(() => {
|
||||
const randomDir = Math.random().toString(36);
|
||||
tempPath = path.resolve(os.tmpdir(), randomDir);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
settings.setPlugin.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
afterEach(() => {
|
||||
rimraf.sync(tempPath);
|
||||
});
|
||||
|
||||
function copyReplyFile(filename) {
|
||||
const filePath = join(__dirname, 'replies', filename);
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
describe('analyzeArchive', function () {
|
||||
it('returns array of plugins', async () => {
|
||||
const packages = await analyzeArchive(archivePath);
|
||||
const plugin = packages[0];
|
||||
|
||||
return _downloadSingle(settings, logger, sourceUrl);
|
||||
}
|
||||
|
||||
describe('listFiles', function () {
|
||||
|
||||
it('lists the files in the zip', function () {
|
||||
return copyReplyFile('test_plugin.zip')
|
||||
.then(() => {
|
||||
return listFiles(settings.tempArchiveFile);
|
||||
})
|
||||
.then((actual) => {
|
||||
const expected = [
|
||||
'elasticsearch/',
|
||||
'kibana/',
|
||||
'kibana/test-plugin/',
|
||||
'kibana/test-plugin/.gitignore',
|
||||
'kibana/test-plugin/extra file only in zip.txt',
|
||||
'kibana/test-plugin/index.js',
|
||||
'kibana/test-plugin/package.json',
|
||||
'kibana/test-plugin/public/',
|
||||
'kibana/test-plugin/public/app.js',
|
||||
'kibana/test-plugin/README.md',
|
||||
'logstash/'
|
||||
];
|
||||
|
||||
expect(actual).to.eql(expected);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('extractFiles', function () {
|
||||
|
||||
describe('strip files parameter', function () {
|
||||
|
||||
it('strips specified number of directories', function () {
|
||||
|
||||
return copyReplyFile('strip_test.zip')
|
||||
.then(() => {
|
||||
return extractFiles(settings.tempArchiveFile, settings.workingPath, 1);
|
||||
})
|
||||
.then(() => {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'1 level deep.txt',
|
||||
'test-plugin',
|
||||
'test-plugin/2 levels deep.txt',
|
||||
'test-plugin/public',
|
||||
'test-plugin/public/3 levels deep.txt',
|
||||
'archive.part'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('throws an exception if it tries to strip too many directories', function () {
|
||||
|
||||
return copyReplyFile('strip_test.zip')
|
||||
.then(() => {
|
||||
return extractFiles(settings.tempArchiveFile, settings.workingPath, 2);
|
||||
})
|
||||
.then(shouldReject, (err) => {
|
||||
expect(err.message).to.match(/You cannot strip more levels than there are directories/i);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('applies the filter before applying the strip directories logic', function () {
|
||||
|
||||
return copyReplyFile('strip_test.zip')
|
||||
.then(() => {
|
||||
const filter = {
|
||||
paths: [
|
||||
'test-plugin'
|
||||
]
|
||||
};
|
||||
|
||||
return extractFiles(settings.tempArchiveFile, settings.workingPath, 2, filter);
|
||||
})
|
||||
.then(() => {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'2 levels deep.txt',
|
||||
'public',
|
||||
'public/3 levels deep.txt',
|
||||
'archive.part'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('extracts files using the files filter', function () {
|
||||
return copyReplyFile('test_plugin_many.zip')
|
||||
.then(() => {
|
||||
const filter = {
|
||||
files: [
|
||||
'kibana/funger-plugin/extra file only in zip.txt',
|
||||
'kibana/funger-plugin/index.js',
|
||||
'kibana\\funger-plugin\\package.json'
|
||||
]
|
||||
};
|
||||
|
||||
return extractFiles(settings.tempArchiveFile, settings.workingPath, 0, filter);
|
||||
})
|
||||
.then(() => {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'kibana',
|
||||
'kibana/funger-plugin',
|
||||
'kibana/funger-plugin/extra file only in zip.txt',
|
||||
'kibana/funger-plugin/index.js',
|
||||
'kibana/funger-plugin/package.json',
|
||||
'archive.part'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
});
|
||||
|
||||
it('extracts files using the paths filter', function () {
|
||||
return copyReplyFile('test_plugin_many.zip')
|
||||
.then(() => {
|
||||
const filter = {
|
||||
paths: [
|
||||
'kibana/funger-plugin',
|
||||
'kibana/test-plugin/public'
|
||||
]
|
||||
};
|
||||
|
||||
return extractFiles(settings.tempArchiveFile, settings.workingPath, 0, filter);
|
||||
})
|
||||
.then(() => {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'archive.part',
|
||||
'kibana',
|
||||
'kibana/funger-plugin',
|
||||
'kibana/funger-plugin/README.md',
|
||||
'kibana/funger-plugin/extra file only in zip.txt',
|
||||
'kibana/funger-plugin/index.js',
|
||||
'kibana/funger-plugin/package.json',
|
||||
'kibana/funger-plugin/public',
|
||||
'kibana/funger-plugin/public/app.js',
|
||||
'kibana/test-plugin',
|
||||
'kibana/test-plugin/public',
|
||||
'kibana/test-plugin/public/app.js'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
expect(packages).to.be.an(Array);
|
||||
expect(plugin.name).to.be('test-plugin');
|
||||
expect(plugin.archivePath).to.be('kibana/test-plugin');
|
||||
expect(plugin.archive).to.be(archivePath);
|
||||
expect(plugin.kibanaVersion).to.be('1.0.0');
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractArchive', () => {
|
||||
it('extracts files using the extractPath filter', async () => {
|
||||
const archive = path.resolve(repliesPath, 'test_plugin_many.zip');
|
||||
|
||||
await extractArchive(archive, tempPath, 'kibana/test-plugin');
|
||||
const files = await glob.sync('**/*', { cwd: tempPath });
|
||||
|
||||
const expected = [
|
||||
'extra file only in zip.txt',
|
||||
'index.js',
|
||||
'package.json',
|
||||
'public',
|
||||
'public/app.js',
|
||||
'README.md'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
});
|
||||
|
||||
it('handles a corrupt zip archive', async (done) => {
|
||||
try {
|
||||
await extractArchive(path.resolve(repliesPath, 'corrupt.zip'));
|
||||
done(false);
|
||||
} catch(e) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { download } from './download';
|
||||
import Promise from 'bluebird';
|
||||
import path from 'path';
|
||||
import { cleanPrevious, cleanArtifacts } from './cleanup';
|
||||
import { extract, getPackData } from './pack';
|
||||
import { renamePlugin } from './rename';
|
||||
|
@ -27,7 +28,7 @@ export default async function install(settings, logger) {
|
|||
|
||||
assertVersion(settings);
|
||||
|
||||
await renamePlugin(settings.workingPath, settings.plugins[0].path);
|
||||
await renamePlugin(settings.workingPath, path.join(settings.pluginDir, settings.plugins[0].name));
|
||||
|
||||
await rebuildCache(settings, logger);
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import _ from 'lodash';
|
||||
import path from 'path';
|
||||
import { fromRoot } from '../../utils';
|
||||
import KbnServer from '../../server/kbn_server';
|
||||
import readYamlConfig from '../../cli/serve/read_yaml_config';
|
||||
|
@ -7,7 +8,7 @@ import { statSync } from 'fs';
|
|||
|
||||
export function existingInstall(settings, logger) {
|
||||
try {
|
||||
statSync(settings.plugins[0].path);
|
||||
statSync(path.join(settings.pluginDir, settings.plugins[0].name));
|
||||
|
||||
logger.error(`Plugin ${settings.plugins[0].name} already exists, please remove before installing a new version`);
|
||||
process.exit(70); // eslint-disable-line no-process-exit
|
||||
|
|
|
@ -1,51 +1,10 @@
|
|||
import _ from 'lodash';
|
||||
import { listFiles, extractFiles } from './zip';
|
||||
import { resolve } from 'path';
|
||||
import { sync as rimrafSync } from 'rimraf';
|
||||
import { analyzeArchive, extractArchive } from './zip';
|
||||
import validate from 'validate-npm-package-name';
|
||||
|
||||
/**
|
||||
* Returns an array of package objects. There will be one for each of
|
||||
* package.json files in the archive
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
*/
|
||||
async function listPackages(settings) {
|
||||
const regExp = new RegExp('(kibana/([^/]+))/package.json', 'i');
|
||||
const archiveFiles = await listFiles(settings.tempArchiveFile);
|
||||
|
||||
return _(archiveFiles)
|
||||
.map(file => file.replace(/\\/g, '/'))
|
||||
.map(file => file.match(regExp))
|
||||
.compact()
|
||||
.map(([ file, , folder ]) => ({ file, folder }))
|
||||
.uniq()
|
||||
.value();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the package.json files into the workingPath
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
* @param {array} packages - array of package objects from listPackages()
|
||||
*/
|
||||
async function extractPackageFiles(settings, packages) {
|
||||
const filter = {
|
||||
files: packages.map((pkg) => pkg.file)
|
||||
};
|
||||
await extractFiles(settings.tempArchiveFile, settings.workingPath, 0, filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the package.json files created by extractPackageFiles()
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
*/
|
||||
function deletePackageFiles(settings) {
|
||||
const fullPath = resolve(settings.workingPath, 'kibana');
|
||||
rimrafSync(fullPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the plugin name. Will throw an exception if it does not meet
|
||||
* npm package naming conventions
|
||||
*
|
||||
* @param {object} plugin - a package object from listPackages()
|
||||
*/
|
||||
function assertValidPackageName(plugin) {
|
||||
|
@ -55,89 +14,47 @@ function assertValidPackageName(plugin) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Examine each package.json file to determine the plugin name,
|
||||
* version, kibanaVersion, and platform. Mutates the package objects
|
||||
* in the packages array
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
* @param {array} packages - array of package objects from listPackages()
|
||||
*/
|
||||
async function mergePackageData(settings, packages) {
|
||||
return packages.map((pkg) => {
|
||||
const fullPath = resolve(settings.workingPath, pkg.file);
|
||||
const packageInfo = require(fullPath);
|
||||
|
||||
pkg.version = _.get(packageInfo, 'version');
|
||||
pkg.name = _.get(packageInfo, 'name');
|
||||
pkg.path = resolve(settings.pluginDir, pkg.name);
|
||||
|
||||
// Plugins must specify their version, and by default that version should match
|
||||
// the version of kibana down to the patch level. If these two versions need
|
||||
// to diverge, they can specify a kibana.version to indicate the version of
|
||||
// kibana the plugin is intended to work with.
|
||||
pkg.kibanaVersion = _.get(packageInfo, 'kibana.version', pkg.version);
|
||||
|
||||
const regExp = new RegExp(`${pkg.name}-(.+)`, 'i');
|
||||
const matches = pkg.folder.match(regExp);
|
||||
pkg.platform = (matches) ? matches[1] : undefined;
|
||||
|
||||
return pkg;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the first plugin in the archive.
|
||||
* NOTE: This will need to be changed in later versions of the pack installer
|
||||
* that allow for the installation of more than one plugin at once.
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
*/
|
||||
async function extractArchive(settings) {
|
||||
const filter = {
|
||||
paths: [ `kibana/${settings.plugins[0].folder}` ]
|
||||
};
|
||||
|
||||
await extractFiles(settings.tempArchiveFile, settings.workingPath, 2, filter);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the detailed information about each kibana plugin in the pack.
|
||||
* TODO: If there are platform specific folders, determine which one to use.
|
||||
*
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
* @param {object} logger - a plugin installer logger object
|
||||
*/
|
||||
export async function getPackData(settings, logger) {
|
||||
let packages;
|
||||
let packages = [];
|
||||
logger.log('Retrieving metadata from plugin archive');
|
||||
try {
|
||||
logger.log('Retrieving metadata from plugin archive');
|
||||
|
||||
packages = await listPackages(settings);
|
||||
|
||||
await extractPackageFiles(settings, packages);
|
||||
await mergePackageData(settings, packages);
|
||||
await deletePackageFiles(settings);
|
||||
packages = await analyzeArchive(settings.tempArchiveFile);
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
logger.error(err.stack);
|
||||
throw new Error('Error retrieving metadata from plugin archive');
|
||||
}
|
||||
|
||||
if (packages.length === 0) {
|
||||
throw new Error('No kibana plugins found in archive');
|
||||
}
|
||||
packages.forEach(assertValidPackageName);
|
||||
|
||||
packages.forEach(assertValidPackageName);
|
||||
settings.plugins = packages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts files from a zip archive to a file path using a filter function
|
||||
*
|
||||
* @param {string} archive - file path to a zip archive
|
||||
* @param {string} targetDir - directory path to where the files should
|
||||
* extracted
|
||||
*/
|
||||
export async function extract(settings, logger) {
|
||||
try {
|
||||
const plugin = settings.plugins[0];
|
||||
|
||||
logger.log('Extracting plugin archive');
|
||||
|
||||
await extractArchive(settings);
|
||||
|
||||
await extractArchive(settings.tempArchiveFile, settings.workingPath, plugin.archivePath);
|
||||
logger.log('Extraction complete');
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
logger.error(err.stack);
|
||||
throw new Error('Error extracting plugin archive');
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,98 +1,122 @@
|
|||
import _ from 'lodash';
|
||||
import DecompressZip from '@bigfunger/decompress-zip';
|
||||
|
||||
const SYMBOLIC_LINK = 'SymbolicLink';
|
||||
import yauzl from 'yauzl';
|
||||
import path from 'path';
|
||||
import mkdirp from 'mkdirp';
|
||||
import { createWriteStream } from 'fs';
|
||||
import { get } from 'lodash';
|
||||
|
||||
/**
|
||||
* Creates a filter function to be consumed by extractFiles that filters by
|
||||
* an array of files
|
||||
* @param {array} files - an array of full file paths to extract. Should match
|
||||
* exactly a value from listFiles
|
||||
* Returns an array of package objects. There will be one for each of
|
||||
* package.json files in the archive
|
||||
*
|
||||
* @param {string} archive - path to plugin archive zip file
|
||||
*/
|
||||
function extractFilterFromFiles(files) {
|
||||
const filterFiles = files.map((file) => file.replace(/\\/g, '/'));
|
||||
return function filterByFiles(file) {
|
||||
if (file.type === SYMBOLIC_LINK) return false;
|
||||
|
||||
const path = file.path.replace(/\\/g, '/');
|
||||
return _.includes(filterFiles, path);
|
||||
};
|
||||
}
|
||||
export function analyzeArchive(archive) {
|
||||
const plugins = [];
|
||||
const regExp = new RegExp('(kibana[\\\\/][^\\\\/]+)[\\\\/]package\.json', 'i');
|
||||
|
||||
/**
|
||||
* Creates a filter function to be consumed by extractFiles that filters by
|
||||
* an array of root paths
|
||||
* @param {array} paths - an array of root paths from the archive. All files and
|
||||
* folders will be extracted recursively using these paths as roots.
|
||||
*/
|
||||
function extractFilterFromPaths(paths) {
|
||||
return function filterByRootPath(file) {
|
||||
if (file.type === SYMBOLIC_LINK) return false;
|
||||
return new Promise ((resolve, reject) => {
|
||||
yauzl.open(archive, { lazyEntries: true }, function (err, zipfile) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return paths.some(path => {
|
||||
const regex = new RegExp(`${path}($|/)`, 'i');
|
||||
return file.parent.match(regex);
|
||||
zipfile.readEntry();
|
||||
zipfile.on('entry', function (entry) {
|
||||
const match = entry.fileName.match(regExp);
|
||||
|
||||
if (!match) {
|
||||
return zipfile.readEntry();
|
||||
}
|
||||
|
||||
zipfile.openReadStream(entry, function (err, readable) {
|
||||
const chunks = [];
|
||||
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
readable.on('data', chunk => chunks.push(chunk));
|
||||
|
||||
readable.on('end', function () {
|
||||
const contents = Buffer.concat(chunks).toString();
|
||||
const pkg = JSON.parse(contents);
|
||||
|
||||
plugins.push(Object.assign(pkg, {
|
||||
archivePath: match[1],
|
||||
archive: archive,
|
||||
|
||||
// Plugins must specify their version, and by default that version should match
|
||||
// the version of kibana down to the patch level. If these two versions need
|
||||
// to diverge, they can specify a kibana.version to indicate the version of
|
||||
// kibana the plugin is intended to work with.
|
||||
kibanaVersion: get(pkg, 'kibana.version', pkg.version)
|
||||
}));
|
||||
|
||||
zipfile.readEntry();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
zipfile.on('close', () => {
|
||||
resolve(plugins);
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a filter function to be consumed by extractFiles
|
||||
* @param {object} filter - an object with either a files or paths property.
|
||||
*/
|
||||
function extractFilter(filter) {
|
||||
if (filter.files) return extractFilterFromFiles(filter.files);
|
||||
if (filter.paths) return extractFilterFromPaths(filter.paths);
|
||||
return _.noop;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts files from a zip archive to a file path using a filter function
|
||||
* @param {string} zipPath - file path to a zip archive
|
||||
* @param {string} targetPath - directory path to where the files should
|
||||
* extracted
|
||||
* @param {integer} strip - Number of nested directories within the archive
|
||||
* that should be ignored when determining the target path of an archived
|
||||
* file.
|
||||
* @param {function} filter - A function that accepts a single parameter 'file'
|
||||
* and returns true if the file should be extracted from the archive
|
||||
*/
|
||||
export async function extractFiles(zipPath, targetPath, strip, filter) {
|
||||
await new Promise((resolve, reject) => {
|
||||
const unzipper = new DecompressZip(zipPath);
|
||||
|
||||
unzipper.on('error', reject);
|
||||
|
||||
const options = {
|
||||
path: targetPath,
|
||||
strip: strip
|
||||
};
|
||||
if (filter) {
|
||||
options.filter = extractFilter(filter);
|
||||
}
|
||||
|
||||
unzipper.extract(options);
|
||||
|
||||
unzipper.on('extract', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all files within an archive
|
||||
* @param {string} zipPath - file path to a zip archive
|
||||
* @returns {array} all files within an archive with their relative paths
|
||||
*/
|
||||
export async function listFiles(zipPath) {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const unzipper = new DecompressZip(zipPath);
|
||||
export function extractArchive(archive, targetDir, extractPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(archive, { lazyEntries: true }, function (err, zipfile) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
unzipper.on('error', reject);
|
||||
zipfile.readEntry();
|
||||
zipfile.on('close', resolve);
|
||||
zipfile.on('entry', function (entry) {
|
||||
let fileName = entry.fileName;
|
||||
|
||||
unzipper.on('list', (files) => {
|
||||
files = files.map((file) => file.replace(/\\/g, '/'));
|
||||
resolve(files);
|
||||
if (extractPath && fileName.startsWith(extractPath)) {
|
||||
fileName = fileName.substring(extractPath.length);
|
||||
} else {
|
||||
return zipfile.readEntry();
|
||||
}
|
||||
|
||||
if (targetDir) {
|
||||
fileName = path.join(targetDir, fileName);
|
||||
}
|
||||
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
// directory file names end with '/'
|
||||
mkdirp(entry.fileName, function (err) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
zipfile.readEntry();
|
||||
});
|
||||
} else {
|
||||
// file entry
|
||||
zipfile.openReadStream(entry, function (err, readStream) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
// ensure parent directory exists
|
||||
mkdirp(path.dirname(fileName), function (err) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
readStream.pipe(createWriteStream(fileName));
|
||||
readStream.on('end', function () {
|
||||
zipfile.readEntry();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
unzipper.list();
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue