mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Merge branch 'master' of github.com:elastic/kibana into implement/clusterWorkerListening
This commit is contained in:
commit
927f85b428
846 changed files with 7726 additions and 5180 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,4 +1,5 @@
|
|||
.aws-config.json
|
||||
.ackrc
|
||||
.DS_Store
|
||||
.node_binaries
|
||||
node_modules
|
||||
|
|
|
@ -55,7 +55,7 @@ Please make sure you have signed the [Contributor License Agreement](http://www.
|
|||
npm run elasticsearch
|
||||
```
|
||||
|
||||
- Start the development server.
|
||||
- Start the development server. _On Windows, you'll need you use Git Bash, Cygwin, or a similar shell that exposes the `sh` command._
|
||||
|
||||
```sh
|
||||
npm start
|
||||
|
@ -128,7 +128,7 @@ Runs both server and browser tests, but skips linting
|
|||
Run only the server tests
|
||||
|
||||
`npm run test:browser`
|
||||
Run only the browser tests
|
||||
Run only the browser tests. Coverage reports are available for browser tests by running `npm run test:coverage`. You can find the results under the `coverage/` directory that will be created upon completion.
|
||||
|
||||
`npm run test:dev`
|
||||
Initializes an environment for debugging the browser tests. Includes an dedicated instance of the kibana server for building the test bundle, and a karma server. When running this task the build is optimized for the first time and then a karma-owned instance of the browser is opened. Click the "debug" button to open a new tab that executes the unit tests.
|
||||
|
@ -146,7 +146,8 @@ Run the tests for just your particular plugin. Assuming you plugin lives outside
|
|||
|
||||
#### Running browser automation tests:
|
||||
|
||||
*The Selenium server that is started currently only runs the tests in Firefox*
|
||||
*The Selenium server that is started currently only runs the tests in a recent version of Firefox.*
|
||||
*You can use the `PATH` environment variable to specify which version of Firefox to use.*
|
||||
|
||||
The following will start Kibana, Elasticsearch and Selenium for you. To run the functional UI tests use the following commands
|
||||
|
||||
|
@ -177,7 +178,7 @@ npm run test:ui:runner
|
|||
- These tests have been developed and tested with Chrome and Firefox browser. In theory, they should work on all browsers (that's the benefit of Intern using Leadfoot).
|
||||
- These tests should also work with an external testing service like https://saucelabs.com/ or https://www.browserstack.com/ but that has not been tested.
|
||||
- https://theintern.github.io/
|
||||
- https://theintern.github.io/leadfoot/Element.html
|
||||
- https://theintern.github.io/leadfoot/module-leadfoot_Element.html
|
||||
|
||||
#### Building OS packages
|
||||
|
||||
|
@ -218,7 +219,7 @@ So, you've been assigned a pull to review. What's that look like?
|
|||
|
||||
Remember, someone is blocked by a pull awaiting review, make it count. Be thorough, the more action items you catch in the first review, the less back and forth will be required, and the better chance the pull has of being successful. Don't you like success?
|
||||
|
||||
1. **Understand the issue** that is being fixed, or the feature being added. Check the description on the pull, and check out the related issue. If you don't understand something, ask the person the submitter for clarification.
|
||||
1. **Understand the issue** that is being fixed, or the feature being added. Check the description on the pull, and check out the related issue. If you don't understand something, ask the submitter for clarification.
|
||||
1. **Reproduce the bug** (or the lack of feature I guess?) in the destination branch, usually `master`. The referenced issue will help you here. If you're unable to reproduce the issue, contact the issue submitter for clarification
|
||||
1. **Check out the pull** and test it. Is the issue fixed? Does it have nasty side effects? Try to create suspect inputs. If it operates on the value of a field try things like: strings (including an empty string), null, numbers, dates. Try to think of edge cases that might break the code.
|
||||
1. **Merge the target branch**. It is possible that tests or the linter have been updated in the target branch since the pull was submitted. Merging the pull could cause core to start failing.
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
require('babel/register')(require('./src/optimize/babelOptions').node);
|
||||
const camelCase = require('lodash').camelCase;
|
||||
require('babel/register')(require('./src/optimize/babel_options').node);
|
||||
|
||||
module.exports = function (grunt) {
|
||||
// set the config once before calling load-grunt-config
|
||||
// and once during so that we have access to it via
|
||||
// grunt.config.get() within the config files
|
||||
var config = {
|
||||
const config = {
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
root: __dirname,
|
||||
src: __dirname + '/src',
|
||||
|
@ -12,7 +13,7 @@ module.exports = function (grunt) {
|
|||
plugins: __dirname + '/src/plugins',
|
||||
server: __dirname + '/src/server',
|
||||
target: __dirname + '/target', // location of the compressed build targets
|
||||
testUtilsDir: __dirname + '/src/testUtils',
|
||||
testUtilsDir: __dirname + '/src/test_utils',
|
||||
configFile: __dirname + '/src/config/kibana.yml',
|
||||
|
||||
karmaBrowser: (function () {
|
||||
|
|
24
bin/kibana-plugin
Executable file
24
bin/kibana-plugin
Executable file
|
@ -0,0 +1,24 @@
|
|||
#!/bin/sh
|
||||
SCRIPT=$0
|
||||
|
||||
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
|
||||
while [ -h "$SCRIPT" ] ; do
|
||||
ls=$(ls -ld "$SCRIPT")
|
||||
# Drop everything prior to ->
|
||||
link=$(expr "$ls" : '.*-> \(.*\)$')
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
SCRIPT="$link"
|
||||
else
|
||||
SCRIPT=$(dirname "$SCRIPT")/"$link"
|
||||
fi
|
||||
done
|
||||
|
||||
DIR="$(dirname "${SCRIPT}")/.."
|
||||
NODE="${DIR}/node/bin/node"
|
||||
test -x "$NODE" || NODE=$(which node)
|
||||
if [ ! -x "$NODE" ]; then
|
||||
echo "unable to find usable node.js executable."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exec "${NODE}" $NODE_OPTIONS "${DIR}/src/cli_plugin" ${@}
|
29
bin/kibana-plugin.bat
Normal file
29
bin/kibana-plugin.bat
Normal file
|
@ -0,0 +1,29 @@
|
|||
@echo off
|
||||
|
||||
SETLOCAL
|
||||
|
||||
set SCRIPT_DIR=%~dp0
|
||||
for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
|
||||
|
||||
set NODE=%DIR%\node\node.exe
|
||||
|
||||
WHERE /Q node
|
||||
IF %ERRORLEVEL% EQU 0 (
|
||||
for /f "delims=" %%i in ('WHERE node') do set SYS_NODE=%%i
|
||||
)
|
||||
|
||||
If Not Exist "%NODE%" (
|
||||
IF Exist "%SYS_NODE%" (
|
||||
set "NODE=%SYS_NODE%"
|
||||
) else (
|
||||
Echo unable to find usable node.js executable.
|
||||
Exit /B 1
|
||||
)
|
||||
)
|
||||
|
||||
TITLE Kibana Server
|
||||
"%NODE%" %NODE_OPTIONS% "%DIR%\src\cli_plugin" %*
|
||||
|
||||
:finally
|
||||
|
||||
ENDLOCAL
|
|
@ -11,6 +11,9 @@
|
|||
# The maximum payload size in bytes for incoming server requests.
|
||||
# server.maxPayloadBytes: 1048576
|
||||
|
||||
# The Kibana server's name. This is used for display purposes.
|
||||
# server.name: "your-hostname"
|
||||
|
||||
# The URL of the Elasticsearch instance to use for all your queries.
|
||||
# elasticsearch.url: "http://localhost:9200"
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
[[kibana-settings-reference]]
|
||||
|
||||
WARNING: Modifying the following settings can signficantly affect Kibana's performance and cause problems that are difficult to diagnose. Setting a property's value to a blank field will revert to the default behavior, which may not be compatible with other configuration settings. Deleting a custom setting removes it from Kibana permanently.
|
||||
WARNING: Modifying the following settings can signficantly affect Kibana's performance and cause problems that are
|
||||
difficult to diagnose. Setting a property's value to a blank field will revert to the default behavior, which may not be
|
||||
compatible with other configuration settings. Deleting a custom setting removes it from Kibana permanently.
|
||||
|
||||
.Kibana Settings Reference
|
||||
[horizontal]
|
||||
|
@ -8,30 +10,43 @@ WARNING: Modifying the following settings can signficantly affect Kibana's perfo
|
|||
`sort:options`:: Options for the Elasticsearch https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-sort.html[sort] parameter.
|
||||
`dateFormat`:: The format to use for displaying pretty-formatted dates.
|
||||
`dateFormat:tz`:: The timezone that Kibana uses. The default value of `Browser` uses the timezone detected by the browser.
|
||||
`dateFormat:scaled`:: These values define the format used to render ordered time-based data. Formatted timestamps must adapt to the interval between measurements. Keys are http://en.wikipedia.org/wiki/ISO_8601#Time_intervals[ISO8601 intervals].
|
||||
`dateFormat:scaled`:: These values define the format used to render ordered time-based data. Formatted timestamps must
|
||||
adapt to the interval between measurements. Keys are http://en.wikipedia.org/wiki/ISO_8601#Time_intervals[ISO8601 intervals].
|
||||
`defaultIndex`:: Default is `null`. This property specifies the default index.
|
||||
`metaFields`:: An array of fields outside of `_source`. Kibana merges these fields into the document when displaying the document.
|
||||
`metaFields`:: An array of fields outside of `_source`. Kibana merges these fields into the document when displaying the
|
||||
document.
|
||||
`discover:sampleSize`:: The number of rows to show in the Discover table.
|
||||
`doc_table:highlight`:: Highlight results in Discover and Saved Searches Dashboard. Highlighing makes request slow when working on big documents. Set this property to `false` to disable highlighting.
|
||||
`courier:maxSegmentCount`:: Kibana splits requests in the Discover app into segments to limit the size of requests sent to the Elasticsearch cluster. This setting constrains the length of the segment list. Long segment lists can significantly increase request processing time.
|
||||
`doc_table:highlight`:: Highlight results in Discover and Saved Searches Dashboard. Highlighing makes request slow when
|
||||
working on big documents. Set this property to `false` to disable highlighting.
|
||||
`courier:maxSegmentCount`:: Kibana splits requests in the Discover app into segments to limit the size of requests sent to
|
||||
the Elasticsearch cluster. This setting constrains the length of the segment list. Long segment lists can significantly
|
||||
increase request processing time.
|
||||
`fields:popularLimit`:: This setting governs how many of the top most popular fields are shown.
|
||||
`histogram:barTarget`:: When date histograms use the `auto` interval, Kibana attempts to generate this number of bars.
|
||||
`histogram:maxBars`:: Date histograms are not generated with more bars than the value of this property, scaling values when necessary.
|
||||
`visualization:tileMap:maxPrecision`:: The maximum geoHash precision displayed on tile maps: 7 is high, 10 is very high, 12 is the maximum. http://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geohashgrid-aggregation.html#_cell_dimensions_at_the_equator[Explanation of cell dimensions].
|
||||
`histogram:maxBars`:: Date histograms are not generated with more bars than the value of this property, scaling values
|
||||
when necessary.
|
||||
`visualization:tileMap:maxPrecision`:: The maximum geoHash precision displayed on tile maps: 7 is high, 10 is very high,
|
||||
12 is the maximum. http://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geohashgrid-aggregation.html#_cell_dimensions_at_the_equator[Explanation of cell dimensions].
|
||||
`visualization:tileMap:WMSdefaults`:: Default properties for the WMS map server support in the tile map.
|
||||
`visualization:colorMapping`:: Maps values to specified colors within visualizations.
|
||||
`visualization:loadingDelay`:: Time to wait before dimming visualizations during query.
|
||||
`csv:separator`:: A string that serves as the separator for exported values.
|
||||
`csv:quoteValues`:: Set this property to `true` to quote exported values.
|
||||
`history:limit`:: In fields that have history, such as query inputs, the value of this property limits how many recent values are shown.
|
||||
`shortDots:enable`:: Set this property to `true` to shorten long field names in visualizations. For example, instead of `foo.bar.baz`, show `f.b.baz`.
|
||||
`truncate:maxHeight`:: This property specifies the maximum height that a cell occupies in a table. A value of 0 disables truncation.
|
||||
`indexPattern:fieldMapping:lookBack`:: The value of this property sets the number of recent matching patterns to query the field mapping for index patterns with names that contain timestamps.
|
||||
`format:defaultTypeMap`:: A map of the default format name for each field type. Field types that are not explicitly mentioned use "_default_".
|
||||
`history:limit`:: In fields that have history, such as query inputs, the value of this property limits how many recent
|
||||
values are shown.
|
||||
`shortDots:enable`:: Set this property to `true` to shorten long field names in visualizations. For example, instead of
|
||||
`foo.bar.baz`, show `f.b.baz`.
|
||||
`truncate:maxHeight`:: This property specifies the maximum height that a cell occupies in a table. A value of 0 disables
|
||||
truncation.
|
||||
`indexPattern:fieldMapping:lookBack`:: The value of this property sets the number of recent matching patterns to query the
|
||||
field mapping for index patterns with names that contain timestamps.
|
||||
`format:defaultTypeMap`:: A map of the default format name for each field type. Field types that are not explicitly
|
||||
mentioned use "_default_".
|
||||
`format:number:defaultPattern`:: Default numeral format for the "number" format.
|
||||
`format:bytes:defaultPattern`:: Default numeral format for the "bytes" format.
|
||||
`format:percent:defaultPattern`:: Default numeral format for the "percent" format.
|
||||
`format:currency:defaultPattern`:: Default numeral format for the "currency" format.
|
||||
`savedObjects:perPage`:: The number of objects shown on each page of the list of saved objects. The default value is 5.
|
||||
`timepicker:timeDefaults`:: The default time filter selection.
|
||||
`timepicker:refreshIntervalDefaults`:: The time filter's default refresh interval.
|
||||
`dashboard:defaultDarkTheme`:: Set this property to `true` to make new dashboards use the dark theme by default.
|
||||
`dashboard:defaultDarkTheme`:: Set this property to `true` to make new dashboards use the dark theme by default.
|
||||
|
|
|
@ -45,6 +45,8 @@ sub-aggregation from the list of types.
|
|||
|
||||
You can use the up or down arrows to the right of the aggregation's type to change the aggregation's priority.
|
||||
|
||||
Enter a string in the *Custom Label* field to change the display label.
|
||||
|
||||
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
|
||||
|
||||
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
:shield: https://www.elastic.co/guide/en/shield/current
|
||||
:k4issue: https://github.com/elastic/kibana/issues/
|
||||
:k4pull: https://github.com/elastic/kibana/pull/
|
||||
:version: 5.0 alpha
|
||||
:esversion: 2.3
|
||||
|
||||
include::introduction.asciidoc[]
|
||||
|
||||
|
|
|
@ -13,9 +13,10 @@ dashboards that display changes to Elasticsearch queries in real time.
|
|||
Setting up Kibana is a snap. You can install Kibana and start exploring your
|
||||
Elasticsearch indices in minutes -- no code, no additional infrastructure required.
|
||||
|
||||
NOTE: This guide describes how to use Kibana 4.3. For information about what's new
|
||||
in Kibana 4.3, see the <<releasenotes, release notes>>.
|
||||
NOTE: This guide describes how to use Kibana {version}. For information about what's new
|
||||
in Kibana {version}, see the <<releasenotes, release notes>>.
|
||||
|
||||
////
|
||||
[float]
|
||||
[[data-discovery]]
|
||||
=== Data Discovery and Visualization
|
||||
|
@ -50,6 +51,7 @@ to correlate related information. For example, we could create a dashboard
|
|||
that displays several visualizations of the TFL data:
|
||||
|
||||
image:images/TFL-Dashboard.jpg[Dashboard]
|
||||
////
|
||||
|
||||
For more information about creating and sharing visualizations and dashboards, see the <<visualize, Visualize>>
|
||||
and <<dashboard, Dashboard>> topics. A complete <<getting-started,tutorial>> covering several aspects of Kibana's
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
A metric visualization displays a single number for each aggregation you select:
|
||||
|
||||
include::y-axis-aggs.asciidoc[]
|
||||
|
||||
You can click the *Advanced* link to display more customization options:
|
||||
|
||||
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
|
||||
|
@ -17,7 +18,7 @@ NOTE: In Elasticsearch releases 1.4.3 and later, this functionality requires you
|
|||
|
||||
The availability of these options varies depending on the aggregation you choose.
|
||||
|
||||
Click the *Options* tab to change the font used to display the metrics.
|
||||
Click the *Options* tab to display the font size slider.
|
||||
|
||||
[float]
|
||||
[[metric-viewing-detailed-information]]
|
||||
|
|
|
@ -11,6 +11,8 @@ field. Select a field from the drop-down.
|
|||
*Unique Count*:: The {ref}search-aggregations-metrics-cardinality-aggregation.html[_cardinality_] aggregation returns
|
||||
the number of unique values in a field. Select a field from the drop-down.
|
||||
|
||||
Enter a string in the *Custom Label* field to change the display label.
|
||||
|
||||
The _buckets_ aggregations determine what information is being retrieved from your data set.
|
||||
|
||||
Before you choose a buckets aggregation, specify if you are splitting slices within a single chart or splitting into
|
||||
|
@ -57,6 +59,8 @@ aggregation's type to change the aggregation's priority.
|
|||
|
||||
include::color-picker.asciidoc[]
|
||||
|
||||
Enter a string in the *Custom Label* field to change the display label.
|
||||
|
||||
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
|
||||
|
||||
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
|
||||
|
|
|
@ -5,6 +5,8 @@ Add-on functionality for Kibana is implemented with plug-in modules. You can use
|
|||
command to manage these modules. You can also install a plugin manually by moving the plugin file to the
|
||||
`installedPlugins` directory and unpacking the plugin files into a new directory.
|
||||
|
||||
A list of existing Kibana plugins is available on https://github.com/elastic/kibana/wiki/Known-Plugins[GitHub].
|
||||
|
||||
[float]
|
||||
=== Installing Plugins
|
||||
|
||||
|
@ -65,6 +67,11 @@ bin/kibana plugin --remove marvel
|
|||
|
||||
You can also remove a plugin manually by deleting the plugin's subdirectory under the `installedPlugins` directory.
|
||||
|
||||
[float]
|
||||
=== Listing Installed Plugins
|
||||
|
||||
Use `--list` or `-l` option to list the currently installed plugins.
|
||||
|
||||
[float]
|
||||
=== Updating Plugins
|
||||
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
[[releasenotes]]
|
||||
== Kibana 4.4 Release Notes
|
||||
== Kibana {version} Release Notes
|
||||
|
||||
The 4.4 release of Kibana requires Elasticsearch 2.2 or later.
|
||||
|
||||
Using event times to create index names is no longer supported as of this release. Current versions of Elasticsearch
|
||||
include sophisticated date parsing APIs that Kibana uses to determine date information, removing the need to specify dates
|
||||
in the index pattern name.
|
||||
The {version} release of Kibana requires Elasticsearch {esversion} or later.
|
||||
|
||||
[float]
|
||||
[[enhancements]]
|
||||
|
|
|
@ -19,6 +19,8 @@ numeric field. Select a field from the drop-down.
|
|||
*Unique Count*:: The {ref}search-aggregations-metrics-cardinality-aggregation.html[_cardinality_] aggregation returns
|
||||
the number of unique values in a field. Select a field from the drop-down.
|
||||
|
||||
Enter a string in the *Custom Label* field to change the display label.
|
||||
|
||||
The _buckets_ aggregations determine what information is being retrieved from your data set.
|
||||
|
||||
Before you choose a buckets aggregation, specify if you are splitting the chart or displaying the buckets as *Geo
|
||||
|
@ -71,6 +73,8 @@ based on the geohash coordinates.
|
|||
|
||||
NOTE: By default, the *Change precision on map zoom* box is checked. Uncheck the box to disable this behavior.
|
||||
|
||||
Enter a string in the *Custom Label* field to change the display label.
|
||||
|
||||
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
|
||||
|
||||
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
|
||||
|
|
|
@ -26,6 +26,8 @@ values field. Click *+Add* to add a values field.
|
|||
|
||||
You can add an aggregation by clicking the *+ Add Aggregation* button.
|
||||
|
||||
Enter a string in the *Custom Label* field to change the display label.
|
||||
|
||||
The _buckets_ aggregations determine what information is being retrieved from your data set.
|
||||
|
||||
Before you choose a buckets aggregation, specify if you are splitting slices within a single chart or splitting into
|
||||
|
@ -36,6 +38,8 @@ include::x-axis-aggs.asciidoc[]
|
|||
|
||||
include::color-picker.asciidoc[]
|
||||
|
||||
Enter a string in the *Custom Label* field to change the display label.
|
||||
|
||||
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
|
||||
|
||||
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
|
||||
|
|
|
@ -28,8 +28,8 @@ remove a range.
|
|||
or bottom _n_ elements of a given field to display, ordered by count or a custom metric.
|
||||
*Filters*:: You can specify a set of {ref}/search-aggregations-bucket-filters-aggregation.html[_filters_] for the data.
|
||||
You can specify a filter as a query string or in JSON format, just as in the Discover search bar. Click *Add Filter* to
|
||||
add another filter. Click the images:labelbutton.png[] *label* button to open the label field, where you can type in a
|
||||
name to display on the visualization.
|
||||
add another filter. Click the image:images/labelbutton.png[Label button icon] *label* button to open the label field, where
|
||||
you can type in a name to display on the visualization.
|
||||
*Significant Terms*:: Displays the results of the experimental
|
||||
{ref}/search-aggregations-bucket-significantterms-aggregation.html[_significant terms_] aggregation.
|
||||
|
||||
|
@ -39,3 +39,5 @@ from the list of types.
|
|||
|
||||
When multiple aggregations are defined on a chart's axis, you can use the up or down arrows to the right of the
|
||||
aggregation's type to change the aggregation's priority.
|
||||
|
||||
Enter a string in the *Custom Label* field to change the display label.
|
||||
|
|
|
@ -22,3 +22,5 @@ from the drop-down, then specify one or more percentile rank values in the *Valu
|
|||
values field. Click *+Add* to add a values field.
|
||||
|
||||
You can add an aggregation by clicking the *+ Add Aggregation* button.
|
||||
|
||||
Enter a string in the *Custom Label* field to change the display label.
|
||||
|
|
18
package.json
18
package.json
|
@ -49,7 +49,7 @@
|
|||
"test:coverage": "grunt test:coverage",
|
||||
"build": "grunt build",
|
||||
"build:ospackages": "grunt build --os-packages",
|
||||
"start": "./bin/kibana --dev",
|
||||
"start": "sh ./bin/kibana --dev",
|
||||
"precommit": "grunt precommit",
|
||||
"karma": "karma start",
|
||||
"elasticsearch": "grunt esvm:dev:keepalive",
|
||||
|
@ -81,13 +81,13 @@
|
|||
"ansicolors": "0.3.2",
|
||||
"autoprefixer": "5.1.1",
|
||||
"autoprefixer-loader": "2.0.0",
|
||||
"babel": "5.8.23",
|
||||
"babel-core": "5.8.23",
|
||||
"babel": "5.8.38",
|
||||
"babel-core": "5.8.38",
|
||||
"babel-loader": "5.3.2",
|
||||
"babel-runtime": "5.8.20",
|
||||
"babel-runtime": "5.8.38",
|
||||
"bluebird": "2.9.34",
|
||||
"boom": "2.8.0",
|
||||
"bootstrap": "3.3.5",
|
||||
"bootstrap": "3.3.6",
|
||||
"brace": "0.5.1",
|
||||
"bunyan": "1.7.1",
|
||||
"clipboard": "1.5.5",
|
||||
|
@ -135,6 +135,7 @@
|
|||
"style-loader": "0.12.3",
|
||||
"tar": "2.2.0",
|
||||
"url-loader": "0.5.6",
|
||||
"validate-npm-package-name": "2.2.2",
|
||||
"webpack": "1.12.1",
|
||||
"webpack-directory-name-as-main": "1.0.0",
|
||||
"whatwg-fetch": "0.9.0",
|
||||
|
@ -156,7 +157,7 @@
|
|||
"grunt-cli": "0.1.13",
|
||||
"grunt-contrib-clean": "0.6.0",
|
||||
"grunt-contrib-copy": "0.8.1",
|
||||
"grunt-esvm": "2.1.1",
|
||||
"grunt-esvm": "3.0.4",
|
||||
"grunt-karma": "0.12.0",
|
||||
"grunt-run": "0.5.0",
|
||||
"grunt-s3": "0.2.0-alpha.3",
|
||||
|
@ -170,16 +171,15 @@
|
|||
"karma-chrome-launcher": "0.2.0",
|
||||
"karma-coverage": "0.5.1",
|
||||
"karma-firefox-launcher": "0.1.6",
|
||||
"karma-growl-reporter": "0.1.1",
|
||||
"karma-ie-launcher": "0.2.0",
|
||||
"karma-mocha": "0.2.0",
|
||||
"karma-safari-launcher": "0.1.1",
|
||||
"libesvm": "3.3.0",
|
||||
"license-checker": "3.1.0",
|
||||
"load-grunt-config": "0.7.2",
|
||||
"load-grunt-config": "0.19.1",
|
||||
"makelogs": "3.0.0-beta3",
|
||||
"marked-text-renderer": "0.1.0",
|
||||
"mocha": "2.3.0",
|
||||
"ncp": "2.0.0",
|
||||
"nock": "2.10.0",
|
||||
"npm": "2.11.0",
|
||||
"portscanner": "1.0.0",
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import _ from 'lodash';
|
||||
import pkg from '../utils/package_json';
|
||||
import Command from './command';
|
||||
import serveCommand from './serve/serve';
|
||||
|
||||
import pkg from '../utils/packageJson';
|
||||
import Command from './Command';
|
||||
|
||||
let argv = process.env.kbnWorkerArgv ? JSON.parse(process.env.kbnWorkerArgv) : process.argv.slice();
|
||||
let program = new Command('bin/kibana');
|
||||
const argv = process.env.kbnWorkerArgv ? JSON.parse(process.env.kbnWorkerArgv) : process.argv.slice();
|
||||
const program = new Command('bin/kibana');
|
||||
|
||||
program
|
||||
.version(pkg.version)
|
||||
|
@ -14,15 +14,14 @@ program
|
|||
);
|
||||
|
||||
// attach commands
|
||||
require('./serve/serve')(program);
|
||||
require('./plugin/plugin')(program);
|
||||
serveCommand(program);
|
||||
|
||||
program
|
||||
.command('help <command>')
|
||||
.description('Get the help for a specific command')
|
||||
.action(function (cmdName) {
|
||||
var cmd = _.find(program.commands, { _name: cmdName });
|
||||
if (!cmd) return this.error(`unknown command ${cmdName}`);
|
||||
const cmd = _.find(program.commands, { _name: cmdName });
|
||||
if (!cmd) return program.error(`unknown command ${cmdName}`);
|
||||
cmd.help();
|
||||
});
|
||||
|
||||
|
@ -33,7 +32,7 @@ program
|
|||
});
|
||||
|
||||
// check for no command name
|
||||
var subCommand = argv[2] && !String(argv[2][0]).match(/^-|^\.|\//);
|
||||
const subCommand = argv[2] && !String(argv[2][0]).match(/^-|^\.|\//);
|
||||
|
||||
if (!subCommand) {
|
||||
if (_.intersection(argv.slice(2), ['-h', '--help']).length) {
|
||||
|
|
|
@ -99,7 +99,7 @@ describe('CLI cluster manager', function () {
|
|||
it(`is bound to fork's message event`, async function () {
|
||||
const worker = setup();
|
||||
await worker.start();
|
||||
sinon.assert.calledWith(worker.fork.on, 'message', worker.parseIncomingMessage);
|
||||
sinon.assert.calledWith(worker.fork.on, 'message');
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ const { format: formatUrl } = require('url');
|
|||
import Hapi from 'hapi';
|
||||
const { debounce, compact, get, invoke, bindAll, once, sample, uniq } = require('lodash');
|
||||
|
||||
import Log from '../Log';
|
||||
import Log from '../log';
|
||||
import Worker from './worker';
|
||||
import BasePathProxy from './base_path_proxy';
|
||||
|
||||
|
@ -83,7 +83,7 @@ module.exports = class ClusterManager {
|
|||
|
||||
setupWatching(extraPaths) {
|
||||
const chokidar = require('chokidar');
|
||||
const fromRoot = require('../../utils/fromRoot');
|
||||
const fromRoot = require('../../utils/from_root');
|
||||
|
||||
const watchPaths = uniq(
|
||||
[
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// load the babel options seperately so that they can modify the process.env
|
||||
// before calling babel/register
|
||||
const babelOptions = require('../optimize/babelOptions').node;
|
||||
const babelOptions = require('../optimize/babel_options').node;
|
||||
require('babel/register')(babelOptions);
|
||||
require('./cli');
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
import expect from 'expect.js';
|
||||
import fileType, { ZIP, TAR } from '../file_type';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
describe('file_type', function () {
|
||||
it('returns ZIP for .zip filename', function () {
|
||||
const type = fileType('wat.zip');
|
||||
expect(type).to.equal(ZIP);
|
||||
});
|
||||
it('returns TAR for .tar.gz filename', function () {
|
||||
const type = fileType('wat.tar.gz');
|
||||
expect(type).to.equal(TAR);
|
||||
});
|
||||
it('returns TAR for .tgz filename', function () {
|
||||
const type = fileType('wat.tgz');
|
||||
expect(type).to.equal(TAR);
|
||||
});
|
||||
it('returns undefined for unknown file type', function () {
|
||||
const type = fileType('wat.unknown');
|
||||
expect(type).to.equal(undefined);
|
||||
});
|
||||
it('accepts paths', function () {
|
||||
const type = fileType('/some/path/to/wat.zip');
|
||||
expect(type).to.equal(ZIP);
|
||||
});
|
||||
it('accepts urls', function () {
|
||||
const type = fileType('http://example.com/wat.zip');
|
||||
expect(type).to.equal(ZIP);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,335 +0,0 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import nock from 'nock';
|
||||
import glob from 'glob-all';
|
||||
import rimraf from 'rimraf';
|
||||
import mkdirp from 'mkdirp';
|
||||
import pluginLogger from '../plugin_logger';
|
||||
import pluginDownloader from '../plugin_downloader';
|
||||
import { join } from 'path';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin downloader', function () {
|
||||
const testWorkingPath = join(__dirname, '.test.data');
|
||||
const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
|
||||
let logger;
|
||||
let downloader;
|
||||
|
||||
function expectWorkingPathEmpty() {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
expect(files).to.eql([]);
|
||||
}
|
||||
|
||||
function expectWorkingPathNotEmpty() {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'archive.part'
|
||||
];
|
||||
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
}
|
||||
|
||||
function shouldReject() {
|
||||
throw new Error('expected the promise to reject');
|
||||
}
|
||||
|
||||
beforeEach(function () {
|
||||
logger = pluginLogger(false);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
rimraf.sync(testWorkingPath);
|
||||
mkdirp.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
describe('_downloadSingle', function () {
|
||||
|
||||
beforeEach(function () {
|
||||
const settings = {
|
||||
urls: [],
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
timeout: 0
|
||||
};
|
||||
downloader = pluginDownloader(settings, logger);
|
||||
});
|
||||
|
||||
describe('http downloader', function () {
|
||||
|
||||
it('should download an unsupported file type, but return undefined for archiveType', function () {
|
||||
const filePath = join(__dirname, 'replies/banana.jpg');
|
||||
const couchdb = nock('http://www.files.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10',
|
||||
'content-type': 'image/jpeg'
|
||||
})
|
||||
.get('/banana.jpg')
|
||||
.replyWithFile(200, filePath);
|
||||
|
||||
const sourceUrl = 'http://www.files.com/banana.jpg';
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(function (data) {
|
||||
expect(data.archiveType).to.be(undefined);
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an ENOTFOUND error for a http ulr that returns 404', function () {
|
||||
const couchdb = nock('http://www.files.com')
|
||||
.get('/plugin.tar.gz')
|
||||
.reply(404);
|
||||
|
||||
const sourceUrl = 'http://www.files.com/plugin.tar.gz';
|
||||
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(shouldReject, function (err) {
|
||||
expect(err.message).to.match(/ENOTFOUND/);
|
||||
expectWorkingPathEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an ENOTFOUND error for an invalid url', function () {
|
||||
const sourceUrl = 'i am an invalid url';
|
||||
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(shouldReject, function (err) {
|
||||
expect(err.message).to.match(/ENOTFOUND/);
|
||||
expectWorkingPathEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should download a tarball from a valid http url', function () {
|
||||
const filePath = join(__dirname, 'replies/test_plugin_master.tar.gz');
|
||||
|
||||
const couchdb = nock('http://www.files.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10',
|
||||
'content-type': 'application/x-gzip'
|
||||
})
|
||||
.get('/plugin.tar.gz')
|
||||
.replyWithFile(200, filePath);
|
||||
|
||||
const sourceUrl = 'http://www.files.com/plugin.tar.gz';
|
||||
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(function (data) {
|
||||
expect(data.archiveType).to.be('.tar.gz');
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should consider .tgz files as archive type .tar.gz', function () {
|
||||
const filePath = join(__dirname, 'replies/test_plugin_master.tar.gz');
|
||||
|
||||
const couchdb = nock('http://www.files.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10'
|
||||
})
|
||||
.get('/plugin.tgz')
|
||||
.replyWithFile(200, filePath);
|
||||
|
||||
const sourceUrl = 'http://www.files.com/plugin.tgz';
|
||||
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(function (data) {
|
||||
expect(data.archiveType).to.be('.tar.gz');
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should download a zip from a valid http url', function () {
|
||||
const filePath = join(__dirname, 'replies/test_plugin_master.zip');
|
||||
|
||||
const couchdb = nock('http://www.files.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '341965',
|
||||
'content-type': 'application/zip'
|
||||
})
|
||||
.get('/plugin.zip')
|
||||
.replyWithFile(200, filePath);
|
||||
|
||||
const sourceUrl = 'http://www.files.com/plugin.zip';
|
||||
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(function (data) {
|
||||
expect(data.archiveType).to.be('.zip');
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('local file downloader', function () {
|
||||
|
||||
it('should copy an unsupported file type, but return undefined for archiveType', function () {
|
||||
const filePath = join(__dirname, 'replies/banana.jpg');
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
|
||||
const couchdb = nock('http://www.files.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10',
|
||||
'content-type': 'image/jpeg'
|
||||
})
|
||||
.get('/banana.jpg')
|
||||
.replyWithFile(200, filePath);
|
||||
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(function (data) {
|
||||
expect(data.archiveType).to.be(undefined);
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an ENOTFOUND error for an invalid local file', function () {
|
||||
const filePath = join(__dirname, 'replies/i-am-not-there.tar.gz');
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(shouldReject, function (err) {
|
||||
expect(err.message).to.match(/ENOTFOUND/);
|
||||
expectWorkingPathEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy a tarball from a valid local file', function () {
|
||||
const filePath = join(__dirname, 'replies/test_plugin_master.tar.gz');
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(function (data) {
|
||||
expect(data.archiveType).to.be('.tar.gz');
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy a zip from a valid local file', function () {
|
||||
const filePath = join(__dirname, 'replies/test_plugin_master.zip');
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
|
||||
return downloader._downloadSingle(sourceUrl)
|
||||
.then(function (data) {
|
||||
expect(data.archiveType).to.be('.zip');
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('download', function () {
|
||||
it('should loop through bad urls until it finds a good one.', function () {
|
||||
const filePath = join(__dirname, 'replies/test_plugin_master.tar.gz');
|
||||
const settings = {
|
||||
urls: [
|
||||
'http://www.files.com/badfile1.tar.gz',
|
||||
'http://www.files.com/badfile2.tar.gz',
|
||||
'I am a bad uri',
|
||||
'http://www.files.com/goodfile.tar.gz'
|
||||
],
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
timeout: 0
|
||||
};
|
||||
downloader = pluginDownloader(settings, logger);
|
||||
|
||||
const couchdb = nock('http://www.files.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10'
|
||||
})
|
||||
.get('/badfile1.tar.gz')
|
||||
.reply(404)
|
||||
.get('/badfile2.tar.gz')
|
||||
.reply(404)
|
||||
.get('/goodfile.tar.gz')
|
||||
.replyWithFile(200, filePath);
|
||||
|
||||
return downloader.download(settings, logger)
|
||||
.then(function (data) {
|
||||
expect(logger.log.getCall(0).args[0]).to.match(/badfile1.tar.gz/);
|
||||
expect(logger.log.getCall(1).args[0]).to.match(/badfile2.tar.gz/);
|
||||
expect(logger.log.getCall(2).args[0]).to.match(/I am a bad uri/);
|
||||
expect(logger.log.getCall(3).args[0]).to.match(/goodfile.tar.gz/);
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should stop looping through urls when it finds a good one.', function () {
|
||||
const filePath = join(__dirname, 'replies/test_plugin_master.tar.gz');
|
||||
const settings = {
|
||||
urls: [
|
||||
'http://www.files.com/badfile1.tar.gz',
|
||||
'http://www.files.com/badfile2.tar.gz',
|
||||
'http://www.files.com/goodfile.tar.gz',
|
||||
'http://www.files.com/badfile3.tar.gz'
|
||||
],
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
timeout: 0
|
||||
};
|
||||
downloader = pluginDownloader(settings, logger);
|
||||
|
||||
const couchdb = nock('http://www.files.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10'
|
||||
})
|
||||
.get('/badfile1.tar.gz')
|
||||
.reply(404)
|
||||
.get('/badfile2.tar.gz')
|
||||
.reply(404)
|
||||
.get('/goodfile.tar.gz')
|
||||
.replyWithFile(200, filePath)
|
||||
.get('/badfile3.tar.gz')
|
||||
.reply(404);
|
||||
|
||||
return downloader.download(settings, logger)
|
||||
.then(function (data) {
|
||||
for (let i = 0; i < logger.log.callCount; i++) {
|
||||
expect(logger.log.getCall(i).args[0]).to.not.match(/badfile3.tar.gz/);
|
||||
}
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error when it doesn\'t find a good url.', function () {
|
||||
const settings = {
|
||||
urls: [
|
||||
'http://www.files.com/badfile1.tar.gz',
|
||||
'http://www.files.com/badfile2.tar.gz',
|
||||
'http://www.files.com/badfile3.tar.gz'
|
||||
],
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
timeout: 0
|
||||
};
|
||||
downloader = pluginDownloader(settings, logger);
|
||||
|
||||
const couchdb = nock('http://www.files.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10'
|
||||
})
|
||||
.get('/badfile1.tar.gz')
|
||||
.reply(404)
|
||||
.get('/badfile2.tar.gz')
|
||||
.reply(404)
|
||||
.get('/badfile3.tar.gz')
|
||||
.reply(404);
|
||||
|
||||
return downloader.download(settings, logger)
|
||||
.then(shouldReject, function (err) {
|
||||
expect(err.message).to.match(/no valid url specified/i);
|
||||
expectWorkingPathEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
|
@ -1,131 +0,0 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import glob from 'glob-all';
|
||||
import rimraf from 'rimraf';
|
||||
import mkdirp from 'mkdirp';
|
||||
|
||||
import pluginLogger from '../plugin_logger';
|
||||
import extract from '../plugin_extractor';
|
||||
import pluginDownloader from '../plugin_downloader';
|
||||
import { join } from 'path';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin extractor', function () {
|
||||
|
||||
const testWorkingPath = join(__dirname, '.test.data');
|
||||
const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
|
||||
let logger;
|
||||
let downloader;
|
||||
|
||||
const settings = {
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath
|
||||
};
|
||||
|
||||
function shouldReject() {
|
||||
throw new Error('expected the promise to reject');
|
||||
}
|
||||
|
||||
beforeEach(function () {
|
||||
logger = pluginLogger(false);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
rimraf.sync(testWorkingPath);
|
||||
mkdirp.sync(testWorkingPath);
|
||||
downloader = pluginDownloader(settings, logger);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
function copyReplyFile(filename) {
|
||||
const filePath = join(__dirname, 'replies', filename);
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
|
||||
return downloader._downloadSingle(sourceUrl);
|
||||
}
|
||||
|
||||
function shouldReject() {
|
||||
throw new Error('expected the promise to reject');
|
||||
}
|
||||
|
||||
describe('extractArchive', function () {
|
||||
|
||||
it('successfully extract a valid tarball', function () {
|
||||
return copyReplyFile('test_plugin_master.tar.gz')
|
||||
.then((data) => {
|
||||
return extract(settings, logger, data.archiveType);
|
||||
})
|
||||
.then(() => {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'archive.part',
|
||||
'README.md',
|
||||
'index.js',
|
||||
'package.json',
|
||||
'public',
|
||||
'public/app.js'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
});
|
||||
|
||||
it('successfully extract a valid zip', function () {
|
||||
return copyReplyFile('test_plugin_master.zip')
|
||||
.then((data) => {
|
||||
return extract(settings, logger, data.archiveType);
|
||||
})
|
||||
.then(() => {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'archive.part',
|
||||
'README.md',
|
||||
'index.js',
|
||||
'package.json',
|
||||
'public',
|
||||
'public/app.js',
|
||||
'extra file only in zip.txt'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
});
|
||||
|
||||
it('throw an error when extracting a corrupt zip', function () {
|
||||
return copyReplyFile('corrupt.zip')
|
||||
.then((data) => {
|
||||
return extract(settings, logger, data.archiveType);
|
||||
})
|
||||
.then(shouldReject, (err) => {
|
||||
expect(err.message).to.match(/error extracting/i);
|
||||
});
|
||||
});
|
||||
|
||||
it('throw an error when extracting a corrupt tarball', function () {
|
||||
return copyReplyFile('corrupt.tar.gz')
|
||||
.then((data) => {
|
||||
return extract(settings, logger, data.archiveType);
|
||||
})
|
||||
.then(shouldReject, (err) => {
|
||||
expect(err.message).to.match(/error extracting/i);
|
||||
});
|
||||
});
|
||||
|
||||
it('throw an error when passed an unknown archive type', function () {
|
||||
return copyReplyFile('banana.jpg')
|
||||
.then((data) => {
|
||||
return extract(settings, logger, data.archiveType);
|
||||
})
|
||||
.then(shouldReject, (err) => {
|
||||
expect(err.message).to.match(/unsupported archive format/i);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
|
@ -1,55 +0,0 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import rimraf from 'rimraf';
|
||||
import pluginLogger from '../plugin_logger';
|
||||
import pluginInstaller from '../plugin_installer';
|
||||
import { mkdirSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin installer', function () {
|
||||
|
||||
describe('pluginInstaller', function () {
|
||||
let logger;
|
||||
let testWorkingPath;
|
||||
let processExitStub;
|
||||
|
||||
beforeEach(function () {
|
||||
processExitStub = undefined;
|
||||
logger = pluginLogger(false);
|
||||
testWorkingPath = join(__dirname, '.test.data');
|
||||
rimraf.sync(testWorkingPath);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
if (processExitStub) processExitStub.restore();
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
it('should throw an error if the workingPath already exists.', function () {
|
||||
processExitStub = sinon.stub(process, 'exit');
|
||||
mkdirSync(testWorkingPath);
|
||||
|
||||
let settings = {
|
||||
pluginPath: testWorkingPath
|
||||
};
|
||||
|
||||
var errorStub = sinon.stub();
|
||||
return pluginInstaller.install(settings, logger)
|
||||
.catch(errorStub)
|
||||
.then(function (data) {
|
||||
expect(logger.error.firstCall.args[0]).to.match(/already exists/);
|
||||
expect(process.exit.called).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,382 +0,0 @@
|
|||
import path from 'path';
|
||||
import expect from 'expect.js';
|
||||
|
||||
import fromRoot from '../../../utils/fromRoot';
|
||||
import settingParser from '../setting_parser';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin installer', function () {
|
||||
|
||||
describe('command line option parsing', function () {
|
||||
|
||||
describe('parseMilliseconds function', function () {
|
||||
|
||||
var parser = settingParser();
|
||||
|
||||
it('should return 0 for an empty string', function () {
|
||||
var value = '';
|
||||
|
||||
var result = parser.parseMilliseconds(value);
|
||||
|
||||
expect(result).to.be(0);
|
||||
});
|
||||
|
||||
it('should return 0 for a number with an invalid unit of measure', function () {
|
||||
var result = parser.parseMilliseconds('1gigablasts');
|
||||
expect(result).to.be(0);
|
||||
});
|
||||
|
||||
it('should assume a number with no unit of measure is specified as milliseconds', function () {
|
||||
var result = parser.parseMilliseconds(1);
|
||||
expect(result).to.be(1);
|
||||
|
||||
result = parser.parseMilliseconds('1');
|
||||
expect(result).to.be(1);
|
||||
});
|
||||
|
||||
it('should interpret a number with "s" as the unit of measure as seconds', function () {
|
||||
var result = parser.parseMilliseconds('5s');
|
||||
expect(result).to.be(5 * 1000);
|
||||
});
|
||||
|
||||
it('should interpret a number with "second" as the unit of measure as seconds', function () {
|
||||
var result = parser.parseMilliseconds('5second');
|
||||
expect(result).to.be(5 * 1000);
|
||||
});
|
||||
|
||||
it('should interpret a number with "seconds" as the unit of measure as seconds', function () {
|
||||
var result = parser.parseMilliseconds('5seconds');
|
||||
expect(result).to.be(5 * 1000);
|
||||
});
|
||||
|
||||
it('should interpret a number with "m" as the unit of measure as minutes', function () {
|
||||
var result = parser.parseMilliseconds('9m');
|
||||
expect(result).to.be(9 * 1000 * 60);
|
||||
});
|
||||
|
||||
it('should interpret a number with "minute" as the unit of measure as minutes', function () {
|
||||
var result = parser.parseMilliseconds('9minute');
|
||||
expect(result).to.be(9 * 1000 * 60);
|
||||
});
|
||||
|
||||
it('should interpret a number with "minutes" as the unit of measure as minutes', function () {
|
||||
var result = parser.parseMilliseconds('9minutes');
|
||||
expect(result).to.be(9 * 1000 * 60);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('parse function', function () {
|
||||
|
||||
var options;
|
||||
var parser;
|
||||
beforeEach(function () {
|
||||
options = { install: 'dummy/dummy', pluginDir: fromRoot('installedPlugins') };
|
||||
});
|
||||
|
||||
it('should require the user to specify either install, remove, or list', function () {
|
||||
options.install = null;
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.throwError(/Please specify either --install, --remove, or --list./);
|
||||
});
|
||||
|
||||
it('should not allow the user to specify both install and remove', function () {
|
||||
options.remove = 'package';
|
||||
options.install = 'org/package/version';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.throwError(/Please specify either --install, --remove, or --list./);
|
||||
});
|
||||
|
||||
it('should not allow the user to specify both install and list', function () {
|
||||
options.list = true;
|
||||
options.install = 'org/package/version';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.throwError(/Please specify either --install, --remove, or --list./);
|
||||
});
|
||||
|
||||
it('should not allow the user to specify both remove and list', function () {
|
||||
options.list = true;
|
||||
options.remove = 'package';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.throwError(/Please specify either --install, --remove, or --list./);
|
||||
});
|
||||
|
||||
it('should not allow the user to specify install, remove, and list', function () {
|
||||
options.list = true;
|
||||
options.install = 'org/package/version';
|
||||
options.remove = 'package';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.throwError(/Please specify either --install, --remove, or --list./);
|
||||
});
|
||||
|
||||
describe('quiet option', function () {
|
||||
|
||||
it('should default to false', function () {
|
||||
parser = settingParser(options);
|
||||
var settings = parser.parse(options);
|
||||
|
||||
expect(settings.quiet).to.be(false);
|
||||
});
|
||||
|
||||
it('should set settings.quiet property to true', function () {
|
||||
options.parent = { quiet: true };
|
||||
parser = settingParser(options);
|
||||
var settings = parser.parse(options);
|
||||
|
||||
expect(settings.quiet).to.be(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('silent option', function () {
|
||||
|
||||
it('should default to false', function () {
|
||||
parser = settingParser(options);
|
||||
var settings = parser.parse(options);
|
||||
|
||||
expect(settings).to.have.property('silent', false);
|
||||
});
|
||||
|
||||
it('should set settings.silent property to true', function () {
|
||||
options.silent = true;
|
||||
parser = settingParser(options);
|
||||
var settings = parser.parse(options);
|
||||
|
||||
expect(settings).to.have.property('silent', true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('timeout option', function () {
|
||||
|
||||
it('should default to 0 (milliseconds)', function () {
|
||||
parser = settingParser(options);
|
||||
var settings = parser.parse(options);
|
||||
|
||||
expect(settings).to.have.property('timeout', 0);
|
||||
});
|
||||
|
||||
it('should set settings.timeout property to specified value', function () {
|
||||
options.timeout = 1234;
|
||||
parser = settingParser(options);
|
||||
var settings = parser.parse(options);
|
||||
|
||||
expect(settings).to.have.property('timeout', 1234);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('install option', function () {
|
||||
|
||||
it('should set settings.action property to "install"', function () {
|
||||
options.install = 'org/package/version';
|
||||
parser = settingParser(options);
|
||||
var settings = parser.parse(options);
|
||||
|
||||
expect(settings).to.have.property('action', 'install');
|
||||
});
|
||||
|
||||
it('should allow two parts to the install parameter', function () {
|
||||
options.install = 'kibana/test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.not.throwError();
|
||||
|
||||
var settings = parser.parse(options);
|
||||
|
||||
expect(settings).to.have.property('organization', 'kibana');
|
||||
expect(settings).to.have.property('package', 'test-plugin');
|
||||
expect(settings).to.have.property('version', undefined);
|
||||
});
|
||||
|
||||
it('should allow three parts to the install parameter', function () {
|
||||
options.install = 'kibana/test-plugin/v1.0.1';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.not.throwError();
|
||||
|
||||
var settings = parser.parse(options);
|
||||
|
||||
expect(settings).to.have.property('organization', 'kibana');
|
||||
expect(settings).to.have.property('package', 'test-plugin');
|
||||
expect(settings).to.have.property('version', 'v1.0.1');
|
||||
});
|
||||
|
||||
it('should not allow one part to the install parameter', function () {
|
||||
options.install = 'test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.throwError(/Invalid install option. Please use the format <org>\/<plugin>\/<version>./);
|
||||
});
|
||||
|
||||
it('should not allow more than three parts to the install parameter', function () {
|
||||
options.install = 'kibana/test-plugin/v1.0.1/dummy';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.throwError(/Invalid install option. Please use the format <org>\/<plugin>\/<version>./);
|
||||
});
|
||||
|
||||
it('should populate the urls collection properly when no version specified', function () {
|
||||
options.install = 'kibana/test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
|
||||
expect(settings.urls).to.have.property('length', 1);
|
||||
expect(settings.urls).to.contain('https://download.elastic.co/kibana/test-plugin/test-plugin-latest.tar.gz');
|
||||
});
|
||||
|
||||
it('should populate the urls collection properly version specified', function () {
|
||||
options.install = 'kibana/test-plugin/v1.1.1';
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
|
||||
expect(settings.urls).to.have.property('length', 1);
|
||||
expect(settings.urls).to.contain('https://download.elastic.co/kibana/test-plugin/test-plugin-v1.1.1.tar.gz');
|
||||
});
|
||||
|
||||
it('should populate the pluginPath', function () {
|
||||
options.install = 'kibana/test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
var expected = fromRoot('installedPlugins/test-plugin');
|
||||
|
||||
expect(settings).to.have.property('pluginPath', expected);
|
||||
});
|
||||
|
||||
it('should populate the workingPath', function () {
|
||||
options.install = 'kibana/test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
var expected = fromRoot('installedPlugins/.plugin.installing');
|
||||
|
||||
expect(settings).to.have.property('workingPath', expected);
|
||||
});
|
||||
|
||||
it('should populate the tempArchiveFile', function () {
|
||||
options.install = 'kibana/test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
var expected = fromRoot('installedPlugins/.plugin.installing/archive.part');
|
||||
|
||||
expect(settings).to.have.property('tempArchiveFile', expected);
|
||||
});
|
||||
|
||||
describe('with url option', function () {
|
||||
|
||||
it('should allow one part to the install parameter', function () {
|
||||
options.install = 'test-plugin';
|
||||
options.url = 'http://www.google.com/plugin.tar.gz';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs().to.not.throwError();
|
||||
|
||||
var settings = parser.parse();
|
||||
|
||||
expect(settings).to.have.property('package', 'test-plugin');
|
||||
});
|
||||
|
||||
it('should not allow more than one part to the install parameter', function () {
|
||||
options.url = 'http://www.google.com/plugin.tar.gz';
|
||||
options.install = 'kibana/test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs()
|
||||
.to.throwError(/Invalid install option. When providing a url, please use the format <plugin>./);
|
||||
});
|
||||
|
||||
it('should result in only the specified url in urls collection', function () {
|
||||
var url = 'http://www.google.com/plugin.tar.gz';
|
||||
options.install = 'test-plugin';
|
||||
options.url = url;
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
|
||||
expect(settings).to.have.property('urls');
|
||||
expect(settings.urls).to.be.an('array');
|
||||
expect(settings.urls).to.have.property('length', 1);
|
||||
expect(settings.urls).to.contain(url);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('remove option', function () {
|
||||
|
||||
it('should set settings.action property to "remove"', function () {
|
||||
delete options.install;
|
||||
options.remove = 'package';
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
|
||||
expect(settings).to.have.property('action', 'remove');
|
||||
});
|
||||
|
||||
it('should allow one part to the remove parameter', function () {
|
||||
delete options.install;
|
||||
options.remove = 'test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
|
||||
expect(settings).to.have.property('package', 'test-plugin');
|
||||
});
|
||||
|
||||
it('should not allow more than one part to the remove parameter', function () {
|
||||
delete options.install;
|
||||
options.remove = 'kibana/test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
expect(parser.parse).withArgs()
|
||||
.to.throwError(/Invalid remove option. Please use the format <plugin>./);
|
||||
});
|
||||
|
||||
it('should populate the pluginPath', function () {
|
||||
delete options.install;
|
||||
options.remove = 'test-plugin';
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
var expected = fromRoot('installedPlugins/test-plugin');
|
||||
|
||||
expect(settings).to.have.property('pluginPath', expected);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('list option', function () {
|
||||
|
||||
it('should set settings.action property to "list"', function () {
|
||||
delete options.install;
|
||||
delete options.remove;
|
||||
options.list = true;
|
||||
parser = settingParser(options);
|
||||
|
||||
var settings = parser.parse();
|
||||
|
||||
expect(settings).to.have.property('action', 'list');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
|
@ -1,34 +0,0 @@
|
|||
import zlib from 'zlib';
|
||||
import fs from 'fs';
|
||||
import tar from 'tar';
|
||||
|
||||
async function extractArchive(settings) {
|
||||
await new Promise((resolve, reject) => {
|
||||
const gunzip = zlib.createGunzip();
|
||||
const tarExtract = new tar.Extract({ path: settings.workingPath, strip: 1 });
|
||||
const readStream = fs.createReadStream(settings.tempArchiveFile);
|
||||
|
||||
readStream.on('error', reject);
|
||||
gunzip.on('error', reject);
|
||||
tarExtract.on('error', reject);
|
||||
|
||||
readStream
|
||||
.pipe(gunzip)
|
||||
.pipe(tarExtract);
|
||||
|
||||
tarExtract.on('finish', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
export default async function extractTarball(settings, logger) {
|
||||
try {
|
||||
logger.log('Extracting plugin archive');
|
||||
|
||||
await extractArchive(settings);
|
||||
|
||||
logger.log('Extraction complete');
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
throw new Error('Error extracting plugin archive');
|
||||
}
|
||||
};
|
|
@ -1,32 +0,0 @@
|
|||
import DecompressZip from '@bigfunger/decompress-zip';
|
||||
|
||||
async function extractArchive(settings) {
|
||||
await new Promise((resolve, reject) => {
|
||||
const unzipper = new DecompressZip(settings.tempArchiveFile);
|
||||
|
||||
unzipper.on('error', reject);
|
||||
|
||||
unzipper.extract({
|
||||
path: settings.workingPath,
|
||||
strip: 1,
|
||||
filter(file) {
|
||||
return file.type !== 'SymbolicLink';
|
||||
}
|
||||
});
|
||||
|
||||
unzipper.on('extract', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
export default async function extractZip(settings, logger) {
|
||||
try {
|
||||
logger.log('Extracting plugin archive');
|
||||
|
||||
await extractArchive(settings);
|
||||
|
||||
logger.log('Extraction complete');
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
throw new Error('Error extracting plugin archive');
|
||||
}
|
||||
};
|
|
@ -1,14 +0,0 @@
|
|||
export const TAR = '.tar.gz';
|
||||
export const ZIP = '.zip';
|
||||
|
||||
export default function fileType(filename) {
|
||||
if (/\.zip$/i.test(filename)) {
|
||||
return ZIP;
|
||||
}
|
||||
if (/\.tar\.gz$/i.test(filename)) {
|
||||
return TAR;
|
||||
}
|
||||
if (/\.tgz$/i.test(filename)) {
|
||||
return TAR;
|
||||
}
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
import fromRoot from '../../utils/fromRoot';
|
||||
import settingParser from './setting_parser';
|
||||
import installer from './plugin_installer';
|
||||
import remover from './plugin_remover';
|
||||
import lister from './plugin_lister';
|
||||
import pluginLogger from './plugin_logger';
|
||||
|
||||
export default function pluginCli(program) {
|
||||
function processCommand(command, options) {
|
||||
let settings;
|
||||
try {
|
||||
settings = settingParser(command).parse();
|
||||
} catch (ex) {
|
||||
//The logger has not yet been initialized.
|
||||
console.error(ex.message);
|
||||
process.exit(64); // eslint-disable-line no-process-exit
|
||||
}
|
||||
|
||||
const logger = pluginLogger(settings);
|
||||
|
||||
switch (settings.action) {
|
||||
case 'install':
|
||||
installer.install(settings, logger);
|
||||
break;
|
||||
case 'remove':
|
||||
remover.remove(settings, logger);
|
||||
break;
|
||||
case 'list':
|
||||
lister.list(settings, logger);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
program
|
||||
.command('plugin')
|
||||
.option('-i, --install <org>/<plugin>/<version>', 'The plugin to install')
|
||||
.option('-r, --remove <plugin>', 'The plugin to remove')
|
||||
.option('-l, --list', 'List installed plugins')
|
||||
.option('-q, --quiet', 'Disable all process messaging except errors')
|
||||
.option('-s, --silent', 'Disable all process messaging')
|
||||
.option('-u, --url <url>', 'Specify download url')
|
||||
.option(
|
||||
'-c, --config <path>',
|
||||
'Path to the config file',
|
||||
fromRoot('config/kibana.yml')
|
||||
)
|
||||
.option(
|
||||
'-t, --timeout <duration>',
|
||||
'Length of time before failing; 0 for never fail',
|
||||
settingParser.parseMilliseconds
|
||||
)
|
||||
.option(
|
||||
'-d, --plugin-dir <path>',
|
||||
'The path to the directory where plugins are stored',
|
||||
fromRoot('installedPlugins')
|
||||
)
|
||||
.description(
|
||||
'Maintain Plugins',
|
||||
`
|
||||
Common examples:
|
||||
-i username/sample
|
||||
attempts to download the latest version from the following url:
|
||||
https://download.elastic.co/username/sample/sample-latest.tar.gz
|
||||
|
||||
-i username/sample/v1.1.1
|
||||
attempts to download version v1.1.1 from the following url:
|
||||
https://download.elastic.co/username/sample/sample-v1.1.1.tar.gz
|
||||
|
||||
-i sample -u http://www.example.com/other_name.tar.gz
|
||||
attempts to download from the specified url,
|
||||
and installs the plugin found at that url as "sample"
|
||||
`
|
||||
)
|
||||
.action(processCommand);
|
||||
};
|
|
@ -1,39 +0,0 @@
|
|||
import rimraf from 'rimraf';
|
||||
import fs from 'fs';
|
||||
|
||||
export default function createPluginCleaner(settings, logger) {
|
||||
function cleanPrevious() {
|
||||
return new Promise(function (resolve, reject) {
|
||||
try {
|
||||
fs.statSync(settings.workingPath);
|
||||
|
||||
logger.log('Found previous install attempt. Deleting...');
|
||||
try {
|
||||
rimraf.sync(settings.workingPath);
|
||||
} catch (e) {
|
||||
return reject(e);
|
||||
}
|
||||
return resolve();
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') return reject(e);
|
||||
|
||||
return resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function cleanError() {
|
||||
// delete the working directory.
|
||||
// At this point we're bailing, so swallow any errors on delete.
|
||||
try {
|
||||
rimraf.sync(settings.workingPath);
|
||||
rimraf.sync(settings.pluginPath);
|
||||
}
|
||||
catch (e) {} // eslint-disable-line no-empty
|
||||
}
|
||||
|
||||
return {
|
||||
cleanPrevious: cleanPrevious,
|
||||
cleanError: cleanError
|
||||
};
|
||||
};
|
|
@ -1,51 +0,0 @@
|
|||
import _ from 'lodash';
|
||||
import downloadHttpFile from './downloaders/http';
|
||||
import downloadLocalFile from './downloaders/file';
|
||||
import { parse as urlParse } from 'url';
|
||||
|
||||
export default function createPluginDownloader(settings, logger) {
|
||||
let archiveType;
|
||||
let sourceType;
|
||||
|
||||
//Attempts to download each url in turn until one is successful
|
||||
function download() {
|
||||
const urls = settings.urls.slice(0);
|
||||
|
||||
function tryNext() {
|
||||
const sourceUrl = urls.shift();
|
||||
if (!sourceUrl) {
|
||||
throw new Error('No valid url specified.');
|
||||
}
|
||||
|
||||
logger.log(`Attempting to transfer from ${sourceUrl}`);
|
||||
|
||||
return downloadSingle(sourceUrl)
|
||||
.catch((err) => {
|
||||
if (err.message === 'ENOTFOUND') {
|
||||
return tryNext();
|
||||
}
|
||||
throw (err);
|
||||
});
|
||||
}
|
||||
|
||||
return tryNext();
|
||||
}
|
||||
|
||||
function downloadSingle(sourceUrl) {
|
||||
const urlInfo = urlParse(sourceUrl);
|
||||
let downloadPromise;
|
||||
|
||||
if (/^file/.test(urlInfo.protocol)) {
|
||||
downloadPromise = downloadLocalFile(logger, urlInfo.path, settings.tempArchiveFile);
|
||||
} else {
|
||||
downloadPromise = downloadHttpFile(logger, sourceUrl, settings.tempArchiveFile, settings.timeout);
|
||||
}
|
||||
|
||||
return downloadPromise;
|
||||
}
|
||||
|
||||
return {
|
||||
download: download,
|
||||
_downloadSingle: downloadSingle
|
||||
};
|
||||
};
|
|
@ -1,16 +0,0 @@
|
|||
import zipExtract from './extractors/zip';
|
||||
import tarGzExtract from './extractors/tar_gz';
|
||||
import { ZIP, TAR } from './file_type';
|
||||
|
||||
export default function extractArchive(settings, logger, archiveType) {
|
||||
switch (archiveType) {
|
||||
case ZIP:
|
||||
return zipExtract(settings, logger);
|
||||
break;
|
||||
case TAR:
|
||||
return tarGzExtract(settings, logger);
|
||||
break;
|
||||
default:
|
||||
throw new Error('Unsupported archive format.');
|
||||
}
|
||||
};
|
|
@ -1,86 +0,0 @@
|
|||
import _ from 'lodash';
|
||||
import fromRoot from '../../utils/fromRoot';
|
||||
import pluginDownloader from './plugin_downloader';
|
||||
import pluginCleaner from './plugin_cleaner';
|
||||
import pluginExtractor from './plugin_extractor';
|
||||
import KbnServer from '../../server/KbnServer';
|
||||
import readYamlConfig from '../serve/read_yaml_config';
|
||||
import Promise from 'bluebird';
|
||||
import { sync as rimrafSync } from 'rimraf';
|
||||
import { statSync, renameSync } from 'fs';
|
||||
const mkdirp = Promise.promisify(require('mkdirp'));
|
||||
|
||||
export default {
|
||||
install: install
|
||||
};
|
||||
|
||||
function checkForExistingInstall(settings, logger) {
|
||||
try {
|
||||
statSync(settings.pluginPath);
|
||||
|
||||
logger.error(`Plugin ${settings.package} already exists, please remove before installing a new version`);
|
||||
process.exit(70); // eslint-disable-line no-process-exit
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async function rebuildKibanaCache(settings, logger) {
|
||||
logger.log('Optimizing and caching browser bundles...');
|
||||
const serverConfig = _.merge(
|
||||
readYamlConfig(settings.config),
|
||||
{
|
||||
env: 'production',
|
||||
logging: {
|
||||
silent: settings.silent,
|
||||
quiet: !settings.silent,
|
||||
verbose: false
|
||||
},
|
||||
optimize: {
|
||||
useBundleCache: false
|
||||
},
|
||||
server: {
|
||||
autoListen: false
|
||||
},
|
||||
plugins: {
|
||||
initialize: false,
|
||||
scanDirs: [settings.pluginDir, fromRoot('src/plugins')]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const kbnServer = new KbnServer(serverConfig);
|
||||
await kbnServer.ready();
|
||||
await kbnServer.close();
|
||||
}
|
||||
|
||||
async function install(settings, logger) {
|
||||
logger.log(`Installing ${settings.package}`);
|
||||
|
||||
const cleaner = pluginCleaner(settings, logger);
|
||||
|
||||
try {
|
||||
checkForExistingInstall(settings, logger);
|
||||
|
||||
await cleaner.cleanPrevious();
|
||||
|
||||
await mkdirp(settings.workingPath);
|
||||
|
||||
const downloader = pluginDownloader(settings, logger);
|
||||
const { archiveType } = await downloader.download();
|
||||
|
||||
await pluginExtractor (settings, logger, archiveType);
|
||||
|
||||
rimrafSync(settings.tempArchiveFile);
|
||||
|
||||
renameSync(settings.workingPath, settings.pluginPath);
|
||||
|
||||
await rebuildKibanaCache(settings, logger);
|
||||
|
||||
logger.log('Plugin installation complete');
|
||||
} catch (err) {
|
||||
logger.error(`Plugin installation was unsuccessful due to error "${err.message}"`);
|
||||
cleaner.cleanError();
|
||||
process.exit(70); // eslint-disable-line no-process-exit
|
||||
}
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
import fs from 'fs';
|
||||
|
||||
export function list(settings, logger) {
|
||||
fs.readdirSync(settings.pluginDir)
|
||||
.forEach(function (pluginFile) {
|
||||
logger.log(pluginFile);
|
||||
});
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
export default function createPluginLogger(settings) {
|
||||
let previousLineEnded = true;
|
||||
const silent = !!settings.silent;
|
||||
const quiet = !!settings.quiet;
|
||||
|
||||
function log(data, sameLine) {
|
||||
if (silent || quiet) return;
|
||||
|
||||
if (!sameLine && !previousLineEnded) {
|
||||
process.stdout.write('\n');
|
||||
}
|
||||
|
||||
//if data is a stream, pipe it.
|
||||
if (data.readable) {
|
||||
data.pipe(process.stdout);
|
||||
return;
|
||||
}
|
||||
|
||||
process.stdout.write(data);
|
||||
if (!sameLine) process.stdout.write('\n');
|
||||
previousLineEnded = !sameLine;
|
||||
}
|
||||
|
||||
function error(data) {
|
||||
if (silent) return;
|
||||
|
||||
if (!previousLineEnded) {
|
||||
process.stderr.write('\n');
|
||||
}
|
||||
|
||||
//if data is a stream, pipe it.
|
||||
if (data.readable) {
|
||||
data.pipe(process.stderr);
|
||||
return;
|
||||
}
|
||||
process.stderr.write(`${data}\n`);
|
||||
previousLineEnded = true;
|
||||
}
|
||||
|
||||
return {
|
||||
log: log,
|
||||
error: error
|
||||
};
|
||||
};
|
|
@ -1,23 +0,0 @@
|
|||
import fs from 'fs';
|
||||
import rimraf from 'rimraf';
|
||||
|
||||
module.exports = {
|
||||
remove: remove
|
||||
};
|
||||
|
||||
function remove(settings, logger) {
|
||||
try {
|
||||
try {
|
||||
fs.statSync(settings.pluginPath);
|
||||
} catch (e) {
|
||||
logger.log(`Plugin ${settings.package} does not exist`);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log(`Removing ${settings.package}...`);
|
||||
rimraf.sync(settings.pluginPath);
|
||||
} catch (err) {
|
||||
logger.error(`Unable to remove plugin "${settings.package}" because of error: "${err.message}"`);
|
||||
process.exit(74); // eslint-disable-line no-process-exit
|
||||
}
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
/*
|
||||
Generates file transfer progress messages
|
||||
*/
|
||||
export default function createProgressReporter(logger) {
|
||||
let dotCount = 0;
|
||||
let runningTotal = 0;
|
||||
let totalSize = 0;
|
||||
|
||||
function init(size) {
|
||||
totalSize = size;
|
||||
let totalDesc = totalSize || 'unknown number of';
|
||||
|
||||
logger.log(`Transferring ${totalDesc} bytes`, true);
|
||||
}
|
||||
|
||||
//Should log a dot for every 5% of progress
|
||||
function progress(size) {
|
||||
if (!totalSize) return;
|
||||
|
||||
runningTotal += size;
|
||||
let newDotCount = Math.round(runningTotal / totalSize * 100 / 5);
|
||||
if (newDotCount > 20) newDotCount = 20;
|
||||
for (let i = 0; i < (newDotCount - dotCount); i++) {
|
||||
logger.log('.', true);
|
||||
}
|
||||
dotCount = newDotCount;
|
||||
}
|
||||
|
||||
function complete() {
|
||||
logger.log(`Transfer complete`, false);
|
||||
}
|
||||
|
||||
return {
|
||||
init: init,
|
||||
progress: progress,
|
||||
complete: complete
|
||||
};
|
||||
};
|
|
@ -1,114 +0,0 @@
|
|||
import expiry from 'expiry-js';
|
||||
import { intersection } from 'lodash';
|
||||
import { resolve } from 'path';
|
||||
|
||||
export default function createSettingParser(options) {
|
||||
function parseMilliseconds(val) {
|
||||
let result;
|
||||
|
||||
try {
|
||||
let timeVal = expiry(val);
|
||||
result = timeVal.asMilliseconds();
|
||||
} catch (ex) {
|
||||
result = 0;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function generateDownloadUrl(settings) {
|
||||
const version = (settings.version) || 'latest';
|
||||
const filename = settings.package + '-' + version + '.tar.gz';
|
||||
|
||||
return 'https://download.elastic.co/' + settings.organization + '/' + settings.package + '/' + filename;
|
||||
}
|
||||
|
||||
function areMultipleOptionsChosen(options, choices) {
|
||||
return intersection(Object.keys(options), choices).length > 1;
|
||||
}
|
||||
|
||||
function parse() {
|
||||
let parts;
|
||||
let settings = {
|
||||
timeout: 0,
|
||||
silent: false,
|
||||
quiet: false,
|
||||
urls: []
|
||||
};
|
||||
|
||||
if (options.timeout) {
|
||||
settings.timeout = options.timeout;
|
||||
}
|
||||
|
||||
if (options.parent && options.parent.quiet) {
|
||||
settings.quiet = options.parent.quiet;
|
||||
}
|
||||
|
||||
if (options.silent) {
|
||||
settings.silent = options.silent;
|
||||
}
|
||||
|
||||
if (options.url) {
|
||||
settings.urls.push(options.url);
|
||||
}
|
||||
|
||||
if (options.config) {
|
||||
settings.config = options.config;
|
||||
}
|
||||
|
||||
if (options.install) {
|
||||
settings.action = 'install';
|
||||
parts = options.install.split('/');
|
||||
|
||||
if (options.url) {
|
||||
if (parts.length !== 1) {
|
||||
throw new Error('Invalid install option. When providing a url, please use the format <plugin>.');
|
||||
}
|
||||
|
||||
settings.package = parts.shift();
|
||||
} else {
|
||||
if (parts.length < 2 || parts.length > 3) {
|
||||
throw new Error('Invalid install option. Please use the format <org>/<plugin>/<version>.');
|
||||
}
|
||||
|
||||
settings.organization = parts.shift();
|
||||
settings.package = parts.shift();
|
||||
settings.version = parts.shift();
|
||||
|
||||
settings.urls.push(generateDownloadUrl(settings));
|
||||
}
|
||||
}
|
||||
|
||||
if (options.remove) {
|
||||
settings.action = 'remove';
|
||||
parts = options.remove.split('/');
|
||||
|
||||
if (parts.length !== 1) {
|
||||
throw new Error('Invalid remove option. Please use the format <plugin>.');
|
||||
}
|
||||
settings.package = parts.shift();
|
||||
}
|
||||
|
||||
if (options.list) {
|
||||
settings.action = 'list';
|
||||
}
|
||||
|
||||
if (!settings.action || areMultipleOptionsChosen(options, [ 'install', 'remove', 'list' ])) {
|
||||
throw new Error('Please specify either --install, --remove, or --list.');
|
||||
}
|
||||
|
||||
settings.pluginDir = options.pluginDir;
|
||||
if (settings.package) {
|
||||
settings.pluginPath = resolve(settings.pluginDir, settings.package);
|
||||
settings.workingPath = resolve(settings.pluginDir, '.plugin.installing');
|
||||
settings.tempArchiveFile = resolve(settings.workingPath, 'archive.part');
|
||||
}
|
||||
|
||||
return settings;
|
||||
}
|
||||
|
||||
return {
|
||||
parse: parse,
|
||||
parseMilliseconds: parseMilliseconds
|
||||
};
|
||||
};
|
|
@ -2,7 +2,7 @@ import _ from 'lodash';
|
|||
import fs from 'fs';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
import fromRoot from '../../utils/fromRoot';
|
||||
import { fromRoot } from '../../utils';
|
||||
|
||||
let legacySettingMap = {
|
||||
// server
|
||||
|
@ -48,7 +48,14 @@ module.exports = function (path) {
|
|||
_.forOwn(val, function (subVal, subKey) {
|
||||
apply(config, subVal, key + '.' + subKey);
|
||||
});
|
||||
} else {
|
||||
}
|
||||
else if (_.isArray(val)) {
|
||||
config[key] = [];
|
||||
val.forEach((subVal, i) => {
|
||||
apply(config, subVal, key + '.' + i);
|
||||
});
|
||||
}
|
||||
else {
|
||||
_.set(config, key, val);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ const { isWorker } = require('cluster');
|
|||
const { resolve } = require('path');
|
||||
|
||||
const cwd = process.cwd();
|
||||
import fromRoot from '../../utils/fromRoot';
|
||||
import { fromRoot } from '../../utils';
|
||||
|
||||
let canCluster;
|
||||
try {
|
||||
|
@ -126,7 +126,7 @@ module.exports = function (program) {
|
|||
}
|
||||
|
||||
let kbnServer = {};
|
||||
const KbnServer = require('../../server/KbnServer');
|
||||
const KbnServer = require('../../server/kbn_server');
|
||||
try {
|
||||
kbnServer = new KbnServer(settings);
|
||||
await kbnServer.ready();
|
||||
|
|
43
src/cli_plugin/cli.js
Normal file
43
src/cli_plugin/cli.js
Normal file
|
@ -0,0 +1,43 @@
|
|||
import _ from 'lodash';
|
||||
import pkg from '../utils/package_json';
|
||||
import Command from '../cli/command';
|
||||
import listCommand from './list';
|
||||
import installCommand from './install';
|
||||
import removeCommand from './remove';
|
||||
|
||||
let argv = process.env.kbnWorkerArgv ? JSON.parse(process.env.kbnWorkerArgv) : process.argv.slice();
|
||||
let program = new Command('bin/kibana-plugin');
|
||||
|
||||
program
|
||||
.version(pkg.version)
|
||||
.description(
|
||||
'The Kibana plugin manager enables you to install and remove plugins that ' +
|
||||
'provide additional functionality to Kibana'
|
||||
);
|
||||
|
||||
listCommand(program);
|
||||
installCommand(program);
|
||||
removeCommand(program);
|
||||
|
||||
program
|
||||
.command('help <command>')
|
||||
.description('get the help for a specific command')
|
||||
.action(function (cmdName) {
|
||||
var cmd = _.find(program.commands, { _name: cmdName });
|
||||
if (!cmd) return program.error(`unknown command ${cmdName}`);
|
||||
cmd.help();
|
||||
});
|
||||
|
||||
program
|
||||
.command('*', null, { noHelp: true })
|
||||
.action(function (cmd, options) {
|
||||
program.error(`unknown command ${cmd}`);
|
||||
});
|
||||
|
||||
// check for no command name
|
||||
var subCommand = argv[2] && !String(argv[2][0]).match(/^-|^\.|\//);
|
||||
if (!subCommand) {
|
||||
program.defaultHelp();
|
||||
}
|
||||
|
||||
program.parse(argv);
|
5
src/cli_plugin/index.js
Normal file
5
src/cli_plugin/index.js
Normal file
|
@ -0,0 +1,5 @@
|
|||
// load the babel options seperately so that they can modify the process.env
|
||||
// before calling babel/register
|
||||
const babelOptions = require('../optimize/babel_options').node;
|
||||
require('babel/register')(babelOptions);
|
||||
require('./cli');
|
|
@ -3,8 +3,8 @@ import sinon from 'sinon';
|
|||
import fs from 'fs';
|
||||
import rimraf from 'rimraf';
|
||||
|
||||
import pluginCleaner from '../plugin_cleaner';
|
||||
import pluginLogger from '../plugin_logger';
|
||||
import { cleanPrevious, cleanArtifacts } from '../cleanup';
|
||||
import Logger from '../../lib/logger';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
|
@ -24,8 +24,7 @@ describe('kibana cli', function () {
|
|||
|
||||
beforeEach(function () {
|
||||
errorStub = sinon.stub();
|
||||
logger = pluginLogger(false);
|
||||
cleaner = pluginCleaner(settings, logger);
|
||||
logger = new Logger(settings);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
request = {
|
||||
|
@ -49,7 +48,7 @@ describe('kibana cli', function () {
|
|||
throw error;
|
||||
});
|
||||
|
||||
return cleaner.cleanPrevious(logger)
|
||||
return cleanPrevious(settings, logger)
|
||||
.catch(errorStub)
|
||||
.then(function (data) {
|
||||
expect(errorStub.called).to.be(false);
|
||||
|
@ -64,7 +63,7 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
errorStub = sinon.stub();
|
||||
return cleaner.cleanPrevious(logger)
|
||||
return cleanPrevious(settings, logger)
|
||||
.catch(errorStub)
|
||||
.then(function () {
|
||||
expect(errorStub.called).to.be(true);
|
||||
|
@ -75,7 +74,7 @@ describe('kibana cli', function () {
|
|||
sinon.stub(rimraf, 'sync');
|
||||
sinon.stub(fs, 'statSync');
|
||||
|
||||
return cleaner.cleanPrevious(logger)
|
||||
return cleanPrevious(settings, logger)
|
||||
.catch(errorStub)
|
||||
.then(function (data) {
|
||||
expect(logger.log.calledWith('Found previous install attempt. Deleting...')).to.be(true);
|
||||
|
@ -89,7 +88,7 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
errorStub = sinon.stub();
|
||||
return cleaner.cleanPrevious(logger)
|
||||
return cleanPrevious(settings, logger)
|
||||
.catch(errorStub)
|
||||
.then(function () {
|
||||
expect(errorStub.called).to.be(true);
|
||||
|
@ -100,7 +99,7 @@ describe('kibana cli', function () {
|
|||
sinon.stub(rimraf, 'sync');
|
||||
sinon.stub(fs, 'statSync');
|
||||
|
||||
return cleaner.cleanPrevious(logger)
|
||||
return cleanPrevious(settings, logger)
|
||||
.catch(errorStub)
|
||||
.then(function (data) {
|
||||
expect(errorStub.called).to.be(false);
|
||||
|
@ -109,13 +108,11 @@ describe('kibana cli', function () {
|
|||
|
||||
});
|
||||
|
||||
describe('cleanError', function () {
|
||||
let cleaner;
|
||||
describe('cleanArtifacts', function () {
|
||||
let logger;
|
||||
|
||||
beforeEach(function () {
|
||||
logger = pluginLogger(false);
|
||||
cleaner = pluginCleaner(settings, logger);
|
||||
logger = new Logger(settings);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
|
@ -125,7 +122,7 @@ describe('kibana cli', function () {
|
|||
it('should attempt to delete the working directory', function () {
|
||||
sinon.stub(rimraf, 'sync');
|
||||
|
||||
cleaner.cleanError();
|
||||
cleanArtifacts(settings);
|
||||
expect(rimraf.sync.calledWith(settings.workingPath)).to.be(true);
|
||||
});
|
||||
|
||||
|
@ -134,7 +131,7 @@ describe('kibana cli', function () {
|
|||
throw new Error('Something bad happened.');
|
||||
});
|
||||
|
||||
expect(cleaner.cleanError).withArgs(settings).to.not.throwError();
|
||||
expect(cleanArtifacts).withArgs(settings).to.not.throwError();
|
||||
});
|
||||
|
||||
});
|
226
src/cli_plugin/install/__tests__/download.js
Normal file
226
src/cli_plugin/install/__tests__/download.js
Normal file
|
@ -0,0 +1,226 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import nock from 'nock';
|
||||
import glob from 'glob-all';
|
||||
import rimraf from 'rimraf';
|
||||
import mkdirp from 'mkdirp';
|
||||
import Logger from '../../lib/logger';
|
||||
import { download, _downloadSingle } from '../download';
|
||||
import { join } from 'path';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin downloader', function () {
|
||||
const testWorkingPath = join(__dirname, '.test.data');
|
||||
const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
|
||||
|
||||
const settings = {
|
||||
urls: [],
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
timeout: 0
|
||||
};
|
||||
const logger = new Logger(settings);
|
||||
|
||||
function expectWorkingPathEmpty() {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
expect(files).to.eql([]);
|
||||
}
|
||||
|
||||
function expectWorkingPathNotEmpty() {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'archive.part'
|
||||
];
|
||||
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
}
|
||||
|
||||
function shouldReject() {
|
||||
throw new Error('expected the promise to reject');
|
||||
}
|
||||
|
||||
beforeEach(function () {
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
rimraf.sync(testWorkingPath);
|
||||
mkdirp.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
describe('_downloadSingle', function () {
|
||||
|
||||
beforeEach(function () {
|
||||
});
|
||||
|
||||
describe('http downloader', function () {
|
||||
|
||||
it('should throw an ENOTFOUND error for a http ulr that returns 404', function () {
|
||||
const couchdb = nock('http://example.com')
|
||||
.get('/plugin.tar.gz')
|
||||
.reply(404);
|
||||
|
||||
const sourceUrl = 'http://example.com/plugin.tar.gz';
|
||||
|
||||
return _downloadSingle(settings, logger, sourceUrl)
|
||||
.then(shouldReject, function (err) {
|
||||
expect(err.message).to.match(/ENOTFOUND/);
|
||||
expectWorkingPathEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an ENOTFOUND error for an invalid url', function () {
|
||||
const sourceUrl = 'i am an invalid url';
|
||||
|
||||
return _downloadSingle(settings, logger, sourceUrl)
|
||||
.then(shouldReject, function (err) {
|
||||
expect(err.message).to.match(/ENOTFOUND/);
|
||||
expectWorkingPathEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should download a file from a valid http url', function () {
|
||||
const filePath = join(__dirname, 'replies/banana.jpg');
|
||||
|
||||
const couchdb = nock('http://example.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '341965',
|
||||
'content-type': 'application/zip'
|
||||
})
|
||||
.get('/plugin.zip')
|
||||
.replyWithFile(200, filePath);
|
||||
|
||||
const sourceUrl = 'http://example.com/plugin.zip';
|
||||
|
||||
return _downloadSingle(settings, logger, sourceUrl)
|
||||
.then(function () {
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('local file downloader', function () {
|
||||
|
||||
it('should throw an ENOTFOUND error for an invalid local file', function () {
|
||||
const filePath = join(__dirname, 'replies/i-am-not-there.zip');
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
|
||||
return _downloadSingle(settings, logger, sourceUrl)
|
||||
.then(shouldReject, function (err) {
|
||||
expect(err.message).to.match(/ENOTFOUND/);
|
||||
expectWorkingPathEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy a valid local file', function () {
|
||||
const filePath = join(__dirname, 'replies/banana.jpg');
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
|
||||
return _downloadSingle(settings, logger, sourceUrl)
|
||||
.then(function () {
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('download', function () {
|
||||
it('should loop through bad urls until it finds a good one.', function () {
|
||||
const filePath = join(__dirname, 'replies/test_plugin.zip');
|
||||
settings.urls = [
|
||||
'http://example.com/badfile1.tar.gz',
|
||||
'http://example.com/badfile2.tar.gz',
|
||||
'I am a bad uri',
|
||||
'http://example.com/goodfile.tar.gz'
|
||||
];
|
||||
|
||||
const couchdb = nock('http://example.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10'
|
||||
})
|
||||
.get('/badfile1.tar.gz')
|
||||
.reply(404)
|
||||
.get('/badfile2.tar.gz')
|
||||
.reply(404)
|
||||
.get('/goodfile.tar.gz')
|
||||
.replyWithFile(200, filePath);
|
||||
|
||||
return download(settings, logger)
|
||||
.then(function () {
|
||||
expect(logger.log.getCall(0).args[0]).to.match(/badfile1.tar.gz/);
|
||||
expect(logger.log.getCall(1).args[0]).to.match(/badfile2.tar.gz/);
|
||||
expect(logger.log.getCall(2).args[0]).to.match(/I am a bad uri/);
|
||||
expect(logger.log.getCall(3).args[0]).to.match(/goodfile.tar.gz/);
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should stop looping through urls when it finds a good one.', function () {
|
||||
const filePath = join(__dirname, 'replies/test_plugin.zip');
|
||||
settings.urls = [
|
||||
'http://example.com/badfile1.tar.gz',
|
||||
'http://example.com/badfile2.tar.gz',
|
||||
'http://example.com/goodfile.tar.gz',
|
||||
'http://example.com/badfile3.tar.gz'
|
||||
];
|
||||
|
||||
const couchdb = nock('http://example.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10'
|
||||
})
|
||||
.get('/badfile1.tar.gz')
|
||||
.reply(404)
|
||||
.get('/badfile2.tar.gz')
|
||||
.reply(404)
|
||||
.get('/goodfile.tar.gz')
|
||||
.replyWithFile(200, filePath)
|
||||
.get('/badfile3.tar.gz')
|
||||
.reply(404);
|
||||
|
||||
return download(settings, logger)
|
||||
.then(function () {
|
||||
for (let i = 0; i < logger.log.callCount; i++) {
|
||||
expect(logger.log.getCall(i).args[0]).to.not.match(/badfile3.tar.gz/);
|
||||
}
|
||||
expectWorkingPathNotEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error when it doesn\'t find a good url.', function () {
|
||||
settings.urls = [
|
||||
'http://example.com/badfile1.tar.gz',
|
||||
'http://example.com/badfile2.tar.gz',
|
||||
'http://example.com/badfile3.tar.gz'
|
||||
];
|
||||
|
||||
const couchdb = nock('http://example.com')
|
||||
.defaultReplyHeaders({
|
||||
'content-length': '10'
|
||||
})
|
||||
.get('/badfile1.tar.gz')
|
||||
.reply(404)
|
||||
.get('/badfile2.tar.gz')
|
||||
.reply(404)
|
||||
.get('/badfile3.tar.gz')
|
||||
.reply(404);
|
||||
|
||||
return download(settings, logger)
|
||||
.then(shouldReject, function (err) {
|
||||
expect(err.message).to.match(/no valid url specified/i);
|
||||
expectWorkingPathEmpty();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
|
@ -1,6 +1,6 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import plugin from '../plugin';
|
||||
import index from '../index';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
|
@ -18,8 +18,8 @@ describe('kibana cli', function () {
|
|||
it('should define the command', function () {
|
||||
sinon.spy(program, 'command');
|
||||
|
||||
plugin(program);
|
||||
expect(program.command.calledWith('plugin')).to.be(true);
|
||||
index(program);
|
||||
expect(program.command.calledWith('install <plugin/url>')).to.be(true);
|
||||
|
||||
program.command.restore();
|
||||
});
|
||||
|
@ -27,8 +27,8 @@ describe('kibana cli', function () {
|
|||
it('should define the description', function () {
|
||||
sinon.spy(program, 'description');
|
||||
|
||||
plugin(program);
|
||||
expect(program.description.calledWith('Maintain Plugins')).to.be(true);
|
||||
index(program);
|
||||
expect(program.description.calledWith('install a plugin')).to.be(true);
|
||||
|
||||
program.description.restore();
|
||||
});
|
||||
|
@ -37,14 +37,14 @@ describe('kibana cli', function () {
|
|||
const spy = sinon.spy(program, 'option');
|
||||
|
||||
const options = [
|
||||
/-i/,
|
||||
/-r/,
|
||||
/-q/,
|
||||
/-s/,
|
||||
/-u/,
|
||||
/-t/
|
||||
/-c/,
|
||||
/-t/,
|
||||
/-d/
|
||||
];
|
||||
|
||||
plugin(program);
|
||||
index(program);
|
||||
|
||||
for (let i = 0; i < spy.callCount; i++) {
|
||||
const call = spy.getCall(i);
|
||||
|
@ -63,7 +63,7 @@ describe('kibana cli', function () {
|
|||
it('should call the action function', function () {
|
||||
sinon.spy(program, 'action');
|
||||
|
||||
plugin(program);
|
||||
index(program);
|
||||
expect(program.action.calledOnce).to.be(true);
|
||||
|
||||
program.action.restore();
|
170
src/cli_plugin/install/__tests__/kibana.js
Normal file
170
src/cli_plugin/install/__tests__/kibana.js
Normal file
|
@ -0,0 +1,170 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import Logger from '../../lib/logger';
|
||||
import { join } from 'path';
|
||||
import rimraf from 'rimraf';
|
||||
import mkdirp from 'mkdirp';
|
||||
import { existingInstall, assertVersion } from '../kibana';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin installer', function () {
|
||||
|
||||
describe('kibana', function () {
|
||||
const testWorkingPath = join(__dirname, '.test.data');
|
||||
const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
|
||||
|
||||
const settings = {
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
plugin: 'test-plugin',
|
||||
version: '1.0.0',
|
||||
plugins: [ { name: 'foo', path: join(testWorkingPath, 'foo') } ]
|
||||
};
|
||||
|
||||
const logger = new Logger(settings);
|
||||
|
||||
describe('assertVersion', function () {
|
||||
|
||||
beforeEach(function () {
|
||||
rimraf.sync(testWorkingPath);
|
||||
mkdirp.sync(testWorkingPath);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
it('should succeed with exact match', function () {
|
||||
const settings = {
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
plugin: 'test-plugin',
|
||||
version: '5.0.0-snapshot',
|
||||
plugins: [ { name: 'foo', path: join(testWorkingPath, 'foo'), version: '5.0.0-snapshot' } ]
|
||||
};
|
||||
const errorStub = sinon.stub();
|
||||
|
||||
try {
|
||||
assertVersion(settings);
|
||||
}
|
||||
catch (err) {
|
||||
errorStub(err);
|
||||
}
|
||||
|
||||
expect(errorStub.called).to.be(false);
|
||||
});
|
||||
|
||||
it('should throw an error if plugin does contain a version.', function () {
|
||||
const errorStub = sinon.stub();
|
||||
|
||||
try {
|
||||
assertVersion(settings);
|
||||
}
|
||||
catch (err) {
|
||||
errorStub(err);
|
||||
}
|
||||
|
||||
expect(errorStub.firstCall.args[0]).to.match(/plugin version not found/i);
|
||||
});
|
||||
|
||||
it('should throw an error if plugin version does does not match kibana version', function () {
|
||||
const errorStub = sinon.stub();
|
||||
settings.plugins[0].version = '1.2.3.4';
|
||||
|
||||
try {
|
||||
assertVersion(settings);
|
||||
}
|
||||
catch (err) {
|
||||
errorStub(err);
|
||||
}
|
||||
|
||||
expect(errorStub.firstCall.args[0]).to.match(/incorrect version/i);
|
||||
});
|
||||
|
||||
it('should not throw an error if plugin version matches kibana version', function () {
|
||||
const errorStub = sinon.stub();
|
||||
settings.plugins[0].version = '1.0.0';
|
||||
|
||||
try {
|
||||
assertVersion(settings);
|
||||
}
|
||||
catch (err) {
|
||||
errorStub(err);
|
||||
}
|
||||
|
||||
expect(errorStub.called).to.be(false);
|
||||
});
|
||||
|
||||
it('should ignore version info after the dash in checks on valid version', function () {
|
||||
const errorStub = sinon.stub();
|
||||
settings.plugins[0].version = '1.0.0-foo-bar-version-1.2.3';
|
||||
|
||||
try {
|
||||
assertVersion(settings);
|
||||
}
|
||||
catch (err) {
|
||||
errorStub(err);
|
||||
}
|
||||
|
||||
expect(errorStub.called).to.be(false);
|
||||
});
|
||||
|
||||
it('should ignore version info after the dash in checks on invalid version', function () {
|
||||
const errorStub = sinon.stub();
|
||||
settings.plugins[0].version = '2.0.0-foo-bar-version-1.2.3';
|
||||
|
||||
try {
|
||||
assertVersion(settings);
|
||||
}
|
||||
catch (err) {
|
||||
errorStub(err);
|
||||
}
|
||||
|
||||
expect(errorStub.firstCall.args[0]).to.match(/incorrect version/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('existingInstall', function () {
|
||||
let testWorkingPath;
|
||||
let processExitStub;
|
||||
|
||||
beforeEach(function () {
|
||||
processExitStub = sinon.stub(process, 'exit');
|
||||
testWorkingPath = join(__dirname, '.test.data');
|
||||
rimraf.sync(testWorkingPath);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
processExitStub.restore();
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
it('should throw an error if the workingPath already exists.', function () {
|
||||
mkdirp.sync(settings.plugins[0].path);
|
||||
existingInstall(settings, logger);
|
||||
|
||||
expect(logger.error.firstCall.args[0]).to.match(/already exists/);
|
||||
expect(process.exit.called).to.be(true);
|
||||
});
|
||||
|
||||
it('should not throw an error if the workingPath does not exist.', function () {
|
||||
existingInstall(settings, logger);
|
||||
expect(logger.error.called).to.be(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
174
src/cli_plugin/install/__tests__/pack.js
Normal file
174
src/cli_plugin/install/__tests__/pack.js
Normal file
|
@ -0,0 +1,174 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import glob from 'glob-all';
|
||||
import rimraf from 'rimraf';
|
||||
import mkdirp from 'mkdirp';
|
||||
import Logger from '../../lib/logger';
|
||||
import { extract, getPackData } from '../pack';
|
||||
import { _downloadSingle } from '../download';
|
||||
import { join } from 'path';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('pack', function () {
|
||||
|
||||
const testWorkingPath = join(__dirname, '.test.data');
|
||||
const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
|
||||
const testPluginPath = join(testWorkingPath, '.installedPlugins');
|
||||
let logger;
|
||||
|
||||
const settings = {
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
pluginDir: testPluginPath,
|
||||
plugin: 'test-plugin'
|
||||
};
|
||||
|
||||
beforeEach(function () {
|
||||
logger = new Logger(settings);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
rimraf.sync(testWorkingPath);
|
||||
mkdirp.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
function copyReplyFile(filename) {
|
||||
const filePath = join(__dirname, 'replies', filename);
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
|
||||
return _downloadSingle(settings, logger, sourceUrl);
|
||||
}
|
||||
|
||||
function shouldReject() {
|
||||
throw new Error('expected the promise to reject');
|
||||
}
|
||||
|
||||
describe('extract', function () {
|
||||
|
||||
//Also only extracts the content from the kibana folder.
|
||||
//Ignores the others.
|
||||
it('successfully extract a valid zip', function () {
|
||||
return copyReplyFile('test_plugin.zip')
|
||||
.then(() => {
|
||||
return getPackData(settings, logger);
|
||||
})
|
||||
.then(() => {
|
||||
return extract(settings, logger);
|
||||
})
|
||||
.then(() => {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'archive.part',
|
||||
'README.md',
|
||||
'index.js',
|
||||
'package.json',
|
||||
'public',
|
||||
'public/app.js',
|
||||
'extra file only in zip.txt'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('getPackData', function () {
|
||||
|
||||
it('populate settings.plugins', function () {
|
||||
return copyReplyFile('test_plugin.zip')
|
||||
.then(() => {
|
||||
return getPackData(settings, logger);
|
||||
})
|
||||
.then(() => {
|
||||
expect(settings.plugins[0].name).to.be('test-plugin');
|
||||
expect(settings.plugins[0].folder).to.be('test-plugin');
|
||||
expect(settings.plugins[0].version).to.be('1.0.0');
|
||||
expect(settings.plugins[0].platform).to.be(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
it('populate settings.plugins with multiple plugins', function () {
|
||||
return copyReplyFile('test_plugin_many.zip')
|
||||
.then(() => {
|
||||
return getPackData(settings, logger);
|
||||
})
|
||||
.then(() => {
|
||||
expect(settings.plugins[0].name).to.be('funger-plugin');
|
||||
expect(settings.plugins[0].file).to.be('kibana/funger-plugin/package.json');
|
||||
expect(settings.plugins[0].folder).to.be('funger-plugin');
|
||||
expect(settings.plugins[0].version).to.be('1.0.0');
|
||||
expect(settings.plugins[0].platform).to.be(undefined);
|
||||
|
||||
expect(settings.plugins[1].name).to.be('pdf');
|
||||
expect(settings.plugins[1].file).to.be('kibana/pdf-linux/package.json');
|
||||
expect(settings.plugins[1].folder).to.be('pdf-linux');
|
||||
expect(settings.plugins[1].version).to.be('1.0.0');
|
||||
expect(settings.plugins[1].platform).to.be('linux');
|
||||
|
||||
expect(settings.plugins[2].name).to.be('pdf');
|
||||
expect(settings.plugins[2].file).to.be('kibana/pdf-win32/package.json');
|
||||
expect(settings.plugins[2].folder).to.be('pdf-win32');
|
||||
expect(settings.plugins[2].version).to.be('1.0.0');
|
||||
expect(settings.plugins[2].platform).to.be('win32');
|
||||
|
||||
expect(settings.plugins[3].name).to.be('pdf');
|
||||
expect(settings.plugins[3].file).to.be('kibana/pdf-win64/package.json');
|
||||
expect(settings.plugins[3].folder).to.be('pdf-win64');
|
||||
expect(settings.plugins[3].version).to.be('1.0.0');
|
||||
expect(settings.plugins[3].platform).to.be('win64');
|
||||
|
||||
expect(settings.plugins[4].name).to.be('pdf');
|
||||
expect(settings.plugins[4].file).to.be('kibana/pdf/package.json');
|
||||
expect(settings.plugins[4].folder).to.be('pdf');
|
||||
expect(settings.plugins[4].version).to.be('1.0.0');
|
||||
expect(settings.plugins[4].platform).to.be(undefined);
|
||||
|
||||
expect(settings.plugins[5].name).to.be('test-plugin');
|
||||
expect(settings.plugins[5].file).to.be('kibana/test-plugin/package.json');
|
||||
expect(settings.plugins[5].folder).to.be('test-plugin');
|
||||
expect(settings.plugins[5].version).to.be('1.0.0');
|
||||
expect(settings.plugins[5].platform).to.be(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
it('throw an error if there is no kibana plugin', function () {
|
||||
return copyReplyFile('test_plugin_no_kibana.zip')
|
||||
.then((data) => {
|
||||
return getPackData(settings, logger);
|
||||
})
|
||||
.then(shouldReject, (err) => {
|
||||
expect(err.message).to.match(/No kibana plugins found in archive/i);
|
||||
});
|
||||
});
|
||||
|
||||
it('throw an error with a corrupt zip', function () {
|
||||
return copyReplyFile('corrupt.zip')
|
||||
.then((data) => {
|
||||
return getPackData(settings, logger);
|
||||
})
|
||||
.then(shouldReject, (err) => {
|
||||
expect(err.message).to.match(/error retrieving/i);
|
||||
});
|
||||
});
|
||||
|
||||
it('throw an error if there an invalid plugin name', function () {
|
||||
return copyReplyFile('invalid_name.zip')
|
||||
.then((data) => {
|
||||
return getPackData(settings, logger);
|
||||
})
|
||||
.then(shouldReject, (err) => {
|
||||
expect(err.message).to.match(/invalid plugin name/i);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
|
@ -1,23 +1,22 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import progressReporter from '../progress_reporter';
|
||||
import pluginLogger from '../plugin_logger';
|
||||
import Progress from '../progress';
|
||||
import Logger from '../../lib/logger';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin installer', function () {
|
||||
|
||||
describe('progressReporter', function () {
|
||||
|
||||
let logger;
|
||||
let progress;
|
||||
let request;
|
||||
|
||||
beforeEach(function () {
|
||||
logger = pluginLogger({ silent: false, quiet: false });
|
||||
logger = new Logger({ silent: false, quiet: false });
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
progress = progressReporter(logger);
|
||||
progress = new Progress(logger);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
Before Width: | Height: | Size: 204 KiB After Width: | Height: | Size: 204 KiB |
BIN
src/cli_plugin/install/__tests__/replies/invalid_name.zip
Normal file
BIN
src/cli_plugin/install/__tests__/replies/invalid_name.zip
Normal file
Binary file not shown.
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"name": "test-plugin",
|
||||
}
|
BIN
src/cli_plugin/install/__tests__/replies/test_plugin.zip
Normal file
BIN
src/cli_plugin/install/__tests__/replies/test_plugin.zip
Normal file
Binary file not shown.
BIN
src/cli_plugin/install/__tests__/replies/test_plugin_many.zip
Normal file
BIN
src/cli_plugin/install/__tests__/replies/test_plugin_many.zip
Normal file
Binary file not shown.
Binary file not shown.
228
src/cli_plugin/install/__tests__/settings.js
Normal file
228
src/cli_plugin/install/__tests__/settings.js
Normal file
|
@ -0,0 +1,228 @@
|
|||
import path from 'path';
|
||||
import expect from 'expect.js';
|
||||
import { fromRoot } from '../../../utils';
|
||||
import { resolve } from 'path';
|
||||
import { parseMilliseconds, parse, getPlatform } from '../settings';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin installer', function () {
|
||||
|
||||
describe('command line option parsing', function () {
|
||||
|
||||
describe('parseMilliseconds function', function () {
|
||||
|
||||
it('should return 0 for an empty string', function () {
|
||||
const value = '';
|
||||
const result = parseMilliseconds(value);
|
||||
|
||||
expect(result).to.be(0);
|
||||
});
|
||||
|
||||
it('should return 0 for a number with an invalid unit of measure', function () {
|
||||
const result = parseMilliseconds('1gigablasts');
|
||||
expect(result).to.be(0);
|
||||
});
|
||||
|
||||
it('should assume a number with no unit of measure is specified as milliseconds', function () {
|
||||
const result = parseMilliseconds(1);
|
||||
expect(result).to.be(1);
|
||||
|
||||
const result2 = parseMilliseconds('1');
|
||||
expect(result2).to.be(1);
|
||||
});
|
||||
|
||||
it('should interpret a number with "s" as the unit of measure as seconds', function () {
|
||||
const result = parseMilliseconds('5s');
|
||||
expect(result).to.be(5 * 1000);
|
||||
});
|
||||
|
||||
it('should interpret a number with "second" as the unit of measure as seconds', function () {
|
||||
const result = parseMilliseconds('5second');
|
||||
expect(result).to.be(5 * 1000);
|
||||
});
|
||||
|
||||
it('should interpret a number with "seconds" as the unit of measure as seconds', function () {
|
||||
const result = parseMilliseconds('5seconds');
|
||||
expect(result).to.be(5 * 1000);
|
||||
});
|
||||
|
||||
it('should interpret a number with "m" as the unit of measure as minutes', function () {
|
||||
const result = parseMilliseconds('9m');
|
||||
expect(result).to.be(9 * 1000 * 60);
|
||||
});
|
||||
|
||||
it('should interpret a number with "minute" as the unit of measure as minutes', function () {
|
||||
const result = parseMilliseconds('9minute');
|
||||
expect(result).to.be(9 * 1000 * 60);
|
||||
});
|
||||
|
||||
it('should interpret a number with "minutes" as the unit of measure as minutes', function () {
|
||||
const result = parseMilliseconds('9minutes');
|
||||
expect(result).to.be(9 * 1000 * 60);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('parse function', function () {
|
||||
|
||||
const command = 'plugin name';
|
||||
let options = {};
|
||||
const kbnPackage = { version: 1234 };
|
||||
beforeEach(function () {
|
||||
options = { pluginDir: fromRoot('installedPlugins') };
|
||||
});
|
||||
|
||||
describe('timeout option', function () {
|
||||
|
||||
it('should default to 0 (milliseconds)', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.timeout).to.be(0);
|
||||
});
|
||||
|
||||
it('should set settings.timeout property', function () {
|
||||
options.timeout = 1234;
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.timeout).to.be(1234);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('quiet option', function () {
|
||||
|
||||
it('should default to false', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.quiet).to.be(false);
|
||||
});
|
||||
|
||||
it('should set settings.quiet property to true', function () {
|
||||
options.quiet = true;
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.quiet).to.be(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('silent option', function () {
|
||||
|
||||
it('should default to false', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.silent).to.be(false);
|
||||
});
|
||||
|
||||
it('should set settings.silent property to true', function () {
|
||||
options.silent = true;
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.silent).to.be(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('config option', function () {
|
||||
|
||||
it('should default to ZLS', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.config).to.be('');
|
||||
});
|
||||
|
||||
it('should set settings.config property', function () {
|
||||
options.config = 'foo bar baz';
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.config).to.be('foo bar baz');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('pluginDir option', function () {
|
||||
|
||||
it('should default to installedPlugins', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.pluginDir).to.be(fromRoot('installedPlugins'));
|
||||
});
|
||||
|
||||
it('should set settings.config property', function () {
|
||||
options.pluginDir = 'foo bar baz';
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.pluginDir).to.be('foo bar baz');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('command value', function () {
|
||||
|
||||
it('should set settings.plugin property', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.plugin).to.be(command);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('urls collection', function () {
|
||||
|
||||
it('should populate the settings.urls property', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
const expected = [
|
||||
command,
|
||||
`https://download.elastic.co/packs/${command}/${command}-1234.zip`
|
||||
];
|
||||
|
||||
expect(settings.urls).to.eql(expected);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('workingPath value', function () {
|
||||
|
||||
it('should set settings.workingPath property', function () {
|
||||
options.pluginDir = 'foo/bar/baz';
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
const expected = resolve('foo/bar/baz', '.plugin.installing');
|
||||
|
||||
expect(settings.workingPath).to.be(expected);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('tempArchiveFile value', function () {
|
||||
|
||||
it('should set settings.tempArchiveFile property', function () {
|
||||
options.pluginDir = 'foo/bar/baz';
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
const expected = resolve('foo/bar/baz', '.plugin.installing', 'archive.part');
|
||||
|
||||
expect(settings.tempArchiveFile).to.be(expected);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('tempPackageFile value', function () {
|
||||
|
||||
it('should set settings.tempPackageFile property', function () {
|
||||
options.pluginDir = 'foo/bar/baz';
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
const expected = resolve('foo/bar/baz', '.plugin.installing', 'package.json');
|
||||
|
||||
expect(settings.tempPackageFile).to.be(expected);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
145
src/cli_plugin/install/__tests__/zip.js
Normal file
145
src/cli_plugin/install/__tests__/zip.js
Normal file
|
@ -0,0 +1,145 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import glob from 'glob-all';
|
||||
import rimraf from 'rimraf';
|
||||
import mkdirp from 'mkdirp';
|
||||
import Logger from '../../lib/logger';
|
||||
import { _downloadSingle } from '../download';
|
||||
import { join } from 'path';
|
||||
import { listFiles, extractFiles } from '../zip';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('zip', function () {
|
||||
|
||||
const testWorkingPath = join(__dirname, '.test.data');
|
||||
const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
|
||||
let logger;
|
||||
|
||||
const settings = {
|
||||
workingPath: testWorkingPath,
|
||||
tempArchiveFile: tempArchiveFilePath,
|
||||
plugin: 'test-plugin',
|
||||
setPlugin: function (plugin) {}
|
||||
};
|
||||
|
||||
function shouldReject() {
|
||||
throw new Error('expected the promise to reject');
|
||||
}
|
||||
|
||||
beforeEach(function () {
|
||||
logger = new Logger(settings);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
sinon.stub(settings, 'setPlugin');
|
||||
rimraf.sync(testWorkingPath);
|
||||
mkdirp.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
settings.setPlugin.restore();
|
||||
rimraf.sync(testWorkingPath);
|
||||
});
|
||||
|
||||
function copyReplyFile(filename) {
|
||||
const filePath = join(__dirname, 'replies', filename);
|
||||
const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
|
||||
|
||||
return _downloadSingle(settings, logger, sourceUrl);
|
||||
}
|
||||
|
||||
describe('listFiles', function () {
|
||||
|
||||
it('lists the files in the zip', function () {
|
||||
return copyReplyFile('test_plugin.zip')
|
||||
.then(() => {
|
||||
return listFiles(settings.tempArchiveFile);
|
||||
})
|
||||
.then((actual) => {
|
||||
const expected = [
|
||||
'elasticsearch/',
|
||||
'kibana/',
|
||||
'kibana/test-plugin/',
|
||||
'kibana/test-plugin/.gitignore',
|
||||
'kibana/test-plugin/extra file only in zip.txt',
|
||||
'kibana/test-plugin/index.js',
|
||||
'kibana/test-plugin/package.json',
|
||||
'kibana/test-plugin/public/',
|
||||
'kibana/test-plugin/public/app.js',
|
||||
'kibana/test-plugin/README.md',
|
||||
'logstash/'
|
||||
];
|
||||
|
||||
expect(actual).to.eql(expected);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('extractFiles', function () {
|
||||
|
||||
it('extracts files using the files filter', function () {
|
||||
return copyReplyFile('test_plugin_many.zip')
|
||||
.then(() => {
|
||||
const filter = {
|
||||
files: [
|
||||
'kibana/funger-plugin/extra file only in zip.txt',
|
||||
'kibana/funger-plugin/index.js',
|
||||
'kibana\\funger-plugin\\package.json'
|
||||
]
|
||||
};
|
||||
|
||||
return extractFiles(settings.tempArchiveFile, settings.workingPath, 0, filter);
|
||||
})
|
||||
.then(() => {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'kibana',
|
||||
'kibana/funger-plugin',
|
||||
'kibana/funger-plugin/extra file only in zip.txt',
|
||||
'kibana/funger-plugin/index.js',
|
||||
'kibana/funger-plugin/package.json',
|
||||
'archive.part'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
});
|
||||
|
||||
it('extracts files using the paths filter', function () {
|
||||
return copyReplyFile('test_plugin_many.zip')
|
||||
.then(() => {
|
||||
const filter = {
|
||||
paths: [
|
||||
'kibana/funger-plugin',
|
||||
'kibana/test-plugin/public'
|
||||
]
|
||||
};
|
||||
|
||||
return extractFiles(settings.tempArchiveFile, settings.workingPath, 0, filter);
|
||||
})
|
||||
.then(() => {
|
||||
const files = glob.sync('**/*', { cwd: testWorkingPath });
|
||||
const expected = [
|
||||
'archive.part',
|
||||
'kibana',
|
||||
'kibana/funger-plugin',
|
||||
'kibana/funger-plugin/README.md',
|
||||
'kibana/funger-plugin/extra file only in zip.txt',
|
||||
'kibana/funger-plugin/index.js',
|
||||
'kibana/funger-plugin/package.json',
|
||||
'kibana/funger-plugin/public',
|
||||
'kibana/funger-plugin/public/app.js',
|
||||
'kibana/test-plugin',
|
||||
'kibana/test-plugin/public',
|
||||
'kibana/test-plugin/public/app.js'
|
||||
];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
32
src/cli_plugin/install/cleanup.js
Normal file
32
src/cli_plugin/install/cleanup.js
Normal file
|
@ -0,0 +1,32 @@
|
|||
import rimraf from 'rimraf';
|
||||
import fs from 'fs';
|
||||
|
||||
export function cleanPrevious(settings, logger) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
try {
|
||||
fs.statSync(settings.workingPath);
|
||||
|
||||
logger.log('Found previous install attempt. Deleting...');
|
||||
try {
|
||||
rimraf.sync(settings.workingPath);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
resolve();
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') reject(e);
|
||||
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export function cleanArtifacts(settings) {
|
||||
// delete the working directory.
|
||||
// At this point we're bailing, so swallow any errors on delete.
|
||||
try {
|
||||
rimraf.sync(settings.workingPath);
|
||||
rimraf.sync(settings.plugins[0].path);
|
||||
}
|
||||
catch (e) {} // eslint-disable-line no-empty
|
||||
};
|
40
src/cli_plugin/install/download.js
Normal file
40
src/cli_plugin/install/download.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
import downloadHttpFile from './downloaders/http';
|
||||
import downloadLocalFile from './downloaders/file';
|
||||
import { parse } from 'url';
|
||||
|
||||
export function _downloadSingle(settings, logger, sourceUrl) {
|
||||
const urlInfo = parse(sourceUrl);
|
||||
let downloadPromise;
|
||||
|
||||
if (/^file/.test(urlInfo.protocol)) {
|
||||
downloadPromise = downloadLocalFile(logger, decodeURI(urlInfo.path), settings.tempArchiveFile);
|
||||
} else {
|
||||
downloadPromise = downloadHttpFile(logger, sourceUrl, settings.tempArchiveFile, settings.timeout);
|
||||
}
|
||||
|
||||
return downloadPromise;
|
||||
}
|
||||
|
||||
//Attempts to download each url in turn until one is successful
|
||||
export function download(settings, logger) {
|
||||
const urls = settings.urls.slice(0);
|
||||
|
||||
function tryNext() {
|
||||
const sourceUrl = urls.shift();
|
||||
if (!sourceUrl) {
|
||||
throw new Error('No valid url specified.');
|
||||
}
|
||||
|
||||
logger.log(`Attempting to transfer from ${sourceUrl}`);
|
||||
|
||||
return _downloadSingle(settings, logger, sourceUrl)
|
||||
.catch((err) => {
|
||||
if (err.message === 'ENOTFOUND') {
|
||||
return tryNext();
|
||||
}
|
||||
throw (err);
|
||||
});
|
||||
}
|
||||
|
||||
return tryNext();
|
||||
};
|
|
@ -1,6 +1,5 @@
|
|||
import getProgressReporter from '../progress_reporter';
|
||||
import Progress from '../progress';
|
||||
import { createWriteStream, createReadStream, unlinkSync, statSync } from 'fs';
|
||||
import fileType from '../file_type';
|
||||
|
||||
function openSourceFile({ sourcePath }) {
|
||||
try {
|
||||
|
@ -18,7 +17,7 @@ function openSourceFile({ sourcePath }) {
|
|||
}
|
||||
}
|
||||
|
||||
async function copyFile({ readStream, writeStream, progressReporter }) {
|
||||
async function copyFile({ readStream, writeStream, progress }) {
|
||||
await new Promise((resolve, reject) => {
|
||||
// if either stream errors, fail quickly
|
||||
readStream.on('error', reject);
|
||||
|
@ -26,7 +25,7 @@ async function copyFile({ readStream, writeStream, progressReporter }) {
|
|||
|
||||
// report progress as we transfer
|
||||
readStream.on('data', (chunk) => {
|
||||
progressReporter.progress(chunk.length);
|
||||
progress.progress(chunk.length);
|
||||
});
|
||||
|
||||
// write the download to the file system
|
||||
|
@ -46,21 +45,17 @@ export default async function copyLocalFile(logger, sourcePath, targetPath) {
|
|||
const writeStream = createWriteStream(targetPath);
|
||||
|
||||
try {
|
||||
const progressReporter = getProgressReporter(logger);
|
||||
progressReporter.init(fileInfo.size);
|
||||
const progress = new Progress(logger);
|
||||
progress.init(fileInfo.size);
|
||||
|
||||
await copyFile({ readStream, writeStream, progressReporter });
|
||||
await copyFile({ readStream, writeStream, progress });
|
||||
|
||||
progressReporter.complete();
|
||||
progress.complete();
|
||||
} catch (err) {
|
||||
readStream.close();
|
||||
writeStream.close();
|
||||
throw err;
|
||||
}
|
||||
|
||||
// all is well, return our archive type
|
||||
const archiveType = fileType(sourcePath);
|
||||
return { archiveType };
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
throw err;
|
|
@ -1,8 +1,7 @@
|
|||
import Wreck from 'wreck';
|
||||
import getProgressReporter from '../progress_reporter';
|
||||
import Progress from '../progress';
|
||||
import { fromNode as fn } from 'bluebird';
|
||||
import { createWriteStream, unlinkSync } from 'fs';
|
||||
import fileType, { ZIP, TAR } from '../file_type';
|
||||
|
||||
function sendRequest({ sourceUrl, timeout }) {
|
||||
const maxRedirects = 11; //Because this one goes to 11.
|
||||
|
@ -25,7 +24,7 @@ function sendRequest({ sourceUrl, timeout }) {
|
|||
});
|
||||
}
|
||||
|
||||
function downloadResponse({ resp, targetPath, progressReporter }) {
|
||||
function downloadResponse({ resp, targetPath, progress }) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const writeStream = createWriteStream(targetPath);
|
||||
|
||||
|
@ -35,7 +34,7 @@ function downloadResponse({ resp, targetPath, progressReporter }) {
|
|||
|
||||
// report progress as we download
|
||||
resp.on('data', (chunk) => {
|
||||
progressReporter.progress(chunk.length);
|
||||
progress.progress(chunk.length);
|
||||
});
|
||||
|
||||
// write the download to the file system
|
||||
|
@ -46,19 +45,6 @@ function downloadResponse({ resp, targetPath, progressReporter }) {
|
|||
});
|
||||
}
|
||||
|
||||
function getArchiveTypeFromResponse(resp, sourceUrl) {
|
||||
const contentType = (resp.headers['content-type'] || '');
|
||||
|
||||
switch (contentType.toLowerCase()) {
|
||||
case 'application/zip': return ZIP;
|
||||
case 'application/x-gzip': return TAR;
|
||||
default:
|
||||
//If we can't infer the archive type from the content-type header,
|
||||
//fall back to checking the extension in the url
|
||||
return fileType(sourceUrl);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Responsible for managing http transfers
|
||||
*/
|
||||
|
@ -68,20 +54,16 @@ export default async function downloadUrl(logger, sourceUrl, targetPath, timeout
|
|||
|
||||
try {
|
||||
let totalSize = parseFloat(resp.headers['content-length']) || 0;
|
||||
const progressReporter = getProgressReporter(logger);
|
||||
progressReporter.init(totalSize);
|
||||
const progress = new Progress(logger);
|
||||
progress.init(totalSize);
|
||||
|
||||
await downloadResponse({ resp, targetPath, progressReporter });
|
||||
await downloadResponse({ resp, targetPath, progress });
|
||||
|
||||
progressReporter.complete();
|
||||
progress.complete();
|
||||
} catch (err) {
|
||||
req.abort();
|
||||
throw err;
|
||||
}
|
||||
|
||||
// all is well, return our archive type
|
||||
const archiveType = getArchiveTypeFromResponse(resp, sourceUrl);
|
||||
return { archiveType };
|
||||
} catch (err) {
|
||||
if (err.message !== 'ENOTFOUND') {
|
||||
logger.error(err);
|
47
src/cli_plugin/install/index.js
Normal file
47
src/cli_plugin/install/index.js
Normal file
|
@ -0,0 +1,47 @@
|
|||
import { fromRoot } from '../../utils';
|
||||
import install from './install';
|
||||
import Logger from '../lib/logger';
|
||||
import pkg from '../../utils/package_json';
|
||||
import { parse, parseMilliseconds } from './settings';
|
||||
|
||||
function processCommand(command, options) {
|
||||
let settings;
|
||||
try {
|
||||
settings = parse(command, options, pkg);
|
||||
} catch (ex) {
|
||||
//The logger has not yet been initialized.
|
||||
console.error(ex.message);
|
||||
process.exit(64); // eslint-disable-line no-process-exit
|
||||
}
|
||||
|
||||
const logger = new Logger(settings);
|
||||
install(settings, logger);
|
||||
}
|
||||
|
||||
export default function pluginInstall(program) {
|
||||
program
|
||||
.command('install <plugin/url>')
|
||||
.option('-q, --quiet', 'disable all process messaging except errors')
|
||||
.option('-s, --silent', 'disable all process messaging')
|
||||
.option(
|
||||
'-c, --config <path>',
|
||||
'path to the config file',
|
||||
fromRoot('config/kibana.yml')
|
||||
)
|
||||
.option(
|
||||
'-t, --timeout <duration>',
|
||||
'length of time before failing; 0 for never fail',
|
||||
parseMilliseconds
|
||||
)
|
||||
.option(
|
||||
'-d, --plugin-dir <path>',
|
||||
'path to the directory where plugins are stored',
|
||||
fromRoot('installedPlugins')
|
||||
)
|
||||
.description('install a plugin',
|
||||
`Common examples:
|
||||
install xpack
|
||||
install file:///Path/to/my/xpack.zip
|
||||
install https://path.to/my/xpack.zip`)
|
||||
.action(processCommand);
|
||||
};
|
40
src/cli_plugin/install/install.js
Normal file
40
src/cli_plugin/install/install.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
import { download } from './download';
|
||||
import Promise from 'bluebird';
|
||||
import { cleanPrevious, cleanArtifacts } from './cleanup';
|
||||
import { extract, getPackData } from './pack';
|
||||
import { sync as rimrafSync } from 'rimraf';
|
||||
import { renameSync } from 'fs';
|
||||
import { existingInstall, rebuildCache, assertVersion } from './kibana';
|
||||
import mkdirp from 'mkdirp';
|
||||
|
||||
const mkdir = Promise.promisify(mkdirp);
|
||||
|
||||
export default async function install(settings, logger) {
|
||||
try {
|
||||
await cleanPrevious(settings, logger);
|
||||
|
||||
await mkdir(settings.workingPath);
|
||||
|
||||
await download(settings, logger);
|
||||
|
||||
await getPackData(settings, logger);
|
||||
|
||||
await extract(settings, logger);
|
||||
|
||||
rimrafSync(settings.tempArchiveFile);
|
||||
|
||||
existingInstall(settings, logger);
|
||||
|
||||
assertVersion(settings);
|
||||
|
||||
renameSync(settings.workingPath, settings.plugins[0].path);
|
||||
|
||||
await rebuildCache(settings, logger);
|
||||
|
||||
logger.log('Plugin installation complete');
|
||||
} catch (err) {
|
||||
logger.error(`Plugin installation was unsuccessful due to error "${err.message}"`);
|
||||
cleanArtifacts(settings);
|
||||
process.exit(70); // eslint-disable-line no-process-exit
|
||||
}
|
||||
}
|
59
src/cli_plugin/install/kibana.js
Normal file
59
src/cli_plugin/install/kibana.js
Normal file
|
@ -0,0 +1,59 @@
|
|||
import _ from 'lodash';
|
||||
import { fromRoot } from '../../utils';
|
||||
import KbnServer from '../../server/kbn_server';
|
||||
import readYamlConfig from '../../cli/serve/read_yaml_config';
|
||||
import { versionSatisfies, cleanVersion } from './version';
|
||||
import { statSync } from 'fs';
|
||||
|
||||
export function existingInstall(settings, logger) {
|
||||
try {
|
||||
statSync(settings.plugins[0].path);
|
||||
|
||||
logger.error(`Plugin ${settings.plugins[0].name} already exists, please remove before installing a new version`);
|
||||
process.exit(70); // eslint-disable-line no-process-exit
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') throw e;
|
||||
}
|
||||
}
|
||||
|
||||
export async function rebuildCache(settings, logger) {
|
||||
logger.log('Optimizing and caching browser bundles...');
|
||||
const serverConfig = _.merge(
|
||||
readYamlConfig(settings.config),
|
||||
{
|
||||
env: 'production',
|
||||
logging: {
|
||||
silent: settings.silent,
|
||||
quiet: !settings.silent,
|
||||
verbose: false
|
||||
},
|
||||
optimize: {
|
||||
useBundleCache: false
|
||||
},
|
||||
server: {
|
||||
autoListen: false
|
||||
},
|
||||
plugins: {
|
||||
initialize: false,
|
||||
scanDirs: [settings.pluginDir, fromRoot('src/plugins')]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const kbnServer = new KbnServer(serverConfig);
|
||||
await kbnServer.ready();
|
||||
await kbnServer.close();
|
||||
}
|
||||
|
||||
export function assertVersion(settings) {
|
||||
if (!settings.plugins[0].version) {
|
||||
throw new Error (`Plugin version not found. Check package.json in archive`);
|
||||
}
|
||||
|
||||
const actual = cleanVersion(settings.plugins[0].version);
|
||||
const expected = cleanVersion(settings.version);
|
||||
if (!versionSatisfies(actual, expected)) {
|
||||
throw new Error (`Incorrect version in plugin [${settings.plugins[0].name}]. ` +
|
||||
`Expected [${expected}]; found [${actual}]`);
|
||||
}
|
||||
}
|
136
src/cli_plugin/install/pack.js
Normal file
136
src/cli_plugin/install/pack.js
Normal file
|
@ -0,0 +1,136 @@
|
|||
import _ from 'lodash';
|
||||
import { listFiles, extractFiles } from './zip';
|
||||
import { resolve } from 'path';
|
||||
import { sync as rimrafSync } from 'rimraf';
|
||||
import validate from 'validate-npm-package-name';
|
||||
|
||||
/**
|
||||
* Returns an array of package objects. There will be one for each of
|
||||
* package.json files in the archive
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
*/
|
||||
async function listPackages(settings) {
|
||||
const regExp = new RegExp('(kibana/([^/]+))/package.json', 'i');
|
||||
const archiveFiles = await listFiles(settings.tempArchiveFile);
|
||||
|
||||
return _(archiveFiles)
|
||||
.map(file => file.replace(/\\/g, '/'))
|
||||
.map(file => file.match(regExp))
|
||||
.compact()
|
||||
.map(([ file, _, folder ]) => ({ file, folder }))
|
||||
.uniq()
|
||||
.value();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the package.json files into the workingPath
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
* @param {array} packages - array of package objects from listPackages()
|
||||
*/
|
||||
async function extractPackageFiles(settings, packages) {
|
||||
const filter = {
|
||||
files: packages.map((pkg) => pkg.file)
|
||||
};
|
||||
await extractFiles(settings.tempArchiveFile, settings.workingPath, 0, filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the package.json files created by extractPackageFiles()
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
*/
|
||||
function deletePackageFiles(settings) {
|
||||
const fullPath = resolve(settings.workingPath, 'kibana');
|
||||
rimrafSync(fullPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the plugin name. Will throw an exception if it does not meet
|
||||
* npm package naming conventions
|
||||
* @param {object} plugin - a package object from listPackages()
|
||||
*/
|
||||
function assertValidPackageName(plugin) {
|
||||
const validation = validate(plugin.name);
|
||||
if (!validation.validForNewPackages) {
|
||||
throw new Error(`Invalid plugin name [${plugin.name}] in package.json`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Examine each package.json file to determine the plugin name,
|
||||
* version, and platform. Mutates the package objects in the packages array
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
* @param {array} packages - array of package objects from listPackages()
|
||||
*/
|
||||
async function mergePackageData(settings, packages) {
|
||||
return packages.map((pkg) => {
|
||||
const fullPath = resolve(settings.workingPath, pkg.file);
|
||||
const packageInfo = require(fullPath);
|
||||
|
||||
pkg.version = _.get(packageInfo, 'version');
|
||||
pkg.name = _.get(packageInfo, 'name');
|
||||
pkg.path = resolve(settings.pluginDir, pkg.name);
|
||||
|
||||
const regExp = new RegExp(`${pkg.name}-(.+)`, 'i');
|
||||
const matches = pkg.folder.match(regExp);
|
||||
pkg.platform = (matches) ? matches[1] : undefined;
|
||||
|
||||
return pkg;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the first plugin in the archive.
|
||||
* NOTE: This will need to be changed in later versions of the pack installer
|
||||
* that allow for the installation of more than one plugin at once.
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
*/
|
||||
async function extractArchive(settings) {
|
||||
const filter = {
|
||||
paths: [ settings.plugins[0].folder ]
|
||||
};
|
||||
await extractFiles(settings.tempArchiveFile, settings.workingPath, 2, filter);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the detailed information about each kibana plugin in the pack.
|
||||
* TODO: If there are platform specific folders, determine which one to use.
|
||||
* @param {object} settings - a plugin installer settings object
|
||||
* @param {object} logger - a plugin installer logger object
|
||||
*/
|
||||
export async function getPackData(settings, logger) {
|
||||
let packages;
|
||||
try {
|
||||
logger.log('Retrieving metadata from plugin archive');
|
||||
|
||||
packages = await listPackages(settings);
|
||||
|
||||
await extractPackageFiles(settings, packages);
|
||||
await mergePackageData(settings, packages);
|
||||
await deletePackageFiles(settings);
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
throw new Error('Error retrieving metadata from plugin archive');
|
||||
}
|
||||
|
||||
if (packages.length === 0) {
|
||||
throw new Error('No kibana plugins found in archive');
|
||||
}
|
||||
packages.forEach(assertValidPackageName);
|
||||
|
||||
settings.plugins = packages;
|
||||
}
|
||||
|
||||
export async function extract(settings, logger) {
|
||||
try {
|
||||
logger.log('Extracting plugin archive');
|
||||
|
||||
await extractArchive(settings);
|
||||
|
||||
logger.log('Extraction complete');
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
throw new Error('Error extracting plugin archive');
|
||||
}
|
||||
};
|
38
src/cli_plugin/install/progress.js
Normal file
38
src/cli_plugin/install/progress.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
/**
|
||||
* Generates file transfer progress messages
|
||||
*/
|
||||
export default class Progress {
|
||||
|
||||
constructor(logger) {
|
||||
const self = this;
|
||||
|
||||
self.dotCount = 0;
|
||||
self.runningTotal = 0;
|
||||
self.totalSize = 0;
|
||||
self.logger = logger;
|
||||
}
|
||||
|
||||
init(size) {
|
||||
this.totalSize = size;
|
||||
const totalDesc = this.totalSize || 'unknown number of';
|
||||
|
||||
this.logger.log(`Transferring ${totalDesc} bytes`, true);
|
||||
}
|
||||
|
||||
progress(size) {
|
||||
if (!this.totalSize) return;
|
||||
|
||||
this.runningTotal += size;
|
||||
let newDotCount = Math.round(this.runningTotal / this.totalSize * 100 / 5);
|
||||
if (newDotCount > 20) newDotCount = 20;
|
||||
for (let i = 0; i < (newDotCount - this.dotCount); i++) {
|
||||
this.logger.log('.', true);
|
||||
}
|
||||
this.dotCount = newDotCount;
|
||||
}
|
||||
|
||||
complete() {
|
||||
this.logger.log(`Transfer complete`, false);
|
||||
}
|
||||
|
||||
}
|
47
src/cli_plugin/install/settings.js
Normal file
47
src/cli_plugin/install/settings.js
Normal file
|
@ -0,0 +1,47 @@
|
|||
import expiry from 'expiry-js';
|
||||
import { intersection } from 'lodash';
|
||||
import { resolve } from 'path';
|
||||
import { arch, platform } from 'os';
|
||||
|
||||
function generateUrls({ version, plugin }) {
|
||||
return [
|
||||
plugin,
|
||||
`https://download.elastic.co/packs/${plugin}/${plugin}-${version}.zip`
|
||||
];
|
||||
}
|
||||
|
||||
export function parseMilliseconds(val) {
|
||||
let result;
|
||||
|
||||
try {
|
||||
const timeVal = expiry(val);
|
||||
result = timeVal.asMilliseconds();
|
||||
} catch (ex) {
|
||||
result = 0;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export function parse(command, options, kbnPackage) {
|
||||
const settings = {
|
||||
timeout: options.timeout || 0,
|
||||
quiet: options.quiet || false,
|
||||
silent: options.silent || false,
|
||||
config: options.config || '',
|
||||
plugin: command,
|
||||
version: kbnPackage.version,
|
||||
pluginDir: options.pluginDir || ''
|
||||
};
|
||||
|
||||
settings.urls = generateUrls(settings);
|
||||
settings.workingPath = resolve(settings.pluginDir, '.plugin.installing');
|
||||
settings.tempArchiveFile = resolve(settings.workingPath, 'archive.part');
|
||||
settings.tempPackageFile = resolve(settings.workingPath, 'package.json');
|
||||
settings.setPlugin = function (plugin) {
|
||||
settings.plugin = plugin;
|
||||
settings.pluginPath = resolve(settings.pluginDir, settings.plugin.name);
|
||||
};
|
||||
|
||||
return settings;
|
||||
};
|
15
src/cli_plugin/install/version.js
Normal file
15
src/cli_plugin/install/version.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
import semver from 'semver';
|
||||
|
||||
export function versionSatisfies(cleanActual, cleanExpected) {
|
||||
try {
|
||||
return (cleanActual === cleanExpected);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function cleanVersion(version) {
|
||||
const match = version.match(/\d+\.\d+\.\d+/);
|
||||
if (!match) return version;
|
||||
return match[0];
|
||||
}
|
94
src/cli_plugin/install/zip.js
Normal file
94
src/cli_plugin/install/zip.js
Normal file
|
@ -0,0 +1,94 @@
|
|||
import _ from 'lodash';
|
||||
import DecompressZip from '@bigfunger/decompress-zip';
|
||||
|
||||
const SYMBOLIC_LINK = 'SymbolicLink';
|
||||
|
||||
/**
|
||||
* Creates a filter function to be consumed by extractFiles that filters by
|
||||
* an array of files
|
||||
* @param {array} files - an array of full file paths to extract. Should match
|
||||
* exactly a value from listFiles
|
||||
*/
|
||||
function extractFilterFromFiles(files) {
|
||||
const filterFiles = files.map((file) => file.replace(/\\/g, '/'));
|
||||
return function filterByFiles(file) {
|
||||
if (file.type === SYMBOLIC_LINK) return false;
|
||||
|
||||
const path = file.path.replace(/\\/g, '/');
|
||||
return _.includes(filterFiles, path);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a filter function to be consumed by extractFiles that filters by
|
||||
* an array of root paths
|
||||
* @param {array} paths - an array of root paths from the archive. All files and
|
||||
* folders will be extracted recursively using these paths as roots.
|
||||
*/
|
||||
function extractFilterFromPaths(paths) {
|
||||
return function filterByRootPath(file) {
|
||||
if (file.type === SYMBOLIC_LINK) return false;
|
||||
|
||||
return paths.some(path => {
|
||||
const regex = new RegExp(`${path}($|/)`, 'i');
|
||||
return file.parent.match(regex);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a filter function to be consumed by extractFiles
|
||||
* @param {object} filter - an object with either a files or paths property.
|
||||
*/
|
||||
function extractFilter(filter) {
|
||||
if (filter.files) return extractFilterFromFiles(filter.files);
|
||||
if (filter.paths) return extractFilterFromPaths(filter.paths);
|
||||
return _.noop;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts files from a zip archive to a file path using a filter function
|
||||
* @param {string} zipPath - file path to a zip archive
|
||||
* @param {string} targetPath - directory path to where the files should
|
||||
* extracted
|
||||
* @param {integer} strip - Number of nested directories within the archive
|
||||
* that should be ignored when determining the target path of an archived
|
||||
* file.
|
||||
* @param {function} filter - A function that accepts a single parameter 'file'
|
||||
* and returns true if the file should be extracted from the archive
|
||||
*/
|
||||
export async function extractFiles(zipPath, targetPath, strip, filter) {
|
||||
await new Promise((resolve, reject) => {
|
||||
const unzipper = new DecompressZip(zipPath);
|
||||
|
||||
unzipper.on('error', reject);
|
||||
|
||||
unzipper.extract({
|
||||
path: targetPath,
|
||||
strip: strip,
|
||||
filter: extractFilter(filter)
|
||||
});
|
||||
|
||||
unzipper.on('extract', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all files within an archive
|
||||
* @param {string} zipPath - file path to a zip archive
|
||||
* @returns {array} all files within an archive with their relative paths
|
||||
*/
|
||||
export async function listFiles(zipPath) {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const unzipper = new DecompressZip(zipPath);
|
||||
|
||||
unzipper.on('error', reject);
|
||||
|
||||
unzipper.on('list', (files) => {
|
||||
files = files.map((file) => file.replace(/\\/g, '/'));
|
||||
resolve(files);
|
||||
});
|
||||
|
||||
unzipper.list();
|
||||
});
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import pluginLogger from '../plugin_logger';
|
||||
import Logger from '../logger';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
|
@ -20,7 +20,7 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
it('should log messages to the console and append a new line', function () {
|
||||
logger = pluginLogger({ silent: false, quiet: false });
|
||||
logger = new Logger({ silent: false, quiet: false });
|
||||
const message = 'this is my message';
|
||||
|
||||
logger.log(message);
|
||||
|
@ -31,7 +31,7 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
it('should log messages to the console and append not append a new line', function () {
|
||||
logger = pluginLogger({ silent: false, quiet: false });
|
||||
logger = new Logger({ silent: false, quiet: false });
|
||||
for (let i = 0; i < 10; i++) {
|
||||
logger.log('.', true);
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
it('should not log any messages when quiet is set', function () {
|
||||
logger = pluginLogger({ silent: false, quiet: true });
|
||||
logger = new Logger({ silent: false, quiet: true });
|
||||
|
||||
const message = 'this is my message';
|
||||
logger.log(message);
|
||||
|
@ -68,7 +68,7 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
it('should not log any messages when silent is set', function () {
|
||||
logger = pluginLogger({ silent: true, quiet: false });
|
||||
logger = new Logger({ silent: true, quiet: false });
|
||||
|
||||
const message = 'this is my message';
|
||||
logger.log(message);
|
||||
|
@ -94,7 +94,7 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
it('should log error messages to the console and append a new line', function () {
|
||||
logger = pluginLogger({ silent: false, quiet: false });
|
||||
logger = new Logger({ silent: false, quiet: false });
|
||||
const message = 'this is my error';
|
||||
|
||||
logger.error(message);
|
||||
|
@ -102,7 +102,7 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
it('should log error messages to the console when quiet is set', function () {
|
||||
logger = pluginLogger({ silent: false, quiet: true });
|
||||
logger = new Logger({ silent: false, quiet: true });
|
||||
const message = 'this is my error';
|
||||
|
||||
logger.error(message);
|
||||
|
@ -110,7 +110,7 @@ describe('kibana cli', function () {
|
|||
});
|
||||
|
||||
it('should not log any error messages when silent is set', function () {
|
||||
logger = pluginLogger({ silent: true, quiet: false });
|
||||
logger = new Logger({ silent: true, quiet: false });
|
||||
const message = 'this is my error';
|
||||
|
||||
logger.error(message);
|
46
src/cli_plugin/lib/logger.js
Normal file
46
src/cli_plugin/lib/logger.js
Normal file
|
@ -0,0 +1,46 @@
|
|||
/**
|
||||
* Logs messages and errors
|
||||
*/
|
||||
export default class Logger {
|
||||
|
||||
constructor(settings) {
|
||||
this.previousLineEnded = true;
|
||||
this.silent = !!settings.silent;
|
||||
this.quiet = !!settings.quiet;
|
||||
}
|
||||
|
||||
log(data, sameLine) {
|
||||
if (this.silent || this.quiet) return;
|
||||
|
||||
if (!sameLine && !this.previousLineEnded) {
|
||||
process.stdout.write('\n');
|
||||
}
|
||||
|
||||
//if data is a stream, pipe it.
|
||||
if (data.readable) {
|
||||
data.pipe(process.stdout);
|
||||
return;
|
||||
}
|
||||
|
||||
process.stdout.write(data);
|
||||
if (!sameLine) process.stdout.write('\n');
|
||||
this.previousLineEnded = !sameLine;
|
||||
}
|
||||
|
||||
error(data) {
|
||||
if (this.silent) return;
|
||||
|
||||
if (!this.previousLineEnded) {
|
||||
process.stderr.write('\n');
|
||||
}
|
||||
|
||||
//if data is a stream, pipe it.
|
||||
if (data.readable) {
|
||||
data.pipe(process.stderr);
|
||||
return;
|
||||
}
|
||||
process.stderr.write(`${data}\n`);
|
||||
this.previousLineEnded = true;
|
||||
};
|
||||
|
||||
}
|
75
src/cli_plugin/list/__tests__/list.js
Normal file
75
src/cli_plugin/list/__tests__/list.js
Normal file
|
@ -0,0 +1,75 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import rimraf from 'rimraf';
|
||||
import mkdirp from 'mkdirp';
|
||||
import Logger from '../../lib/logger';
|
||||
import list from '../list';
|
||||
import { join } from 'path';
|
||||
import { writeFileSync } from 'fs';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin lister', function () {
|
||||
|
||||
const pluginDir = join(__dirname, '.test.data');
|
||||
let logger;
|
||||
|
||||
const settings = {
|
||||
pluginDir: pluginDir
|
||||
};
|
||||
|
||||
beforeEach(function () {
|
||||
logger = new Logger(settings);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
rimraf.sync(pluginDir);
|
||||
mkdirp.sync(pluginDir);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(pluginDir);
|
||||
});
|
||||
|
||||
it('list all of the folders in the plugin folder', function () {
|
||||
mkdirp.sync(join(pluginDir, 'plugin1'));
|
||||
mkdirp.sync(join(pluginDir, 'plugin2'));
|
||||
mkdirp.sync(join(pluginDir, 'plugin3'));
|
||||
|
||||
list(settings, logger);
|
||||
|
||||
expect(logger.log.calledWith('plugin1')).to.be(true);
|
||||
expect(logger.log.calledWith('plugin2')).to.be(true);
|
||||
expect(logger.log.calledWith('plugin3')).to.be(true);
|
||||
});
|
||||
|
||||
it('ignore folders that start with a period', function () {
|
||||
mkdirp.sync(join(pluginDir, '.foo'));
|
||||
mkdirp.sync(join(pluginDir, 'plugin1'));
|
||||
mkdirp.sync(join(pluginDir, 'plugin2'));
|
||||
mkdirp.sync(join(pluginDir, 'plugin3'));
|
||||
mkdirp.sync(join(pluginDir, '.bar'));
|
||||
|
||||
list(settings, logger);
|
||||
|
||||
expect(logger.log.calledWith('.foo')).to.be(false);
|
||||
expect(logger.log.calledWith('.bar')).to.be(false);
|
||||
});
|
||||
|
||||
it('list should only list folders', function () {
|
||||
mkdirp.sync(join(pluginDir, 'plugin1'));
|
||||
mkdirp.sync(join(pluginDir, 'plugin2'));
|
||||
mkdirp.sync(join(pluginDir, 'plugin3'));
|
||||
writeFileSync(join(pluginDir, 'plugin4'), 'This is a file, and not a folder.');
|
||||
|
||||
list(settings, logger);
|
||||
|
||||
expect(logger.log.calledWith('plugin1')).to.be(true);
|
||||
expect(logger.log.calledWith('plugin2')).to.be(true);
|
||||
expect(logger.log.calledWith('plugin3')).to.be(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
44
src/cli_plugin/list/__tests__/settings.js
Normal file
44
src/cli_plugin/list/__tests__/settings.js
Normal file
|
@ -0,0 +1,44 @@
|
|||
import path from 'path';
|
||||
import expect from 'expect.js';
|
||||
import fromRoot from '../../../utils/from_root';
|
||||
import { resolve } from 'path';
|
||||
import { parseMilliseconds, parse } from '../settings';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin installer', function () {
|
||||
|
||||
describe('command line option parsing', function () {
|
||||
|
||||
describe('parse function', function () {
|
||||
|
||||
let command;
|
||||
const options = {};
|
||||
beforeEach(function () {
|
||||
command = { pluginDir: fromRoot('installedPlugins') };
|
||||
});
|
||||
|
||||
describe('pluginDir option', function () {
|
||||
|
||||
it('should default to installedPlugins', function () {
|
||||
const settings = parse(command, options);
|
||||
|
||||
expect(settings.pluginDir).to.be(fromRoot('installedPlugins'));
|
||||
});
|
||||
|
||||
it('should set settings.config property', function () {
|
||||
command.pluginDir = 'foo bar baz';
|
||||
const settings = parse(command, options);
|
||||
|
||||
expect(settings.pluginDir).to.be('foo bar baz');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
30
src/cli_plugin/list/index.js
Normal file
30
src/cli_plugin/list/index.js
Normal file
|
@ -0,0 +1,30 @@
|
|||
import { fromRoot } from '../../utils';
|
||||
import list from './list';
|
||||
import Logger from '../lib/logger';
|
||||
import { parse } from './settings';
|
||||
|
||||
function processCommand(command, options) {
|
||||
let settings;
|
||||
try {
|
||||
settings = parse(command, options);
|
||||
} catch (ex) {
|
||||
//The logger has not yet been initialized.
|
||||
console.error(ex.message);
|
||||
process.exit(64); // eslint-disable-line no-process-exit
|
||||
}
|
||||
|
||||
const logger = new Logger(settings);
|
||||
list(settings, logger);
|
||||
}
|
||||
|
||||
export default function pluginList(program) {
|
||||
program
|
||||
.command('list')
|
||||
.option(
|
||||
'-d, --plugin-dir <path>',
|
||||
'path to the directory where plugins are stored',
|
||||
fromRoot('installedPlugins')
|
||||
)
|
||||
.description('list installed plugins')
|
||||
.action(processCommand);
|
||||
};
|
14
src/cli_plugin/list/list.js
Normal file
14
src/cli_plugin/list/list.js
Normal file
|
@ -0,0 +1,14 @@
|
|||
import { statSync, readdirSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
export default function list(settings, logger) {
|
||||
readdirSync(settings.pluginDir)
|
||||
.forEach((filename) => {
|
||||
const stat = statSync(join(settings.pluginDir, filename));
|
||||
|
||||
if (stat.isDirectory() && filename[0] !== '.') {
|
||||
logger.log(filename);
|
||||
}
|
||||
});
|
||||
logger.log(''); //intentional blank line for aesthetics
|
||||
}
|
9
src/cli_plugin/list/settings.js
Normal file
9
src/cli_plugin/list/settings.js
Normal file
|
@ -0,0 +1,9 @@
|
|||
import { resolve } from 'path';
|
||||
|
||||
export function parse(command, options) {
|
||||
const settings = {
|
||||
pluginDir: command.pluginDir || ''
|
||||
};
|
||||
|
||||
return settings;
|
||||
};
|
68
src/cli_plugin/remove/__tests__/remove.js
Normal file
68
src/cli_plugin/remove/__tests__/remove.js
Normal file
|
@ -0,0 +1,68 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import glob from 'glob-all';
|
||||
import rimraf from 'rimraf';
|
||||
import mkdirp from 'mkdirp';
|
||||
import Logger from '../../lib/logger';
|
||||
import remove from '../remove';
|
||||
import { join } from 'path';
|
||||
import { writeFileSync } from 'fs';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin remover', function () {
|
||||
|
||||
const pluginDir = join(__dirname, '.test.data');
|
||||
let processExitStub;
|
||||
let logger;
|
||||
|
||||
const settings = { pluginDir };
|
||||
|
||||
beforeEach(function () {
|
||||
processExitStub = sinon.stub(process, 'exit');
|
||||
logger = new Logger(settings);
|
||||
sinon.stub(logger, 'log');
|
||||
sinon.stub(logger, 'error');
|
||||
rimraf.sync(pluginDir);
|
||||
mkdirp.sync(pluginDir);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
processExitStub.restore();
|
||||
logger.log.restore();
|
||||
logger.error.restore();
|
||||
rimraf.sync(pluginDir);
|
||||
});
|
||||
|
||||
it('throw an error if the plugin is not installed.', function () {
|
||||
settings.pluginPath = join(pluginDir, 'foo');
|
||||
settings.plugin = 'foo';
|
||||
|
||||
remove(settings, logger);
|
||||
expect(logger.error.firstCall.args[0]).to.match(/not installed/);
|
||||
expect(process.exit.called).to.be(true);
|
||||
});
|
||||
|
||||
it('throw an error if the specified plugin is not a folder.', function () {
|
||||
writeFileSync(join(pluginDir, 'foo'), 'This is a file, and not a folder.');
|
||||
|
||||
remove(settings, logger);
|
||||
expect(logger.error.firstCall.args[0]).to.match(/not a plugin/);
|
||||
expect(process.exit.called).to.be(true);
|
||||
});
|
||||
|
||||
it('delete the specified folder.', function () {
|
||||
settings.pluginPath = join(pluginDir, 'foo');
|
||||
mkdirp.sync(join(pluginDir, 'foo'));
|
||||
mkdirp.sync(join(pluginDir, 'bar'));
|
||||
|
||||
remove(settings, logger);
|
||||
|
||||
const files = glob.sync('**/*', { cwd: pluginDir });
|
||||
const expected = ['bar'];
|
||||
expect(files.sort()).to.eql(expected.sort());
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
106
src/cli_plugin/remove/__tests__/settings.js
Normal file
106
src/cli_plugin/remove/__tests__/settings.js
Normal file
|
@ -0,0 +1,106 @@
|
|||
import path from 'path';
|
||||
import expect from 'expect.js';
|
||||
import fromRoot from '../../../utils/from_root';
|
||||
import { resolve } from 'path';
|
||||
import { parseMilliseconds, parse } from '../settings';
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
||||
describe('plugin installer', function () {
|
||||
|
||||
describe('command line option parsing', function () {
|
||||
|
||||
describe('parse function', function () {
|
||||
|
||||
const command = 'plugin name';
|
||||
let options = {};
|
||||
const kbnPackage = { version: 1234 };
|
||||
beforeEach(function () {
|
||||
options = { pluginDir: fromRoot('installedPlugins') };
|
||||
});
|
||||
|
||||
describe('quiet option', function () {
|
||||
|
||||
it('should default to false', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.quiet).to.be(false);
|
||||
});
|
||||
|
||||
it('should set settings.quiet property to true', function () {
|
||||
options.quiet = true;
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.quiet).to.be(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('silent option', function () {
|
||||
|
||||
it('should default to false', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.silent).to.be(false);
|
||||
});
|
||||
|
||||
it('should set settings.silent property to true', function () {
|
||||
options.silent = true;
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.silent).to.be(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('config option', function () {
|
||||
|
||||
it('should default to ZLS', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.config).to.be('');
|
||||
});
|
||||
|
||||
it('should set settings.config property', function () {
|
||||
options.config = 'foo bar baz';
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.config).to.be('foo bar baz');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('pluginDir option', function () {
|
||||
|
||||
it('should default to installedPlugins', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.pluginDir).to.be(fromRoot('installedPlugins'));
|
||||
});
|
||||
|
||||
it('should set settings.config property', function () {
|
||||
options.pluginDir = 'foo bar baz';
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.pluginDir).to.be('foo bar baz');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('command value', function () {
|
||||
|
||||
it('should set settings.plugin property', function () {
|
||||
const settings = parse(command, options, kbnPackage);
|
||||
|
||||
expect(settings.plugin).to.be(command);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
39
src/cli_plugin/remove/index.js
Normal file
39
src/cli_plugin/remove/index.js
Normal file
|
@ -0,0 +1,39 @@
|
|||
import { fromRoot } from '../../utils';
|
||||
import remove from './remove';
|
||||
import Logger from '../lib/logger';
|
||||
import { parse } from './settings';
|
||||
|
||||
function processCommand(command, options) {
|
||||
let settings;
|
||||
try {
|
||||
settings = parse(command, options);
|
||||
} catch (ex) {
|
||||
//The logger has not yet been initialized.
|
||||
console.error(ex.message);
|
||||
process.exit(64); // eslint-disable-line no-process-exit
|
||||
}
|
||||
|
||||
const logger = new Logger(settings);
|
||||
remove(settings, logger);
|
||||
}
|
||||
|
||||
export default function pluginRemove(program) {
|
||||
program
|
||||
.command('remove <plugin>')
|
||||
.option('-q, --quiet', 'disable all process messaging except errors')
|
||||
.option('-s, --silent', 'disable all process messaging')
|
||||
.option(
|
||||
'-c, --config <path>',
|
||||
'path to the config file',
|
||||
fromRoot('config/kibana.yml')
|
||||
)
|
||||
.option(
|
||||
'-d, --plugin-dir <path>',
|
||||
'path to the directory where plugins are stored',
|
||||
fromRoot('installedPlugins')
|
||||
)
|
||||
.description('remove a plugin',
|
||||
`common examples:
|
||||
remove xpack`)
|
||||
.action(processCommand);
|
||||
};
|
23
src/cli_plugin/remove/remove.js
Normal file
23
src/cli_plugin/remove/remove.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
import { statSync } from 'fs';
|
||||
import rimraf from 'rimraf';
|
||||
|
||||
export default function remove(settings, logger) {
|
||||
try {
|
||||
let stat;
|
||||
try {
|
||||
stat = statSync(settings.pluginPath);
|
||||
} catch (e) {
|
||||
throw new Error(`Plugin [${settings.plugin}] is not installed`);
|
||||
}
|
||||
|
||||
if (!stat.isDirectory()) {
|
||||
throw new Error(`[${settings.plugin}] is not a plugin`);
|
||||
}
|
||||
|
||||
logger.log(`Removing ${settings.plugin}...`);
|
||||
rimraf.sync(settings.pluginPath);
|
||||
} catch (err) {
|
||||
logger.error(`Unable to remove plugin because of error: "${err.message}"`);
|
||||
process.exit(74); // eslint-disable-line no-process-exit
|
||||
}
|
||||
}
|
15
src/cli_plugin/remove/settings.js
Normal file
15
src/cli_plugin/remove/settings.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
import { resolve } from 'path';
|
||||
|
||||
export function parse(command, options) {
|
||||
const settings = {
|
||||
quiet: options.quiet || false,
|
||||
silent: options.silent || false,
|
||||
config: options.config || '',
|
||||
pluginDir: options.pluginDir || '',
|
||||
plugin: command
|
||||
};
|
||||
|
||||
settings.pluginPath = resolve(settings.pluginDir, settings.plugin);
|
||||
|
||||
return settings;
|
||||
};
|
|
@ -1,4 +1,4 @@
|
|||
module.exports = {
|
||||
export default {
|
||||
"took": 35,
|
||||
"timed_out": false,
|
||||
"_shards": {
|
||||
|
|
|
@ -16,7 +16,7 @@ function stubbedLogstashFields() {
|
|||
{ name: 'extension', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
|
||||
{ name: 'machine.os', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
|
||||
{ name: 'geo.src', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
|
||||
{ name: '_type', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
|
||||
{ name: '_type', type: 'string', indexed: false, analyzed: true, sortable: true, filterable: true },
|
||||
{ name: '_id', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: true},
|
||||
{ name: '_source', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: false},
|
||||
{ name: 'custom_user_field', type: 'conflict', indexed: false, analyzed: false, sortable: false, filterable: true },
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import _ from 'lodash';
|
||||
import TestUtilsStubIndexPatternProvider from 'testUtils/stub_index_pattern';
|
||||
import TestUtilsStubIndexPatternProvider from 'test_utils/stub_index_pattern';
|
||||
import IndexPatternsFieldTypesProvider from 'ui/index_patterns/_field_types';
|
||||
import FixturesLogstashFieldsProvider from 'fixtures/logstash_fields';
|
||||
export default function stubbedLogstashIndexPatternService(Private) {
|
||||
|
|
|
@ -3,9 +3,11 @@ import Boom from 'boom';
|
|||
import DirectoryNameAsMain from 'webpack-directory-name-as-main';
|
||||
import ExtractTextPlugin from 'extract-text-webpack-plugin';
|
||||
import CommonsChunkPlugin from 'webpack/lib/optimize/CommonsChunkPlugin';
|
||||
import DefinePlugin from 'webpack/lib/DefinePlugin';
|
||||
import UglifyJsPlugin from 'webpack/lib/optimize/UglifyJsPlugin';
|
||||
|
||||
import fromRoot from '../utils/fromRoot';
|
||||
import babelOptions from './babelOptions';
|
||||
import fromRoot from '../utils/from_root';
|
||||
import babelOptions from './babel_options';
|
||||
import { inherits } from 'util';
|
||||
import { defaults, transform } from 'lodash';
|
||||
import { resolve } from 'path';
|
||||
|
@ -63,6 +65,7 @@ class BaseOptimizer {
|
|||
|
||||
getConfig() {
|
||||
let mapQ = this.sourceMaps ? '?sourceMap' : '';
|
||||
let mapQPre = mapQ ? mapQ + '&' : '?';
|
||||
|
||||
return {
|
||||
context: fromRoot('.'),
|
||||
|
@ -93,6 +96,7 @@ class BaseOptimizer {
|
|||
name: 'commons',
|
||||
filename: 'commons.bundle.js'
|
||||
}),
|
||||
...this.pluginsForEnv(this.env.context.env)
|
||||
],
|
||||
|
||||
module: {
|
||||
|
@ -101,7 +105,7 @@ class BaseOptimizer {
|
|||
test: /\.less$/,
|
||||
loader: ExtractTextPlugin.extract(
|
||||
'style',
|
||||
`css${mapQ}!autoprefixer${mapQ ? mapQ + '&' : '?'}{ "browsers": ["last 2 versions","> 5%"] }!less${mapQ}`
|
||||
`css${mapQ}!autoprefixer${mapQPre}{ "browsers": ["last 2 versions","> 5%"] }!less${mapQPre}dumpLineNumbers=comments`
|
||||
)
|
||||
},
|
||||
{ test: /\.css$/, loader: ExtractTextPlugin.extract('style', `css${mapQ}`) },
|
||||
|
@ -150,6 +154,27 @@ class BaseOptimizer {
|
|||
};
|
||||
}
|
||||
|
||||
pluginsForEnv(env) {
|
||||
if (env !== 'production') {
|
||||
return [];
|
||||
}
|
||||
|
||||
return [
|
||||
new DefinePlugin({
|
||||
'process.env': {
|
||||
'NODE_ENV': '"production"'
|
||||
}
|
||||
}),
|
||||
new UglifyJsPlugin({
|
||||
compress: {
|
||||
warnings: false
|
||||
},
|
||||
sourceMap: false,
|
||||
mangle: false
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
failedStatsToError(stats) {
|
||||
let statFormatOpts = {
|
||||
hash: false, // add the hash of the compilation
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
import BaseOptimizer from './BaseOptimizer';
|
||||
import fromRoot from '../utils/fromRoot';
|
||||
import BaseOptimizer from './base_optimizer';
|
||||
import fromRoot from '../utils/from_root';
|
||||
import { fromNode } from 'bluebird';
|
||||
import { writeFile } from 'fs';
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
import FsOptimizer from './FsOptimizer';
|
||||
import FsOptimizer from './fs_optimizer';
|
||||
module.exports = async (kbnServer, server, config) => {
|
||||
if (!config.get('optimize.enabled')) return;
|
||||
|
||||
|
|
|
@ -21,11 +21,11 @@ module.exports = async (kbnServer, server, config) => {
|
|||
*/
|
||||
switch (process.env.kbnWorkerType) {
|
||||
case 'optmzr':
|
||||
await kbnServer.mixin(require('./optmzrRole'));
|
||||
await kbnServer.mixin(require('./optmzr_role'));
|
||||
break;
|
||||
|
||||
case 'server':
|
||||
await kbnServer.mixin(require('./proxyRole'));
|
||||
await kbnServer.mixin(require('./proxy_role'));
|
||||
break;
|
||||
|
||||
default:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import Boom from 'boom';
|
||||
|
||||
import BaseOptimizer from '../BaseOptimizer';
|
||||
import WeirdControlFlow from './WeirdControlFlow';
|
||||
import BaseOptimizer from '../base_optimizer';
|
||||
import WeirdControlFlow from './weird_control_flow';
|
||||
import { once, pick, size } from 'lodash';
|
||||
import { join } from 'path';
|
||||
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue