Merge remote-tracking branch 'origin/master' into angularUpgrade

This commit is contained in:
Khalah Jones-Golden 2015-09-08 13:16:31 -04:00
commit 92104ee751
264 changed files with 4030 additions and 2940 deletions

7
.gitignore vendored
View file

@ -5,10 +5,15 @@ node_modules
trash
bundles
target
/build
.jruby
.idea
*.iml
*.log
esvm
/esvm
.htpasswd
installedPlugins
disabledPlugins
webpackstats.json
config/kibana.dev.yml
coverage

View file

@ -1 +1 @@
iojs-v2.4
0.12.7

View file

@ -1,6 +1,8 @@
language: node_js
node_js: 'iojs-v2.4'
install: npm install
node_js: '0.12.7'
install:
- npm install -g npm@3.2
- npm install
script: ./node_modules/.bin/grunt travis
sudo: false
cache:

View file

@ -23,16 +23,22 @@ Please make sure you have signed the [Contributor License Agreement](http://www.
nvm install "$(cat .node-version)"
```
- Install npm 3.2
```sh
npm install -g npm@3.2
```
- Install dependencies
```sh
npm install
```
- Start elasticsearch, you can use [esvm](https://github.com/simianhacker/esvm) to make that easier.
- Start elasticsearch
```sh
grunt esvm:dev:keepalive
npm run elasticsearch
```
- Start the development server.
@ -59,15 +65,35 @@ Here are some hints for getting eslint setup in your favorite editor:
To ensure that your changes will not break other functionality, please run the test suite and build process before submitting your pull request.
Before running the tests you will need to install the projects dependencies as described below.
Before running the tests you will need to install the projects dependencies as described above.
Once that is complete just run:
```sh
./node_modules/.bin/grunt test build
npm run test && npm run build
```
Distributable, built packages can be found in `target/` after the build completes.
Distributable packages can be found in `target/` after the build completes.
#### Debugging test failures
The standard `npm run test` task runs several sub tasks and can take several minutes to complete, making debugging failures pretty painful. In order to ease the pain specialized tasks provide alternate methods for running the tests.
<dl>
<dt><code>npm run test:quick</code></dt>
<dd>Runs both server and browser tests, but skips linting</dd>
<dt><code>npm run test:server</code> or <code>npm run test:browser</code></dt>
<dd>Runs the tests for just the server or browser</dd>
<dt><code>npm run test:dev</code></dt>
<dd>
Initializes an environment for debugging the browser tests. Includes an dedicated instance of the kibana server for building the test bundle, and a karma server. When running this task the build is optimized for the first time and then a karma-owned instance of the browser is opened. Click the "debug" button to open a new tab that executes the unit tests.
<br>
<img src="http://i.imgur.com/DwHxgfq.png">
</dd>
</dl>
### Submit a pull request

View file

@ -1,4 +1,4 @@
require('babel/register');
require('babel/register')(require('./src/optimize/babelOptions').node);
module.exports = function (grunt) {
// set the config once before calling load-grunt-config
@ -16,6 +16,10 @@ module.exports = function (grunt) {
configFile: __dirname + '/src/config/kibana.yml',
karmaBrowser: (function () {
if (grunt.option('browser')) {
return grunt.option('browser');
}
switch (require('os').platform()) {
case 'win32':
return 'IE';
@ -26,16 +30,7 @@ module.exports = function (grunt) {
}
}()),
nodeVersion: '0.10.35',
platforms: ['darwin-x64', 'linux-x64', 'linux-x86', 'windows'],
services: [
['launchd', '10.9'],
['upstart', '1.5'],
['systemd', 'default'],
['sysv', 'lsb-3.1']
],
devPlugins: 'devMode',
nodeVersion: grunt.file.read('.node-version').trim(),
meta: {
banner: '/*! <%= package.name %> - v<%= package.version %> - ' +
@ -44,6 +39,7 @@ module.exports = function (grunt) {
' * Copyright (c) <%= grunt.template.today("yyyy") %> <%= package.author.company %>;' +
' Licensed <%= package.license %> */\n'
},
lintThese: [
'Gruntfile.js',
'<%= root %>/tasks/**/*.js',
@ -54,6 +50,13 @@ module.exports = function (grunt) {
grunt.config.merge(config);
config.userScriptsDir = __dirname + '/build/userScripts';
// ensure that these run first, other configs need them
config.services = require('./tasks/config/services')(grunt);
config.platforms = require('./tasks/config/platforms')(grunt);
grunt.config.merge(config);
// load plugins
require('load-grunt-config')(grunt, {
configPath: __dirname + '/tasks/config',
@ -66,4 +69,5 @@ module.exports = function (grunt) {
// load task definitions
grunt.task.loadTasks('tasks');
grunt.task.loadTasks('tasks/build');
};

View file

@ -35,4 +35,11 @@ Visit [Elastic.co](http://www.elastic.co/guide/en/kibana/current/index.html) for
## Snapshot Builds
***Snapshots are currently disabled*** until [#4597](https://github.com/elastic/kibana/issues/4597) is complete, the snapshot builds can not be built. Master can be started for development or experimentation by running `./bin/kibana` from the root of the project.
For the daring, snapshot builds are available. These builds are created after each commit to the master branch, and therefore are not something you should run in production.
| platform | | |
| --- | --- | --- |
| OSX | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-darwin-x64.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-darwin-x64.zip) |
| Linux x64 | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-linux-x64.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-linux-x64.zip) |
| Linux x86 | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-linux-x86.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-linux-x86.zip) |
| Windows | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-windows.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-windows.zip) |

View file

@ -2,6 +2,7 @@ This is a collection of style guides for Kibana projects. The include guides for
- [JavaScript](#javascript-style-guide)
- [Kibana Project](#kibana-style-guide)
- [Html](#html-style-guide)
# JavaScript Style Guide
@ -855,6 +856,24 @@ require('ui/routes')
});
```
# Html Style Guide
### Multiple attribute values
When a node has multiple attributes that would cause it to exceed the line character limit, each attribute including the first should be on its own line with a single indent. Also, when a node that is styled in this way has child nodes, there should be a blank line between the openening parent tag and the first child tag.
```
<ul
attribute1="value1"
attribute2="value2"
attribute3="value3">
<li></li>
<li></li>
...
</ul>
```
# Attribution
This JavaScript guide forked from the [node style guide](https://github.com/felixge/node-style-guide) created by [Felix Geisendörfer](http://felixge.de/) and is

View file

@ -20,7 +20,7 @@
# If your Elasticsearch is protected with basic auth, this is the user credentials
# used by the Kibana server to perform maintence on the kibana_index at statup. Your Kibana
# users will still need to authenticate with Elasticsearch (which is proxied thorugh
# users will still need to authenticate with Elasticsearch (which is proxied through
# the Kibana server)
# elasticsearch.username: user
# elasticsearch.password: pass
@ -53,11 +53,16 @@
# elasticsearch.startupTimeout: 5000
# SSL for outgoing requests from the Kibana Server (PEM formatted)
# server.ssl.cert: /path/to/your/server.key
# server.ssl.key: /path/to/your/server.crt
# server.ssl.cert: /path/to/your/server.crt
# server.ssl.key: /path/to/your/server.key
# Set the path to where you would like the process id file to be created.
# pid.file: /var/run/kibana.pid
# If you would like to send the log output to a file you can set the path below.
# logging.dest: stdout
#
optimize:
sourceMaps: '#cheap-source-map'
unsafeCache: true
lazyPrebuild: false

View file

@ -5,8 +5,16 @@ Kibana is a web application that you access through port 5601. All you need to d
machine where Kibana is running and specify the port number. For example, `localhost:5601` or
`http://YOURDOMAIN.com:5601`.
When you access Kibana, the Discover page loads by default with the default index pattern selected. The time filter is
set to the last 15 minutes and the search query is set to match-all (\*).
When you access Kibana, the <<discover,Discover>> page loads by default with the default index pattern selected. The
time filter is set to the last 15 minutes and the search query is set to match-all (\*).
If you don't see any documents, try setting the time filter to a wider time range.
If you still don't see any results, it's possible that you don't *have* any documents.
[[status]]
=== Checking Kibana Status
You can reach the Kibana server's status page by navigating to `localhost:5601/status`. The status page displays
information about the server's resource usage and lists the installed plugins.
image::images/kibana-status-page.png[]

View file

@ -31,7 +31,8 @@ remove a range.
or bottom _n_ elements of a given field to display, ordered by count or a custom metric.
*Filters*:: You can specify a set of {ref}/search-aggregations-bucket-filters-aggregation.html[_filters_] for the data.
You can specify a filter as a query string or in JSON format, just as in the Discover search bar. Click *Add Filter* to
add another filter.
add another filter. Click the image:images/labelbutton.png[] *label* button to open the label field, where you can type
in a name to display on the visualization.
*Significant Terms*:: Displays the results of the experimental
{ref}/search-aggregations-bucket-significantterms-aggregation.html[_significant terms_] aggregation. The value of the
*Size* parameter defines the number of entries this aggregation returns.

Binary file not shown.

After

Width:  |  Height:  |  Size: 105 KiB

BIN
docs/images/labelbutton.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 582 B

View file

@ -42,7 +42,8 @@ remove a range.
or bottom _n_ elements of a given field to display, ordered by count or a custom metric.
*Filters*:: You can specify a set of {ref}/search-aggregations-bucket-filters-aggregation.html[_filters_] for the data.
You can specify a filter as a query string or in JSON format, just as in the Discover search bar. Click *Add Filter* to
add another filter.
add another filter. Click the image:images/labelbutton.png[] *label* button to open the label field, where you can type
in a name to display on the visualization.
*Significant Terms*:: Displays the results of the experimental
{ref}/search-aggregations-bucket-significantterms-aggregation.html[_significant terms_] aggregation. The value of the
*Size* parameter defines the number of entries this aggregation returns.

View file

@ -81,7 +81,7 @@ If you are using a self-signed certificate for Elasticsearch, set the `ca` prope
[source,text]
----
# If you need to provide a CA certificate for your Elasticsarech instance, put
# If you need to provide a CA certificate for your Elasticsearch instance, put
# the path of the pem file here.
ca: /path/to/your/ca/cacert.pem
----

View file

@ -1,64 +1,57 @@
[[settings]]
== Settings
To use Kibana, you have to tell it about the Elasticsearch indices that you
want to explore by configuring one or more index patterns. You can also:
To use Kibana, you have to tell it about the Elasticsearch indices that you want to explore by configuring one or more
index patterns. You can also:
* Create scripted fields that are computed on the fly from your data. You can
browse and visualize scripted fields, but you cannot search them.
* Set advanced options such as the number of rows to show in a table and
how many of the most popular fields to show. Use caution when modifying advanced options,
as it's possible to set values that are incompatible with one another.
* Create scripted fields that are computed on the fly from your data. You can browse and visualize scripted fields, but
you cannot search them.
* Set advanced options such as the number of rows to show in a table and how many of the most popular fields to show.
Use caution when modifying advanced options, as it's possible to set values that are incompatible with one another.
* Configure Kibana for a production environment
[float]
[[settings-create-pattern]]
=== Creating an Index Pattern to Connect to Elasticsearch
An _index pattern_ identifies one or more Elasticsearch indices that you want to
explore with Kibana. Kibana looks for index names that match the specified pattern.
An asterisk (*) in the pattern matches zero or more characters. For example, the pattern
`myindex-*` matches all indices whose names start with `myindex-`, such as `myindex-1`
and `myindex-2`.
An _index pattern_ identifies one or more Elasticsearch indices that you want to explore with Kibana. Kibana looks for
index names that match the specified pattern.
An asterisk (*) in the pattern matches zero or more characters. For example, the pattern `myindex-*` matches all
indices whose names start with `myindex-`, such as `myindex-1` and `myindex-2`.
If you use event times to create index names (for example, if you're pushing data
into Elasticsearch from Logstash), the index pattern can also contain a date format.
In this case, the static text in the pattern must be enclosed in brackets, and you
specify the date format using the tokens described in <<date-format-tokens>>.
If you use event times to create index names (for example, if you're pushing data into Elasticsearch from Logstash),
the index pattern can also contain a date format.
In this case, the static text in the pattern must be enclosed in brackets, and you specify the date format using the
tokens described in <<date-format-tokens>>.
For example, `[logstash-]YYYY.MM.DD` matches all indices whose names have a
timestamp of the form `YYYY.MM.DD` appended to the prefix `logstash-`, such as
`logstash-2015.01.31` and `logstash-2015-02-01`.
For example, `[logstash-]YYYY.MM.DD` matches all indices whose names have a timestamp of the form `YYYY.MM.DD` appended
to the prefix `logstash-`, such as `logstash-2015.01.31` and `logstash-2015-02-01`.
An index pattern can also simply be the name of a single index.
To create an index pattern to connect to Elasticsearch:
. Go to the *Settings > Indices* tab.
. Specify an index pattern that matches the name of one or more of your Elasticsearch
indices. By default, Kibana guesses that you're you're working with log data being
fed into Elasticsearch by Logstash.
. Specify an index pattern that matches the name of one or more of your Elasticsearch indices. By default, Kibana
guesses that you're you're working with log data being fed into Elasticsearch by Logstash.
+
NOTE: When you switch between top-level tabs, Kibana remembers where you were.
For example, if you view a particular index pattern from the Settings tab, switch
to the Discover tab, and then go back to the Settings tab, Kibana displays the
index pattern you last looked at. To get to the create pattern form, click
the *Add* button in the Index Patterns list.
NOTE: When you switch between top-level tabs, Kibana remembers where you were. For example, if you view a particular
index pattern from the Settings tab, switch to the Discover tab, and then go back to the Settings tab, Kibana displays
the index pattern you last looked at. To get to the create pattern form, click the *Add* button in the Index Patterns
list.
. If your index contains a timestamp field that you want to use to perform
time-based comparisons, select the *Index contains time-based events* option
and select the index field that contains the timestamp. Kibana reads the
index mapping to list all of the fields that contain a timestamp.
. If your index contains a timestamp field that you want to use to perform time-based comparisons, select the *Index
contains time-based events* option and select the index field that contains the timestamp. Kibana reads the index
mapping to list all of the fields that contain a timestamp.
. If new indices are generated periodically and have a timestamp appended to
the name, select the *Use event times to create index names* option and select
the *Index pattern interval*. This enables Kibana to search only those indices
that could possibly contain data in the time range you specify. This is
primarily applicable if you are using Logstash to feed data into Elasticsearch.
. If new indices are generated periodically and have a timestamp appended to the name, select the *Use event times to
create index names* option and select the *Index pattern interval*. This enables Kibana to search only those indices
that could possibly contain data in the time range you specify. This is primarily applicable if you are using Logstash
to feed data into Elasticsearch.
. Click *Create* to add the index pattern.
. To designate the new pattern as the default pattern to load when you view
the Discover tab, click the *favorite* button.
. To designate the new pattern as the default pattern to load when you view the Discover tab, click the *favorite*
button.
[float]
[[date-format-tokens]]
@ -116,10 +109,9 @@ the Discover tab, click the *favorite* button.
[float]
[[set-default-pattern]]
=== Setting the Default Index Pattern
The default index pattern is loaded by automatically when you view the *Discover* tab.
Kibana displays a star to the left of the name of the default pattern in the Index Patterns list
on the *Settings > Indices* tab. The first pattern you create is automatically
designated as the default pattern.
The default index pattern is loaded by automatically when you view the *Discover* tab. Kibana displays a star to the
left of the name of the default pattern in the Index Patterns list on the *Settings > Indices* tab. The first pattern
you create is automatically designated as the default pattern.
To set a different pattern as the default index pattern:
@ -132,13 +124,11 @@ NOTE: You can also manually set the default index pattern in *Advanced > Setting
[float]
[[reload-fields]]
=== Reloading the Index Fields List
When you add an index mapping, Kibana automatically scans the indices that
match the pattern to display a list of the index fields. You can reload the
index fields list to pick up any newly-added fields.
When you add an index mapping, Kibana automatically scans the indices that match the pattern to display a list of the
index fields. You can reload the index fields list to pick up any newly-added fields.
Reloading the index fields list also resets Kibana's popularity counters for the fields.
The popularity counters keep track of the fields you've used most often within Kibana
and are used to sort fields within lists.
Reloading the index fields list also resets Kibana's popularity counters for the fields. The popularity counters keep
track of the fields you've used most often within Kibana and are used to sort fields within lists.
To reload the index fields list:
@ -168,20 +158,17 @@ You can also set the field's popularity value in the *Popularity* text entry box
[float]
[[create-scripted-field]]
=== Creating a Scripted Field
Scripted fields compute data on the fly from the data in your
Elasticsearch indices. Scripted field data is shown on the Discover tab as
part of the document data, and you can use scripted fields in your visualizations.
(Scripted field values are computed at query time so they aren't indexed and
cannot be searched.)
Scripted fields compute data on the fly from the data in your Elasticsearch indices. Scripted field data is shown on
the Discover tab as part of the document data, and you can use scripted fields in your visualizations.
Scripted field values are computed at query time so they aren't indexed and cannot be searched.
WARNING: Computing data on the fly with scripted fields can be very resource
intensive and can have a direct impact on Kibana's performance. Keep in mind
that there's no built-in validation of a scripted field. If your scripts are
buggy, you'll get exceptions whenever you try to view the dynamically generated
data.
WARNING: Computing data on the fly with scripted fields can be very resource intensive and can have a direct impact on
Kibana's performance. Keep in mind that there's no built-in validation of a scripted field. If your scripts are
buggy, you'll get exceptions whenever you try to view the dynamically generated data.
Scripted fields use the Lucene expression syntax. For more information,
see http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html#_lucene_expressions_scripts[Lucene Expressions Scripts].
see http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html#_lucene_expressions_scripts[
Lucene Expressions Scripts].
You can reference any single value numeric field in your expressions, for example:
@ -196,8 +183,7 @@ To create a scripted field:
. Go to the pattern's *Scripted Fields* tab.
. Click *Add Scripted Field*.
. Enter a name for the scripted field.
. Enter the expression that you want to use to compute a value on the fly
from your index data.
. Enter the expression that you want to use to compute a value on the fly from your index data.
. Click *Save Scripted Field*.
For more information about scripted fields in Elasticsearch, see
@ -215,10 +201,8 @@ To modify a scripted field:
. Click the *Edit* button for the scripted field you want to change.
. Make your changes and then click *Save Scripted Field* to update the field.
WARNING: Keep in mind
that there's no built-in validation of a scripted field. If your scripts are
buggy, you'll get exceptions whenever you try to view the dynamically generated
data.
WARNING: Keep in mind that there's no built-in validation of a scripted field. If your scripts are buggy, you'll get
exceptions whenever you try to view the dynamically generated data.
[float]
[[delete-scripted-field]]
@ -231,13 +215,12 @@ To delete a scripted field:
[[advanced-options]]
=== Setting Advanced Options
The Advanced Settings page enables you to directly edit settings that control
the behavior of the Kibana application. For example, you can change the format
used to display dates, specify the default index pattern, and set the precision
The Advanced Settings page enables you to directly edit settings that control the behavior of the Kibana application.
For example, you can change the format used to display dates, specify the default index pattern, and set the precision
for displayed decimal values.
WARNING: Changing advanced settings can have unintended consequences. If you aren't
sure what you're doing, it's best to leave these settings as-is.
WARNING: Changing advanced settings can have unintended consequences. If you aren't sure what you're doing, it's best
to leave these settings as-is.
To set advanced options:
@ -253,30 +236,26 @@ To set advanced options:
You can view, edit, and delete saved searches, visualizations, and dashboards from *Settings > Objects*. You can also
export or import sets of searches, visualizations, and dashboards.
Viewing a saved object displays the selected item in the *Discover*, *Visualize*,
or *Dashboard* page. To view a saved object:
Viewing a saved object displays the selected item in the *Discover*, *Visualize*, or *Dashboard* page. To view a saved
object:
. Go to *Settings > Objects*.
. Select the object you want to view.
. Click the *View* button.
Editing a saved object enables you to directly modify the object definition.
You can change the name of the object, add a description, and modify the
JSON that defines the object's properties.
Editing a saved object enables you to directly modify the object definition. You can change the name of the object, add
a description, and modify the JSON that defines the object's properties.
If you attempt to access an object whose index has been deleted, Kibana displays
its Edit Object page. You can:
If you attempt to access an object whose index has been deleted, Kibana displays its Edit Object page. You can:
* Recreate the index so you can continue using the object.
* Delete the object and recreate it using a different index.
* Change the index name referenced in the object's `kibanaSavedObjectMeta.searchSourceJSON`
to point to an existing index pattern. This is useful if the index you were working
with has been renamed.
* Change the index name referenced in the object's `kibanaSavedObjectMeta.searchSourceJSON` to point to an existing
index pattern. This is useful if the index you were working with has been renamed.
WARNING: No validation is performed for object properties. Submitting invalid
changes will render the object unusable. Generally, you should use the
*Discover*, *Visualize*, or *Dashboard* pages to create new objects instead of
directly editing existing ones.
WARNING: No validation is performed for object properties. Submitting invalid changes will render the object unusable.
Generally, you should use the *Discover*, *Visualize*, or *Dashboard* pages to create new objects instead of directly
editing existing ones.
To edit a saved object:
@ -310,64 +289,135 @@ To import a set of objects:
[[kibana-server-properties]]
=== Setting Kibana Server Properties
The Kibana server reads properties from the `kibana.yml` file on startup. The default
settings configure Kibana to run on `localhost:5601`. To change the host or port number, or
connect to Elasticsearch running on a different machine, you'll need to update your `kibana.yml` file. You can also
enable SSL and set a variety of other options.
The Kibana server reads properties from the `kibana.yml` file on startup. The default settings configure Kibana to run
on `localhost:5601`. To change the host or port number, or connect to Elasticsearch running on a different machine,
you'll need to update your `kibana.yml` file. You can also enable SSL and set a variety of other options.
deprecated[4.2, The names of several Kibana server properties changed in the 4.2 release of Kibana. The previous names remain as functional aliases, but are now deprecated and will be removed in a future release of Kibana]
[horizontal]
.Kibana Server Properties
|===
|Property |Description
`server.port` added[4.2]:: The port that the Kibana server runs on.
+
*alias*: `port` deprecated[4.2]
+
*default*: `5601`
|`port`
|The port that the Kibana server runs on. Default: `port: 5601`.
`server.host` added[4.2]:: The host to bind the Kibana server to.
+
*alias*: `host` deprecated[4.2]
+
*default*: `"0.0.0.0"`
|`host`
|The host to bind the Kibana server to. Default: `host: "0.0.0.0"`.
`elasticsearch.url` added[4.2]:: The Elasticsearch instance where the indices you want to query reside.
+
*alias*: `elasticsearch_url` deprecated[4.2]
+
*default*: `"http://localhost:9200"`
|`elasticsearch_url`
|The Elasticsearch instance where the indices you want to query reside. Default:&nbsp;&nbsp;`elasticsearch_url:
"http://localhost:9200"`.
`elasticsearch.preserveHost` added[4.2]:: By default, the host specified in the incoming request from the browser is specified as the host in the corresponding request Kibana sends to Elasticsearch. If you set this option to `false`, Kibana uses the host specified in `elasticsearch_url`.
+
*alias*: `elasticsearch_preserve_host` deprecated[4.2]
+
*default*: `true`
|`elasticsearch_preserve_host`
|By default, the host specified in the incoming request from the browser is specified as the host in the
corresponding request Kibana sends to Elasticsearch. If you set this option to `false`, Kibana uses the host
specified in `elasticsearch_url`. You probably don't need to worry about this setting--just use the default.
Default: `elasticsearch_preserve_host: true`.
`elasticsearch.ssl.cert` added[4.2]:: This parameter specifies the path to the SSL certificate for Elasticsearch instances that require a client certificate.
+
*alias*: `kibana_elasticsearch_client_crt` deprecated[4.2]
|`kibana_index`
|The name of the index where saved searched, visualizations, and dashboards will be stored. Default: `kibana_index: .kibana`.
`elasticsearch.ssl.key` added[4.2]:: This parameter specifies the path to the SSL key for Elasticsearch instances that require a client key.
+
*alias*: `kibana_elasticsearch_client_key` deprecated[4.2]
|`default_app_id`
|The page that will be displayed when you launch Kibana: `discover`, `visualize`, `dashboard`, or `settings`. Default:
`default_app_id: "discover"`.
`elasticsearch.password` added[4.2]:: This parameter specifies the password for Elasticsearch instances that use HTTP basic authentication. Kibana users still need to authenticate with Elasticsearch, which is proxied through the Kibana server.
+
*alias*: `kibana_elasticsearch_password` deprecated [4.2]
|`request_timeout`
|How long to wait for responses from the Kibana backend or Elasticsearch, in milliseconds. Default: `request_timeout: 500000`.
`elasticsearch.username` added[4.2]:: This parameter specifies the username for Elasticsearch instances that use HTTP basic authentication. Kibana users still need to authenticate with Elasticsearch, which is proxied through the Kibana server.
+
*alias*: `kibana_elasticsearch_username` deprecated[4.2]
|`shard_timeout`
|How long Elasticsearch should wait for responses from shards. Set to 0 to disable. Default: `shard_timeout: 0`.
`elasticsearch.pingTimeout` added[4.2]:: This parameter specifies the maximum wait time in milliseconds for ping responses by Elasticsearch.
+
*alias*: `ping_timeout` deprecated[4.2]
+
*default*: `1500`
|`verify_ssl`
|Indicates whether or not to validate the Elasticsearch SSL certificate. Set to false to disable SSL verification.
Default: `verify_ssl: true`.
`elasticsearch.startupTimeout` added[4.2]:: This parameter specifies the maximum wait time in milliseconds for Elasticsearch discovery at Kibana startup. Kibana repeats attempts to discover an Elasticsearch cluster after the specified time elapses.
+
*alias*: `startup_timeout` deprecated[4.2]
+
*default*: `5000`
|`ca`
|The path to the CA certificate for your Elasticsearch instance. Specify if you are using a self-signed certificate
so the certificate can be verified. (Otherwise, you have to disable `verify_ssl`.) Default: none.
`kibana.index` added[4.2]:: The name of the index where saved searched, visualizations, and dashboards will be stored..
+
*alias*: `kibana_index` deprecated[4.2]
+
*default*: `.kibana`
|`ssl_key_file`
|The path to your Kibana server's key file. Must be set to encrypt communications between the browser and Kibana. Default: none.
`kibana.defaultAppId` added[4.2]:: The page that will be displayed when you launch Kibana: `discover`, `visualize`, `dashboard`, or `settings`.
+
*alias*: `default_app_id` deprecated[4.2]
+
*default*: `"discover"`
|`ssl_cert_file`
|The path to your Kibana server's certificate file. Must be set to encrypt communications between the browser and Kibana. Default: none.
`logging.silent` added[4.2]:: Set this value to `true` to suppress all logging output.
+
*default*: `false`
|`pid_file`
|The location where you want to store the process ID file. If not specified, the PID file is stored in
`/var/run/kibana.pid`. Default: none.
`logging.quiet` added[4.2]:: Set this value to `true` to suppress all logging output except for log messages tagged `error`, `fatal`, or Hapi.js errors.
+
*default*: `false`
|`log_file`
|The location where you want to store the Kibana's log output. If not specified, log output is written to standard
output and not stored. Specifying a log file suppresses log writes to standard output. Default: none.
`logging.verbose` added[4.2]:: Set this value to `true` to log all events, including system usage information and all requests.
+
*default*: `false`
|===
`logging.events` added[4.2]:: You can specify a map of log types to output tags for this parameter to create a customized set of loggable events, as in the following example:
+
[source,json]
{
log: ['info', 'warning', 'error', 'fatal'],
response: '*',
error: '*'
}
`elasticsearch.requestTimeout` added[4.2]:: How long to wait for responses from the Kibana backend or Elasticsearch, in milliseconds.
+
*alias*: `request_timeout` deprecated[4.2]
+
*default*: `500000`
`elasticsearch.shardTimeout` added[4.2]:: How long Elasticsearch should wait for responses from shards. Set to 0 to disable.
+
*alias*: `shard_timeout` deprecated[4.2]
+
*default*: `0`
`elasticsearch.ssl.verify` added[4.2]:: Indicates whether or not to validate the Elasticsearch SSL certificate. Set to false to disable SSL verification.
+
*alias*: `verify_ssl` deprecated[4.2]
+
*default*: `true`
`elasticsearch.ssl.ca` added[4.2]:: The path to the CA certificate for your Elasticsearch instance. Specify if you are using a self-signed certificate so the certificate can be verified. Disable `elasticsearch.ssl.verify` otherwise.
+
*alias*: `ca` deprecated[4.2]
`server.ssl.key` added[4.2]:: The path to your Kibana server's key file. Must be set to encrypt communications between the browser and Kibana.
+
*alias*: `ssl_key_file` deprecated[4.2]
`server.ssl.cert` added[4.2]:: The path to your Kibana server's certificate file. Must be set to encrypt communications between the browser and Kibana.
+
*alias*: `ssl_cert_file` deprecated[4.2]
`pid.file` added[4.2]:: The location where you want to store the process ID file.
+
*alias*: `pid_file` deprecated[4.2]
+
*default*: `/var/run/kibana.pid`
`logging.dest` added[4.2]:: The location where you want to store the Kibana's log output. If not specified, log output is written to standard output and not stored. Specifying a log file suppresses log writes to standard output.
+
*alias*: `log_file` deprecated[4.2]

View file

@ -62,7 +62,8 @@ remove a range.
or bottom _n_ elements of a given field to display, ordered by count or a custom metric.
*Filters*:: You can specify a set of {ref}/search-aggregations-bucket-filters-aggregation.html[_filters_] for the data.
You can specify a filter as a query string or in JSON format, just as in the Discover search bar. Click *Add Filter* to
add another filter.
add another filter. Click the image:images/labelbutton.png[] *label* button to open the label field, where you can type
in a name to display on the visualization.
*Significant Terms*:: Displays the results of the experimental
{ref}/search-aggregations-bucket-significantterms-aggregation.html[_significant terms_] aggregation. The value of the
*Size* parameter defines the number of entries this aggregation returns.

View file

@ -28,7 +28,8 @@ remove a range.
or bottom _n_ elements of a given field to display, ordered by count or a custom metric.
*Filters*:: You can specify a set of {ref}/search-aggregations-bucket-filters-aggregation.html[_filters_] for the data.
You can specify a filter as a query string or in JSON format, just as in the Discover search bar. Click *Add Filter* to
add another filter.
add another filter. Click the images:labelbutton.png[] *label* button to open the label field, where you can type in a
name to display on the visualization.
*Significant Terms*:: Displays the results of the experimental
{ref}/search-aggregations-bucket-significantterms-aggregation.html[_significant terms_] aggregation.

0
installedPlugins/.empty Normal file
View file

View file

@ -1,62 +0,0 @@
// Karma configuration
// Generated on Mon Jul 27 2015 04:03:51 GMT-0700 (MST)
module.exports = function (config) {
config.set({
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
captureTimeout: 30000,
browserNoActivityTimeout: 120000,
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['mocha'],
// list of files / patterns to load in the browser
files: [
'http://localhost:5601/bundles/tests.bundle.js',
'http://localhost:5601/bundles/tests.bundle.style.css'
],
proxies: {
'/tests/': 'http://localhost:5601/tests/',
'/bundles/': 'http://localhost:5601/bundles/'
},
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['progress', 'growl'],
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: false,
// start these browsers
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: [
require('os').platform() === 'win32' ? 'IE' : 'Chrome'
],
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: false
});
};

View file

@ -13,7 +13,8 @@
"private": false,
"version": "4.2.0-snapshot",
"build": {
"num": 8095
"number": 8467,
"sha": "6cb7fec4e154faa0a4a3fee4b33dfef91b9870d9"
},
"main": "src/server/KbnServer.js",
"homepage": "https://www.elastic.co/products/kibana",
@ -33,9 +34,16 @@
],
"scripts": {
"test": "grunt test",
"start": "node ./src/server/cli/index.js --dev",
"test:dev": "grunt test:dev",
"test:quick": "grunt test:quick",
"test:browser": "grunt test:browser",
"test:server": "grunt test:server",
"test:coverage": "grunt test:coverage",
"build": "grunt build",
"start": "./bin/kibana --dev",
"precommit": "grunt lintStagedFiles",
"karma": "karma start"
"karma": "karma start",
"elasticsearch": "grunt esvm:dev:keepalive"
},
"repository": {
"type": "git",
@ -43,71 +51,65 @@
},
"dependencies": {
"@spalger/angular-bootstrap": "^0.10.0",
"@spalger/angular-nvd3": "^1.0.0-beta",
"@spalger/filesaver": "^1.1.2",
"@spalger/leaflet-draw": "^0.2.3",
"@spalger/leaflet-heat": "^0.1.3",
"@spalger/nvd3": "^1.8.1",
"@spalger/ui-ace": "^0.2.3",
"Nonsense": "^0.1.2",
"angular": "1.4.5",
"angular-elastic": "2.5.0",
"angular-mocks": "1.4.5",
"angular-nvd3": "panda01/angular-nvd3#kibana",
"angular-route": "1.4.5",
"ansicolors": "^0.3.2",
"auto-preload-rjscommon-deps-loader": "^1.0.4",
"autoprefixer": "^5.2.0",
"autoprefixer-loader": "^2.0.0",
"babel": "^5.8.19",
"babel-core": "^5.8.19",
"autoprefixer": "5.1.x",
"autoprefixer-loader": "2.0.x",
"babel": "^5.8.21",
"babel-core": "^5.8.22",
"babel-loader": "^5.3.2",
"babel-runtime": "^5.8.19",
"babel-runtime": "^5.8.20",
"bluebird": "^2.9.27",
"boom": "^2.8.0",
"bootstrap": "^3.3.5",
"brace": "^0.5.1",
"bunyan": "^1.2.3",
"chokidar": "^1.0.4",
"commander": "^2.8.1",
"css-loader": "^0.15.1",
"d3": "^3.5.6",
"elasticsearch": "^5.0.0",
"elasticsearch-browser": "^5.0.0",
"elasticsearch": "^8.0.1",
"elasticsearch-browser": "^8.0.1",
"expiry-js": "^0.1.7",
"exports-loader": "^0.6.2",
"expose-loader": "^0.7.0",
"extract-text-webpack-plugin": "^0.8.2",
"file-loader": "^0.8.4",
"font-awesome": "^4.3.0",
"glob": "^4.3.2",
"good": "^6.2.0",
"good-squeeze": "^2.1.0",
"gridster": "^0.5.6",
"hapi": "^8.6.1",
"imports-loader": "^0.6.4",
"is-array": "^1.0.1",
"jade": "^1.7.2",
"jade-loader": "^0.7.1",
"joi": "^6.4.3",
"jquery": "^2.1.4",
"js-yaml": "^3.2.5",
"json-stringify-safe": "^5.0.1",
"jstimezonedetect": "^1.0.5",
"leaflet": "^0.7.3",
"less": "^2.5.1",
"less-loader": "^2.2.0",
"lodash": "^3.10.0",
"marked": "^0.3.3",
"memory-fs": "^0.2.0",
"marked": "0.3.3",
"minimatch": "^2.0.8",
"mkdirp": "^0.5.1",
"moment": "^2.10.3",
"moment-timezone": "^0.4.0",
"ng-clip": "^0.2.6",
"numeral": "^1.5.3",
"nvd3": "panda01/nvd3#kibana",
"raw-loader": "^0.5.1",
"request": "^2.40.0",
"request": "^2.60.0",
"requirefrom": "^0.2.0",
"rimraf": "^2.4.1",
"rjs-repack-loader": "^1.0.6",
"script-loader": "^0.6.1",
"semver": "^4.3.6",
"style-loader": "^0.12.3",
@ -119,44 +121,52 @@
"zeroclipboard": "^2.2.0"
},
"devDependencies": {
"Nonsense": "^0.1.2",
"angular-mocks": "1.2.28",
"auto-release-sinon": "^1.0.3",
"babel-eslint": "^4.0.5",
"eslint": "1.0.x",
"babel-eslint": "^4.1.1",
"chokidar": "^1.0.4",
"eslint": "^1.3.1",
"expect.js": "^0.3.1",
"faker": "^1.1.0",
"glob": "^4.3.2",
"grunt": "^0.4.5",
"grunt-babel": "^5.0.1",
"grunt-cli": "0.1.13",
"grunt-contrib-clean": "^0.6.0",
"grunt-contrib-compress": "^0.13.0",
"grunt-contrib-copy": "^0.8.0",
"grunt-esvm": "^1.1.3",
"grunt-esvm": "^1.1.5",
"grunt-karma": "^0.12.0",
"grunt-replace": "^0.7.9",
"grunt-run": "spalger/grunt-run#master",
"grunt-run": "^0.4.0",
"grunt-s3": "^0.2.0-alpha.3",
"grunt-simple-mocha": "^0.4.0",
"gruntify-eslint": "^1.0.0",
"gruntify-eslint": "^1.0.1",
"html-entities": "^1.1.1",
"husky": "^0.8.1",
"istanbul-instrumenter-loader": "^0.1.3",
"karma": "^0.13.3",
"karma-chrome-launcher": "^0.2.0",
"karma-coverage": "^0.5.0",
"karma-firefox-launcher": "^0.1.6",
"karma-growl-reporter": "^0.1.1",
"karma-ie-launcher": "^0.2.0",
"karma-mocha": "^0.2.0",
"karma-safari-launcher": "^0.1.1",
"libesvm": "^1.0.1",
"license-checker": "^3.1.0",
"load-grunt-config": "^0.7.0",
"marked-text-renderer": "^0.1.0",
"mocha": "^2.2.5",
"nock": "^2.9.0",
"npm": "^2.11.0",
"npm": "3.2",
"portscanner": "^1.0.0",
"simple-git": "^1.3.0",
"sinon": "^1.15.4",
"source-map": "^0.4.4"
"source-map": "^0.4.4",
"wreck": "^6.1.0"
},
"engines": {
"node": ">=2"
"node": "2.5",
"npm": "3.2"
}
}

View file

@ -80,4 +80,16 @@ Command.prototype.parseOptions = _.wrap(Command.prototype.parseOptions, function
return opts;
});
Command.prototype.action = _.wrap(Command.prototype.action, function (action, fn) {
return action.call(this, function (...args) {
var ret = fn.apply(this, args);
if (ret && typeof ret.then === 'function') {
ret.then(null, function (e) {
console.log('FATAL CLI ERROR', e.stack);
process.exit(1);
});
}
});
});
module.exports = Command;

View file

@ -0,0 +1,126 @@
let cluster = require('cluster');
let { join } = require('path');
let { debounce, compact, invoke, bindAll, once } = require('lodash');
let Log = require('../Log');
let Worker = require('./Worker');
module.exports = class ClusterManager {
constructor(opts) {
this.log = new Log(opts.quiet, opts.silent);
this.addedCount = 0;
this.workers = [
this.optimizer = new Worker({
type: 'optmzr',
title: 'optimizer',
log: this.log,
argv: compact([
'--plugins.initialize=false',
'--server.autoListen=false'
]),
watch: false
}),
this.server = new Worker({
type: 'server',
log: this.log
})
];
// broker messages between workers
this.workers.forEach((worker) => {
worker.on('broadcast', (msg) => {
this.workers.forEach((to) => {
if (to !== worker && to.online) {
to.fork.send(msg);
}
});
});
});
bindAll(this, 'onWatcherAdd', 'onWatcherError', 'onWatcherChange');
if (opts.watch) this.setupWatching();
else this.startCluster();
}
startCluster() {
this.setupManualRestart();
invoke(this.workers, 'start');
}
setupWatching() {
var chokidar = require('chokidar');
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');
this.watcher = chokidar.watch([
'src/plugins',
'src/server',
'src/ui',
'src/utils',
'config',
'installedPlugins'
], {
cwd: fromRoot('.'),
ignored: /[\\\/](\..*|node_modules|bower_components|public|__tests__)[\\\/]/
});
this.watcher.on('add', this.onWatcherAdd);
this.watcher.on('error', this.onWatcherError);
this.watcher.on('ready', once(() => {
// start sending changes to workers
this.watcher.removeListener('add', this.onWatcherAdd);
this.watcher.on('all', this.onWatcherChange);
this.log.good('watching for changes', `(${this.addedCount} files)`);
this.startCluster();
}));
}
setupManualRestart() {
let readline = require('readline');
let rl = readline.createInterface(process.stdin, process.stdout);
let nls = 0;
let clear = () => nls = 0;
let clearSoon = debounce(clear, 2000);
rl.setPrompt('');
rl.prompt();
rl.on('line', line => {
nls = nls + 1;
if (nls >= 2) {
clearSoon.cancel();
clear();
this.server.start();
} else {
clearSoon();
}
rl.prompt();
});
rl.on('SIGINT', () => {
rl.pause();
process.kill(process.pid, 'SIGINT');
});
}
onWatcherAdd() {
this.addedCount += 1;
}
onWatcherChange(e, path) {
invoke(this.workers, 'onChange', path);
}
onWatcherError(err) {
this.log.bad('failed to watch files!\n', err.stack);
process.exit(1); // eslint-disable-line no-process-exit
}
};

View file

@ -14,6 +14,17 @@ cluster.setupMaster({
silent: false
});
let dead = fork => {
return fork.isDead() || fork.killed;
};
let kill = fork => {
// fork.kill() waits for process to disconnect, but causes occasional
// "ipc disconnected" errors and is too slow for the proc's "exit" event
fork.process.kill();
fork.killed = true;
};
module.exports = class Worker extends EventEmitter {
constructor(opts) {
opts = opts || {};
@ -22,7 +33,9 @@ module.exports = class Worker extends EventEmitter {
this.log = opts.log;
this.type = opts.type;
this.title = opts.title || opts.type;
this.filters = opts.filters;
this.watch = (opts.watch !== false);
this.startCount = 0;
this.online = false;
this.changes = [];
let argv = _.union(baseArgv, opts.argv || []);
@ -31,7 +44,7 @@ module.exports = class Worker extends EventEmitter {
kbnWorkerArgv: JSON.stringify(argv)
};
_.bindAll(this, ['onExit', 'onMessage', 'shutdown', 'start']);
_.bindAll(this, ['onExit', 'onMessage', 'onOnline', 'onDisconnect', 'shutdown', 'start']);
this.start = _.debounce(this.start, 25);
cluster.on('exit', this.onExit);
@ -46,6 +59,7 @@ module.exports = class Worker extends EventEmitter {
if (code) {
this.log.bad(`${this.title} crashed`, 'with status code', code);
if (!this.watch) process.exit(code);
} else {
// restart after graceful shutdowns
this.start();
@ -53,23 +67,17 @@ module.exports = class Worker extends EventEmitter {
}
onChange(path) {
var valid = true;
if (this.filters) {
valid = _.any(this.filters, function (filter) {
return filter.test(path);
});
}
if (!valid) return;
if (!this.watch) return;
this.changes.push(path);
this.start();
}
shutdown() {
if (this.fork && !this.fork.isDead()) {
this.fork.kill();
if (this.fork && !dead(this.fork)) {
kill(this.fork);
this.fork.removeListener('message', this.onMessage);
this.fork.removeListener('online', this.onOnline);
this.fork.removeListener('disconnect', this.onDisconnect);
}
}
@ -78,6 +86,14 @@ module.exports = class Worker extends EventEmitter {
this.emit('broadcast', msg[1]);
}
onOnline() {
this.online = true;
}
onDisconnect() {
this.online = false;
}
flushChangeBuffer() {
let files = _.unique(this.changes.splice(0));
let prefix = files.length > 1 ? '\n - ' : '';
@ -87,18 +103,19 @@ module.exports = class Worker extends EventEmitter {
}
start() {
if (this.fork && !this.fork.isDead()) {
// once "exit" event is received with 0 status, start() is called again
return this.shutdown();
}
// once "exit" event is received with 0 status, start() is called again
if (this.fork) return this.shutdown();
if (this.fork && this.changes.length) {
this.log.warn(`${this.title} restarting`, `due to changes in ${this.flushChangeBuffer()}`);
} else {
this.log.warn(`${this.title} starting`);
if (this.changes.length) {
this.log.warn(`restarting ${this.title}`, `due to changes in ${this.flushChangeBuffer()}`);
}
else if (this.startCount++) {
this.log.warn(`restarting ${this.title}...`);
}
this.fork = cluster.fork(this.env);
this.fork.on('message', this.onMessage);
this.fork.on('online', this.onOnline);
this.fork.on('disconnect', this.onDisconnect);
}
};

View file

@ -1,2 +1,2 @@
require('babel/register')(require('../optimize/babelOptions'));
require('babel/register')(require('../optimize/babelOptions').node);
require('./cli');

View file

@ -1,69 +0,0 @@
var expect = require('expect.js');
var nock = require('nock');
var glob = require('glob');
var rimraf = require('rimraf');
var fs = require('fs');
var { join } = require('path');
var sinon = require('sinon');
var pluginLogger = require('../pluginLogger');
var npmInstall = require('../npmInstall');
describe('kibana cli', function () {
describe('plugin installer', function () {
describe('npmInstall', function () {
var testWorkingPath = join(__dirname, '.test.data');
var logger;
var statSyncStub;
beforeEach(function () {
statSyncStub = undefined;
logger = pluginLogger(false);
rimraf.sync(testWorkingPath);
sinon.stub(logger, 'log');
sinon.stub(logger, 'error');
});
afterEach(function () {
logger.log.restore();
logger.error.restore();
rimraf.sync(testWorkingPath);
if (statSyncStub) statSyncStub.restore();
});
it('should throw an error if there is no package.json file in the archive', function () {
fs.mkdirSync(testWorkingPath);
var errorStub = sinon.stub();
return npmInstall(testWorkingPath, logger)
.catch(errorStub)
.then(function (data) {
expect(errorStub.called).to.be(true);
expect(errorStub.lastCall.args[0].message).to.match(/package.json/);
});
});
it('should rethrow any errors other than "ENOENT" from fs.statSync', function () {
fs.mkdirSync(testWorkingPath);
statSyncStub = sinon.stub(fs, 'statSync', function () {
throw new Error('This is unexpected.');
});
var errorStub = sinon.stub();
return npmInstall(testWorkingPath, logger)
.catch(errorStub)
.then(function (data) {
expect(errorStub.called).to.be(true);
expect(errorStub.lastCall.args[0].message).to.match(/This is unexpected./);
});
});
});
});
});

View file

@ -1,7 +1,6 @@
var expect = require('expect.js');
var sinon = require('sinon');
var nock = require('nock');
var glob = require('glob');
var rimraf = require('rimraf');
var fs = require('fs');
var { join } = require('path');

View file

@ -1,35 +0,0 @@
var Promise = require('bluebird');
var fs = require('fs');
var path = require('path');
var exec = require('child_process').exec;
module.exports = function (dest, logger) {
return new Promise(function (resolve, reject) {
//throw an exception if package.json does not exist
try {
var packageFile = path.join(dest, 'package.json');
fs.statSync(packageFile);
} catch (e) {
if (e.code !== 'ENOENT') throw e;
return reject(new Error('Plugin does not contain package.json file'));
}
var cmd = '"' + path.resolve(path.dirname(process.execPath), 'npm').replace(/\\/g, '/') + '" install --production';
var child = exec(cmd, { cwd: dest });
child.on('error', function (err) {
reject(err);
});
child.on('exit', function (code, signal) {
if (code === 0) {
resolve();
} else {
reject(new Error('npm install failed with code ' + code));
}
});
logger.error(child.stderr);
logger.log(child.stdout);
});
};

View file

@ -1,6 +1,8 @@
var _ = require('lodash');
var zlib = require('zlib');
var Promise = require('bluebird');
var url = require('url');
var fs = require('fs');
var request = require('request');
var tar = require('tar');
var progressReporter = require('./progressReporter');
@ -17,7 +19,7 @@ module.exports = function (settings, logger) {
throw new Error('Not a valid url.');
}
logger.log('attempting to download ' + sourceUrl);
logger.log('Attempting to extract from ' + sourceUrl);
return Promise.try(function () {
return downloadSingle(sourceUrl, settings.workingPath, settings.timeout, logger)
@ -26,7 +28,7 @@ module.exports = function (settings, logger) {
return tryNext();
}
if (err.message === 'EEXTRACT') {
throw (new Error('Error extracting the plugin archive'));
throw (new Error('Error extracting the plugin archive... is this a valid tar.gz file?'));
}
throw (err);
});
@ -54,10 +56,10 @@ module.exports = function (settings, logger) {
}
return wrappedRequest(requestOptions)
.then(function (req) {
var reporter = progressReporter(logger, req);
.then(function (fileStream) {
var reporter = progressReporter(logger, fileStream);
req
fileStream
.on('response', reporter.handleResponse)
.on('data', reporter.handleData)
.on('error', _.partial(reporter.handleError, 'ENOTFOUND'))
@ -73,7 +75,12 @@ module.exports = function (settings, logger) {
function wrappedRequest(requestOptions) {
return Promise.try(function () {
return request.get(requestOptions);
let urlInfo = url.parse(requestOptions.url);
if (/^file/.test(urlInfo.protocol)) {
return fs.createReadStream(urlInfo.path);
} else {
return request.get(requestOptions);
}
})
.catch(function (err) {
if (err.message.match(/invalid uri/i)) {

View file

@ -1,6 +1,5 @@
var pluginDownloader = require('./pluginDownloader');
var pluginCleaner = require('./pluginCleaner');
var npmInstall = require('./npmInstall');
var fs = require('fs');
module.exports = {
@ -8,12 +7,12 @@ module.exports = {
};
function install(settings, logger) {
logger.log(`installing ${settings.package}`);
logger.log(`Installing ${settings.package}`);
try {
fs.statSync(settings.pluginPath);
logger.error(`Plugin ${settings.package} already exists. Please remove before installing a new version.`);
logger.error(`Plugin ${settings.package} already exists, please remove before installing a new version`);
process.exit(70); // eslint-disable-line no-process-exit
} catch (e) {
if (e.code !== 'ENOENT') throw e;
@ -26,12 +25,9 @@ function install(settings, logger) {
.then(function () {
return downloader.download();
})
.then(function () {
return npmInstall(settings.workingPath, logger);
})
.then(function (curious) {
fs.renameSync(settings.workingPath, settings.pluginPath);
logger.log('Plugin installation complete!');
logger.log('Plugin installation complete');
})
.catch(function (e) {
logger.error(`Plugin installation was unsuccessful due to error "${e.message}"`);

View file

@ -10,7 +10,7 @@ function remove(settings, logger) {
try {
fs.statSync(settings.pluginPath);
} catch (e) {
logger.log(`Plugin ${settings.package} does not exist.`);
logger.log(`Plugin ${settings.package} does not exist`);
return;
}

View file

@ -1,9 +1,9 @@
var Promise = require('bluebird');
/*
Responsible for reporting the progress of the file request stream
Responsible for reporting the progress of the file stream
*/
module.exports = function (logger, request) {
module.exports = function (logger, stream) {
var oldDotCount = 0;
var runningTotal = 0;
var totalSize = 0;
@ -22,7 +22,7 @@ module.exports = function (logger, request) {
if (err) logger.error(err);
hasError = true;
request.abort();
if (stream.abort) stream.abort();
_reject(new Error(errorMessage));
}
@ -56,7 +56,7 @@ module.exports = function (logger, request) {
function handleEnd() {
if (hasError) return;
logger.log('Download Complete.');
logger.log('Extraction complete');
_resolve();
}

View file

@ -3,10 +3,16 @@ let { isWorker } = require('cluster');
let { resolve } = require('path');
let cwd = process.cwd();
let readYamlConfig = require('./readYamlConfig');
let src = require('requirefrom')('src');
let fromRoot = src('utils/fromRoot');
let KbnServer = src('server/KbnServer');
let canCluster;
try {
require.resolve('../cluster/ClusterManager');
canCluster = true;
} catch (e) {
canCluster = false;
}
let pathCollector = function () {
let paths = [];
@ -20,8 +26,9 @@ let pluginDirCollector = pathCollector();
let pluginPathCollector = pathCollector();
module.exports = function (program) {
program
.command('serve')
let command = program.command('serve');
command
.description('Run the kibana server')
.collectUnknownOptions()
.option('-e, --elasticsearch <uri>', 'Elasticsearch instance')
@ -49,22 +56,39 @@ module.exports = function (program) {
pluginPathCollector,
[]
)
.option('--plugins <path>', 'an alias for --plugin-dir', pluginDirCollector)
.option('--dev', 'Run the server with development mode defaults')
.option('--no-watch', 'Prevent watching, use with --dev to prevent server restarts')
.action(function (opts) {
if (opts.dev && opts.watch && !isWorker) {
// stop processing the action and handoff to watch cluster manager
return require('../watch/watch')(opts);
.option('--plugins <path>', 'an alias for --plugin-dir', pluginDirCollector);
if (canCluster) {
command
.option('--dev', 'Run the server with development mode defaults')
.option('--no-watch', 'Prevents automatic restarts of the server in --dev mode');
}
command
.action(async function (opts) {
if (canCluster && opts.dev && !isWorker) {
// stop processing the action and handoff to cluster manager
let ClusterManager = require('../cluster/ClusterManager');
new ClusterManager(opts);
return;
}
let readYamlConfig = require('./readYamlConfig');
let KbnServer = src('server/KbnServer');
let settings = readYamlConfig(opts.config || fromRoot('config/kibana.yml'));
if (opts.dev) {
try { _.merge(settings, readYamlConfig(fromRoot('config/kibana.dev.yml'))); }
catch (e) { null; }
}
let set = _.partial(_.set, settings);
let get = _.partial(_.get, settings);
if (opts.dev) {
set('env', 'development');
set('optimize.watch', opts.watch);
set('optimize.lazy', true);
}
if (opts.elasticsearch) set('elasticsearch.url', opts.elasticsearch);
@ -82,13 +106,22 @@ module.exports = function (program) {
set('plugins.paths', [].concat(opts.pluginPath || []));
let server = new KbnServer(_.merge(settings, this.getUnknownOptions()));
let kbnServer = {};
server.ready().catch(function (err) {
console.error(err.stack);
try {
kbnServer = new KbnServer(_.merge(settings, this.getUnknownOptions()));
await kbnServer.ready();
}
catch (err) {
let { server } = kbnServer;
if (server) server.log(['fatal'], err);
else console.error('FATAL', err);
kbnServer.close();
process.exit(1); // eslint-disable-line no-process-exit
});
}
return server;
return kbnServer;
});
};

View file

@ -1,90 +0,0 @@
let cluster = require('cluster');
let { join } = require('path');
let _ = require('lodash');
var chokidar = require('chokidar');
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');
let Log = require('../Log');
let Worker = require('./Worker');
module.exports = function (opts) {
let watcher = chokidar.watch([
'src/cli',
'src/fixtures',
'src/server',
'src/utils',
'src/plugins',
'config',
], {
cwd: fromRoot('.'),
ignore: 'src/plugins/*/public/**/*'
});
let log = new Log(opts.quiet, opts.silent);
let customLogging = opts.quiet || opts.silent || opts.verbose;
let workers = [
new Worker({
type: 'optmzr',
title: 'optimizer',
log: log,
argv: _.compact([
customLogging ? null : '--quiet',
'--plugins.initialize=false',
'--server.autoListen=false',
'--optimize._workerRole=send'
]),
filters: [
/src\/server\/(optimize|ui|plugins)\//,
/src\/plugins\/[^\/]+\/[^\/]+\.js$/,
/src\/cli\//
]
}),
new Worker({
type: 'server',
log: log,
argv: [
'--optimize._workerRole=receive'
]
})
];
workers.forEach(function (worker) {
worker.on('broadcast', function (msg) {
workers.forEach(function (to) {
if (to !== worker && to.fork) to.fork.send(msg);
});
});
});
var addedCount = 0;
function onAddBeforeReady() {
addedCount += 1;
}
function onReady() {
// start sending changes to workers
watcher.removeListener('add', onAddBeforeReady);
watcher.on('all', onAnyChangeAfterReady);
log.good('Watching for changes', `(${addedCount} files)`);
_.invoke(workers, 'start');
}
function onAnyChangeAfterReady(e, path) {
_.invoke(workers, 'onChange', path);
}
function onError(err) {
log.bad('Failed to watch files!\n', err.stack);
process.exit(1); // eslint-disable-line no-process-exit
}
watcher.on('add', onAddBeforeReady);
watcher.on('ready', onReady);
watcher.on('error', onError);
};

View file

@ -0,0 +1,37 @@
module.exports = {
"took": 35,
"timed_out": false,
"_shards": {
"total": 7,
"successful": 7,
"failed": 0
},
"hits": {
"total": 218512,
"max_score": 0,
"hits": []
},
"aggregations": {
"1": {
"buckets": {
"*-1024.0": {
"to": 1024,
"to_as_string": "1024.0",
"doc_count": 20904
},
"1024.0-2560.0": {
"from": 1024,
"from_as_string": "1024.0",
"to": 2560,
"to_as_string": "2560.0",
"doc_count": 23358
},
"2560.0-*": {
"from": 2560,
"from_as_string": "2560.0",
"doc_count": 174250
}
}
}
}
};

View file

@ -1,32 +1,63 @@
let { EventEmitter } = require('events');
let { inherits } = require('util');
let _ = require('lodash');
let { join } = require('path');
let write = require('fs').writeFileSync;
let { defaults } = require('lodash');
let { resolve } = require('path');
let { writeFile } = require('fs');
let webpack = require('webpack');
var Boom = require('boom');
let DirectoryNameAsMain = require('webpack-directory-name-as-main');
let ExtractTextPlugin = require('extract-text-webpack-plugin');
var CommonsChunkPlugin = require('webpack/lib/optimize/CommonsChunkPlugin');
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');
let OptmzBundles = require('./OptmzBundles');
let OptmzUiModules = require('./OptmzUiModules');
let babelOptions = require('./babelOptions');
let babelExclude = [/[\/\\](webpackShims|node_modules|bower_components)[\/\\]/];
let kbnTag = `Kibana ${ utils('packageJson').version }`;
class BaseOptimizer extends EventEmitter {
class BaseOptimizer {
constructor(opts) {
super();
this.env = opts.env;
this.bundles = opts.bundles;
this.profile = opts.profile || false;
this.sourceMaps = opts.sourceMaps || false;
this.modules = new OptmzUiModules(opts.plugins);
this.bundles = new OptmzBundles(
opts,
`${kbnTag} ${this.constructor.name} ${ this.sourceMaps ? ' (with source maps)' : ''}`
);
switch (opts.sourceMaps) {
case true:
this.sourceMaps = 'source-map';
break;
_.bindAll(this, 'getConfig');
case 'fast':
this.sourceMaps = 'cheap-module-eval-source-map';
break;
default:
this.sourceMaps = opts.sourceMaps || false;
break;
}
this.unsafeCache = opts.unsafeCache || false;
if (typeof this.unsafeCache === 'string') {
this.unsafeCache = [
new RegExp(this.unsafeCache.slice(1, -1))
];
}
}
async initCompiler() {
if (this.compiler) return this.compiler;
let compilerConfig = this.getConfig();
this.compiler = webpack(compilerConfig);
this.compiler.plugin('done', stats => {
if (!this.profile) return;
let path = resolve(this.env.workingDir, 'stats.json');
let content = JSON.stringify(stats.toJson());
writeFile(path, content, function (err) {
if (err) throw err;
});
});
return this.compiler;
}
getConfig() {
@ -34,18 +65,21 @@ class BaseOptimizer extends EventEmitter {
return {
context: fromRoot('.'),
entry: this.bundles.getEntriesConfig(),
entry: this.bundles.toWebpackEntries(),
devtool: this.sourceMaps ? '#source-map' : false,
devtool: this.sourceMaps,
profile: this.profile || false,
output: {
path: this.bundles.dir,
path: this.env.workingDir,
filename: '[name].bundle.js',
sourceMapFilename: '[file].map',
publicPath: '/bundles/',
devtoolModuleFilenameTemplate: '[absolute-resource-path]'
},
recordsPath: resolve(this.env.workingDir, 'webpack.records'),
plugins: [
new webpack.ResolverPlugin([
new DirectoryNameAsMain()
@ -54,7 +88,11 @@ class BaseOptimizer extends EventEmitter {
new webpack.optimize.DedupePlugin(),
new ExtractTextPlugin('[name].style.css', {
allChunks: true
})
}),
new CommonsChunkPlugin({
name: 'commons',
filename: 'commons.bundle.js'
}),
],
module: {
@ -71,36 +109,66 @@ class BaseOptimizer extends EventEmitter {
{ test: /\.(html|tmpl)$/, loader: 'raw' },
{ test: /\.png$/, loader: 'url?limit=10000&name=[path][name].[ext]' },
{ test: /\.(woff|woff2|ttf|eot|svg|ico)(\?|$)/, loader: 'file?name=[path][name].[ext]' },
{ test: /[\/\\]src[\/\\](plugins|ui)[\/\\].+\.js$/, loader: `auto-preload-rjscommon-deps${mapQ}` },
{ test: /[\/\\]src[\/\\](plugins|ui)[\/\\].+\.js$/, loader: `rjs-repack${mapQ}` },
{
test: /\.js$/,
exclude: /(node_modules|bower_components)/,
exclude: babelExclude.concat(this.env.noParse),
loader: 'babel',
query: babelOptions
query: babelOptions.webpack
},
{
// explicitly require .jsx extension to support jsx
test: /\.jsx$/,
exclude: /(node_modules|bower_components)/,
exclude: babelExclude.concat(this.env.noParse),
loader: 'babel',
query: _.defaults({
nonStandard: true
}, babelOptions)
query: defaults({
nonStandard: true,
}, babelOptions.webpack)
}
].concat(this.modules.loaders),
noParse: this.modules.noParse,
].concat(this.env.loaders),
postLoaders: this.env.postLoaders || [],
noParse: this.env.noParse,
},
resolve: {
extensions: ['.js', '.less', ''],
extensions: ['.js', '.jsx', '.less', ''],
postfixes: [''],
modulesDirectories: ['node_modules'],
modulesDirectories: ['webpackShims', 'node_modules'],
loaderPostfixes: ['-loader', ''],
root: fromRoot('.'),
alias: this.modules.aliases
}
alias: this.env.aliases,
unsafeCache: this.unsafeCache,
},
};
}
failedStatsToError(stats) {
let statFormatOpts = {
hash: false, // add the hash of the compilation
version: false, // add webpack version information
timings: false, // add timing information
assets: false, // add assets information
chunks: false, // add chunk information
chunkModules: false, // add built modules information to chunk information
modules: false, // add built modules information
cached: false, // add also information about cached (not built) modules
reasons: false, // add information about the reasons why modules are included
source: false, // add the source code of modules
errorDetails: false, // add details to errors (like resolving log)
chunkOrigins: false, // add the origins of chunks and chunk merging info
modulesSort: false, // (string) sort the modules by that field
chunksSort: false, // (string) sort the chunks by that field
assetsSort: false, // (string) sort the assets by that field
children: false,
};
let details = stats.toString(defaults({ colors: true }, statFormatOpts));
return Boom.create(
500,
`Optimizations failure.\n${details.split('\n').join('\n ')}\n`,
stats.toJson(statFormatOpts)
);
}
}
module.exports = BaseOptimizer;

View file

@ -1,47 +0,0 @@
let _ = require('lodash');
let webpack = require('webpack');
let BaseOptimizer = require('./BaseOptimizer');
module.exports = class CachedOptimizer extends BaseOptimizer {
constructor(opts) {
super(opts);
_.bindAll(this, 'init', 'setupCompiler', 'run');
}
init(autoRun) {
return this.bundles.ensureAllEntriesExist().then(autoRun ? this.run : this.setupCompiler);
}
setupCompiler(autoRun) {
this.entries = this.bundles.getMissingEntries();
if (!this.entries.length) return;
this.compilerConfig = this.getConfig();
this.compiler = webpack(this.compilerConfig);
if (autoRun) this.run();
}
run() {
if (!this.compiler) {
return this.setupCompiler(true);
}
var self = this;
let entries = self.entries;
self.emit('build-start', entries);
self.compiler.run(function (err, stats) {
if (err) {
self.emit('error', entries, stats, err);
}
else if (stats.hasErrors() || stats.hasWarnings()) {
self.emit('error', entries, stats, new Error('Optimization must not produce errors or warnings'));
}
else {
self.emit('done', entries, stats);
}
});
}
};

View file

@ -0,0 +1,28 @@
let { fromNode } = require('bluebird');
let { writeFile } = require('fs');
let BaseOptimizer = require('./BaseOptimizer');
let fromRoot = require('../utils/fromRoot');
module.exports = class FsOptimizer extends BaseOptimizer {
async init() {
await this.initCompiler();
}
async run() {
if (!this.compiler) await this.init();
let stats = await fromNode(cb => {
this.compiler.run((err, stats) => {
if (err || !stats) return cb(err);
if (stats.hasErrors() || stats.hasWarnings()) {
return cb(this.failedStatsToError(stats));
}
else {
cb(null, stats);
}
});
});
}
};

View file

@ -1,53 +0,0 @@
let _ = require('lodash');
let { join } = require('path');
let { promisify } = require('bluebird');
let webpack = require('webpack');
let MemoryFileSystem = require('memory-fs');
let BaseOptimizer = require('./BaseOptimizer');
module.exports = class LiveOptimizer extends BaseOptimizer {
constructor(opts) {
super(opts);
this.compilerConfig = this.getConfig();
// this.compilerConfig.profile = true;
this.compiler = webpack(this.compilerConfig);
this.outFs = this.compiler.outputFileSystem = new MemoryFileSystem();
_.bindAll(this, 'get', 'init');
this.compile = promisify(this.compiler.run, this.compiler);
}
init() {
return this.bundles.ensureAllEntriesExist(true);
}
get(id) {
let self = this;
let fs = self.outFs;
let filename = join(self.compiler.outputPath, `${id}.bundle.js`);
let mapFilename = join(self.compiler.outputPath, `${id}.bundle.js.map`);
let styleFilename = join(self.compiler.outputPath, `${id}.style.css`);
if (!self.active) {
self.active = self.compile().finally(function () {
self.active = null;
});
}
return self.active.then(function (stats) {
if (stats.hasErrors() || stats.hasWarnings()) {
console.log(stats.toString({ colors: true }));
return null;
}
return {
bundle: fs.readFileSync(filename),
sourceMap: self.sourceMaps ? fs.readFileSync(mapFilename) : false,
style: fs.readFileSync(styleFilename)
};
});
}
};

View file

@ -1,111 +0,0 @@
let _ = require('lodash');
let { join } = require('path');
let { resolve, promisify } = require('bluebird');
let rimraf = promisify(require('rimraf'));
let mkdirp = promisify(require('mkdirp'));
let stat = promisify(require('fs').stat);
let read = promisify(require('fs').readFile);
let write = promisify(require('fs').writeFile);
let unlink = promisify(require('fs').unlink);
let readdir = promisify(require('fs').readdir);
let readSync = require('fs').readFileSync;
let entryFileTemplate = _.template(readSync(join(__dirname, 'entry.js.tmpl')));
class OptmzBundles {
constructor(opts, optimizerTagline) {
this.dir = _.get(opts, 'bundleDir');
if (!_.isString(this.dir)) {
throw new TypeError('Optimizer requires a working directory');
}
this.sourceMaps = _.get(opts, 'sourceMaps');
this.entries = _.get(opts, 'entries', []).map(function (spec) {
let entry = {
id: spec.id,
deps: _.get(spec, 'deps', []),
modules: _.get(spec, 'modules', []),
path: join(this.dir, spec.id + '.entry.js'),
bundlePath: join(this.dir, spec.id + '.bundle.js'),
exists: undefined,
content: undefined
};
entry.content = _.get(spec, 'template', entryFileTemplate)({
entry: entry,
optimizerTagline: optimizerTagline
});
return entry;
}, this);
_.bindAll(this, [
'ensureDir',
'ensureAllEntriesExist',
'checkIfEntryExists',
'writeEntryFile',
'clean',
'getMissingEntries',
'getEntriesConfig'
]);
}
ensureDir() {
return mkdirp(this.dir);
}
ensureAllEntriesExist(overwrite) {
return this.ensureDir()
.return(this.entries)
.map(overwrite ? this.checkIfEntryExists : _.noop)
.return(this.entries)
.map(this.writeEntryFile)
.return(undefined);
}
checkIfEntryExists(entry) {
return resolve([
read(entry.path),
stat(entry.bundlePath)
])
.settle()
.spread(function (readEntry, statBundle) {
let existingEntry = readEntry.isFulfilled() && readEntry.value().toString('utf8');
let bundleExists = statBundle.isFulfilled() && !statBundle.value().isDirectory();
entry.exists = existingEntry && bundleExists && (existingEntry === entry.content);
});
}
writeEntryFile(entry) {
return this.clean([entry.path, entry.bundlePath]).then(function () {
entry.exists = false;
return write(entry.path, entry.content, { encoding: 'utf8' });
});
}
// unlinks files, swallows missing file errors
clean(paths) {
return resolve(
_.flatten([paths]).map(function (path) {
return rimraf(path);
})
)
.settle()
.return(undefined);
}
getMissingEntries() {
return _.reject(this.entries, 'exists');
}
getEntriesConfig() {
return _.transform(this.getMissingEntries(), function (map, entry) {
map[entry.id] = entry.path;
}, {});
}
}
module.exports = OptmzBundles;

View file

@ -1,98 +0,0 @@
var _ = require('lodash');
var fromRoot = require('../utils/fromRoot');
var asRegExp = _.flow(
_.escapeRegExp,
function (path) {
return path + '(?:\\.js)?$';
},
RegExp
);
function OptmzUiExports(plugins) {
// regular expressions which will prevent webpack from parsing the file
var noParse = this.noParse = [];
// webpack aliases, like require paths, mapping a prefix to a directory
var aliases = this.aliases = {
ui: fromRoot('src/ui/public'),
testHarness: fromRoot('src/testHarness/public')
};
// webpack loaders map loader configuration to regexps
var loaders = this.loaders = [];
var claimedModuleIds = {};
_.each(plugins, function (plugin) {
var exports = plugin.uiExportsSpecs;
// add an alias for this plugins public directory
if (plugin.publicDir) {
aliases[`plugins/${plugin.id}`] = plugin.publicDir;
}
// consume the plugin's "modules" exports
_.forOwn(exports.modules, function (spec, id) {
if (claimedModuleIds[id]) {
throw new TypeError(`Plugin ${plugin.id} attempted to override export "${id}" from ${claimedModuleIds[id]}`);
} else {
claimedModuleIds[id] = plugin.id;
}
// configurable via spec
var path;
var parse = true;
var imports = null;
var exports = null;
var expose = null;
// basic style, just a path
if (_.isString(spec)) path = spec;
if (_.isArray(spec)) {
path = spec[0];
imports = spec[1];
exports = spec[2];
}
if (_.isPlainObject(spec)) {
path = spec.path;
parse = _.get(spec, 'parse', parse);
imports = _.get(spec, 'imports', imports);
exports = _.get(spec, 'exports', exports);
expose = _.get(spec, 'expose', expose);
}
if (!path) {
throw new TypeError('Invalid spec definition, unable to identify path');
}
aliases[id] = path;
var loader = [];
if (imports) {
loader.push(`imports?${imports}`);
}
if (exports) loader.push(`exports?${exports}`);
if (expose) loader.push(`expose?${expose}`);
if (loader.length) loaders.push({ test: asRegExp(path), loader: loader.join('!') });
if (!parse) noParse.push(asRegExp(path));
});
// consume the plugin's "loaders" exports
_.each(exports.loaders, function (loader) {
loaders.push(loader);
});
// consume the plugin's "noParse" exports
_.each(exports.noParse, function (regExp) {
noParse.push(regExp);
});
});
}
module.exports = OptmzUiExports;

View file

@ -1,118 +0,0 @@
let _ = require('lodash');
let webpack = require('webpack');
let BaseOptimizer = require('./BaseOptimizer');
const STATUS_BUNDLE_INVALID = 'bundle invalid';
const STATUS_BUNDLING = 'optimizing';
const STATUS_REBUNDLING = 'bundle invalid during optimizing';
const STATUS_ERROR = 'error';
const STATUS_DONE = 'done';
class WatchingOptimizer extends BaseOptimizer {
constructor(opts) {
super(opts);
this.bundleStatus = null;
_.bindAll(this, 'init', 'setupCompiler', 'onBundlesInvalid', 'setStatus', 'enable', 'disable');
this.run = this.enable; // enable makes a bit more sense here, but alias for consistency with CachedOptimizer
}
init(autoEnable) {
return this.bundles.ensureAllEntriesExist(true).then(autoEnable ? this.enable : this.setupCompiler);
}
setupCompiler(autoEnable) {
if (!_.size(this.bundles.entries)) return;
this.compilerConfig = this.getConfig();
this.compiler = webpack(this.compilerConfig);
this.compiler.plugin('watch-run', _.partial(this.setStatus, STATUS_BUNDLING));
this.compiler.plugin('invalid', this.onBundlesInvalid);
this.compiler.plugin('failed', _.partial(this.setStatus, STATUS_ERROR));
this.compiler.plugin('done', _.partial(this.setStatus, STATUS_DONE));
if (autoEnable) this.enable();
}
onBundlesInvalid() {
switch (this.bundleStatus || null) {
case STATUS_BUNDLING:
case STATUS_REBUNDLING:
// if the source changed during building, we immediately rebuild
return this.setStatus(STATUS_REBUNDLING);
case null:
// the bundle has to be something before that something can be invalid
return;
default:
return this.setStatus(STATUS_BUNDLE_INVALID);
}
}
setStatus(status) {
let self = this;
let entries = self.bundles.entries;
let stats;
let error;
let shouldBeFinal = false;
switch (status) {
case 'done':
stats = self.watcher.stats;
error = null;
shouldBeFinal = true;
if (stats.hasErrors()) {
error = new Error('Optimization must not produce errors or warnings');
status = 'error';
}
break;
case 'error':
stats = self.watcher.stats;
error = self.watcher.error;
}
let apply = function () {
clearTimeout(self.tentativeStatusChange);
self.tentativeStatusChange = null;
self.emit(self.bundleStatus = status, entries, stats, error);
};
if (shouldBeFinal) {
// this looks race-y, but it's how webpack does it: https://goo.gl/ShVo2o
self.tentativeStatusChange = setTimeout(apply, 0);
} else {
apply();
}
// webpack allows some plugins to be async, we don't want to hold up webpack,
// so just always callback if we get a cb();
let cb = _.last(arguments);
if (typeof cb === 'function') cb();
}
enable() {
if (!this.compiler) {
return this.setupCompiler(true);
}
if (this.watcher) {
throw new Error('WatchingOptimizer already watching!');
}
this.watcher = this.compiler.watch({}, _.noop);
}
disable() {
if (!this.compiler) return;
if (!this.watcher) return;
this.watcher.close();
this.watcher = null;
this.compiler = null;
}
}
module.exports = WatchingOptimizer;

View file

@ -0,0 +1,14 @@
var fromRoot = require('requirefrom')('src/utils')('fromRoot');
exports.webpack = {
stage: 1,
nonStandard: false,
optional: ['runtime']
};
exports.node = Object.assign({
ignore: [
fromRoot('src'),
/[\\\/](node_modules|bower_components)[\\\/]/
]
}, exports.webpack);

View file

@ -1,5 +1,7 @@
module.exports = {
optional: ['runtime'],
exports.webpack = {
stage: 1,
nonStandard: false
nonStandard: false,
optional: ['runtime']
};
exports.node = Object.assign({}, exports.webpack);

View file

@ -1,78 +0,0 @@
let _ = require('lodash');
let { resolve } = require('path');
let { readFileSync } = require('fs');
let src = require('requirefrom')('src');
let fromRoot = src('utils/fromRoot');
let pathContains = src('utils/pathContains');
let LiveOptimizer = src('optimize/LiveOptimizer');
let id = 'tests';
let globAll = require('./globAll');
let testEntryFileTemplate = _.template(readFileSync(resolve(__dirname, './testBundleEntry.js.tmpl')));
class TestBundler {
constructor(kbnServer) {
this.kbnServer = kbnServer;
this.init = _.once(this.init);
_.bindAll(this, ['init', 'findTestFiles', 'setupOptimizer', 'render']);
}
init() {
return this.findTestFiles().then(this.setupOptimizer);
}
findTestFiles() {
return globAll(fromRoot('src'), [
'**/public/**/__tests__/**/*.js'
]);
}
setupOptimizer(testFiles) {
let plugins = this.kbnServer.plugins;
let bundleDir = this.kbnServer.config.get('optimize.bundleDir');
let deps = [];
let modules = [];
if (testFiles) {
modules = modules.concat(testFiles);
}
plugins.forEach(function (plugin) {
if (!plugin.app) return;
modules = modules.concat(plugin.app.getModules());
deps = deps.concat(plugin.app.getRelatedPlugins());
});
this.optimizer = new LiveOptimizer({
sourceMaps: true,
bundleDir: bundleDir,
entries: [
{
id: id,
deps: deps,
modules: modules,
template: testEntryFileTemplate
}
],
plugins: plugins
});
return this.optimizer.init();
}
render() {
let self = this;
let first = !this.optimizer;
let server = this.kbnServer.server;
return self.init()
.then(function () {
server.log(['optimize', 'testHarness', first ? 'info' : 'debug'], 'Test harness built, compiling test bundle');
return self.optimizer.get(id);
});
}
}
module.exports = TestBundler;

View file

@ -1,17 +0,0 @@
let _ = require('lodash');
let { resolve } = require('path');
let { promisify } = require('bluebird');
let { all } = require('bluebird');
let glob = promisify(require('glob'));
module.exports = function (path, patterns) {
return all([].concat(patterns || []))
.map(function (pattern) {
return glob(pattern, { cwd: path, ignore: '**/_*.js' });
})
.then(_.flatten)
.then(_.uniq)
.map(function (match) {
return resolve(path, match);
});
};

View file

@ -1,52 +0,0 @@
module.exports = function (kbnServer, server, config) {
if (!config.get('env.dev')) return;
let Boom = require('boom');
let src = require('requirefrom')('src');
let fromRoot = src('utils/fromRoot');
let TestBundler = require('./TestBundler');
let bundler = new TestBundler(kbnServer, fromRoot('src'));
let renderPromise = false;
let renderComplete = false;
function send(reply, part, mimeType) {
if (!renderPromise || (part === 'bundle' && renderComplete)) {
renderPromise = bundler.render();
renderComplete = false;
renderPromise.then(function () { renderComplete = true; });
}
renderPromise.then(function (output) {
if (!output || !output.bundle) {
return reply(Boom.create(500, 'failed to build test bundle'));
}
return reply(output[part]).type(mimeType);
}, reply);
}
server.route({
path: '/bundles/tests.bundle.js',
method: 'GET',
handler: function (req, reply) {
send(reply, 'bundle', 'application/javascript');
}
});
server.route({
path: '/bundles/tests.bundle.js.map',
method: 'GET',
handler: function (req, reply) {
send(reply, 'sourceMap', 'text/plain');
}
});
server.route({
path: '/bundles/tests.bundle.style.css',
method: 'GET',
handler: function (req, reply) {
send(reply, 'style', 'text/css');
}
});
};

View file

@ -1,20 +0,0 @@
/**
* Optimized application entry file
*
* This is programatically created and updated, do not modify
*
* built using: <%= optimizerTagline %>
* includes code from:
<%
entry.deps.sort().forEach(function (plugin) {
print(` * - ${plugin}\n`);
})
%> *
*/
require('ui/testHarness');
<%
entry.modules.slice(0).reverse().forEach(function (id) {
print(`require('${id}');\n`);
});
%>require('ui/testHarness').bootstrap();

View file

@ -1,20 +0,0 @@
/**
* Optimized application entry file
*
* This is programatically created and updated, do not modify
*
* built using: <%= optimizerTagline %>
* includes code from:
<%
entry.deps.sort().forEach(function (plugin) {
print(` * - ${plugin}\n`);
})
%> *
*/
require('ui/chrome');
<%
entry.modules.forEach(function (id) {
if (id !== 'ui/chrome') print(`require('${id}');\n`);
});
%>require('ui/chrome').bootstrap(/* xoxo */);

View file

@ -1,115 +1,53 @@
module.exports = function (kbnServer, server, config) {
module.exports = async (kbnServer, server, config) => {
if (!config.get('optimize.enabled')) return;
var _ = require('lodash');
var { resolve } = require('path');
var fromRoot = require('../utils/fromRoot');
var CachedOptimizer = require('./CachedOptimizer');
var WatchingOptimizer = require('./WatchingOptimizer');
var bundleDir = resolve(config.get('optimize.bundleDir'));
var status = kbnServer.status.create('optimize');
server.exposeStaticDir('/bundles/{path*}', bundleDir);
function logStats(tag, stats) {
if (config.get('logging.json')) {
server.log(['optimize', tag], _.pick(stats.toJson(), 'errors', 'warnings'));
} else {
server.log(['optimize', tag], `\n${ stats.toString({ colors: true }) }`);
}
// the lazy optimizer sets up two threads, one is the server listening
// on 5601 and the other is a server listening on 5602 that builds the
// bundles in a "middleware" style.
//
// the server listening on 5601 may be restarted a number of times, depending
// on the watch setup managed by the cli. It proxies all bundles/* requests to
// the other server. The server on 5602 is long running, in order to prevent
// complete rebuilds of the optimize content.
let lazy = config.get('optimize.lazy');
if (lazy) {
return await kbnServer.mixin(require('./lazy/lazy'));
}
function describeEntries(entries) {
let ids = _.pluck(entries, 'id').join('", "');
return `application${ entries.length === 1 ? '' : 's'} "${ids}"`;
let bundles = kbnServer.bundles;
server.exposeStaticDir('/bundles/{path*}', bundles.env.workingDir);
await bundles.writeEntryFiles();
// in prod, only bundle what looks invalid or missing
if (config.get('env.prod')) bundles = await kbnServer.bundles.getInvalidBundles();
// we might not have any work to do
if (!bundles.getIds().length) {
server.log(
['debug', 'optimize'],
`All bundles are cached and ready to go!`
);
return;
}
function onMessage(handle, filter) {
filter = filter || _.constant(true);
process.on('message', function (msg) {
var optimizeMsg = msg && msg.optimizeMsg;
if (!optimizeMsg || !filter(optimizeMsg)) return;
handle(optimizeMsg);
});
}
var role = config.get('optimize._workerRole');
if (role === 'receive') {
// query for initial status
process.send(['WORKER_BROADCAST', { optimizeMsg: '?' }]);
onMessage(function (wrkrStatus) {
status[wrkrStatus.state](wrkrStatus.message);
});
}
if (role === 'send') {
let send = function () {
process.send(['WORKER_BROADCAST', { optimizeMsg: status }]);
};
status.on('change', send);
onMessage(send, _.partial(_.eq, '?'));
send();
}
let watching = config.get('optimize.watch');
let Optimizer = watching ? WatchingOptimizer : CachedOptimizer;
let optmzr = kbnServer.optimizer = new Optimizer({
// only require the FsOptimizer when we need to
let FsOptimizer = require('./FsOptimizer');
let optimizer = new FsOptimizer({
env: bundles.env,
bundles: bundles,
profile: config.get('optimize.profile'),
sourceMaps: config.get('optimize.sourceMaps'),
bundleDir: bundleDir,
entries: _.map(kbnServer.uiExports.allApps(), function (app) {
return {
id: app.id,
deps: app.getRelatedPlugins(),
modules: app.getModules()
};
}),
plugins: kbnServer.plugins
unsafeCache: config.get('optimize.unsafeCache'),
});
server.on('close', _.bindKey(optmzr.disable || _.noop, optmzr));
server.log(
['info', 'optimize'],
`Optimizing and caching ${bundles.desc()}. This may take a few minutes`
);
kbnServer.mixin(require('./browserTests'))
.then(function () {
let start = Date.now();
await optimizer.run();
let seconds = ((Date.now() - start) / 1000).toFixed(2);
if (role === 'receive') return;
optmzr.on('bundle invalid', function () {
status.yellow('Source file change detected, reoptimizing source files');
});
optmzr.on('done', function (entries, stats) {
logStats('debug', stats);
status.green(`Optimization of ${describeEntries(entries)} complete`);
});
optmzr.on('error', function (entries, stats, err) {
if (stats) logStats('fatal', stats);
status.red('Optimization failure! ' + err.message);
});
return optmzr.init()
.then(function () {
let entries = optmzr.bundles.getMissingEntries();
if (!entries.length) {
if (watching) {
status.red('No optimizable applications found');
} else {
status.green('Reusing previously cached application source files');
}
return;
}
if (watching) {
status.yellow(`Optimizing and watching all application source files`);
} else {
status.yellow(`Optimizing and caching ${describeEntries(entries)}`);
}
optmzr.run();
return null;
});
});
server.log(['info', 'optimize'], `Optimization of ${bundles.desc()} complete in ${seconds} seconds`);
};

View file

@ -0,0 +1,117 @@
let { once, pick, size } = require('lodash');
let { join } = require('path');
let Boom = require('boom');
let BaseOptimizer = require('../BaseOptimizer');
let WeirdControlFlow = require('./WeirdControlFlow');
module.exports = class LazyOptimizer extends BaseOptimizer {
constructor(opts) {
super(opts);
this.log = opts.log || (() => null);
this.prebuild = opts.prebuild || false;
this.timer = {
ms: null,
start: () => this.timer.ms = Date.now(),
end: () => this.timer.ms = ((Date.now() - this.timer.ms) / 1000).toFixed(2)
};
this.build = new WeirdControlFlow();
}
async init() {
this.initializing = true;
await this.bundles.writeEntryFiles();
await this.initCompiler();
this.compiler.plugin('watch-run', (w, webpackCb) => {
this.build.work(once(() => {
this.timer.start();
this.logRunStart();
webpackCb();
}));
});
this.compiler.plugin('done', stats => {
if (!stats.hasErrors() && !stats.hasWarnings()) {
this.logRunSuccess();
this.build.success();
return;
}
let err = this.failedStatsToError(stats);
this.logRunFailure(err);
this.build.failure(err);
this.watching.invalidate();
});
this.watching = this.compiler.watch({ aggregateTimeout: 200 }, err => {
if (err) {
this.log('fatal', err);
process.exit(1);
}
});
let buildPromise = this.build.get();
if (this.prebuild) await buildPromise;
this.initializing = false;
this.log(['info', 'optimize'], {
tmpl: `Lazy optimization of ${this.bundles.desc()} ready`,
bundles: this.bundles.getIds()
});
}
async getPath(relativePath) {
await this.build.get();
return join(this.compiler.outputPath, relativePath);
}
bindToServer(server) {
server.route({
path: '/bundles/{asset*}',
method: 'GET',
handler: async (request, reply) => {
try {
let path = await this.getPath(request.params.asset);
return reply.file(path);
} catch (error) {
console.log(error.stack);
return reply(error);
}
}
});
}
logRunStart() {
this.log(['info', 'optimize'], {
tmpl: `Lazy optimization started`,
bundles: this.bundles.getIds()
});
}
logRunSuccess() {
this.log(['info', 'optimize'], {
tmpl: 'Lazy optimization <%= status %> in <%= seconds %> seconds',
bundles: this.bundles.getIds(),
status: 'success',
seconds: this.timer.end()
});
}
logRunFailure(err) {
// errors during initialization to the server, unlike the rest of the
// errors produced here. Lets not muddy the console with extra errors
if (this.initializing) return;
this.log(['fatal', 'optimize'], {
tmpl: 'Lazy optimization <%= status %> in <%= seconds %> seconds<%= err %>',
bundles: this.bundles.getIds(),
status: 'failed',
seconds: this.timer.end(),
err: err
});
}
};

View file

@ -0,0 +1,22 @@
let { Server } = require('hapi');
let { fromNode } = require('bluebird');
let Boom = require('boom');
module.exports = class LazyServer {
constructor(host, port, optimizer) {
this.optimizer = optimizer;
this.server = new Server();
this.server.connection({
host: host,
port: port
});
}
async init() {
await this.optimizer.init();
this.optimizer.bindToServer(this.server);
await fromNode(cb => this.server.start(cb));
}
};

View file

@ -0,0 +1,58 @@
let { fromNode } = require('bluebird');
module.exports = class WeirdControlFlow {
constructor(work) {
this.handlers = [];
}
get() {
return fromNode(cb => {
if (this.ready) return cb();
this.handlers.push(cb);
this.start();
});
}
work(work) {
this._work = work;
this.stop();
if (this.handlers.length) {
this.start();
}
}
start() {
if (this.running) return;
this.stop();
if (this._work) {
this.running = true;
this._work();
}
}
stop() {
this.ready = false;
this.error = false;
this.running = false;
}
success(...args) {
this.stop();
this.ready = true;
this._flush(args);
}
failure(err) {
this.stop();
this.error = err;
this._flush([err]);
}
_flush(args) {
for (let fn of this.handlers.splice(0)) {
fn.apply(null, args);
}
}
};

35
src/optimize/lazy/lazy.js Normal file
View file

@ -0,0 +1,35 @@
module.exports = async (kbnServer, server, config) => {
let { isWorker } = require('cluster');
if (!isWorker) {
throw new Error(`lazy optimization is only available in "watch" mode`);
}
/**
* When running in lazy mode two workers/threads run in one
* of the modes: 'optmzr' or 'server'
*
* optmzr: this thread runs the LiveOptimizer and the LazyServer
* which serves the LiveOptimizer's output and blocks requests
* while the optimizer is running
*
* server: this thread runs the entire kibana server and proxies
* all requests for /bundles/* to the optmzr
*
* @param {string} process.env.kbnWorkerType
*/
switch (process.env.kbnWorkerType) {
case 'optmzr':
await kbnServer.mixin(require('./optmzrRole'));
break;
case 'server':
await kbnServer.mixin(require('./proxyRole'));
break;
default:
throw new Error(`unkown kbnWorkerType "${process.env.kbnWorkerType}"`);
}
};

View file

@ -0,0 +1,41 @@
module.exports = async (kbnServer, kibanaHapiServer, config) => {
let src = require('requirefrom')('src');
let fromRoot = src('utils/fromRoot');
let LazyServer = require('./LazyServer');
let LazyOptimizer = require('./LazyOptimizer');
let server = new LazyServer(
config.get('optimize.lazyHost'),
config.get('optimize.lazyPort'),
new LazyOptimizer({
log: (tags, data) => kibanaHapiServer.log(tags, data),
env: kbnServer.bundles.env,
bundles: kbnServer.bundles,
profile: config.get('optimize.profile'),
sourceMaps: config.get('optimize.sourceMaps'),
prebuild: config.get('optimize.lazyPrebuild'),
unsafeCache: config.get('optimize.unsafeCache'),
})
);
let ready = false;
let sendReady = () => {
if (!process.connected) return;
process.send(['WORKER_BROADCAST', { optimizeReady: ready }]);
};
process.on('message', (msg) => {
if (msg && msg.optimizeReady === '?') sendReady();
});
sendReady();
await server.init();
ready = true;
sendReady();
};

View file

@ -0,0 +1,44 @@
let { fromNode } = require('bluebird');
let { get, once } = require('lodash');
module.exports = (kbnServer, server, config) => {
server.route({
path: '/bundles/{path*}',
method: 'GET',
handler: {
proxy: {
host: config.get('optimize.lazyHost'),
port: config.get('optimize.lazyPort'),
passThrough: true,
xforward: true
}
}
});
return fromNode(cb => {
let timeout = setTimeout(() => {
cb(new Error('Server timedout waiting for the optimizer to become ready'));
}, config.get('optimize.lazyProxyTimeout'));
let waiting = once(() => {
server.log(['info', 'optimize'], 'Waiting for optimizer completion');
});
if (!process.connected) return;
process.send(['WORKER_BROADCAST', { optimizeReady: '?' }]);
process.on('message', (msg) => {
switch (get(msg, 'optimizeReady')) {
case true:
clearTimeout(timeout);
cb();
break;
case false:
waiting();
break;
}
});
});
};

View file

@ -2,7 +2,7 @@ module.exports = function (kibana) {
return new kibana.Plugin({
uiExports: {
app: {
id: 'switcher',
id: 'appSwitcher',
main: 'plugins/appSwitcher/appSwitcher',
hidden: true,
autoload: kibana.autoload.styles

View file

@ -1,24 +0,0 @@
module.exports = function (kibana) {
let _ = require('lodash');
let fromRoot = require('../../utils/fromRoot');
let { readdirSync } = require('fs');
let { resolve, basename } = require('path');
let modules = {};
let metaLibs = resolve(__dirname, 'metaLibs');
readdirSync(metaLibs).forEach(function (file) {
if (file[0] === '.') return;
let name = basename(file, '.js') + '$';
modules[name] = resolve(metaLibs, file);
});
return new kibana.Plugin({
init: false,
uiExports: {
modules: modules,
noParse: [
/node_modules\/(angular|elasticsearch-browser|mocha)\//
]
}
});
};

View file

@ -1,4 +0,0 @@
{
"name": "bundledLibs",
"version": "1.0.0"
}

View file

@ -1,24 +1,10 @@
module.exports = function (kibana) {
module.exports = (kibana) => {
if (!kibana.config.get('env.dev')) return;
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');
return new kibana.Plugin({
uiExports: {
spyModes: [
'plugins/devMode/visDebugSpyPanel'
],
modules: {
ngMock$: fromRoot('src/plugins/devMode/public/ngMock'),
fixtures: fromRoot('src/fixtures'),
testUtils: fromRoot('src/testUtils'),
'angular-mocks': {
path: require.resolve('angular-mocks'),
imports: 'angular'
},
}
]
}
});
};

View file

@ -23,7 +23,8 @@ module.exports = function (kibana) {
cert: Joi.string(),
key: Joi.string()
}).default(),
minimumVerison: Joi.string().default('1.4.4')
apiVersion: Joi.string().default('2.0'),
minimumVerison: Joi.string().default('2.0.0')
}).default();
},
@ -32,9 +33,10 @@ module.exports = function (kibana) {
// Expose the client to the server
exposeClient(server);
createProxy(server, 'GET', '/elasticsearch/{paths*}');
createProxy(server, 'POST', '/elasticsearch/_mget');
createProxy(server, 'POST', '/elasticsearch/_msearch');
createProxy(server, 'GET', '/{paths*}');
createProxy(server, 'POST', '/_mget');
createProxy(server, 'POST', '/{index}/_search');
createProxy(server, 'POST', '/_msearch');
function noBulkCheck(request, reply) {
if (/\/_bulk/.test(request.path)) {
@ -48,10 +50,9 @@ module.exports = function (kibana) {
createProxy(
server,
['PUT', 'POST', 'DELETE'],
'/elasticsearch/' + config.get('kibana.index') + '/{paths*}',
'/' + config.get('kibana.index') + '/{paths*}',
{
prefix: '/' + config.get('kibana.index'),
config: { pre: [ noBulkCheck ] }
pre: [ noBulkCheck ]
}
);

View file

@ -2,11 +2,13 @@ var portscanner = require('portscanner');
var path = require('path');
var Promise = require('bluebird');
var libesvm = require('libesvm');
var fromRoot = require('requirefrom')('src/utils')('fromRoot');
function startEs() {
var options = {
version: '1.4.4',
directory: path.join(__dirname, '..', '..', 'esvm'),
branch: 'master',
directory: fromRoot('esvm/test-es'),
purge: true,
config: {
'cluster.name': 'test',
'network.host': '127.0.0.1'
@ -28,7 +30,7 @@ function maybeStartES() {
return new Promise(function (resolve, reject) {
portscanner.checkPortStatus(9200, '127.0.0.1', function (err, status) {
if (err) return reject(err);
if (status === 'closed') return startEs().then(resolve);
if (status === 'closed') return startEs().then(resolve, reject);
resolve();
});
});

View file

@ -13,6 +13,7 @@ describe('plugins/elasticsearch', function () {
var get = sinon.stub().withArgs('elasticserach.minimumVerison').returns('1.4.3');
var config = function () { return { get: get }; };
server = {
log: _.noop,
config: config,
plugins: {
elasticsearch: {

View file

@ -9,14 +9,11 @@ describe('plugins/elasticsearch', function () {
describe('lib/health_check', function () {
var health;
var plugin;
var server;
var get;
var client;
beforeEach(function () {
// setup the plugin stub
plugin = {
@ -75,7 +72,7 @@ describe('plugins/elasticsearch', function () {
});
it('should set the cluster red if the ping fails, then to green', function () {
this.timeout(3000);
get.withArgs('elasticsearch.url').returns('http://localhost:9200');
get.withArgs('elasticsearch.minimumVerison').returns('1.4.4');
get.withArgs('kibana.index').returns('.my-kibana');
@ -100,7 +97,6 @@ describe('plugins/elasticsearch', function () {
});
it('should set the cluster red if the health check status is red, then to green', function () {
this.timeout(3000);
get.withArgs('elasticsearch.url').returns('http://localhost:9200');
get.withArgs('elasticsearch.minimumVerison').returns('1.4.4');
get.withArgs('kibana.index').returns('.my-kibana');
@ -124,7 +120,6 @@ describe('plugins/elasticsearch', function () {
});
it('should set the cluster yellow if the health check timed_out and create index', function () {
this.timeout(3000);
get.withArgs('elasticsearch.url').returns('http://localhost:9200');
get.withArgs('elasticsearch.minimumVerison').returns('1.4.4');
get.withArgs('kibana.index').returns('.my-kibana');

View file

@ -13,8 +13,6 @@ describe('plugins/elasticsearch', function () {
var kbnServer;
before(function () {
this.timeout(10000);
kbnServer = new KbnServer({
server: { autoListen: false },
logging: { quiet: true },
@ -38,17 +36,19 @@ describe('plugins/elasticsearch', function () {
function testRoute(options) {
if (typeof options.payload === 'object') {
options.payload = JSON.stringify(options.payload);
}
var statusCode = options.statusCode || 200;
describe(format('%s %s', options.method, options.url), function () {
it('should should return ' + statusCode, function (done) {
kbnServer.server.inject(options, function (res) {
try {
expect(res.statusCode).to.be(statusCode);
done();
} catch (e) {
done(e);
done = null;
} finally {
done && done();
}
});
});
@ -69,8 +69,8 @@ describe('plugins/elasticsearch', function () {
testRoute({
method: 'POST',
url: '/elasticsearch/.kibana',
payload: '{settings: {number_of_shards: 1, number_of_replicas: 1}}',
statusCode: 201
payload: {settings: { number_of_shards: 1, number_of_replicas: 1 }},
statusCode: 200
});
testRoute({
@ -85,27 +85,16 @@ describe('plugins/elasticsearch', function () {
statusCode: 400
});
testRoute({
method: 'GET',
url: '/elasticsearch/.kibana/_mapping/*/field/_source'
});
testRoute({
method: 'POST',
url: '/elasticsearch/.kibana/index-pattern/_search?fields=',
payload: '{query: {match_all: {}}, size: 2147483647}'
});
testRoute({
method: 'POST',
url: '/elasticsearch/.kibana/__kibanaQueryValidator/_validate/query?explain=true&ignore_unavailable=true',
payload: '{query: {query_string: {analyze_wildcard: true, query: "*"}}}'
payload: {query: {query_string: {analyze_wildcard: true, query: '*'}}}
});
testRoute({
method: 'POST',
url: '/elasticsearch/_mget?timeout=0&ignore_unavailable=true&preference=1429574531063',
payload: '{docs: [{_index: ".kibana", _type: "index-pattern", _id: "[logstash-]YYYY.MM.DD"}]}'
payload: {docs: [{_index: '.kibana', _type: 'index-pattern', _id: '[logstash-]YYYY.MM.DD'}]}
});
testRoute({

View file

@ -28,7 +28,7 @@ describe('plugins/elasticsearch', function () {
});
after(function () {
kbnServer.close();
return kbnServer.close();
});
describe('lib/validate', function () {

View file

@ -4,6 +4,8 @@ var versionMath = require('./version_math');
var SetupError = require('./setup_error');
module.exports = function (server) {
server.log(['plugin', 'debug'], 'Checking Elasticsearch version');
var client = server.plugins.elasticsearch.client;
var minimumElasticsearchVersion = server.config().get('elasticsearch.minimumVerison');
@ -31,7 +33,6 @@ module.exports = function (server) {
`${minimumElasticsearchVersion} or higher on all nodes. I found ` +
`the following incompatible nodes in your cluster: ${badNodeNames.join(',')}`;
server.plugins.elasticsearch.status.red(message);
throw new SetupError(server, message);
});
};

View file

@ -1,30 +1,32 @@
var url = require('url');
var fs = require('fs');
var _ = require('lodash');
var readFile = _.partialRight(require('fs').readFileSync, 'utf8');
var http = require('http');
var agentOptions;
module.exports = function (server) {
var https = require('https');
module.exports = _.memoize(function (server) {
var config = server.config();
var target = url.parse(config.get('elasticsearch.url'));
if (!agentOptions) {
agentOptions = {
rejectUnauthorized: config.get('elasticsearch.ssl.verify')
};
if (!/^https/.test(target.protocol)) return new http.Agent();
var customCA;
if (/^https/.test(target.protocol) && config.get('elasticsearch.ssl.ca')) {
customCA = fs.readFileSync(config.get('elasticsearch.ssl.ca'), 'utf8');
agentOptions.ca = [customCA];
}
var agentOptions = {
rejectUnauthorized: config.get('elasticsearch.ssl.verify')
};
// Add client certificate and key if required by elasticsearch
if (/^https/.test(target.protocol) &&
config.get('elasticsearch.ssl.cert') &&
config.get('elasticsearch.ssl.key')) {
agentOptions.crt = fs.readFileSync(config.get('elasticsearch.ssl.cert'), 'utf8');
agentOptions.key = fs.readFileSync(config.get('elasticsearch.ssl.key'), 'utf8');
}
if (config.get('elasticsearch.ssl.ca')) {
agentOptions.ca = [readFile(config.get('elasticsearch.ssl.ca'))];
}
return new http.Agent(agentOptions);
};
// Add client certificate and key if required by elasticsearch
if (config.get('elasticsearch.ssl.cert') && config.get('elasticsearch.ssl.key')) {
agentOptions.cert = readFile(config.get('elasticsearch.ssl.cert'));
agentOptions.key = readFile(config.get('elasticsearch.ssl.key'));
}
return new https.Agent(agentOptions);
});
// See https://lodash.com/docs#memoize: We use a Map() instead of the default, because we want the keys in the cache
// to be the server objects, and by default these would be coerced to strings as keys (which wouldn't be useful)
module.exports.cache = new Map();

View file

@ -1,19 +1,26 @@
var createAgent = require('./create_agent');
var mapUri = require('./map_uri');
module.exports = function createProxy(server, method, route, opts) {
opts = opts || {};
var { resolve } = require('url');
module.exports = function createProxy(server, method, route, config) {
var pre = '/elasticsearch';
var sep = route[0] === '/' ? '' : '/';
var path = `${pre}${sep}${route}`;
var options = {
method: method,
path: route,
path: path,
handler: {
proxy: {
mapUri: mapUri(server, opts.prefix),
mapUri: mapUri(server),
passThrough: true,
agent: createAgent(server)
agent: createAgent(server),
xforward: true
}
}
},
};
if (opts && opts.config) options.config = opts.config;
if (config) options.config = config;
server.route(options);
};

View file

@ -13,6 +13,7 @@ module.exports = function (server) {
var clientCrt = config.get('elasticsearch.ssl.cert');
var clientKey = config.get('elasticsearch.ssl.key');
var ca = config.get('elasticsearch.ssl.ca');
var apiVersion = config.get('elasticsearch.apiVersion');
if (username && password) {
uri.auth = util.format('%s:%s', username, password);
@ -30,7 +31,7 @@ module.exports = function (server) {
var client = new elasticsearch.Client({
host: url.format(uri),
ssl: ssl,
apiVersion: '1.4',
apiVersion: apiVersion,
log: function () {
this.error = function (err) {
server.log(['error', 'elasticsearch'], err);

View file

@ -15,7 +15,6 @@ module.exports = function (plugin, server) {
plugin.status.yellow('Waiting for Elasticsearch');
function waitForPong() {
return client.ping({ requestTimeout: 1500 }).catch(function (err) {
if (!(err instanceof NoConnections)) throw err;
@ -29,12 +28,13 @@ module.exports = function (plugin, server) {
function waitForShards() {
return client.cluster.health({
timeout: '5s', // tells es to not sit around and wait forever
index: config.get('kibana.index')
index: config.get('kibana.index'),
ignore: [408]
})
.then(function (resp) {
// if "timed_out" === true then elasticsearch could not
// find any idices matching our filter within 5 seconds
if (resp.timed_out) {
if (!resp || resp.timed_out) {
plugin.status.yellow('No existing Kibana index found');
return createKibanaIndex(server);
}
@ -53,13 +53,12 @@ module.exports = function (plugin, server) {
function check() {
return waitForPong()
.then(_.partial(checkEsVersion, server, plugin))
.then(_.partial(checkEsVersion, server))
.then(waitForShards)
.then(_.partial(migrateConfig, server))
.catch(_.bindKey(server, 'log', 'error'));
.catch(err => plugin.status.red(err));
}
var timeoutId = null;
function scheduleCheck(ms) {

View file

@ -3,16 +3,10 @@ var resolve = require('url').resolve;
module.exports = function mapUri(server, prefix) {
var config = server.config();
return function (request, done) {
var paths = request.params.paths;
if (!paths) {
paths = request.path.replace('/elasticsearch', '');
}
if (prefix) {
paths = prefix + '/' + paths;
}
var path = request.path.replace('/elasticsearch', '');
var url = config.get('elasticsearch.url');
if (!/\/$/.test(url)) url += '/';
if (paths) url = resolve(url, paths);
if (path) url = resolve(url, path);
var query = querystring.stringify(request.query);
if (query) url += '?' + query;
done(null, url);

View file

@ -113,6 +113,85 @@
Desaturate map tiles
<kbn-info info="Reduce the vibrancy of tile colors, this does not work in any version of Internet Explorer"
<kbn-info info="Reduce the vibrancy of tile colors, this does not work in any version of Internet Explorer"></kbn-info>
</label>
</div>
<div class="vis-option-item form-group">
<label>
<input type="checkbox"
name="wms.enabled"
ng-model="vis.params.wms.enabled">
WMS compliant map server
<kbn-info info="Use WMS compliant map tile server. For advanced users only."></kbn-info>
</label>
</div>
<div ng-show="vis.params.wms.enabled" class="well">
<div class="vis-option-item form-group">
<p>
WMS maps are 3rd party mapping services that have not been verified to work with Kibana.
These should be considered expert settings.
</p>
<label>
WMS url*
</label>
<input type="text" class="form-control"
name="wms.url"
ng-model="vis.params.wms.url">
</div>
<div class="vis-option-item form-group">
<label>
WMS layers* <kbn-info info="A comma seperated list of layers to use."></kbn-info>
</label>
<input type="text" class="form-control"
ng-require="vis.params.wms.enabled"
ng-model="vis.params.wms.options.layers"
name="wms.options.layers">
</div>
<div class="vis-option-item form-group">
<label>
WMS version* <kbn-info info="The version of WMS the server supports"></kbn-info>
</label>
<input type="text" class="form-control"
name="wms.options.version"
ng-model="vis.params.wms.options.version">
</div>
<div class="vis-option-item form-group">
<label>
WMS format* <kbn-info info="Usually image/png or image/jpeg. Use png if the server will return transparent layers"></kbn-info>
</label>
<input type="text" class="form-control"
name="wms.options.format"
ng-model="vis.params.wms.options.format">
</div>
<div class="vis-option-item form-group">
<label>
WMS attribution <kbn-info info="Attribution string for the lower right corner<"></kbn-info>
</label>
<input type="text" class="form-control"
name="wms.options.attribution"
ng-model="vis.params.wms.options.attribution">
</div>
<div class="vis-option-item form-group">
<label>
WMS styles* <kbn-info info="A comma seperated list of WMS server supported styles to use. Blank in most cases."></kbn-info>
</label>
<input type="text" class="form-control"
name="wms.options.styles"
ng-model="vis.params.wms.options.styles">
</div>
<p>* if this parameter is incorrect, maps will fail to load.</p>
</div>

View file

@ -22,6 +22,7 @@ define(function (require) {
heatRadius: 25,
heatBlur: 15,
heatNormalizeData: true,
wms: config.get('visualization:tileMap:WMSdefaults')
},
mapTypes: ['Scaled Circle Markers', 'Shaded Circle Markers', 'Shaded Geohash Grid', 'Heatmap'],
canDesaturate: !!supports.cssFilters,

View file

@ -34,9 +34,13 @@ module.exports = function (kibana) {
'leaflet'
),
constants: function (server, options) {
injectVars: function (server, options) {
let config = server.config();
return {
defaultAppId: options.defaultAppId
kbnIndex: config.get('kibana.index'),
esShardTimeout: config.get('elasticsearch.shardTimeout'),
esApiVersion: config.get('elasticsearch.apiVersion'),
};
}
}

View file

@ -1,6 +1,14 @@
<div class="panel panel-default" ng-switch on="panel.type" ng-if="savedObj || error">
<div class="panel-heading">
<span class="panel-title">{{savedObj.title}}</span>
<span class="panel-title">
<i
class="fa"
ng-class="savedObj.vis.type.icon"
aria-label="{{savedObj.vis.type.title}} Icon"
title="{{savedObj.vis.type.title}}">
</i>
{{savedObj.title}}
</span>
<div class="btn-group">
<a aria-label="Edit" ng-show="chrome.getVisible() && editUrl" ng-href="{{editUrl}}">
<i aria-hidden="true" class="fa fa-pencil"></i>

View file

@ -16,6 +16,9 @@ define(function (require) {
require('plugins/kibana/dashboard/services/saved_dashboards');
require('plugins/kibana/dashboard/styles/main.less');
require('ui/saved_objects/saved_object_registry').register(require('plugins/kibana/dashboard/services/saved_dashboard_register'));
var app = require('ui/modules').get('app/dashboard', [
'elasticsearch',
'ngRoute',
@ -49,6 +52,7 @@ define(function (require) {
app.directive('dashboardApp', function (Notifier, courier, AppState, timefilter, kbnUrl) {
return {
controller: function ($scope, $route, $routeParams, $location, Private, getAppState) {
var queryFilter = Private(require('ui/filter_bar/query_filter'));
var notify = new Notifier({

View file

@ -0,0 +1,5 @@
define(function (require) {
return function savedDashboardFn(savedDashboards) {
return savedDashboards;
};
});

View file

@ -17,6 +17,13 @@ define(function (require) {
this.type = SavedDashboard.type;
this.Class = SavedDashboard;
this.loaderProperties = {
name: 'dashboards',
noun: 'Dashboard',
nouns: 'dashboards'
};
// Returns a single dashboard by ID, should be the name of the dashboard
this.get = function (id) {

View file

@ -76,6 +76,12 @@ dashboard-grid {
.ellipsis();
flex: 1 1 auto;
i {
opacity: 0.3;
font-size: 1.2em;
margin-right: 4px;
}
}
a {

View file

@ -16,19 +16,18 @@ describe('hit sort function', function () {
var groupSize = _.random(10, 30);
var total = sortOpts.length * groupSize;
var hits = new Array(total);
sortOpts = sortOpts.map(function (opt) {
if (_.isArray(opt)) return opt;
else return [opt];
});
var sortOptLength = sortOpts.length;
for (let i = 0; i < hits.length; i++) {
hits[i] = {
var hits = _.times(total, function (i) {
return {
_source: {},
sort: sortOpts[i % sortOptLength]
};
}
});
hits.sort(createHitSortFn(dir))
.forEach(function (hit, i) {

View file

@ -324,6 +324,7 @@ define(function (require) {
*/
var sortBy = (function () {
if (!_.isArray(sort)) return 'implicit';
else if (sort[0] === '_score') return 'implicit';
else if (sort[0] === timeField) return 'time';
else return 'non-time';
}());
@ -334,6 +335,7 @@ define(function (require) {
}
$scope.updateTime();
if (sort[0] === '_score') segmented.setMaxSegments(1);
segmented.setDirection(sortBy === 'time' ? (sort[1] || 'desc') : 'desc');
segmented.setSize(sortBy === 'time' ? $scope.opts.sampleSize : false);

View file

@ -5,4 +5,10 @@ define(function (require, module, exports) {
require('plugins/kibana/discover/components/field_chooser/field_chooser');
require('plugins/kibana/discover/controllers/discover');
require('plugins/kibana/discover/styles/main.less');
// preload
require('ui/doc_table/components/table_row');
require('ui/saved_objects/saved_object_registry').register(require('plugins/kibana/discover/saved_searches/saved_search_register'));
});

View file

@ -0,0 +1,5 @@
define(function (require) {
return function savedSearchObjectFn(savedSearches) {
return savedSearches;
};
});

View file

@ -25,6 +25,12 @@ define(function (require) {
this.type = SavedSearch.type;
this.Class = SavedSearch;
this.loaderProperties = {
name: 'searches',
noun: 'Saved Search',
nouns: 'saved searches'
};
this.get = function (id) {
return (new SavedSearch(id)).init();
};

View file

@ -4,14 +4,18 @@ require('plugins/kibana/dashboard/index');
require('plugins/kibana/settings/index');
require('plugins/kibana/doc/index');
var chrome = require('ui/chrome');
var routes = require('ui/routes');
var modules = require('ui/modules');
var kibanaLogoUrl = require('ui/images/kibana.png');
require('ui/routes')
routes
.otherwise({
redirectTo: '/discover'
});
require('ui/chrome')
chrome
.setBrand({
'logo': 'url(' + kibanaLogoUrl + ') left no-repeat',
'smallLogo': 'url(' + kibanaLogoUrl + ') left no-repeat'
@ -49,3 +53,4 @@ require('ui/chrome')
courier.start();
});
});

View file

@ -36,4 +36,9 @@ define(function (require, module, exports) {
}
};
});
// preload
require('ui/field_editor');
require('plugins/kibana/settings/sections/indices/_indexed_fields');
require('plugins/kibana/settings/sections/indices/_scripted_fields');
});

View file

@ -18,7 +18,7 @@
<tr>
<td>Commit SHA</td>
<td>{{commitSha | limitTo:7}}</td>
<td>{{buildSha | limitTo:7}}</td>
</tr>
</table>

View file

@ -7,10 +7,10 @@ define(function (require) {
});
require('ui/modules').get('apps/settings')
.controller('settingsAbout', function ($scope, kbnVersion, buildNum, commitSha) {
.controller('settingsAbout', function ($scope, kbnVersion, buildNum, buildSha) {
$scope.kbnVersion = kbnVersion;
$scope.buildNum = buildNum;
$scope.commitSha = commitSha;
$scope.buildSha = buildSha;
});
return {

View file

@ -5,7 +5,7 @@
(Default: <i>{{conf.defVal == undefined ? 'null' : conf.defVal}}</i>)
</span>
<br>
<span class="smaller">{{conf.description}}</span>
<span class="smaller" ng-bind-html="conf.description | trustAsHtml"></span>
</td>
<td class="value">

View file

@ -5,6 +5,7 @@ define(function (require) {
require('plugins/kibana/settings/sections/indices/index'),
require('plugins/kibana/settings/sections/advanced/index'),
require('plugins/kibana/settings/sections/objects/index'),
require('plugins/kibana/settings/sections/status/index'),
require('plugins/kibana/settings/sections/about/index')
];
});

View file

@ -155,10 +155,13 @@ define(function (require) {
var pattern = mockIndexPattern(index);
return indexPatterns.mapper.getIndicesForIndexPattern(pattern)
.catch(notify.error)
.catch(function (err) {
if (err instanceof IndexPatternMissingIndices) return;
notify.error(err);
})
.then(function (existing) {
var all = existing.all;
var matches = existing.matches;
var all = _.get(existing, 'all', []);
var matches = _.get(existing, 'matches', []);
if (all.length) {
index.existing = {
class: 'success',

View file

@ -129,6 +129,8 @@ define(function (require) {
if (_.contains(loadedEditors, editor)) return;
loadedEditors.push(editor);
editor.$blockScrolling = Infinity;
var session = editor.getSession();
var fieldName = editor.container.id;

View file

@ -0,0 +1,10 @@
define(function (require) {
var _ = require('lodash');
return {
order: 3,
name: 'status',
display: 'Status',
url: '/status'
};
});

View file

@ -19,7 +19,7 @@
<i aria-hidden="true" class="fa fa-chain-broken"></i> Unlinked!
</div>
<form ng-if="vis.type.requiresSearch && $state.linked" class="inline-form" name="queryInput">
<form ng-if="vis.type.requiresSearch && $state.linked" class="inline-form fill" name="queryInput">
<div class="typeahead">
<div class="input-group">
<input
@ -39,7 +39,7 @@
<form
ng-if="vis.type.requiresSearch && !$state.linked"
ng-submit="fetch()"
class="inline-form"
class="inline-form fill"
name="queryInput">
<div class="typeahead" kbn-typeahead="visualize">
<div class="input-group"
@ -130,8 +130,9 @@
<div class="vis-editor-canvas" ng-class="{ embedded: !chrome.getVisible() }">
<div class="visualize-info" ng-if="savedVis.id">
<div class="visualize-info-tab">
<span ng-bind="::savedVis.title"></span>
<div class="visualize-info-tab" title="{{::savedVis.vis.type.title}}">
<i class="fa" aria-label="{{::savedVis.vis.type.title}} Icon" ng-class="savedVis.vis.type.icon"></i>
<span bindonce bo-bind="savedVis.title"></span>
</div>
</div>

View file

@ -8,4 +8,23 @@ define(function (require) {
.when('/visualize', {
redirectTo: '/visualize/step/1'
});
// preloading
require('plugins/kibana/visualize/editor/add_bucket_agg');
require('plugins/kibana/visualize/editor/agg');
require('plugins/kibana/visualize/editor/agg_add');
require('plugins/kibana/visualize/editor/agg_filter');
require('plugins/kibana/visualize/editor/agg_group');
require('plugins/kibana/visualize/editor/agg_param');
require('plugins/kibana/visualize/editor/agg_params');
require('plugins/kibana/visualize/editor/editor');
require('plugins/kibana/visualize/editor/nesting_indicator');
require('plugins/kibana/visualize/editor/sidebar');
require('plugins/kibana/visualize/editor/vis_options');
require('plugins/kibana/visualize/saved_visualizations/_saved_vis');
require('plugins/kibana/visualize/saved_visualizations/saved_visualizations');
require('ui/saved_objects/saved_object_registry')
.register(require('plugins/kibana/visualize/saved_visualizations/saved_visualization_register'));
});

View file

@ -0,0 +1,5 @@
define(function (require) {
return function savedVisualizationFn(savedVisualizations) {
return savedVisualizations;
};
});

View file

@ -20,6 +20,12 @@ define(function (require) {
this.type = SavedVis.type;
this.Class = SavedVis;
this.loaderProperties = {
name: 'visualizations',
noun: 'Visualization',
nouns: 'visualizations'
};
this.get = function (id) {
return (new SavedVis(id)).init();
};

View file

@ -56,4 +56,4 @@
}
@import "../editor/styles/.editor.less";
@import "../editor/styles/_editor.less";

View file

@ -11,22 +11,7 @@ module.exports = function (kibana) {
'ui/chrome',
'angular'
)
},
modules: {
nvd3$: {
path: 'nvd3/build/nv.d3.js',
exports: 'window.nv',
imports: 'd3,nvd3Styles'
},
nvd3Styles$: {
path: 'nvd3/build/nv.d3.css'
}
},
loaders: [
{ test: /\/angular-nvd3\//, loader: 'imports?angular,nv=nvd3,d3' }
]
}
}
});
};

Some files were not shown because too many files have changed in this diff Show more