Merge branch 'master' into testing

This commit is contained in:
leedr 2015-09-02 10:40:49 -05:00
commit 7bf6266c26
91 changed files with 6456 additions and 575 deletions

2
.gitignore vendored
View file

@ -13,5 +13,7 @@ target
/esvm
.htpasswd
installedPlugins
disabledPlugins
webpackstats.json
config/kibana.dev.yml
coverage

View file

@ -1 +1 @@
iojs-v2.4
0.12.7

View file

@ -1,6 +1,8 @@
language: node_js
node_js: 'iojs-v2.4'
install: npm install
node_js: '0.12.7'
install:
- npm install -g npm@3.2
- npm install
script: ./node_modules/.bin/grunt travis
sudo: false
cache:

View file

@ -23,16 +23,22 @@ Please make sure you have signed the [Contributor License Agreement](http://www.
nvm install "$(cat .node-version)"
```
- Install npm 3.2
```sh
npm install -g npm@3.2
```
- Install dependencies
```sh
npm install
```
- Start elasticsearch, you can use [esvm](https://github.com/simianhacker/esvm) to make that easier.
- Start elasticsearch
```sh
grunt esvm:dev:keepalive
npm run elasticsearch
```
- Start the development server.

View file

@ -1,4 +1,4 @@
require('babel/register')(require('./src/optimize/babelOptions'));
require('babel/register')(require('./src/optimize/babelOptions').node);
module.exports = function (grunt) {
// set the config once before calling load-grunt-config
@ -30,7 +30,7 @@ module.exports = function (grunt) {
}
}()),
nodeVersion: '2.5.0',
nodeVersion: grunt.file.read('.node-version').trim(),
meta: {
banner: '/*! <%= package.name %> - v<%= package.version %> - ' +
@ -45,23 +45,7 @@ module.exports = function (grunt) {
'<%= root %>/tasks/**/*.js',
'<%= src %>/**/*.js',
'!<%= src %>/fixtures/**/*.js'
],
deepModules: {
'caniuse-db': '1.0.30000265',
'chalk': '1.1.0',
'glob': '4.5.3',
'har-validator': '1.8.0',
'json5': '0.4.0',
'loader-utils': '0.2.11',
'micromatch': '2.2.0',
'postcss-normalize-url': '2.1.1',
'postcss-reduce-idents': '1.0.2',
'postcss-unique-selectors': '1.0.0',
'postcss-minify-selectors': '1.4.6',
'postcss-single-charset': '0.3.0',
'regenerator': '0.8.36'
}
]
};
grunt.config.merge(config);

View file

@ -35,4 +35,11 @@ Visit [Elastic.co](http://www.elastic.co/guide/en/kibana/current/index.html) for
## Snapshot Builds
***Snapshots are currently disabled*** until [#4597](https://github.com/elastic/kibana/issues/4597) is complete, the snapshot builds can not be built. Master can be started for development or experimentation by running `./bin/kibana` from the root of the project.
For the daring, snapshot builds are available. These builds are created after each commit to the master branch, and therefore are not something you should run in production.
| platform | | |
| --- | --- | --- |
| OSX | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-darwin-x64.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-darwin-x64.zip) |
| Linux x64 | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-linux-x64.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-linux-x64.zip) |
| Linux x86 | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-linux-x86.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-linux-x86.zip) |
| Windows | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-windows.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.2.0-snapshot-windows.zip) |

View file

@ -2,6 +2,7 @@ This is a collection of style guides for Kibana projects. The include guides for
- [JavaScript](#javascript-style-guide)
- [Kibana Project](#kibana-style-guide)
- [Html](#html-style-guide)
# JavaScript Style Guide
@ -855,6 +856,24 @@ require('ui/routes')
});
```
# Html Style Guide
### Multiple attribute values
When a node has multiple attributes that would cause it to exceed the line character limit, each attribute including the first should be on its own line with a single indent. Also, when a node that is styled in this way has child nodes, there should be a blank line between the openening parent tag and the first child tag.
```
<ul
attribute1="value1"
attribute2="value2"
attribute3="value3">
<li></li>
<li></li>
...
</ul>
```
# Attribution
This JavaScript guide forked from the [node style guide](https://github.com/felixge/node-style-guide) created by [Felix Geisendörfer](http://felixge.de/) and is

View file

@ -20,7 +20,7 @@
# If your Elasticsearch is protected with basic auth, this is the user credentials
# used by the Kibana server to perform maintence on the kibana_index at statup. Your Kibana
# users will still need to authenticate with Elasticsearch (which is proxied thorugh
# users will still need to authenticate with Elasticsearch (which is proxied through
# the Kibana server)
# elasticsearch.username: user
# elasticsearch.password: pass
@ -53,8 +53,8 @@
# elasticsearch.startupTimeout: 5000
# SSL for outgoing requests from the Kibana Server (PEM formatted)
# server.ssl.cert: /path/to/your/server.key
# server.ssl.key: /path/to/your/server.crt
# server.ssl.cert: /path/to/your/server.crt
# server.ssl.key: /path/to/your/server.key
# Set the path to where you would like the process id file to be created.
# pid.file: /var/run/kibana.pid

@ -1 +0,0 @@
Subproject commit fb7fd94954edf8d5fcd270add6ee1f2de40a2c9e

View file

@ -1,64 +1,57 @@
[[settings]]
== Settings
To use Kibana, you have to tell it about the Elasticsearch indices that you
want to explore by configuring one or more index patterns. You can also:
To use Kibana, you have to tell it about the Elasticsearch indices that you want to explore by configuring one or more
index patterns. You can also:
* Create scripted fields that are computed on the fly from your data. You can
browse and visualize scripted fields, but you cannot search them.
* Set advanced options such as the number of rows to show in a table and
how many of the most popular fields to show. Use caution when modifying advanced options,
as it's possible to set values that are incompatible with one another.
* Create scripted fields that are computed on the fly from your data. You can browse and visualize scripted fields, but
you cannot search them.
* Set advanced options such as the number of rows to show in a table and how many of the most popular fields to show.
Use caution when modifying advanced options, as it's possible to set values that are incompatible with one another.
* Configure Kibana for a production environment
[float]
[[settings-create-pattern]]
=== Creating an Index Pattern to Connect to Elasticsearch
An _index pattern_ identifies one or more Elasticsearch indices that you want to
explore with Kibana. Kibana looks for index names that match the specified pattern.
An asterisk (*) in the pattern matches zero or more characters. For example, the pattern
`myindex-*` matches all indices whose names start with `myindex-`, such as `myindex-1`
and `myindex-2`.
An _index pattern_ identifies one or more Elasticsearch indices that you want to explore with Kibana. Kibana looks for
index names that match the specified pattern.
An asterisk (*) in the pattern matches zero or more characters. For example, the pattern `myindex-*` matches all
indices whose names start with `myindex-`, such as `myindex-1` and `myindex-2`.
If you use event times to create index names (for example, if you're pushing data
into Elasticsearch from Logstash), the index pattern can also contain a date format.
In this case, the static text in the pattern must be enclosed in brackets, and you
specify the date format using the tokens described in <<date-format-tokens>>.
If you use event times to create index names (for example, if you're pushing data into Elasticsearch from Logstash),
the index pattern can also contain a date format.
In this case, the static text in the pattern must be enclosed in brackets, and you specify the date format using the
tokens described in <<date-format-tokens>>.
For example, `[logstash-]YYYY.MM.DD` matches all indices whose names have a
timestamp of the form `YYYY.MM.DD` appended to the prefix `logstash-`, such as
`logstash-2015.01.31` and `logstash-2015-02-01`.
For example, `[logstash-]YYYY.MM.DD` matches all indices whose names have a timestamp of the form `YYYY.MM.DD` appended
to the prefix `logstash-`, such as `logstash-2015.01.31` and `logstash-2015-02-01`.
An index pattern can also simply be the name of a single index.
To create an index pattern to connect to Elasticsearch:
. Go to the *Settings > Indices* tab.
. Specify an index pattern that matches the name of one or more of your Elasticsearch
indices. By default, Kibana guesses that you're you're working with log data being
fed into Elasticsearch by Logstash.
. Specify an index pattern that matches the name of one or more of your Elasticsearch indices. By default, Kibana
guesses that you're you're working with log data being fed into Elasticsearch by Logstash.
+
NOTE: When you switch between top-level tabs, Kibana remembers where you were.
For example, if you view a particular index pattern from the Settings tab, switch
to the Discover tab, and then go back to the Settings tab, Kibana displays the
index pattern you last looked at. To get to the create pattern form, click
the *Add* button in the Index Patterns list.
NOTE: When you switch between top-level tabs, Kibana remembers where you were. For example, if you view a particular
index pattern from the Settings tab, switch to the Discover tab, and then go back to the Settings tab, Kibana displays
the index pattern you last looked at. To get to the create pattern form, click the *Add* button in the Index Patterns
list.
. If your index contains a timestamp field that you want to use to perform
time-based comparisons, select the *Index contains time-based events* option
and select the index field that contains the timestamp. Kibana reads the
index mapping to list all of the fields that contain a timestamp.
. If your index contains a timestamp field that you want to use to perform time-based comparisons, select the *Index
contains time-based events* option and select the index field that contains the timestamp. Kibana reads the index
mapping to list all of the fields that contain a timestamp.
. If new indices are generated periodically and have a timestamp appended to
the name, select the *Use event times to create index names* option and select
the *Index pattern interval*. This enables Kibana to search only those indices
that could possibly contain data in the time range you specify. This is
primarily applicable if you are using Logstash to feed data into Elasticsearch.
. If new indices are generated periodically and have a timestamp appended to the name, select the *Use event times to
create index names* option and select the *Index pattern interval*. This enables Kibana to search only those indices
that could possibly contain data in the time range you specify. This is primarily applicable if you are using Logstash
to feed data into Elasticsearch.
. Click *Create* to add the index pattern.
. To designate the new pattern as the default pattern to load when you view
the Discover tab, click the *favorite* button.
. To designate the new pattern as the default pattern to load when you view the Discover tab, click the *favorite*
button.
[float]
[[date-format-tokens]]
@ -116,10 +109,9 @@ the Discover tab, click the *favorite* button.
[float]
[[set-default-pattern]]
=== Setting the Default Index Pattern
The default index pattern is loaded by automatically when you view the *Discover* tab.
Kibana displays a star to the left of the name of the default pattern in the Index Patterns list
on the *Settings > Indices* tab. The first pattern you create is automatically
designated as the default pattern.
The default index pattern is loaded by automatically when you view the *Discover* tab. Kibana displays a star to the
left of the name of the default pattern in the Index Patterns list on the *Settings > Indices* tab. The first pattern
you create is automatically designated as the default pattern.
To set a different pattern as the default index pattern:
@ -132,13 +124,11 @@ NOTE: You can also manually set the default index pattern in *Advanced > Setting
[float]
[[reload-fields]]
=== Reloading the Index Fields List
When you add an index mapping, Kibana automatically scans the indices that
match the pattern to display a list of the index fields. You can reload the
index fields list to pick up any newly-added fields.
When you add an index mapping, Kibana automatically scans the indices that match the pattern to display a list of the
index fields. You can reload the index fields list to pick up any newly-added fields.
Reloading the index fields list also resets Kibana's popularity counters for the fields.
The popularity counters keep track of the fields you've used most often within Kibana
and are used to sort fields within lists.
Reloading the index fields list also resets Kibana's popularity counters for the fields. The popularity counters keep
track of the fields you've used most often within Kibana and are used to sort fields within lists.
To reload the index fields list:
@ -168,20 +158,17 @@ You can also set the field's popularity value in the *Popularity* text entry box
[float]
[[create-scripted-field]]
=== Creating a Scripted Field
Scripted fields compute data on the fly from the data in your
Elasticsearch indices. Scripted field data is shown on the Discover tab as
part of the document data, and you can use scripted fields in your visualizations.
(Scripted field values are computed at query time so they aren't indexed and
cannot be searched.)
Scripted fields compute data on the fly from the data in your Elasticsearch indices. Scripted field data is shown on
the Discover tab as part of the document data, and you can use scripted fields in your visualizations.
Scripted field values are computed at query time so they aren't indexed and cannot be searched.
WARNING: Computing data on the fly with scripted fields can be very resource
intensive and can have a direct impact on Kibana's performance. Keep in mind
that there's no built-in validation of a scripted field. If your scripts are
buggy, you'll get exceptions whenever you try to view the dynamically generated
data.
WARNING: Computing data on the fly with scripted fields can be very resource intensive and can have a direct impact on
Kibana's performance. Keep in mind that there's no built-in validation of a scripted field. If your scripts are
buggy, you'll get exceptions whenever you try to view the dynamically generated data.
Scripted fields use the Lucene expression syntax. For more information,
see http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html#_lucene_expressions_scripts[Lucene Expressions Scripts].
see http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html#_lucene_expressions_scripts[
Lucene Expressions Scripts].
You can reference any single value numeric field in your expressions, for example:
@ -196,8 +183,7 @@ To create a scripted field:
. Go to the pattern's *Scripted Fields* tab.
. Click *Add Scripted Field*.
. Enter a name for the scripted field.
. Enter the expression that you want to use to compute a value on the fly
from your index data.
. Enter the expression that you want to use to compute a value on the fly from your index data.
. Click *Save Scripted Field*.
For more information about scripted fields in Elasticsearch, see
@ -215,10 +201,8 @@ To modify a scripted field:
. Click the *Edit* button for the scripted field you want to change.
. Make your changes and then click *Save Scripted Field* to update the field.
WARNING: Keep in mind
that there's no built-in validation of a scripted field. If your scripts are
buggy, you'll get exceptions whenever you try to view the dynamically generated
data.
WARNING: Keep in mind that there's no built-in validation of a scripted field. If your scripts are buggy, you'll get
exceptions whenever you try to view the dynamically generated data.
[float]
[[delete-scripted-field]]
@ -231,13 +215,12 @@ To delete a scripted field:
[[advanced-options]]
=== Setting Advanced Options
The Advanced Settings page enables you to directly edit settings that control
the behavior of the Kibana application. For example, you can change the format
used to display dates, specify the default index pattern, and set the precision
The Advanced Settings page enables you to directly edit settings that control the behavior of the Kibana application.
For example, you can change the format used to display dates, specify the default index pattern, and set the precision
for displayed decimal values.
WARNING: Changing advanced settings can have unintended consequences. If you aren't
sure what you're doing, it's best to leave these settings as-is.
WARNING: Changing advanced settings can have unintended consequences. If you aren't sure what you're doing, it's best
to leave these settings as-is.
To set advanced options:
@ -253,30 +236,26 @@ To set advanced options:
You can view, edit, and delete saved searches, visualizations, and dashboards from *Settings > Objects*. You can also
export or import sets of searches, visualizations, and dashboards.
Viewing a saved object displays the selected item in the *Discover*, *Visualize*,
or *Dashboard* page. To view a saved object:
Viewing a saved object displays the selected item in the *Discover*, *Visualize*, or *Dashboard* page. To view a saved
object:
. Go to *Settings > Objects*.
. Select the object you want to view.
. Click the *View* button.
Editing a saved object enables you to directly modify the object definition.
You can change the name of the object, add a description, and modify the
JSON that defines the object's properties.
Editing a saved object enables you to directly modify the object definition. You can change the name of the object, add
a description, and modify the JSON that defines the object's properties.
If you attempt to access an object whose index has been deleted, Kibana displays
its Edit Object page. You can:
If you attempt to access an object whose index has been deleted, Kibana displays its Edit Object page. You can:
* Recreate the index so you can continue using the object.
* Delete the object and recreate it using a different index.
* Change the index name referenced in the object's `kibanaSavedObjectMeta.searchSourceJSON`
to point to an existing index pattern. This is useful if the index you were working
with has been renamed.
* Change the index name referenced in the object's `kibanaSavedObjectMeta.searchSourceJSON` to point to an existing
index pattern. This is useful if the index you were working with has been renamed.
WARNING: No validation is performed for object properties. Submitting invalid
changes will render the object unusable. Generally, you should use the
*Discover*, *Visualize*, or *Dashboard* pages to create new objects instead of
directly editing existing ones.
WARNING: No validation is performed for object properties. Submitting invalid changes will render the object unusable.
Generally, you should use the *Discover*, *Visualize*, or *Dashboard* pages to create new objects instead of directly
editing existing ones.
To edit a saved object:
@ -310,64 +289,135 @@ To import a set of objects:
[[kibana-server-properties]]
=== Setting Kibana Server Properties
The Kibana server reads properties from the `kibana.yml` file on startup. The default
settings configure Kibana to run on `localhost:5601`. To change the host or port number, or
connect to Elasticsearch running on a different machine, you'll need to update your `kibana.yml` file. You can also
enable SSL and set a variety of other options.
The Kibana server reads properties from the `kibana.yml` file on startup. The default settings configure Kibana to run
on `localhost:5601`. To change the host or port number, or connect to Elasticsearch running on a different machine,
you'll need to update your `kibana.yml` file. You can also enable SSL and set a variety of other options.
deprecated[4.2, The names of several Kibana server properties changed in the 4.2 release of Kibana. The previous names remain as functional aliases, but are now deprecated and will be removed in a future release of Kibana]
[horizontal]
.Kibana Server Properties
|===
|Property |Description
`server.port` added[4.2]:: The port that the Kibana server runs on.
+
*alias*: `port` deprecated[4.2]
+
*default*: `5601`
|`port`
|The port that the Kibana server runs on. Default: `port: 5601`.
`server.host` added[4.2]:: The host to bind the Kibana server to.
+
*alias*: `host` deprecated[4.2]
+
*default*: `"0.0.0.0"`
|`host`
|The host to bind the Kibana server to. Default: `host: "0.0.0.0"`.
`elasticsearch.url` added[4.2]:: The Elasticsearch instance where the indices you want to query reside.
+
*alias*: `elasticsearch_url` deprecated[4.2]
+
*default*: `"http://localhost:9200"`
|`elasticsearch_url`
|The Elasticsearch instance where the indices you want to query reside. Default:&nbsp;&nbsp;`elasticsearch_url:
"http://localhost:9200"`.
`elasticsearch.preserveHost` added[4.2]:: By default, the host specified in the incoming request from the browser is specified as the host in the corresponding request Kibana sends to Elasticsearch. If you set this option to `false`, Kibana uses the host specified in `elasticsearch_url`.
+
*alias*: `elasticsearch_preserve_host` deprecated[4.2]
+
*default*: `true`
|`elasticsearch_preserve_host`
|By default, the host specified in the incoming request from the browser is specified as the host in the
corresponding request Kibana sends to Elasticsearch. If you set this option to `false`, Kibana uses the host
specified in `elasticsearch_url`. You probably don't need to worry about this setting--just use the default.
Default: `elasticsearch_preserve_host: true`.
`elasticsearch.ssl.cert` added[4.2]:: This parameter specifies the path to the SSL certificate for Elasticsearch instances that require a client certificate.
+
*alias*: `kibana_elasticsearch_client_crt` deprecated[4.2]
|`kibana_index`
|The name of the index where saved searched, visualizations, and dashboards will be stored. Default: `kibana_index: .kibana`.
`elasticsearch.ssl.key` added[4.2]:: This parameter specifies the path to the SSL key for Elasticsearch instances that require a client key.
+
*alias*: `kibana_elasticsearch_client_key` deprecated[4.2]
|`default_app_id`
|The page that will be displayed when you launch Kibana: `discover`, `visualize`, `dashboard`, or `settings`. Default:
`default_app_id: "discover"`.
`elasticsearch.password` added[4.2]:: This parameter specifies the password for Elasticsearch instances that use HTTP basic authentication. Kibana users still need to authenticate with Elasticsearch, which is proxied through the Kibana server.
+
*alias*: `kibana_elasticsearch_password` deprecated [4.2]
|`request_timeout`
|How long to wait for responses from the Kibana backend or Elasticsearch, in milliseconds. Default: `request_timeout: 500000`.
`elasticsearch.username` added[4.2]:: This parameter specifies the username for Elasticsearch instances that use HTTP basic authentication. Kibana users still need to authenticate with Elasticsearch, which is proxied through the Kibana server.
+
*alias*: `kibana_elasticsearch_username` deprecated[4.2]
|`shard_timeout`
|How long Elasticsearch should wait for responses from shards. Set to 0 to disable. Default: `shard_timeout: 0`.
`elasticsearch.pingTimeout` added[4.2]:: This parameter specifies the maximum wait time in milliseconds for ping responses by Elasticsearch.
+
*alias*: `ping_timeout` deprecated[4.2]
+
*default*: `1500`
|`verify_ssl`
|Indicates whether or not to validate the Elasticsearch SSL certificate. Set to false to disable SSL verification.
Default: `verify_ssl: true`.
`elasticsearch.startupTimeout` added[4.2]:: This parameter specifies the maximum wait time in milliseconds for Elasticsearch discovery at Kibana startup. Kibana repeats attempts to discover an Elasticsearch cluster after the specified time elapses.
+
*alias*: `startup_timeout` deprecated[4.2]
+
*default*: `5000`
|`ca`
|The path to the CA certificate for your Elasticsearch instance. Specify if you are using a self-signed certificate
so the certificate can be verified. (Otherwise, you have to disable `verify_ssl`.) Default: none.
`kibana.index` added[4.2]:: The name of the index where saved searched, visualizations, and dashboards will be stored..
+
*alias*: `kibana_index` deprecated[4.2]
+
*default*: `.kibana`
|`ssl_key_file`
|The path to your Kibana server's key file. Must be set to encrypt communications between the browser and Kibana. Default: none.
`kibana.defaultAppId` added[4.2]:: The page that will be displayed when you launch Kibana: `discover`, `visualize`, `dashboard`, or `settings`.
+
*alias*: `default_app_id` deprecated[4.2]
+
*default*: `"discover"`
|`ssl_cert_file`
|The path to your Kibana server's certificate file. Must be set to encrypt communications between the browser and Kibana. Default: none.
`logging.silent` added[4.2]:: Set this value to `true` to suppress all logging output.
+
*default*: `false`
|`pid_file`
|The location where you want to store the process ID file. If not specified, the PID file is stored in
`/var/run/kibana.pid`. Default: none.
`logging.quiet` added[4.2]:: Set this value to `true` to suppress all logging output except for log messages tagged `error`, `fatal`, or Hapi.js errors.
+
*default*: `false`
|`log_file`
|The location where you want to store the Kibana's log output. If not specified, log output is written to standard
output and not stored. Specifying a log file suppresses log writes to standard output. Default: none.
`logging.verbose` added[4.2]:: Set this value to `true` to log all events, including system usage information and all requests.
+
*default*: `false`
|===
`logging.events` added[4.2]:: You can specify a map of log types to output tags for this parameter to create a customized set of loggable events, as in the following example:
+
[source,json]
{
log: ['info', 'warning', 'error', 'fatal'],
response: '*',
error: '*'
}
`elasticsearch.requestTimeout` added[4.2]:: How long to wait for responses from the Kibana backend or Elasticsearch, in milliseconds.
+
*alias*: `request_timeout` deprecated[4.2]
+
*default*: `500000`
`elasticsearch.shardTimeout` added[4.2]:: How long Elasticsearch should wait for responses from shards. Set to 0 to disable.
+
*alias*: `shard_timeout` deprecated[4.2]
+
*default*: `0`
`elasticsearch.ssl.verify` added[4.2]:: Indicates whether or not to validate the Elasticsearch SSL certificate. Set to false to disable SSL verification.
+
*alias*: `verify_ssl` deprecated[4.2]
+
*default*: `true`
`elasticsearch.ssl.ca` added[4.2]:: The path to the CA certificate for your Elasticsearch instance. Specify if you are using a self-signed certificate so the certificate can be verified. Disable `elasticsearch.ssl.verify` otherwise.
+
*alias*: `ca` deprecated[4.2]
`server.ssl.key` added[4.2]:: The path to your Kibana server's key file. Must be set to encrypt communications between the browser and Kibana.
+
*alias*: `ssl_key_file` deprecated[4.2]
`server.ssl.cert` added[4.2]:: The path to your Kibana server's certificate file. Must be set to encrypt communications between the browser and Kibana.
+
*alias*: `ssl_cert_file` deprecated[4.2]
`pid.file` added[4.2]:: The location where you want to store the process ID file.
+
*alias*: `pid_file` deprecated[4.2]
+
*default*: `/var/run/kibana.pid`
`logging.dest` added[4.2]:: The location where you want to store the Kibana's log output. If not specified, log output is written to standard output and not stored. Specifying a log file suppresses log writes to standard output.
+
*alias*: `log_file` deprecated[4.2]

5560
npm-shrinkwrap.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -13,8 +13,8 @@
"private": false,
"version": "4.2.0-snapshot",
"build": {
"number": 8173,
"sha": "0102888deb393e4141369fbd1644a5d77f8732da"
"number": 8467,
"sha": "6cb7fec4e154faa0a4a3fee4b33dfef91b9870d9"
},
"main": "src/server/KbnServer.js",
"homepage": "https://www.elastic.co/products/kibana",
@ -38,10 +38,12 @@
"test:quick": "grunt test:quick",
"test:browser": "grunt test:browser",
"test:server": "grunt test:server",
"test:coverage": "grunt test:coverage",
"build": "grunt build",
"start": "./bin/kibana --dev",
"precommit": "grunt lintStagedFiles",
"karma": "karma start"
"karma": "karma start",
"elasticsearch": "grunt esvm:dev:keepalive"
},
"repository": {
"type": "git",
@ -49,22 +51,21 @@
},
"dependencies": {
"@spalger/angular-bootstrap": "^0.10.0",
"@spalger/angular-nvd3": "^1.0.0-beta",
"@spalger/filesaver": "^1.1.2",
"@spalger/leaflet-draw": "^0.2.3",
"@spalger/leaflet-heat": "^0.1.3",
"@spalger/nvd3": "^1.8.1",
"@spalger/ui-ace": "^0.2.3",
"Nonsense": "^0.1.2",
"angular": "1.2.28",
"angular-bindonce": "0.3.1",
"angular-elastic": "2.5.0",
"angular-mocks": "1.2.28",
"angular-nvd3": "panda01/angular-nvd3#kibana",
"angular-route": "1.2.28",
"ansicolors": "^0.3.2",
"autoprefixer": "^5.2.0",
"autoprefixer-loader": "^2.0.0",
"babel": "^5.8.21",
"babel-core": "^5.8.21",
"babel-core": "^5.8.22",
"babel-loader": "^5.3.2",
"babel-runtime": "^5.8.20",
"bluebird": "^2.9.27",
@ -72,7 +73,6 @@
"bootstrap": "^3.3.5",
"brace": "^0.5.1",
"bunyan": "^1.2.3",
"chokidar": "^1.0.4",
"commander": "^2.8.1",
"css-loader": "^0.15.1",
"d3": "^3.5.6",
@ -84,13 +84,11 @@
"extract-text-webpack-plugin": "^0.8.2",
"file-loader": "^0.8.4",
"font-awesome": "^4.3.0",
"glob": "^4.3.2",
"good": "^6.2.0",
"good-squeeze": "^2.1.0",
"gridster": "^0.5.6",
"hapi": "^8.6.1",
"imports-loader": "^0.6.4",
"is-array": "^1.0.1",
"jade": "^1.7.2",
"jade-loader": "^0.7.1",
"joi": "^6.4.3",
@ -103,14 +101,11 @@
"less-loader": "^2.2.0",
"lodash": "^3.10.0",
"marked": "0.3.3",
"memory-fs": "^0.2.0",
"minimatch": "^2.0.8",
"mkdirp": "^0.5.1",
"moment": "^2.10.3",
"moment-timezone": "^0.4.0",
"ng-clip": "^0.2.6",
"numeral": "^1.5.3",
"nvd3": "panda01/nvd3#kibana",
"raw-loader": "^0.5.1",
"request": "^2.60.0",
"requirefrom": "^0.2.0",
@ -127,26 +122,32 @@
"zeroclipboard": "^2.2.0"
},
"devDependencies": {
"Nonsense": "^0.1.2",
"angular-mocks": "1.2.28",
"auto-release-sinon": "^1.0.3",
"babel-eslint": "^4.0.5",
"chokidar": "^1.0.4",
"eslint": "1.0.x",
"expect.js": "^0.3.1",
"faker": "^1.1.0",
"glob": "^4.3.2",
"grunt": "^0.4.5",
"grunt-babel": "^5.0.1",
"grunt-cli": "0.1.13",
"grunt-contrib-clean": "^0.6.0",
"grunt-contrib-copy": "^0.8.0",
"grunt-esvm": "^1.1.3",
"grunt-esvm": "^1.1.5",
"grunt-karma": "^0.12.0",
"grunt-run": "spalger/grunt-run#master",
"grunt-run": "^0.4.0",
"grunt-s3": "^0.2.0-alpha.3",
"grunt-simple-mocha": "^0.4.0",
"gruntify-eslint": "^1.0.0",
"html-entities": "^1.1.1",
"husky": "^0.8.1",
"istanbul-instrumenter-loader": "^0.1.3",
"karma": "^0.13.3",
"karma-chrome-launcher": "^0.2.0",
"karma-coverage": "^0.5.0",
"karma-firefox-launcher": "^0.1.6",
"karma-growl-reporter": "^0.1.1",
"karma-ie-launcher": "^0.2.0",
@ -158,7 +159,7 @@
"marked-text-renderer": "^0.1.0",
"mocha": "^2.2.5",
"nock": "^2.9.0",
"npm": "^2.11.0",
"npm": "3.2",
"portscanner": "^1.0.0",
"simple-git": "^1.3.0",
"sinon": "^1.15.4",
@ -166,6 +167,7 @@
"wreck": "^6.1.0"
},
"engines": {
"node": ">=2"
"node": "2.5",
"npm": "3.2"
}
}

View file

@ -92,7 +92,8 @@ module.exports = class ClusterManager {
rl.prompt();
rl.on('line', line => {
nls = line.trim() ? 0 : nls + 1;
nls = nls + 1;
if (nls >= 2) {
clearSoon.cancel();
clear();

View file

@ -19,7 +19,9 @@ let dead = fork => {
};
let kill = fork => {
fork.kill('SIGINT'); // make it snappy
// fork.kill() waits for process to disconnect, but causes occasional
// "ipc disconnected" errors and is too slow for the proc's "exit" event
fork.process.kill();
fork.killed = true;
};

View file

@ -6,7 +6,7 @@ var babelOpts = _.defaults({
fromRoot('src'),
/[\\\/](node_modules|bower_components)[\\\/]/
]
}, require('../optimize/babelOptions'));
}, require('../optimize/babelOptions').node);
require('babel/register')(babelOpts);
require('./cli');

View file

@ -1,2 +1,2 @@
require('babel/register')(require('../optimize/babelOptions'));
require('babel/register')(require('../optimize/babelOptions').node);
require('./cli');

View file

@ -1,7 +1,6 @@
var expect = require('expect.js');
var sinon = require('sinon');
var nock = require('nock');
var glob = require('glob');
var rimraf = require('rimraf');
var fs = require('fs');
var { join } = require('path');

View file

@ -11,7 +11,7 @@ var CommonsChunkPlugin = require('webpack/lib/optimize/CommonsChunkPlugin');
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');
let babelOptions = require('./babelOptions');
let babelExclude = [/[\/\\](node_modules|bower_components)[\/\\]/];
let babelExclude = [/[\/\\](webpackShims|node_modules|bower_components)[\/\\]/];
class BaseOptimizer {
constructor(opts) {
@ -78,8 +78,6 @@ class BaseOptimizer {
devtoolModuleFilenameTemplate: '[absolute-resource-path]'
},
recordsPath: resolve(this.env.workingDir, 'webpack.records'),
plugins: [
new webpack.ResolverPlugin([
new DirectoryNameAsMain()
@ -114,7 +112,7 @@ class BaseOptimizer {
test: /\.js$/,
exclude: babelExclude.concat(this.env.noParse),
loader: 'babel',
query: babelOptions
query: babelOptions.webpack
},
{
test: /\.jsx$/,
@ -122,16 +120,17 @@ class BaseOptimizer {
loader: 'babel',
query: defaults({
nonStandard: true,
}, babelOptions)
}, babelOptions.webpack)
}
].concat(this.env.loaders),
postLoaders: this.env.postLoaders || [],
noParse: this.env.noParse,
},
resolve: {
extensions: ['.babel.js', '.js', '.less', ''],
extensions: ['.js', '.jsx', '.less', ''],
postfixes: [''],
modulesDirectories: ['node_modules'],
modulesDirectories: ['webpackShims', 'node_modules'],
loaderPostfixes: ['-loader', ''],
root: fromRoot('.'),
alias: this.env.aliases,

View file

@ -1,4 +1,7 @@
module.exports = {
exports.webpack = {
stage: 1,
nonStandard: false
nonStandard: false,
optional: ['runtime']
};
exports.node = Object.assign({}, exports.webpack);

View file

@ -1,28 +0,0 @@
module.exports = function (kibana) {
let _ = require('lodash');
let fromRoot = require('../../utils/fromRoot');
let { readdirSync } = require('fs');
let { resolve, basename } = require('path');
let modules = {
moment$: fromRoot('node_modules/moment/min/moment.min.js')
};
let metaLibs = resolve(__dirname, 'metaLibs');
readdirSync(metaLibs).forEach(function (file) {
if (file[0] === '.') return;
let name = basename(file, '.js') + '$';
modules[name] = resolve(metaLibs, file);
});
return new kibana.Plugin({
init: false,
uiExports: {
modules: modules,
noParse: [
/node_modules[\/\\](angular|elasticsearch-browser)[\/\\]/,
/node_modules[\/\\](angular-nvd3|mocha|moment)[\/\\]/
]
}
});
};

View file

@ -1,5 +0,0 @@
require('d3');
require('nvd3/build/nv.d3.css');
require('nvd3/build/nv.d3.js');
require('angular-nvd3/dist/angular-nvd3.min.js');
module.exports = window.nv;

View file

@ -1,4 +0,0 @@
{
"name": "bundledLibs",
"version": "1.0.0"
}

View file

@ -35,6 +35,7 @@ module.exports = function (kibana) {
exposeClient(server);
createProxy(server, 'GET', '/{paths*}');
createProxy(server, 'POST', '/_mget');
createProxy(server, 'POST', '/{index}/_search');
createProxy(server, 'POST', '/_msearch');
function noBulkCheck(request, reply) {

View file

@ -1,30 +1,32 @@
var url = require('url');
var fs = require('fs');
var _ = require('lodash');
var readFile = _.partialRight(require('fs').readFileSync, 'utf8');
var http = require('http');
var agentOptions;
module.exports = function (server) {
var https = require('https');
module.exports = _.memoize(function (server) {
var config = server.config();
var target = url.parse(config.get('elasticsearch.url'));
if (!agentOptions) {
agentOptions = {
rejectUnauthorized: config.get('elasticsearch.ssl.verify')
};
if (!/^https/.test(target.protocol)) return new http.Agent();
var customCA;
if (/^https/.test(target.protocol) && config.get('elasticsearch.ssl.ca')) {
customCA = fs.readFileSync(config.get('elasticsearch.ssl.ca'), 'utf8');
agentOptions.ca = [customCA];
}
var agentOptions = {
rejectUnauthorized: config.get('elasticsearch.ssl.verify')
};
// Add client certificate and key if required by elasticsearch
if (/^https/.test(target.protocol) &&
config.get('elasticsearch.ssl.cert') &&
config.get('elasticsearch.ssl.key')) {
agentOptions.crt = fs.readFileSync(config.get('elasticsearch.ssl.cert'), 'utf8');
agentOptions.key = fs.readFileSync(config.get('elasticsearch.ssl.key'), 'utf8');
}
if (config.get('elasticsearch.ssl.ca')) {
agentOptions.ca = [readFile(config.get('elasticsearch.ssl.ca'))];
}
return new http.Agent(agentOptions);
};
// Add client certificate and key if required by elasticsearch
if (config.get('elasticsearch.ssl.cert') && config.get('elasticsearch.ssl.key')) {
agentOptions.cert = readFile(config.get('elasticsearch.ssl.cert'));
agentOptions.key = readFile(config.get('elasticsearch.ssl.key'));
}
return new https.Agent(agentOptions);
});
// See https://lodash.com/docs#memoize: We use a Map() instead of the default, because we want the keys in the cache
// to be the server objects, and by default these would be coerced to strings as keys (which wouldn't be useful)
module.exports.cache = new Map();

View file

@ -1,6 +1,14 @@
<div class="panel panel-default" ng-switch on="panel.type" ng-if="savedObj || error">
<div class="panel-heading">
<span class="panel-title">{{savedObj.title}}</span>
<span class="panel-title">
<i
class="fa"
ng-class="savedObj.vis.type.icon"
aria-label="{{savedObj.vis.type.title}} Icon"
title="{{savedObj.vis.type.title}}">
</i>
{{savedObj.title}}
</span>
<div class="btn-group">
<a aria-label="Edit" ng-show="chrome.getVisible() && editUrl" ng-href="{{editUrl}}">
<i aria-hidden="true" class="fa fa-pencil"></i>

View file

@ -76,6 +76,12 @@ dashboard-grid {
.ellipsis();
flex: 1 1 auto;
i {
opacity: 0.3;
font-size: 1.2em;
margin-right: 4px;
}
}
a {

View file

@ -5,7 +5,7 @@
(Default: <i>{{conf.defVal == undefined ? 'null' : conf.defVal}}</i>)
</span>
<br>
<span class="smaller">{{conf.description}}</span>
<span class="smaller" ng-bind-html="conf.description | trustAsHtml"></span>
</td>
<td class="value">

View file

@ -5,6 +5,7 @@ define(function (require) {
require('plugins/kibana/settings/sections/indices/index'),
require('plugins/kibana/settings/sections/advanced/index'),
require('plugins/kibana/settings/sections/objects/index'),
require('plugins/kibana/settings/sections/status/index'),
require('plugins/kibana/settings/sections/about/index')
];
});

View file

@ -129,6 +129,8 @@ define(function (require) {
if (_.contains(loadedEditors, editor)) return;
loadedEditors.push(editor);
editor.$blockScrolling = Infinity;
var session = editor.getSession();
var fieldName = editor.container.id;

View file

@ -0,0 +1,10 @@
define(function (require) {
var _ = require('lodash');
return {
order: 3,
name: 'status',
display: 'Status',
url: '/status'
};
});

View file

@ -130,7 +130,8 @@
<div class="vis-editor-canvas" ng-class="{ embedded: !chrome.getVisible() }">
<div class="visualize-info" ng-if="savedVis.id">
<div class="visualize-info-tab">
<div class="visualize-info-tab" title="{{savedVis.vis.type.title}}">
<i class="fa" aria-label="{{savedVis.vis.type.title}} Icon" ng-class="savedVis.vis.type.icon"></i>
<span bindonce bo-bind="savedVis.title"></span>
</div>
</div>

View file

@ -9,10 +9,10 @@ module.exports = function formatNumber(num, which) {
case 'time':
return moment(num).format('HH:mm:ss');
case 'byte':
format += 'b';
format += ' b';
break;
case 'ms':
postfix = 'ms';
postfix = ' ms';
break;
}
return numeral(num).format(format) + postfix;

View file

@ -1,48 +1,39 @@
<div class="container">
<div class="container state_default state_{{ui.serverState}}">
<header>
<h1>
<strong>Kibana</strong>&nbsp;Status Page
Status: <span class="state_color">{{ ui.serverStateMessage }}</span>
<i class="fa state_color state_icon" />
</h1>
</header>
<section class="section">
<h4>What is this page?</h4>
<p>This page is your sanity check, and your savior. You can check for potential problems</p>
<p>Here is the status of your kibana instance and the plugins you have installed along with some, statistics to asses potential problems.</p>
</section>
<div class="system_status_wrapper state_default state_{{ui.serverState}}">
<h3 class="title">
<b>System Status</b> {{ ui.serverStateMessage }}
</h3>
<div class="row metrics_wrapper">
<div ng-repeat="(name, data) in ui.metrics">
<status-page-metric name="{{name}}" data="data"></status-page-metric>
</div>
</div>
<div class="row plugin_status_wrapper">
<h3>Installed Plugins</h3>
<div ng-if="!ui.statuses && ui.loading" class="loading_statuses">
<span class="spinner"></span>
</div>
<h4 ng-if="!ui.statuses && !ui.loading" class="missing_statuses">
No status information available
No plugin status information available
</h4>
<table class="status_breakdown" ng-if="ui.statuses">
<table class="plugin_status_breakdown row" ng-if="ui.statuses">
<tr>
<th class="col-xs-1">Name</th>
<th class="col-xs-11">Description</th>
<th class="col-xs-11">Status</th>
</tr>
<tr ng-repeat="status in ui.statuses" class="status_row state_default state_{{status.state}}">
<tr ng-repeat="status in ui.statuses" class="status_row plugin_state_default plugin_state_{{status.state}}">
<td class="col-xs-1 status_name">{{status.name}}</td>
<td class="col-xs-11 status_message">{{status.message}}</td>
<td class="col-xs-11 status_message">
<i class="fa plugin_state_color plugin_state_icon" />
{{status.message}}
</td>
</tr>
</table>
</div>
<h2>Server Metrics</h2>
<p>Interval of 5 seconds, with a max history of 5 minutes.</p>
<div id="chart_cont" class="row">
<div ng-repeat="(name, data) in ui.metrics">
<status-page-metric name="{{name}}" data="data"></status-page-metric>
</div>
</div>
</div>

View file

@ -9,7 +9,8 @@ require('ui/chrome')
.setTabs([
{
id: '',
title: 'Server Status'
title: 'Server Status',
activeIndicatorColor: '#EFF0F2'
}
])
.setRootTemplate(require('plugins/statusPage/statusPage.html'))
@ -24,6 +25,7 @@ require('ui/chrome')
return $http
.get('/api/status')
.then(function (resp) {
if (ui.fetchError) {
ui.fetchError.clear();
ui.fetchError = null;
@ -36,7 +38,7 @@ require('ui/chrome')
var overall = data.status.overall;
if (!ui.serverState || (ui.serverState !== overall.state)) {
ui.serverState = overall.state;
ui.serverStateMessage = overall.nickname || overall.title;
ui.serverStateMessage = overall.title;
}
})
.catch(function () {
@ -50,9 +52,4 @@ require('ui/chrome')
};
ui.refresh();
// let the browser decide when to slow down requests
setInterval(function () {
$scope.$eval(ui.refresh);
}, 5000);
});

View file

@ -1,126 +1,178 @@
@import "~font-awesome/less/font-awesome";
@status-bg: #eff0f2;
@status-metric-bg: #fff;
@status-metric-border: #aaa;
@status-metric-title-color: #666;
@status-plugins-bg: #fff;
@status-plugins-border: #bbb;
@status-plugins-headings-color: #666;
@status-default: #7c7c7c;
@status-green: #94c63d;
@status-yellow: #edb800;
@status-red: #da1e04;
@icon-default: @fa-var-clock-o;
@icon-green: @fa-var-check;
@icon-yellow: @fa-var-exclamation-circle;
@icon-red: @fa-var-exclamation-triangle;
// background of main page
.content {
background-color: @status-bg;
}
.section {
margin-bottom:15px;
}
.status_breakdown {
margin:0 15px 15px 15px;
// metrics section
.metrics_wrapper {
margin-top: 25px;
.status_metric_wrapper {
padding: 10px;
border: 0;
.status_row {
height:30px;
line-height:30px;
+ .status_row {
border-top:1px solid #ebebeb;
.content {
text-align: right;
padding: 15px;
padding-right: 20px;
background-color: @status-metric-bg;
border-top: 2px solid;
border-top-color: @status-metric-border;
.title {
color: @status-metric-title-color;
margin: 0 0 5px 0;
}
.average {
font-size: 42px;
line-height:45px;
font-weight: normal;
margin:0;
}
}
}
th {
font-size:10px;
color:#a9a9a9;
height:25px;
line-height:25px;
}
.status_name {
font-weight:bold;
padding:0px 5px;
}
.status_message {
border-left:1px solid #ebebeb;
padding:0;
padding-left:15px;
}
}
.system_status_wrapper {
// plugin status table section
.plugin_status_wrapper {
margin-top: 25px;
margin-left: -5px;
margin-right: -5px;
border-top:2px solid;
background-color: @status-plugins-bg;
padding: 10px;
h3 {
margin-top: 3px;
margin-bottom: 3px;
}
.missing_statuses,
.loading_statuses {
padding: 20px;
text-align: center;
}
}
.status_chart_wrapper {
border-top:1px solid #ebebeb;
border-left:1px solid #ebebeb;
.average {
font-size: 42px;
line-height:45px;
margin-top:0;
font-weight:bold;
}
.title {
margin:0 0 5px 0;
text-transform:capitalize;
}
}
.plugin_status_breakdown {
margin-left: 0;
margin-right: 0;
#chart_cont {
margin-top:35px;
}
.status_row {
height:30px;
line-height:30px;
border-bottom:1px solid;
border-bottom-color: @status-plugins-border;
}
.status_chart_wrapper:nth-child(2), .status_chart_wrapper:nth-child(3) {
border-top:0 none transparent;
}
th {
color:@status-plugins-headings-color;
font-weight: normal;
height:25px;
line-height:25px;
border-bottom:1px solid;
border-bottom-color: @status-plugins-border;
}
.status_chart_wrapper:first-child {
border-top:0 none transparent;
border-left:0 none transparent;
}
.status_name {
padding:0px 5px;
border-left: 2px solid;
}
.status_chart_wrapper:nth-child(3n + 1) {
border-left:0 none transparent;
}
.status_chart_wrapper:nth-child(n + 4) {
padding-top:20px;
}
.nv-axis.nv-x .tick line {
display:none;
}
.state(@primary, @secondary) {
&.system_status_wrapper {
border:1px solid @primary;
border-radius:5px;
overflow: hidden;
.title {
color:#ffffff;
height:50px;
line-height:50px;
margin:0 0 10px 0;
padding:0 15px;
border-color:@primary;
background:@primary;
background:-moz-linear-gradient(left,@primary 0%,@secondary 100%);
background:-webkit-gradient(linear,left top,right top,color-stop(0%,@primary),color-stop(100%,@secondary));
background:-webkit-linear-gradient(left,@primary 0%,@secondary 100%);
background:-o-linear-gradient(left,@primary 0%,@secondary 100%);
background:-ms-linear-gradient(left,@primary 0%,@secondary 100%);
background:linear-gradient(to right,@primary 0%,@secondary 100%);
filter:progid:DXImageTransform.Microsoft.gradient(startColorstr=@primary,endColorstr=@secondary,GradientType=1);
.status_message {
padding:0;
padding-left:15px;
border-right: 2px solid;
}
}
}
&.status_row {
color: @primary;
//plugin state
.plugin_state(@color, @icon) {
.plugin_state_color {
color: @color;
}
.plugin_state_icon:before {
content: @icon;
}
.status_name {
border-left-color: @color !important;
}
.status_message {
border-right-color: @color !important;
}
}
.plugin_state_default {
.plugin_state(@status-default, @icon-default);
}
.plugin_state_green {
.plugin_state(@status-green, @icon-green);
}
.plugin_state_yellow {
.plugin_state(@status-yellow, @icon-yellow);
}
.plugin_state_red {
.plugin_state(@status-red, @icon-red);
}
//server state
.state(@color, @icon) {
.state_color {
color: @color;
}
.state_icon:before {
content: @icon;
}
.plugin_status_wrapper {
border-top-color: @color;
}
}
.state_default {
.state(#7C7C7C, #CFCFCF);
.state(@status-default, @icon-default);
}
.state_green {
.state(#0a8e03, #96f501);
.state(@status-green, @icon-green);
}
.state_yellow {
.state(#fdee00, #c16f00);
.state(@status-yellow, @icon-yellow);
}
.state_red {
.state(#da1e04, #ff730f);
.state(@status-red, @icon-red);
}

View file

@ -1,5 +1,6 @@
<div class="status_chart_wrapper col-md-4">
<h3 class="title">{{metric.title}}</h3>
<h4 class="average">{{ metric.averages.join(', ') }}</h4>
<nvd3 options="metric.chartOptions" data="metric.chartData"></nvd3>
<div class="status_metric_wrapper col-md-4">
<div class="content">
<h3 class="title">{{metric.extendedTitle}}</h3>
<h4 class="average">{{ metric.averages.join(', ') }}</h4>
</div>
</div>

View file

@ -5,7 +5,6 @@ require('angular-nvd3');
var toTitleCase = require('./lib/toTitleCase');
var formatNumber = require('./lib/formatNumber');
var getChartOptions = _.memoize(require('./lib/makeChartOptions'));
var readStatData = require('./lib/readStatData');
function calcAvg(metricList, metricNumberType) {
@ -33,17 +32,16 @@ require('ui/modules')
self.name = $scope.name;
self.title = toTitleCase(self.name);
self.extendedTitle = self.title;
self.numberType = 'precise';
self.seriesNames = [];
switch (self.name) {
case 'heapTotal':
case 'heapUsed':
case 'rss':
self.numberType = 'byte';
break;
case 'delay':
case 'responseTimeAvg':
case 'responseTimeMax':
self.numberType = 'ms';
@ -54,12 +52,22 @@ require('ui/modules')
break;
}
self.chartOptions = getChartOptions(self.numberType);
$scope.$watch('data', function (data) {
self.rawData = data;
self.chartData = readStatData(self.rawData, self.seriesNames);
self.averages = calcAvg(self.chartData, self.numberType);
var unit = '';
self.averages = self.averages.map(function (average) {
var parts = average.split(' ');
var value = parts.shift();
unit = parts.join(' ');
return value;
});
self.extendedTitle = self.title;
if (unit) {
self.extendedTitle = `${self.extendedTitle} (${unit})`;
}
});
}
};

View file

@ -1,10 +1,9 @@
let { chain, memoize } = require('lodash');
let { resolve } = require('path');
let { map, fromNode } = require('bluebird');
let fromRoot = require('./fromRoot');
let { Glob } = require('glob');
let fromRoot = require('../../utils/fromRoot');
let findSourceFiles = async (patterns, cwd = fromRoot('.')) => {
patterns = [].concat(patterns || []);

View file

@ -3,13 +3,20 @@ module.exports = (kibana) => {
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');
let findSourceFiles = utils('findSourceFiles');
let findSourceFiles = require('./findSourceFiles');
return new kibana.Plugin({
config: (Joi) => {
return Joi.object({
enabled: Joi.boolean().default(true),
instrument: Joi.boolean().default(false)
}).default();
},
uiExports: {
bundle: async (UiBundle, env, apps) => {
let modules = [];
let config = kibana.config;
// add the modules from all of the apps
for (let app of apps) {
@ -23,6 +30,14 @@ module.exports = (kibana) => {
for (let f of testFiles) modules.push(f);
if (config.get('testsBundle.instrument')) {
env.addPostLoader({
test: /\.jsx?$/,
exclude: /[\/\\](__tests__|node_modules|bower_components|webpackShims)[\/\\]/,
loader: 'istanbul-instrumenter'
});
}
return new UiBundle({
id: 'tests',
modules: modules,

View file

@ -1,4 +1,4 @@
{
"name": "tests_bundle",
"name": "testsBundle",
"version": "0.0.0"
}

View file

@ -1,6 +1,14 @@
module.exports = function ({env, bundle}) {
module.exports = require('lodash').template(
`
let pluginSlug = env.pluginInfo.sort()
.map(p => ' * - ' + p)
.join('\n');
let requires = bundle.modules
.map(m => `require('${m}');`)
.join('\n');
return `
/**
* Test entry file
*
@ -8,14 +16,7 @@ module.exports = require('lodash').template(
*
* context: <%= JSON.stringify(env.context) %>
* includes code from:
<%
env.pluginInfo.sort().forEach(function (plugin, i) {
if (i > 0) print('\\n');
print(' * - ' + plugin);
});
%>
${pluginSlug}
*
*/
@ -27,15 +28,9 @@ window.__KBN__ = {
};
require('ui/testHarness');
<%
bundle.modules.forEach(function (id, i) {
if (i > 0) print('\\n');
print(\`require('\${id.replace(/\\\\/g, '\\\\\\\\')}');\`);
});
%>
${requires}
require('ui/testHarness').bootstrap(/* go! */);
`
);
`;
};

View file

@ -1,37 +1,61 @@
let Promise = require('bluebird');
let Joi = require('joi');
let _ = require('lodash');
let { zipObject } = require('lodash');
let override = require('./override');
let pkg = require('requirefrom')('src/utils')('packageJson');
const schema = Symbol('Joi Schema');
const schemaKeys = Symbol('Schema Extensions');
const vals = Symbol('config values');
const pendingSets = Symbol('Pending Settings');
module.exports = class Config {
constructor(schema, defaults) {
this.schema = Joi.object({}).default();
this.config = {};
this.unappliedDefaults = _.cloneDeep(defaults || {});
if (schema) this.extendSchema(schema);
constructor(initialSchema, initialSettings) {
this[schemaKeys] = new Map();
this[vals] = Object.create(null);
this[pendingSets] = new Map(_.pairs(_.cloneDeep(initialSettings || {})));
if (initialSchema) this.extendSchema(initialSchema);
}
extendSchema(key, schema) {
getPendingSets() {
return this[pendingSets];
}
extendSchema(key, extension) {
if (key && key.isJoi) {
return _.each(key._inner.children, function (child) {
return _.each(key._inner.children, (child) => {
this.extendSchema(child.key, child.schema);
}, this);
});
}
if (this.has(key)) {
throw new Error(`Config schema already has key ${key}`);
throw new Error(`Config schema already has key: ${key}`);
}
this.schema = this.schema.keys(_.set({}, key, schema));
this[schemaKeys].set(key, extension);
this[schema] = null;
if (this.unappliedDefaults[key]) {
this.set(key, this.unappliedDefaults[key]);
this.unappliedDefaults[key] = null;
let initialVals = this[pendingSets].get(key);
if (initialVals) {
this.set(key, initialVals);
this[pendingSets].delete(key);
} else {
this._commit(this.config);
this._commit(this[vals]);
}
}
removeSchema(key) {
if (!this[schemaKeys].has(key)) {
throw new TypeError(`Unknown schema key: ${key}`);
}
this[schema] = null;
this[schemaKeys].delete(key);
this[pendingSets].delete(key);
delete this[vals][key];
}
resetTo(obj) {
@ -40,7 +64,7 @@ module.exports = class Config {
set(key, value) {
// clone and modify the config
let config = _.cloneDeep(this.config);
let config = _.cloneDeep(this[vals]);
if (_.isPlainObject(key)) {
config = override(config, key);
} else {
@ -51,10 +75,10 @@ module.exports = class Config {
this._commit(config);
}
_commit(newConfig) {
_commit(newVals) {
// resolve the current environment
let env = newConfig.env;
delete newConfig.env;
let env = newVals.env;
delete newVals.env;
if (_.isObject(env)) env = env.name;
if (!env) env = process.env.NODE_ENV || 'production';
@ -79,23 +103,21 @@ module.exports = class Config {
);
}
let results = Joi.validate(newConfig, this.schema, {
context: context
});
let results = Joi.validate(newVals, this.getSchema(), { context });
if (results.error) {
throw results.error;
}
this.config = results.value;
this[vals] = results.value;
}
get(key) {
if (!key) {
return _.cloneDeep(this.config);
return _.cloneDeep(this[vals]);
}
let value = _.get(this.config, key);
let value = _.get(this[vals], key);
if (value === undefined) {
if (!this.has(key)) {
throw new Error('Unknown config key: ' + key);
@ -130,6 +152,15 @@ module.exports = class Config {
key = key.join('.');
}
return !!has(key, this.schema);
return !!has(key, this.getSchema());
}
getSchema() {
if (!this[schema]) {
let objKeys = zipObject([...this[schemaKeys]]);
this[schema] = Joi.object().keys(objKeys).default();
}
return this[schema];
}
};

View file

@ -208,6 +208,24 @@ describe('lib/config/config', function () {
});
describe('#removeSchema(key)', function () {
it('should completely remove the key', function () {
var config = new Config(Joi.object().keys({
a: Joi.number().default(1)
}));
expect(config.get('a')).to.be(1);
config.removeSchema('a');
expect(() => config.get('a')).to.throwException('Unknown config key');
});
it('only removes existing keys', function () {
var config = new Config(Joi.object());
expect(() => config.removeSchema('b')).to.throwException('Unknown schema');
});
});
});
});

View file

@ -1,16 +1,11 @@
module.exports = function (kbnServer, server, config) {
let _ = require('lodash');
server.decorate('server', 'config', function () {
return kbnServer.config;
});
_.forOwn(config.unappliedDefaults, function (val, key) {
if (val === null) return;
server.log(['warning', 'config'], {
tmpl: 'Settings for "<%= key %>" were not applied, check for spelling errors and ensure the plugin is loaded.',
key: key,
val: val
});
});
let tmpl = 'Settings for "<%= key %>" were not applied, check for spelling errors and ensure the plugin is loaded.';
for (let [key, val] of config.getPendingSets()) {
server.log(['warning', 'config'], { key, val, tmpl });
}
};

View file

@ -1,5 +1,6 @@
module.exports = function (kbnServer, server, config) {
let _ = require('lodash');
let fs = require('fs');
let Boom = require('boom');
let Hapi = require('hapi');
let parse = require('url').parse;
@ -10,13 +11,23 @@ module.exports = function (kbnServer, server, config) {
server = kbnServer.server = new Hapi.Server();
// Create a new connection
server.connection({
var connectionOptions = {
host: config.get('server.host'),
port: config.get('server.port'),
routes: {
cors: config.get('server.cors')
}
});
};
// enable tls if ssl key and cert are defined
if (config.get('server.ssl.key') && config.get('server.ssl.cert')) {
connectionOptions.tls = {
key: fs.readFileSync(config.get('server.ssl.key')),
cert: fs.readFileSync(config.get('server.ssl.cert'))
};
}
server.connection(connectionOptions);
// provide a simple way to expose static directories
server.decorate('server', 'exposeStaticDir', function (routePath, dirPath) {

View file

@ -14,7 +14,7 @@ module.exports = class KbnLogger {
this.dest = process.stdout;
} else {
this.dest = writeStr(config.dest, {
mode: 'a',
flags: 'a',
encoding: 'utf8'
});
}

View file

@ -33,10 +33,17 @@ module.exports = class Plugin {
};
}
async setupConfig() {
let { config } = this.kbnServer;
async readConfig() {
let schema = await this.getConfigSchema(Joi);
this.kbnServer.config.extendSchema(this.id, schema || defaultConfigSchema);
let { config } = this.kbnServer;
config.extendSchema(this.id, schema || defaultConfigSchema);
if (config.get([this.id, 'enabled'])) {
return true;
} else {
config.removeSchema(this.id);
return false;
}
}
async init() {

View file

@ -1,4 +1,4 @@
let _ = require('lodash');
let { get, indexBy } = require('lodash');
let inspect = require('util').inspect;
let PluginApi = require('./PluginApi');
@ -14,22 +14,32 @@ module.exports = class Plugins extends Collection {
}
async new(path) {
var api = new PluginApi(this.kbnServer, path);
let api = new PluginApi(this.kbnServer, path);
let output = [].concat(require(path)(api) || []);
let config = this.kbnServer.config;
if (!output.length) return;
// clear the byIdCache
this[byIdCache] = null;
for (let product of output) {
if (product instanceof api.Plugin) {
this[byIdCache] = null;
this.add(product);
await product.setupConfig();
} else {
throw new TypeError('unexpected plugin export ' + inspect(product));
let plugin = product;
this.add(plugin);
let enabled = await plugin.readConfig();
if (!enabled) this.delete(plugin);
continue;
}
throw new TypeError('unexpected plugin export ' + inspect(product));
}
}
get byId() {
return this[byIdCache] || (this[byIdCache] = _.indexBy([...this], 'id'));
return this[byIdCache] || (this[byIdCache] = indexBy([...this], 'id'));
}
};

View file

@ -7,18 +7,10 @@ module.exports = async function (kbnServer, server, config) {
}
let { plugins } = kbnServer;
let enabledPlugins = {};
// setup config and filter out disabled plugins
for (let plugin of plugins) {
if (config.get([plugin.id, 'enabled'])) {
enabledPlugins[plugin.id] = plugin;
}
}
let path = [];
let initialize = async id => {
let plugin = enabledPlugins[id];
async function initialize(id) {
let plugin = plugins.byId[id];
if (includes(path, id)) {
throw new Error(`circular dependencies found: "${path.concat(id).join(' -> ')}"`);
@ -27,13 +19,10 @@ module.exports = async function (kbnServer, server, config) {
path.push(id);
for (let reqId of plugin.requiredIds) {
if (!enabledPlugins[reqId]) {
if (plugins.byId[reqId]) {
throw new Error(`Requirement "${reqId}" for plugin "${plugin.id}" is disabled.`);
} else {
throw new Error(`Unmet requirement "${reqId}" for plugin "${plugin.id}"`);
}
if (!plugins.byId[reqId]) {
throw new Error(`Unmet requirement "${reqId}" for plugin "${id}"`);
}
await initialize(reqId);
}
@ -42,5 +31,7 @@ module.exports = async function (kbnServer, server, config) {
path.pop();
};
for (let id of keys(enabledPlugins)) await initialize(id);
for (let {id} of plugins) {
await initialize(id);
}
};

View file

@ -53,7 +53,6 @@ module.exports = async (kbnServer, server, config) => {
continue;
}
require(modulePath);
await plugins.new(path);
debug({ tmpl: 'Found plugin at <%= path %>', path: modulePath });
}

View file

@ -17,7 +17,7 @@ class Status extends EventEmitter {
tags.push(this.state === 'red' ? 'error' : 'info');
server.log(tags, {
tmpl: 'Status changed from <%= prevState %> to <%= state %><% message && print(` - ${message}`) %>',
tmpl: 'Status changed from <%= prevState %> to <%= state %><%= message ? " - " + message : "" %>',
name: name,
state: this.state,
message: this.message,

View file

@ -1,27 +1,12 @@
module.exports = function (kbnServer, server, config) {
var _ = require('lodash');
var Samples = require('./Samples');
var ServerStatus = require('./ServerStatus');
var { join } = require('path');
kbnServer.status = new ServerStatus(kbnServer.server);
kbnServer.metrics = new Samples(60);
if (server.plugins.good) {
server.plugins.good.monitor.on('ops', function (event) {
var port = config.get('server.port');
kbnServer.metrics.add({
rss: event.psmem.rss,
heapTotal: event.psmem.heapTotal,
heapUsed: event.psmem.heapUsed,
load: event.osload,
delay: event.psdelay,
concurrency: _.get(event, ['concurrents', port]),
responseTimeAvg: _.get(event, ['responseTimes', port, 'avg']),
responseTimeMax: _.get(event, ['responseTimes', port, 'max']),
requests: _.get(event, ['requests', port, 'total'], 0)
});
});
kbnServer.mixin(require('./metrics'));
}
server.route({

View file

@ -0,0 +1,27 @@
module.exports = function (kbnServer, server, config) {
var _ = require('lodash');
var Samples = require('./Samples');
let lastReport = Date.now();
kbnServer.metrics = new Samples(12);
server.plugins.good.monitor.on('ops', function (event) {
let now = Date.now();
let secSinceLast = (now - lastReport) / 1000;
lastReport = now;
var port = config.get('server.port');
let requests = _.get(event, ['requests', port, 'total'], 0);
let requestsPerSecond = requests / secSinceLast;
kbnServer.metrics.add({
heapTotal: _.get(event, 'psmem.heapTotal'),
heapUsed: _.get(event, 'psmem.heapUsed'),
load: event.osload,
responseTimeAvg: _.get(event, ['responseTimes', port, 'avg']),
responseTimeMax: _.get(event, ['responseTimes', port, 'max']),
requestsPerSecond: requestsPerSecond
});
});
};

View file

@ -38,7 +38,10 @@ module.exports = class UiBundlerEnv {
this.pluginInfo = [];
// regular expressions which will prevent webpack from parsing the file
this.noParse = [];
this.noParse = [
/node_modules[\/\\](angular|elasticsearch-browser)[\/\\]/,
/node_modules[\/\\](angular-nvd3|mocha|moment)[\/\\]/
];
// webpack aliases, like require paths, mapping a prefix to a directory
this.aliases = {
@ -51,6 +54,7 @@ module.exports = class UiBundlerEnv {
// webpack loaders map loader configuration to regexps
this.loaders = [];
this.postLoaders = [];
}
consumePlugin(plugin) {
@ -71,6 +75,11 @@ module.exports = class UiBundlerEnv {
for (let loader of arr(spec)) this.addLoader(loader);
};
case 'postLoaders':
return (plugin, spec) => {
for (let loader of arr(spec)) this.addPostLoader(loader);
};
case 'noParse':
return (plugin, spec) => {
for (let re of arr(spec)) this.addNoParse(re);
@ -91,6 +100,10 @@ module.exports = class UiBundlerEnv {
this.loaders.push(loader);
}
addPostLoader(loader) {
this.postLoaders.push(loader);
}
addNoParse(regExp) {
this.noParse.push(regExp);
}

View file

@ -1,37 +1,29 @@
module.exports = function ({env, bundle}) {
module.exports = require('lodash').template(
`
let pluginSlug = env.pluginInfo.sort()
.map(p => ' * - ' + p)
.join('\n');
let requires = bundle.modules
.map(m => `require('${m}');`)
.join('\n');
return `
/**
* Optimized application entry file
* Test entry file
*
* This is programatically created and updated, do not modify
*
* context: <%= JSON.stringify(env.context) %>
* includes code from:
<%
env.pluginInfo.sort().forEach(function (plugin) {
print(\` * - \${plugin}\n\`);
});
%> *
${pluginSlug}
*
*/
require('ui/chrome');
<%
bundle.modules
.filter(function (id) {
return id !== 'ui/chrome';
})
.forEach(function (id, i) {
if (i > 0) print('\\n');
print(\`require('\${id}');\`);
});
%>
require('ui/chrome')
${requires}
require('ui/chrome').bootstrap(/* xoxo */);
`
);
`;
};

View file

@ -260,7 +260,8 @@ define(function (require) {
AggConfig.prototype.makeLabel = function () {
if (!this.type) return '';
return this.type.makeLabel(this);
var pre = (_.get(this.vis, 'params.mode') === 'percentage') ? 'Percentage of ' : '';
return pre += this.type.makeLabel(this);
};
AggConfig.prototype.field = function () {

View file

@ -223,7 +223,10 @@ describe('buildHierarchicalData', function () {
type: 'pie',
aggs: [
{ type: 'count', schema: 'metric' },
{ type: 'filters', schema: 'segment', params: {
{
type: 'filters',
schema: 'segment',
params: {
filters: [
{ input: { query: { query_string: { query: '_type:apache' } } } },
{ input: { query: { query_string: { query: '_type:nginx' } } } }
@ -256,7 +259,10 @@ describe('buildHierarchicalData', function () {
type: 'pie',
aggs: [
{ type: 'count', schema: 'metric' },
{ type: 'filters', schema: 'split', params: {
{
type: 'filters',
schema: 'split',
params: {
filters: [
{ input: { query: { query_string: { query: '_type:apache' } } } },
{ input: { query: { query_string: { query: '_type:nginx' } } } }

View file

@ -38,8 +38,7 @@ describe('AggConfig Filters', function () {
expect(filter.range).to.have.property('bytes');
expect(filter.range.bytes).to.have.property('gte', 2048);
expect(filter.range.bytes).to.have.property('lt', 3072);
expect(filter.meta).to.have.property('formattedValue', '2,048');
});
});
});

View file

@ -41,7 +41,7 @@ describe('AggConfig Filters', function () {
expect(filter.range).to.have.property('bytes');
expect(filter.range.bytes).to.have.property('gte', 1024.0);
expect(filter.range.bytes).to.have.property('lt', 2048.0);
expect(filter.meta).to.have.property('formattedValue', '1,024 to 2,048');
});
});
});

View file

@ -5,10 +5,12 @@ define(function (require) {
return function (aggConfig, key) {
var value = parseInt(key, 10);
return buildRangeFilter(aggConfig.params.field, {
gte: value,
lt: value + aggConfig.params.interval
}, aggConfig.vis.indexPattern);
return buildRangeFilter(
aggConfig.params.field,
{gte: value, lt: value + aggConfig.params.interval},
aggConfig.vis.indexPattern,
aggConfig.fieldFormatter()(key)
);
};
};
});

View file

@ -12,7 +12,7 @@ var parse = _.wrap(require('url').parse, function (parse, path) {
function TabCollection() {
var tabs = null;
var tabs = [];
var specs = null;
var defaults = null;
var activeTab = null;

View file

@ -31,8 +31,9 @@ define(function () {
']',
description: 'Values that define the format used in situations where timebased' +
' data is rendered in order, and formatted timestamps should adapt to the' +
' interval between measurements. Keys are ISO 8601 intervals:' +
' http://en.wikipedia.org/wiki/ISO_8601#Time_intervals'
' interval between measurements. Keys are' +
' <a href="http://en.wikipedia.org/wiki/ISO_8601#Time_intervals" target="_blank">' +
'ISO8601 intervals.</a>'
},
'defaultIndex': {
value: null,
@ -71,8 +72,10 @@ define(function () {
'visualization:tileMap:maxPrecision': {
value: 7,
description: 'The maximum geoHash precision displayed on tile maps: 7 is high, 10 is very high, ' +
'12 is the max. Explanation of cell dimensions: http://www.elastic.co/guide/en/elasticsearch/reference/current/' +
'search-aggregations-bucket-geohashgrid-aggregation.html#_cell_dimensions_at_the_equator',
'12 is the max. ' +
'<a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/' +
'search-aggregations-bucket-geohashgrid-aggregation.html#_cell_dimensions_at_the_equator" target="_blank">' +
'Explanation of cell dimensions.</a>',
},
'csv:separator': {
value: ',',

View file

@ -62,6 +62,7 @@
readonly
ui-ace="{
useWrapMode: true,
onLoad: aceLoaded,
advanced: {
highlightActiveLine: false
},

View file

@ -17,24 +17,31 @@ define(function (require) {
filter: '=?',
columns: '=?'
},
link: function ($scope, $el, attr) {
// If a field isn't in the mapping, use this
$scope.mode = 'table';
$scope.mapping = $scope.indexPattern.fields.byName;
$scope.flattened = $scope.indexPattern.flattenHit($scope.hit);
$scope.hitJson = angular.toJson($scope.hit, true);
$scope.formatted = $scope.indexPattern.formatHit($scope.hit);
$scope.fields = _.keys($scope.flattened).sort();
link: {
pre($scope) {
$scope.aceLoaded = (editor) => {
editor.$blockScrolling = Infinity;
};
},
$scope.toggleColumn = function (fieldName) {
_.toggleInOut($scope.columns, fieldName);
};
post($scope, $el, attr) {
// If a field isn't in the mapping, use this
$scope.mode = 'table';
$scope.mapping = $scope.indexPattern.fields.byName;
$scope.flattened = $scope.indexPattern.flattenHit($scope.hit);
$scope.hitJson = angular.toJson($scope.hit, true);
$scope.formatted = $scope.indexPattern.formatHit($scope.hit);
$scope.fields = _.keys($scope.flattened).sort();
$scope.showArrayInObjectsWarning = function (row, field) {
var value = $scope.flattened[field];
return _.isArray(value) && typeof value[0] === 'object';
};
$scope.toggleColumn = function (fieldName) {
_.toggleInOut($scope.columns, fieldName);
};
$scope.showArrayInObjectsWarning = function (row, field) {
var value = $scope.flattened[field];
return _.isArray(value) && typeof value[0] === 'object';
};
}
}
};
});

View file

@ -42,5 +42,26 @@ describe('Filter Bar Directive', function () {
$rootScope.$apply();
});
it('should return a value for a range/histogram filter from a scripted field', (done) => {
let filter = {
meta: {
index: 'logstash-*',
formattedValue: '1,000.00 to 2,000.00',
field: 'script number'
},
script: {
params: {
gte: 1000,
lt: 2000,
value: '>=1,000.00 <2,000.00'
}
}
};
mapScript(filter).then((result) => {
expect(result).to.have.property('value', filter.meta.formattedValue);
done();
});
$rootScope.$apply();
});
});
});

View file

@ -70,11 +70,11 @@ module.exports = function (grunt) {
grunt.log.ok(`downloading ${platform.name} - ${mb} mb`);
};
grunt.registerTask('_build:downloadNodes:start', function () {
grunt.registerTask('_build:downloadNodeBuilds:start', function () {
map(platforms, start).nodeify(this.async());
});
grunt.registerTask('_build:downloadNodes:finish', function () {
grunt.registerTask('_build:downloadNodeBuilds:finish', function () {
map(activeDownloads, async (platform) => {
await platform.downloadPromise;
grunt.log.ok(`${platform.name} download complete`);

View file

@ -2,23 +2,24 @@ module.exports = function (grunt) {
let { flatten } = require('lodash');
grunt.registerTask('build', flatten([
'_build:shrinkwrap:ensureExists:true',
'_build:getProps',
'clean:build',
'clean:target',
'_build:downloadNodes:start',
'_build:downloadNodeBuilds:start',
'copy:devSource',
'babel:build',
'_build:cliIndex',
'_build:installedPlugins',
'_build:packageJson',
'_build:readme',
'_build:shrinkwrap:copyToBuild',
'_build:shrinkwrap:cleanup',
'_build:installNpmDeps',
'clean:testsFromModules',
'clean:deepModuleBins',
'clean:deepModules',
'run:optimizeBuild',
'stop:optimizeBuild',
'_build:downloadNodes:finish',
'_build:downloadNodeBuilds:finish',
'_build:versionedLinks',
'_build:archives',
!grunt.option('os-packages') ? [] : [

View file

@ -2,7 +2,6 @@ module.exports = function (grunt) {
let { defaults } = require('lodash');
let pkg = grunt.config.get('pkg');
let deepModules = grunt.config.get('deepModules');
grunt.registerTask('_build:packageJson', function () {
@ -18,7 +17,7 @@ module.exports = function (grunt) {
sha: grunt.config.get('buildSha')
},
repository: pkg.repository,
dependencies: defaults({}, pkg.dependencies, deepModules)
dependencies: pkg.dependencies
}, null, ' ')
);
});

41
tasks/build/shrinkwrap.js Normal file
View file

@ -0,0 +1,41 @@
module.exports = function (grunt) {
let { config } = grunt;
let { statSync } = require('fs');
let { join } = require('path');
let exec = (...args) => require('../utils/exec')(...args, { cwd: config.get('root') });
let newFiles = [];
let shrinkwrapFile = join(config.get('root'), 'npm-shrinkwrap.json');
grunt.registerTask('_build:shrinkwrap:ensureExists', function (createIfMissing) {
try {
statSync(shrinkwrapFile);
} catch (e) {
if (e.code !== 'ENOENT') throw e;
if (createIfMissing) {
exec('npm', ['shrinkwrap', '--dev', '--logLevel', 'error']);
newFiles.push(shrinkwrapFile);
}
else grunt.fail.warn('Releases require an npm-shrinkwrap.json file to exist');
}
});
grunt.registerTask('_build:shrinkwrap:copyToBuild', function () {
// this.requires(['_build:shrinkwrap:ensureExists', 'copy:devSource']);
// backup shrinkwrap and copy to build
exec('cp', ['npm-shrinkwrap.json', 'npm-shrinkwrap.dev']);
exec('cp', ['npm-shrinkwrap.json', join(config.get('root'), 'build', 'kibana', 'npm-shrinkwrap.build.json')]);
// create shrinkwrap without dev dependencies and copy to build
exec('npm', ['shrinkwrap', '--logLevel', 'error']);
exec('cp', ['npm-shrinkwrap.json', join(config.get('root'), 'build', 'kibana', 'npm-shrinkwrap.json')]);
// restore the dev shrinkwrap
exec('mv', ['npm-shrinkwrap.dev', 'npm-shrinkwrap.json']);
});
grunt.registerTask('_build:shrinkwrap:cleanup', function () {
if (newFiles.length) exec('rm', newFiles.splice(0));
});
};

View file

@ -3,9 +3,7 @@ let babelOptions = require('requirefrom')('src')('optimize/babelOptions');
module.exports = {
build: {
options: defaults({
optional: ['runtime']
}, babelOptions),
options: babelOptions.node,
src: [
'build/kibana/**/*.js',
'!**/public/**',

View file

@ -1,10 +1,7 @@
module.exports = function (grunt) {
let modules = Object.keys(grunt.config.get('deepModules'));
return {
build: 'build',
target: 'target',
testsFromModules: 'build/kibana/node_modules/**/*test*/**',
deepModuleBins: 'build/kibana/node_modules/*/node_modules/**/.bin/{' + modules.join(',') + '}',
deepModules: 'build/kibana/node_modules/*/node_modules/**/{' + modules.join(',') + '}/',
testsFromModules: 'build/kibana/node_modules/**/*test*/**'
};
};

View file

@ -5,6 +5,7 @@ module.exports = function (grunt) {
src: [
'src/**',
'bin/**',
'webpackShims/**',
'config/kibana.yml',
'!src/**/__tests__/**',
'!src/testUtils/**',

View file

@ -40,5 +40,15 @@ module.exports = function (grunt) {
dev: { singleRun: false },
unit: { singleRun: true },
coverage: {
singleRun: true,
reporters: ['coverage'],
coverageReporter: {
reporters: [
{ type: 'html', dir: 'coverage' },
{ type: 'text-summary' },
]
}
}
};
};

View file

@ -45,7 +45,6 @@ module.exports = function (grunt) {
'inherits@1.0.0': ['ISC'],
'jsonpointer@1.1.0': ['MIT'],
'leaflet@0.7.2': ['BSD-2-Clause'],
'moment-timezone@0.0.6': ['MIT'],
'Nonsense@0.1.2': ['Public-Domain'],
'pkginfo@0.2.3': ['MIT'],
'uglify-js@2.2.5': ['BSD'],

View file

@ -4,7 +4,7 @@ module.exports = function (grunt) {
let version = grunt.config.get('pkg.version');
let nodeVersion = grunt.config.get('nodeVersion');
let rootPath = grunt.config.get('root');
let baseUri = `https://iojs.org/dist/v${nodeVersion}`;
let baseUri = `https://nodejs.org/dist/v${nodeVersion}`;
return [
'darwin-x64',
@ -14,7 +14,7 @@ module.exports = function (grunt) {
].map(function (name) {
let win = name === 'windows';
let nodeUrl = win ? `${baseUri}/win-x86/iojs.exe` : `${baseUri}/iojs-v${nodeVersion}-${name}.tar.gz`;
let nodeUrl = win ? `${baseUri}/node.exe` : `${baseUri}/node-v${nodeVersion}-${name}.tar.gz`;
let nodeDir = resolve(rootPath, `.node_binaries/${nodeVersion}/${name}`);
let buildName = `kibana-${version}-${name}`;

View file

@ -1,4 +1,5 @@
module.exports = function (grunt) {
let platform = require('os').platform();
let {resolve} = require('path');
let root = p => resolve(__dirname, '../../', p);
@ -10,7 +11,7 @@ module.exports = function (grunt) {
quiet: false,
failOnError: false
},
cmd: './bin/kibana',
cmd: /^win/.test(platform) ? '.\\bin\\kibana.bat' : './bin/kibana',
args: [
'--server.port=5610',
'--env.name=development',
@ -20,6 +21,24 @@ module.exports = function (grunt) {
]
},
testCoverageServer: {
options: {
wait: false,
ready: /Server running/,
quiet: false,
failOnError: false
},
cmd: /^win/.test(platform) ? '.\\bin\\kibana.bat' : './bin/kibana',
args: [
'--server.port=5610',
'--env.name=development',
'--logging.json=false',
'--optimize.bundleFilter=tests',
'--plugins.initialize=false',
'--testsBundle.instrument=true'
]
},
devTestServer: {
options: {
wait: false,

View file

@ -1,9 +1,9 @@
module.exports = function (grunt) {
var readline = require('readline');
// build, then zip and upload to s3
grunt.registerTask('release', [
'_build:shrinkwrap:ensureExists',
'_release:confirmUpload',
'_release:loadS3Config',
'build',

View file

@ -2,6 +2,7 @@ var _ = require('lodash');
module.exports = function (grunt) {
grunt.registerTask('test:server', [ 'simplemocha:all' ]);
grunt.registerTask('test:browser', [ 'run:testServer', 'karma:unit' ]);
grunt.registerTask('test:coverage', [ 'run:testCoverageServer', 'karma:coverage' ]);
grunt.registerTask('test:quick', [
'test:server',

5
webpackShims/angular-nvd3.js vendored Normal file
View file

@ -0,0 +1,5 @@
require('d3');
require('@spalger/nvd3/build/nv.d3.css');
require('@spalger/nvd3/build/nv.d3.js');
require('@spalger/angular-nvd3/dist/angular-nvd3.min.js');
module.exports = window.nv;

1
webpackShims/moment.js Normal file
View file

@ -0,0 +1 @@
module.exports = require('../node_modules/moment/min/moment.min.js');

4
webpackShims/ng-clip.js Normal file
View file

@ -0,0 +1,4 @@
require('angular');
require('zeroclipboard');
require('node_modules/ng-clip/src/ngClip');
require('ui/modules').get('kibana', ['ngClipboard']);

View file

@ -1,10 +1,9 @@
require('angular');
// ng-clip expects ZeroClipboard to be global, but it's UMD, so it never is
window.ZeroClipboard = require('node_modules/zeroclipboard/dist/ZeroClipboard.js');
window.ZeroClipboard.SWF_URL = require('file!node_modules/zeroclipboard/dist/ZeroClipboard.swf');
require('node_modules/ng-clip/src/ngClip');
require('ui/modules').get('kibana', ['ngClipboard']);
window.ZeroClipboard.config({
swfPath: window.ZeroClipboard.SWF_URL,
});
module.exports = window.ZeroClipboard;