Merge branch 'master' of github.com:elastic/kibana

This commit is contained in:
LeeDr 2018-08-22 17:01:49 -05:00
commit a20d71b491
887 changed files with 18980 additions and 13389 deletions

3
.browserslistrc Normal file
View file

@ -0,0 +1,3 @@
last 2 versions
> 5%
Safari 7

2
.gitignore vendored
View file

@ -42,4 +42,4 @@ package-lock.json
.vscode
npm-debug.log*
.tern-project
index.css
**/public/index.css

14
.i18nrc.json Normal file
View file

@ -0,0 +1,14 @@
{
"paths": {
"kbn": "src/core_plugins/kibana",
"common.server": "src/server",
"common.ui": "src/ui",
"xpack.idxMgmt": "xpack/plugins/index_management"
},
"exclude": [
"src/ui/ui_render/bootstrap/app_bootstrap.js",
"src/ui/ui_render/ui_render_mixin.js",
"x-pack/plugins/monitoring/public/components/cluster/overview/alerts_panel.js",
"x-pack/plugins/monitoring/public/directives/alerts/index.js"
]
}

View file

@ -1 +1 @@
8.11.3
8.11.4

2
.nvmrc
View file

@ -1 +1 @@
8.11.3
8.11.4

View file

@ -103,6 +103,33 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
---
This product bundles childnode-remove which is available under a
"MIT" license.
The MIT License (MIT)
Copyright (c) 2016-present, jszhou
https://github.com/jserz/js_piece
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
---
This product bundles geohash.js which is available under a
"MIT" license. For details, see src/ui/public/utils/decode_geo_hash.js.

View file

@ -27,10 +27,14 @@ entirely.
[float]
== APIs
* <<role-management-api>>
* <<saved-objects-api>>
* <<logstash-configuration-management-api>>
* <<url-shortening-api>>
--
include::api/role-management.asciidoc[]
include::api/saved-objects.asciidoc[]
include::api/logstash-configuration-management.asciidoc[]
include::api/url-shortening.asciidoc[]

View file

@ -0,0 +1,18 @@
[role="xpack"]
[[role-management-api]]
== Kibana Role Management API
experimental[This API is *experimental* and may be changed or removed completely in a future release. The underlying mechanism of enforcing role based access control is stable, but the APIs for managing the roles are currently experimental.]
The role management API allows people to manage roles that grant <<kibana-privileges>>.
It is *not* supported to do so using the
{ref}/security-api.html#security-role-apis[{es} role management APIs], and doing
so will likely cause {kib}'s authorization to behave unexpectedly.
* <<role-management-api-put>>
* <<role-management-api-get>>
* <<role-management-api-delete>>
include::role-management/put.asciidoc[]
include::role-management/get.asciidoc[]
include::role-management/delete.asciidoc[]

View file

@ -0,0 +1,24 @@
[[role-management-api-delete]]
=== Delete role
experimental[This API is experimental and may be changed or removed completely in a future release. Although the underlying mechanism of enforcing role-based access control is stable, the APIs for managing the roles are currently experimental.]
==== Authorization
To use this API, you must have at least the `manage_security` cluster privilege.
==== Request
To delete a role, submit a DELETE request to the `/api/security/role/<rolename>`
endpoint:
[source,js]
--------------------------------------------------
DELETE /api/security/role/my_admin_role
--------------------------------------------------
// KIBANA
==== Response
If the role is successfully deleted, the response code is `204`; otherwise, the response
code is 404.

View file

@ -0,0 +1,111 @@
[[role-management-api-get]]
=== Get Role
experimental[This API is experimental and may be changed or removed completely in a future release. Although the underlying mechanism of enforcing role-based access control is stable, the APIs for managing the roles are currently experimental.]
Retrieves all {kib} roles, or a specific role.
==== Authorization
To use this API, you must have at least the `manage_security` cluster privilege.
==== Get all {kib} roles
===== Request
To retrieve all roles, issue a GET request to the
/api/security/role endpoint.
[source,js]
--------------------------------------------------
GET /api/security/role
--------------------------------------------------
// KIBANA
===== Response
A successful call returns a response code of `200` and a response body containing a JSON
representation of the roles.
[source,js]
--------------------------------------------------
[
{
"name": "my_kibana_role",
"metadata" : {
"version" : 1
},
"transient_metadata": {
"enabled": true
},
"elasticsearch": {
"indices": [ ],
"cluster": [ ],
"run_as": [ ]
},
"kibana": [ {
"privileges": [ "all" ]
} ],
},
{
"name": "my_admin_role",
"metadata" : {
"version" : 1
},
"transient_metadata": {
"enabled": true
},
"elasticsearch": {
"cluster" : [ "all" ],
"indices" : [ {
"names" : [ "index1", "index2" ],
"privileges" : [ "all" ],
"field_security" : {
"grant" : [ "title", "body" ]
},
"query" : "{\"match\": {\"title\": \"foo\"}}"
} ],
},
"kibana": [ ]
}
]
--------------------------------------------------
==== Get a specific role
===== Request
To retrieve a specific role, issue a GET request to
the `/api/security/role/<rolename>` endpoint:
[source,js]
--------------------------------------------------
GET /api/security/role/my_kibana_role
--------------------------------------------------
// KIBANA
===== Response
A successful call returns a response code of `200` and a response body containing a JSON
representation of the role.
[source,js]
--------------------------------------------------
{
"name": "my_kibana_role",
"metadata" : {
"version" : 1
},
"transient_metadata": {
"enabled": true
},
"elasticsearch": {
"cluster": [ ],
"indices": [ ],
"run_as": [ ]
},
"kibana": [ {
"privileges": [ "all" ]
} ],
}
--------------------------------------------------

View file

@ -0,0 +1,64 @@
[[role-management-api-put]]
=== Create or Update Role
experimental[This API is experimental and may be changed or removed completely in a future release. Although the underlying mechanism of enforcing role-based access control is stable, the APIs for managing the roles are currently experimental.]
Creates a new {kib} role or updates the attributes of an existing role. {kib} roles are stored in the
{es} native realm.
==== Authorization
To use this API, you must have at least the `manage_security` cluster privilege.
==== Request
To create or update a role, issue a PUT request to the
`/api/security/role/<rolename>` endpoint.
[source,js]
--------------------------------------------------
PUT /api/security/role/my_kibana_role
--------------------------------------------------
==== Request Body
The following parameters can be specified in the body of a PUT request to add or update a role:
`metadata`:: (object) Optional meta-data. Within the `metadata` object, keys
that begin with `_` are reserved for system usage.
`elasticsearch`:: (object) Optional {es} cluster and index privileges, valid keys are
`cluster`, `indices` and `run_as`. For more information, see {xpack-ref}/defining-roles.html[Defining Roles].
`kibana`:: (list) A list of objects that specify the <<kibana-privileges>>.
===== Example
[source,js]
--------------------------------------------------
PUT /api/security/role/my_kibana_role
{
"metadata" : {
"version" : 1
},
"elasticsearch": {
"cluster" : [ "all" ],
"indices" : [ {
"names" : [ "index1", "index2" ],
"privileges" : [ "all" ],
"field_security" : {
"grant" : [ "title", "body" ]
},
"query" : "{\"match\": {\"title\": \"foo\"}}"
} ],
},
"kibana": [ {
"privileges": [ "all" ]
} ],
}
--------------------------------------------------
// KIBANA
==== Response
A successful call returns a response code of `204` and no response body.

View file

@ -0,0 +1,11 @@
[[url-shortening-api]]
== URL Shortening API
Kibana URLs contain the state of the application making them very long and cumbersome.
Internet Explorer has URL length restrictions, and some wiki and markup parsers don't do well with the full-length version of the Kibana URL.
The short URLs enabled by this API are designed to make sharing Kibana URLs easier.
* <<url-shortening-api-api-shorten-url>>
include::url_shortening/shorten_url.asciidoc[]

View file

@ -0,0 +1,46 @@
[[url-shortening-api-api-shorten-url]]
=== Shorten URL
The Shorten URL API allows for converting a Kibana URL into a token.
==== Request
`POST /api/shorten_url`
==== Request Body
The request body must be a JSON object containing the following properties:
`url` (required)::
(string) Kibana URL, relative to `/app/kibana`, to be shortened.
==== Response body
The response body will have a top level `urlId` property that contains
the shortened URL token for the provided request body.
==== Examples
[source,js]
--------------------------------------------------
POST api/shorten_url
{
"url": "/app/kibana#/dashboard?_g=()&_a=(description:'',filters:!(),fullScreenMode:!f,options:(darkTheme:!f,hidePanelTitles:!f,useMargins:!t),panels:!((embeddableConfig:(),gridData:(h:15,i:'1',w:24,x:0,y:0),id:'8f4d0c00-4c86-11e8-b3d7-01146121b73d',panelIndex:'1',type:visualization,version:'7.0.0-alpha1')),query:(language:lucene,query:''),timeRestore:!f,title:'New%20Dashboard',viewMode:edit)",
}
--------------------------------------------------
// KIBANA
A successful call returns a response code of `200` and a response body
containing a JSON structure similar to the following example:
[source,js]
--------------------------------------------------
{
"urlId": "f73b295ff92718b26bc94edac766d8e3"
}
--------------------------------------------------
A shortened Kibana URL can then be constructed for easier sharing.
`http://localhost:5601/goto/f73b295ff92718b26bc94edac766d8e3`

View file

@ -119,4 +119,4 @@ TIP: You can create a link to a dashboard by title by doing this: +
TIP: When sharing a link to a dashboard snapshot, use the *Short URL*. Snapshot
URLs are long and can be problematic for Internet Explorer and other
tools. To create a short URL, you must have write access to `.kibana`.
tools. To create a short URL, you must have write access to {kib}.

View file

@ -15,4 +15,7 @@ include::development/core-development.asciidoc[]
include::development/plugin-development.asciidoc[]
include::development/security/index.asciidoc[]
include::development/pr-review.asciidoc[]

View file

@ -64,7 +64,10 @@ node scripts/functional_test_runner
[float]
===== More about `node scripts/functional_test_runner`
When run without any arguments the `FunctionalTestRunner` automatically loads the configuration in the standard location, but you can override that behavior with the `--config` flag.
When run without any arguments the `FunctionalTestRunner` automatically loads the configuration in the standard location, but you can override that behavior with the `--config` flag. List configs with multiple --config arguments.
* `--config test/functional/config.js` starts Elasticsearch and Kibana servers with the selenium tests configuration.
* `--config test/api_integration/config.js` starts Elasticsearch and Kibana servers with the api integration tests configuration.
There are also command line flags for `--bail` and `--grep`, which behave just like their mocha counterparts. For instance, use `--grep=foo` to run only tests that match a regular expression.

View file

@ -0,0 +1,12 @@
[[development-security]]
== Security
Kibana has generally been able to implement security transparently to core and plugin developers, and this largely remains the case. {kib} on two methods that the <<development-elasticsearch, elasticsearch plugin>>'s `Cluster` provides: `callWithRequest` and `callWithInternalUser`.
`callWithRequest` executes requests against Elasticsearch using the authentication credentials of the Kibana end-user. So, if you log into Kibana with the user of `foo` when `callWithRequest` is used, {kib} execute the request against Elasticsearch as the user `foo`. Historically, `callWithRequest` has been used extensively to perform actions that are initiated at the request of Kibana end-users.
`callWithInternalUser` executes requests against Elasticsearch using the internal Kibana server user, and has historically been used for performing actions that aren't initiated by Kibana end users; for example, creating the initial `.kibana` index or performing health checks against Elasticsearch.
However, with the changes that role-based access control (RBAC) introduces, this is no longer cut and dry. {kib} now requires all access to the `.kibana` index goes through the `SavedObjectsClient`. This used to be a best practice, as the `SavedObjectsClient` was responsible for translating the documents stored in Elasticsearch to and from Saved Objects, but RBAC is now taking advantage of this abstraction to implement access control and determine when to use `callWithRequest` versus `callWithInternalUser`.
include::rbac.asciidoc[]

View file

@ -0,0 +1,174 @@
[[development-security-rbac]]
=== Role-based access control
Role-based access control (RBAC) in {kib} relies upon the {ref}/security-api-privileges.html[privilege APIs] that Elasticsearch exposes. This {kib} to define the privileges that {kib} wishes to grant to users, assign them to the relevant users using roles, and then authorize the user to perform a specific action. This is handled within a secured instance of the `SavedObjectsClient` and available transparently to consumers when using `request.getSavedObjectsClient()` or `savedObjects.getScopedSavedObjectsClient()`.
[[development-rbac-privileges]]
==== {kib} Privileges
When {kib} first starts up, it executes the following `POST` request against {es}. This synchronizes the definition of the privileges with various `actions` which are later used to authorize a user:
[source,js]
----------------------------------
POST /_xpack/security/privilege
Content-Type: application/json
Authorization: Basic kibana changeme
{
"kibana-.kibana":{
"all":{
"application":"kibana-.kibana",
"name":"all",
"actions":[
"version:7.0.0-alpha1-SNAPSHOT",
"action:login",
"action:*"
],
"metadata":{}
},
"read":{
"application":"kibana-.kibana",
"name":"read",
"actions":[
"version:7.0.0-alpha1-SNAPSHOT",
"action:login",
"action:saved_objects/dashboard/get",
"action:saved_objects/dashboard/bulk_get",
"action:saved_objects/dashboard/find",
...
],"metadata":{}}
}
}
----------------------------------
[NOTE]
==============================================
The application is created by concatenating the prefix of `kibana-` with the value of `kibana.index` from the `kibana.yml`, so different {kib} tenants are isolated from one another.
==============================================
[[development-rbac-assigning-privileges]]
==== Assigning {kib} Privileges
{kib} privileges are assigned to specific roles using the `applications` element. For example, the following role assigns the <<kibana-privileges-all, all>> privilege at `*` `resources` (which will in the future be used to secure spaces) to the default {kib} `application`:
[source,js]
----------------------------------
"new_kibana_user": {
"applications": [
{
"application": "kibana-.kibana",
"privileges": [
"all"
],
"resources": [
"*"
]
}
]
}
----------------------------------
Roles that grant <<kibana-privileges>> should be managed using the <<role-management-api>> or the *Management* / *Security* / *Roles* page, not directly using the {es} {ref}/security-api.html#security-role-apis[role management API]. This role can then be assigned to users using the {es}
{ref}/security-api.html#security-user-apis[user management APIs].
[[development-rbac-authorization]]
==== Authorization
The {es} {ref}/security-api-privileges.html#security-api-privileges[has privileges API]determines whether the user is authorized to perform a specific action:
[source,js]
----------------------------------
POST /_xpack/security/user/_has_privileges
Content-Type: application/json
Authorization: Basic foo_read_only_user password
{
"applications":[
{
"application":"kibana-.kibana",
"resources":["*"],
"privileges":[
"action:saved_objects/dashboard/save",
]
}
]
}
----------------------------------
{es} checks if the user is granted a specific action. If the user is assigned a role that grants a privilege, {es} uses the <<development-rbac-privileges, {kib} privileges>> definition to associate this with the actions, which makes authorizing users more intuitive and flexible programatically.
Once we have authorized the user to perform a specific action, we can execute the request using `callWithInternalUser`.
[[development-rbac-legacy-fallback]]
==== Legacy Fallback
Users have existign roles that rely on index privileges to the `.kibana` index. The legacy fallback uses the `callWithRequest` method when the user doesn't have and application privileges. This relies on the user have index privileges on `.kibana`. The legacy fallback will be available until 7.0.
Within the secured instance of the `SavedObjectsClient` the `_has_privileges` check determines if the user has any index privileges on the `.kibana` index:
[source,js]
----------------------------------
POST /_xpack/security/user/_has_privileges
Content-Type: application/json
Authorization: Basic foo_legacy_user password
{
"applications":[
{
"application":"kibana-.kibana",
"resources":["*"],
"privileges":[
"action:saved_objects/dashboard/save"
]
}
],
"index": [
{
"names": ".kibana",
"privileges": ["create", "delete", "read", "view_index_metadata"]
}
]
}
----------------------------------
Here is an example response if the user does not have application privileges, but does have privileges on the `.kibana` index:
[source,js]
----------------------------------
{
"username": "foo_legacy_user",
"has_all_requested": false,
"cluster": {},
"index": {
".kibana": {
"read": true,
"view_index_metadata": true,
"create": true,
"delete": true
}
},
"application": {
"kibana-.kibana": {
"*": {
"action:saved_objects/dashboard/save": false
}
}
}
}
----------------------------------
{kib} automatically detects that the request could be executed against `.kibana` using `callWithRequest` and does so.
When the user first logs into {kib}, if they have no application privileges and will have to rely on the legacy fallback, {kib} logs a deprecation warning similar to the following:
[source,js]
----------------------------------
${username} relies on index privileges on the {kib} index. This is deprecated and will be removed in {kib} 7.0
----------------------------------
[[development-rbac-reserved-roles]]
==== Reserved roles
Ideally, the `kibana_user` and `kibana_dashboard_only_user` roles should only use application privileges, and no longer have index privileges on the `.kibana` index. However, making this switch forces the user to incur downtime if Elasticsearch is upgraded to >= 6.4, and {kib} is running < 6.4. To mitigate this downtime, for the 6.x releases the `kibana_user` and `kibana_dashbord_only_user` roles have both application privileges and index privileges. When {kib} is running >= 6.4 it uses the application privileges to authorize the user, but when {kib} is running < 6.4 {kib} relies on the direct index privileges.

View file

@ -3,14 +3,17 @@
[partintro]
--
Ready to get some hands-on experience with Kibana?
This tutorial shows you how to:
* Load a sample data set into Elasticsearch
* Define an index pattern
* Discover and explore the sample data
* Visualize the data
* Assemble visualizations into a dashboard
Ready to get some hands-on experience with {kib}? There are two ways to start:
* <<tutorial-sample-data, Explore {kib} using the Flights dashboard>>
+
Load the Flights sample data and dashboard with one click and start
interacting with {kib} visualizations in seconds.
* <<tutorial-build-dashboard, Build your own dashboard>>
+
Manually load a data set and build your own visualizations and dashboard.
Before you begin, make sure you've <<install, installed Kibana>> and established
a {kibana-ref}/connect-to-elasticsearch.html[connection to Elasticsearch].
@ -22,6 +25,22 @@ If you are running our https://cloud.elastic.co[hosted Elasticsearch Service]
on Elastic Cloud, you can access Kibana with a single click.
--
include::getting-started/tutorial-sample-data.asciidoc[]
include::getting-started/tutorial-sample-filter.asciidoc[]
include::getting-started/tutorial-sample-query.asciidoc[]
include::getting-started/tutorial-sample-discover.asciidoc[]
include::getting-started/tutorial-sample-edit.asciidoc[]
include::getting-started/tutorial-sample-inspect.asciidoc[]
include::getting-started/tutorial-sample-remove.asciidoc[]
include::getting-started/tutorial-full-experience.asciidoc[]
include::getting-started/tutorial-load-dataset.asciidoc[]
include::getting-started/tutorial-define-index.asciidoc[]
@ -32,4 +51,6 @@ include::getting-started/tutorial-visualizing.asciidoc[]
include::getting-started/tutorial-dashboard.asciidoc[]
include::getting-started/tutorial-inspect.asciidoc[]
include::getting-started/wrapping-up.asciidoc[]

View file

@ -1,14 +1,14 @@
[[tutorial-dashboard]]
== Putting it Together in a Dashboard
=== Displaying your visualizations in a dashboard
A dashboard is a collection of visualizations that you can arrange and share.
Here you'll build a dashboard that contains the visualizations you saved during
You'll build a dashboard that contains the visualizations you saved during
this tutorial.
. Open *Dashboard*.
. Click *Create new dashboard*.
. Click *Add*.
. Click *Bar Example*, *Map Example*, *Markdown Example*, and *Pie Example*.
. Add *Bar Example*, *Map Example*, *Markdown Example*, and *Pie Example*.
Your sample dashboard look like this:

View file

@ -1,5 +1,5 @@
[[tutorial-define-index]]
== Defining Your Index Patterns
=== Defining your index patterns
Index patterns tell Kibana which Elasticsearch indices you want to explore.
An index pattern can match the name of a single index, or include a wildcard
@ -10,7 +10,7 @@ series of indices in the format `logstash-YYYY.MMM.DD`. To explore all
of the log data from May 2018, you could specify the index pattern
`logstash-2018.05*`.
Create patterns for the Shakespeare data set, which has an
You'll create patterns for the Shakespeare data set, which has an
index named `shakespeare,` and the accounts data set, which has an index named
`bank.` These data sets don't contain time-series data.

View file

@ -1,12 +1,16 @@
[[tutorial-discovering]]
== Discovering Your Data
=== Discovering your data
Using the Discover application, you can enter
an {ref}/query-dsl-query-string-query.html#query-string-syntax[Elasticsearch
query] to search your data and filter the results.
. Open *Discover*. The `shakes*` pattern is the current index pattern.
. Click the caret to the right of `shakes*`, and select `ba*`.
. Open *Discover*.
+
The current index pattern appears below the filter bar, in this case `shakes*`.
You might need to click *New* in the menu bar to refresh the data.
. Click the caret to the right of the current index pattern, and select `ba*`.
. In the search field, enter the following string:
+
[source,text]
@ -19,8 +23,8 @@ excess of 47,500. It returns results for account numbers 8, 32, 78, 85, and 97.
image::images/tutorial-discover-2.png[]
By default, all fields are shown for each matching document. To choose which
fields to display, hover the mouse over the the list of *Available Fields*
and then click *add* next to each field you want include.
fields to display, hover the pointer over the the list of *Available Fields*
and then click *add* next to each field you want include as a column in the table.
For example, if you add the `account_number` field, the display changes to a list of five
account numbers.

View file

@ -0,0 +1,12 @@
[[tutorial-build-dashboard]]
== Building your own dashboard
Ready to load some data and build a dashboard? This tutorial shows you how to:
* Load a data set into Elasticsearch
* Define an index pattern
* Discover and explore the data
* Visualize the data
* Add visualizations to a dashboard
* Inspect the data behind a visualization

View file

@ -0,0 +1,24 @@
[[tutorial-inspect]]
=== Inspecting the data
Seeing visualizations of your data is great,
but sometimes you need to look at the actual data to
understand what's really going on. You can inspect the data behind any visualization
and view the {es} query used to retrieve it.
. In the dashboard, hover the pointer over the pie chart.
. Click the icon in the upper right.
. From the *Options* menu, select *Inspect*.
+
[role="screenshot"]
image::images/tutorial-full-inspect1.png[]
You can also look at the query used to fetch the data for the visualization.
. Open the *View:Data* menu and select *Requests*.
. Click the tabs to look at the request statistics, the Elasticsearch request,
and the response in JSON.
. To close the Inspector, click X in the upper right.
+
[role="screenshot"]
image::images/tutorial-full-inspect2.png[]

View file

@ -1,5 +1,5 @@
[[tutorial-load-dataset]]
== Loading Sample Data
=== Loading sample data
This tutorial requires three data sets:
@ -16,6 +16,8 @@ Two of the data sets are compressed. To extract the files, use these commands:
unzip accounts.zip
gunzip logs.jsonl.gz
==== Structure of the data sets
The Shakespeare data set has this structure:
[source,json]
@ -54,11 +56,18 @@ The logs data set has dozens of different fields. Here are the notable fields fo
"@timestamp": "date"
}
==== Set up mappings
Before you load the Shakespeare and logs data sets, you must set up {ref}/mapping.html[_mappings_] for the fields.
Mappings divide the documents in the index into logical groups and specify the characteristics
of the fields. These characteristics include the searchability of the field
and whether it's _tokenized_, or broken up into separate words.
NOTE: If security is enabled, you must have the `all` Kibana privilege to run this tutorial.
You must also have the `create`, `manage` `read`, `write,` and `delete`
index privileges. See {xpack-ref}/security-privileges.html[Security Privileges]
for more information.
In Kibana *Dev Tools > Console*, set up a mapping for the Shakespeare data set:
[source,js]
@ -149,6 +158,8 @@ PUT /logstash-2015.05.20
The accounts data set doesn't require any mappings.
==== Load the data sets
At this point, you're ready to use the Elasticsearch {ref}/docs-bulk.html[bulk]
API to load the data sets:

View file

@ -0,0 +1,32 @@
[[tutorial-sample-data]]
== Explore {kib} using the Flight dashboard
Youre new to {kib} and want to try it out. With one click, you can install
the Flights sample data and start interacting with Kibana.
The Flights data set contains data for four airlines.
You can load the data and preconfigured dashboard from the {kib} home page.
. On the home page, click the link next to *Sample data*.
. On the *Sample flight data* card, click *Add*.
. Click *View data*.
Youre taken to the *Global Flight* dashboard, a collection of charts, graphs,
maps, and other visualizations of the the data in the `kibana_sample_data_flights` index.
[role="screenshot"]
image::images/tutorial-sample-dashboard.png[]
In this tutorial, youll learn to:
* Filter the data
* Query the data
* Discover the data
* Edit a visualization
* Inspect the data behind the scenes
NOTE: If security is enabled, you must have the `all` Kibana privilege.
You must also have access to the `kibana_sample_data_flights` index with
the `read`, `write,` and `manage` privileges. See {xpack-ref}/security-privileges.html[Security Privileges]
for more information.

View file

@ -0,0 +1,27 @@
[[tutorial-sample-discover]]
=== Using Discover
In the Discover application, the Flight data is presented in a table. You can
interactively explore the data, including searching and filtering.
* In the side navigation, select *Discover*.
The current index pattern appears below the filter bar. An
<<index-patterns, index pattern>> tells {kib} which {es} indices you want to
explore.
The `kibana_sample_data_flights` index contains a time field. A histogram
shows the distribution of documents over time.
[role="screenshot"]
image::images/tutorial-sample-discover1.png[]
By default, all fields are shown for each matching document. To choose which fields to display,
hover the pointer over the the list of *Available Fields* and then click *add* next
to each field you want include as a column in the table.
For example, if you add the `DestAirportID` and `DestWeather` fields,
the display includes columns for those two fields:
[role="screenshot"]
image::images/tutorial-sample-discover2.png[]

View file

@ -0,0 +1,45 @@
[[tutorial-sample-edit]]
=== Editing a visualization
You have edit permissions for the *Global Flight* dashboard so you can change
the appearance and behavior of the visualizations. For example, you might want
to see which airline has the lowest average fares.
. Go to the *Global Flight* dashboard.
. In the menu bar, click *Edit*.
. In the *Average Ticket Price* visualization, click the gear icon in
the upper right.
. From the *Options* menu, select *Edit visualization*.
==== Edit a metric visualization
*Average Ticket Price* is a metric visualization.
To specify which groups to display
in this visualization, you use an {es} {ref}/search-aggregations.html[bucket aggregation].
This aggregation sorts the documents that match your search criteria into different
categories, or buckets.
. In the *Buckets* pane, select *Split Group*.
. In the *Aggregation* dropdown menu, select *Terms*.
. In the *Field* dropdown, select *Carrier*.
. Set *Descending* to four.
. Click *Apply changes* image:images/apply-changes-button.png[].
You now see the average ticket price for all four airlines.
[role="screenshot"]
image::images/tutorial-sample-edit1.png[]
==== Save the changes
. In the menu bar, click *Save*.
. Leave the visualization name unchanged and click *Save*.
. Go to the *Global Flight* dashboard.
. Resize the panel for the *Average Ticket Price* visualization by dragging the
handle in the lower right.
You can also rearrange the visualizations by clicking the header and dragging.
. In the menu bar, click *Save* and then confirm the save.
+
[role="screenshot"]
image::images/tutorial-sample-edit2.png[]

View file

@ -0,0 +1,23 @@
[[tutorial-sample-filter]]
=== Filtering the data
Many visualizations in the *Global Flight* dashboard are interactive. You can
apply filters to modify the view of the data across all visualizations.
. In the *Controls* visualization, set an *Origin City* and a *Destination City*.
. Click *Apply changes*.
+
The `OriginCityName` and the `DestCityName` fields are filtered to match
the data you specified.
+
For example, this dashboard shows the data for flights from London to Newark
and Pittsburgh.
+
[role="screenshot"]
image::images/tutorial-sample-filter.png[]
+
. To remove the filters, in the *Controls* visualization, click *Clear form*, and then
*Apply changes*.
You can also add filters manually. In the filter bar, click *Add a Filter*
and specify the data you want to view.

View file

@ -0,0 +1,24 @@
[[tutorial-sample-inspect]]
=== Inspecting the data
Seeing visualizations of your data is great,
but sometimes you need to look at the actual data to
understand what's really going on. You can inspect the data behind any visualization
and view the {es} query used to retrieve it.
. Hover the pointer over the *Flight Count and Average Ticket Price* visualization.
. Click the icon in the upper right.
. From the *Options* menu, select *Inspect*.
+
[role="screenshot"]
image::images/tutorial-sample-inspect1.png[]
You can also look at the query used to fetch the data for the visualization.
. Open the *View: Data* menu and select *Requests*.
. Click the tabs to look at the request statistics, the Elasticsearch request,
and the response in JSON.
. To close the editor, click X in the upper right.
+
[role="screenshot"]
image::images/tutorial-sample-inspect2.png[]

View file

@ -0,0 +1,30 @@
[[tutorial-sample-query]]
=== Querying the data
You can enter an {es} query to narrow the view of the data.
. To find all flights out of Rome, submit this query:
+
[source,text]
OriginCityName:Rome
. For a more complex query with AND and OR, try this:
+
[source,text]
OriginCityName:Rome AND (Carrier:JetBeats OR "Kibana Airlines")
+
The dashboard updates to show data for the flights out of Rome on JetBeats and
{kib} Airlines.
+
[role="screenshot"]
image::images/tutorial-sample-query.png[]
. When you are finished exploring the dashboard, remove the query by
clearing the contents in the query bar and pressing Enter.
In general, filters are faster than queries. For more information, see {ref}/query-filter-context.html[Query and filter context].
TIP: {kib} has an experimental autocomplete feature that can
help jumpstart your queries. To turn on this feature, click *Options* on the
right of the query bar and opt in. With autocomplete enabled,
search suggestions are displayed when you start typing your query.

View file

@ -0,0 +1,18 @@
[[tutorial-sample-remove]]
=== Wrapping up
When youre done experimenting with the sample data set, you can remove it.
. Go to the {kib} home page and click the link next to *Sample data*.
. On the *Sample flight data* card, click *Remove*.
Now that you have a handle on the {kib} basics, you might be interested in:
* <<tutorial-build-dashboard, Building your own dashboard>>. Youll learn how to load your own
data, define an index pattern, and create visualizations and dashboards.
* <<visualize>>. Youll find information about all the visualization types
{kib} has to offer.
* <<dashboard>>. You have the ability to share a dashboard, or embed the dashboard in a web page.
* <<discover>>. You'll learn more about searching data and filtering by field.

View file

@ -1,5 +1,5 @@
[[tutorial-visualizing]]
== Visualizing Your Data
=== Visualizing your data
In the Visualize application, you can shape your data using a variety
of charts, tables, and maps, and more. You'll create four
@ -19,7 +19,7 @@ gain insight into the account balances in the bank account data.
[role="screenshot"]
image::images/tutorial-visualize-wizard-step-2.png[]
=== Pie Chart
=== Pie chart
Initially, the pie contains a single "slice."
That's because the default search matched all documents.
@ -73,15 +73,17 @@ in a ring around the balance ranges.
[role="screenshot"]
image::images/tutorial-visualize-pie-3.png[]
To save this chart so you can use it later, click *Save* in the top menu bar
and enter `Pie Example`.
To save this chart so you can use it later:
=== Bar Chart
* Click *Save* in the top menu bar and enter `Pie Example`.
=== Bar chart
You'll use a bar chart to look at the Shakespeare data set and compare
the number of speaking parts in the plays.
Create a *Vertical Bar* chart and set the search source to `shakes*`.
* Create a *Vertical Bar* chart and set the search source to `shakes*`.
Initially, the chart is a single bar that shows the total count
of documents that match the default wildcard query.
@ -120,32 +122,12 @@ that play.
Notice how the individual play names show up as whole phrases, instead of
broken into individual words. This is the result of the mapping
you did at the beginning of the tutorial, when your marked the `play_name` field
you did at the beginning of the tutorial, when you marked the `play_name` field
as `not analyzed`.
////
You might
also be curious to see which plays make the greatest demands on an
individual actor. Let's show the maximum number of speeches for a given part.
. Click *Add metrics* to add a Y-axis aggregation.
. Set *Aggregation* to `Max` and *Field* to `speech_number`.
. Click *Metrics & Axes* and then change *Mode* from `stacked` to `normal`.
. Click *Apply changes* image:images/apply-changes-button.png[].
[role="screenshot"]
image::images/tutorial-visualize-bar-3.png[]
The play Love's Labours Lost has an unusually high maximum speech number compared to the other plays.
Note how the *Number of speaking parts* Y-axis starts at zero, but the bars don't begin to differentiate until 18. To
make the differences stand out, starting the Y-axis at a value closer to the minimum, go to Options and select
*Scale Y-Axis to data bounds*.
////
*Save* this chart with the name `Bar Example`.
=== Coordinate Map
=== Coordinate map
Using a coordinate map, you can visualize geographic information in the log file sample data.
@ -175,18 +157,6 @@ You can navigate the map by clicking and dragging. The controls
on the top left of the map enable you to zoom the map and set filters.
Give them a try.
////
- Zoom image:images/viz-zoom.png[] buttons,
- *Fit Data Bounds*
image:images/viz-fit-bounds.png[] button to zoom to the lowest level that
includes all the points.
- Include or exclude a rectangular area
by clicking the *Latitude/Longitude Filter* image:images/viz-lat-long-filter.png[]
button and drawing a bounding box on the map. Applied filters are displayed
below the query bar. Hovering over a filter displays controls to toggle,
pin, invert, or delete the filter.
////
[role="screenshot"]
image::images/tutorial-visualize-map-3.png[]

View file

@ -1,5 +1,5 @@
[[wrapping-up]]
== Wrapping Up
=== Wrapping up
Now that you have a handle on the basics, you're ready to start exploring
your own data with Kibana.

Binary file not shown.

After

Width:  |  Height:  |  Size: 204 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 222 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 378 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 947 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 386 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 191 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 335 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 302 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 268 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 230 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 309 KiB

View file

@ -3,7 +3,7 @@
=== Advanced Configuration for Dashboard Only Mode
If {security} is enabled, Kibana has a built-in `kibana_dashboard_only_user`
role that grants read only access to the `.kibana` index. This role is sufficient
role that grants read-only access to {kib}. This role is sufficient
for most use cases. However, if your setup requires a custom {kib} index, you can create
your own roles and tag them as *Dashboard only mode*.
@ -15,8 +15,8 @@ Here you can add as many roles as you like.
image:management/dashboard_only_mode/images/advanced_dashboard_mode_role_setup.png["Advanced dashboard mode role setup"]
By default, a *dashboard only mode* user doesn't have access to any data indices.
To grant read only access to your custom {kib}
index, you must assign `view_index_metadata` and `read` privileges.
To grant read-only access to your custom {kib} instance,
you must assign the read <<kibana-privileges, Kibana privilege>>.
These privileges are available under *Management > Security > Roles*.
For more information on roles and privileges, see {xpack-ref}/authorization.html[User Authorization].

Binary file not shown.

Before

Width:  |  Height:  |  Size: 145 KiB

After

Width:  |  Height:  |  Size: 281 KiB

Before After
Before After

View file

@ -18,10 +18,10 @@ that grant the user appropriate data access. For information on roles
and privileges, see {xpack-ref}/authorization.html[User Authorization].
The `kibana_dashboard_only_user` role is
preconfigured with read only permissions on the `.kibana` index.
preconfigured with read-only permissions to {kib}.
IMPORTANT: If you assign users the `kibana_dashboard_only_user` role, along with a role
with write permissions on the `.kibana` index, they *will* have write access,
with write permissions to {kib}, they *will* have write access,
even though the controls remain hidden in the {kib} UI.
IMPORTANT: If you also assign users the reserved `superuser` role, they will be able to see

View file

@ -49,4 +49,10 @@ to Kibana's advanced setting.
*Impact:* This setting is no longer necessary. If you enable {monitoring} across the Elastic Stack, a monitoring agent runs
on each Elasticsearch node, Logstash node, Kibana instance, and Beat to collect and index metrics. Each node and instance is
considered unique based on its persistent UUID, which is written to the path.data directory when the node or instance starts.
considered unique based on its persistent UUID, which is written to the path.data directory when the node or instance starts.
[float]
=== Deprecated API `/shorten` has been removed
*Details:* The `/shorten` API has been deprecated since 6.5, when it was replaced by the `/api/shorten_url` API.
*Impact:* The '/shorten' API has been removed. Use the '/api/shorten_url' API instead.

View file

@ -0,0 +1,36 @@
[role="xpack"]
[[xpack-security-audit-logging]]
=== Audit Logging
You can enable auditing to keep track of security-related events such as
authorization success and failures. Logging these events enables you
to monitor {kib} for suspicious activity and provides evidence in the
event of an attack.
Use the {kib} audit logs in conjunction with {es}'s
audit logging to get a holistic view of all security related events.
{kib} defers to {es}'s security model for authentication, data
index authorization, and features that are driven by cluster-wide privileges.
For more information on enabling audit logging in {es}, see
{stack-ov}/auditing.html[Auditing Security Events].
[IMPORTANT]
============================================================================
Audit logs are **disabled** by default. To enable this functionality, you
must set `xpack.security.audit.enabled` to `true` in `kibana.yml`.
============================================================================
Audit logging uses the standard {kib} logging output, which can be configured
in the `kibana.yml` and is discussed in <<settings>>.
==== Audit event types
When you are auditing security events, each request can generate
multiple audit events. The following is a list of the events that can be generated:
|======
| `saved_objects_authorization_success` | Logged when a user is authorized to access a saved
objects when using a role with <<kibana-privileges>>
| `saved_objects_authorization_failure` | Logged when a user isn't authorized to access a saved
objects when using a role with <<kibana-privileges>>
|======

View file

@ -0,0 +1,31 @@
[role="xpack"]
[[xpack-security-authorization]]
=== Authorization
Authorizing users to use {kib} in most configurations is as simple as assigning the user
either the `kibana_user` or `kibana_dashboard_only_user` reserved role. If you're running
a single tenant of {kib} against your {es} cluster, this is sufficient and no other
action is required.
==== Multi-tenant {kib}
When running multiple tenants of {kib}, and changing the `kibana.index` in your `kibana.yml`, you
must create custom roles that authorize the user for that specific tenant. You can use
either the *Management / Security / Roles* page in {kib} or the <<role-management-api>>
to assign a specific <<kibana-privileges, Kibana privilege>> at that tenant. After creating the
custom role, you should assign this role to the user(s) that you wish to have access.
==== Legacy roles
Prior to {kib} 6.4, {kib} users required index privileges to the `kibana.index`
in {es}. This approach is deprecated starting in 6.4, and you will need to switch to using
<<kibana-privileges>> before 7.0. When a user logs into {kib} and they're using
a legacy role, the following is logged to your {kib} logs:
[source,js]
----------------------------------
<username> relies on index privileges on the Kibana index. This is deprecated and will be removed in Kibana 7.0
----------------------------------
To disable legacy roles from being authorized in {kib}, set `xpack.security.authorization.legacyFallback` to `false`
in your `kibana.yml`.

View file

@ -0,0 +1,15 @@
[role="xpack"]
[[kibana-privileges]]
=== Kibana privileges
This section lists the Kibana privileges that you can assign to a role.
[horizontal]
[[kibana-privileges-all]]
`all`::
All Kibana privileges, can read, write and delete saved searches, dashboards, visualizations,
short URLs, Timelion sheets, graph workspaces, index patterns and advanced settings.
`read`::
Can read saved searches, dashboards, visualizations, short URLs, Timelion sheets, graph workspaces,
index patterns, and advanced settings.

View file

@ -6,7 +6,7 @@
password-protect your data as well as implement more advanced security measures
such as encrypting communications, role-based access control, IP filtering, and
auditing. For more information, see
{xpack-ref}/xpack-security.html[Securing {es} and {kib}] and
{xpack-ref}/elasticsearch-security.html[Securing {es} and {kib}] and
<<using-kibana-with-security,Configuring Security in {kib}>>.
[float]
@ -20,6 +20,19 @@ authentication and built-in users, see
[float]
=== Roles
You can manage roles on the *Management* / *Security* / *Roles* page. For more
information, see
{xpack-ref}/authorization.html[Configuring Role-based Access Control].
You can manage roles on the *Management* / *Security* / *Roles* page, or use
{kib}'s <<role-management-api>>. For more information on configuring roles for
{kib} see <<xpack-security-authorization, {kib} Authorization>>.
For a more holistic overview of configuring roles for the entire stack,
see {xpack-ref}/authorization.html[Configuring Role-based Access Control].
[NOTE]
============================================================================
Managing roles that grant <<kibana-privileges>> using the {es}
{ref}/security-api.html#security-role-apis[role management APIs] is not supported. Doing so will likely
cause Kibana's authorization to behave unexpectedly.
============================================================================
include::authorization/index.asciidoc[]
include::authorization/kibana-privileges.asciidoc[]

View file

@ -85,7 +85,7 @@ You can manage privileges on the *Management / Security / Roles* page in {kib}.
If you're using the native realm with Basic Authentication, you can assign roles
using the *Management / Security / Users* page in {kib} or the
{ref}/security-api-users.html[User Management API]. For example, the following
{ref}/security-api.html#security-user-apis[user management APIs]. For example, the following
creates a user named `jacknich` and assigns it the `kibana_user` role:
[source,js]
@ -124,4 +124,5 @@ NOTE: This must be a user who has been assigned the `kibana_user` role.
include::authentication/index.asciidoc[]
include::securing-communications/index.asciidoc[]
include::audit-logging.asciidoc[]
include::{kib-repo-dir}/settings/security-settings.asciidoc[]

View file

@ -17,12 +17,12 @@ xpack.apm.enabled:: Set to `false` to disabled the APM plugin {kib}. Defaults to
xpack.apm.ui.enabled:: Set to `false` to hide the APM plugin {kib} from the menu. Defaults to
`true`.
apm_oss.indexPattern:: Index pattern is used for integrations with Machine Learning and Kuery Bar. It must match all apm indices. Defaults to `apm-&#42;`.
apm_oss.indexPattern:: Index pattern is used for integrations with Machine Learning and Kuery Bar. It must match all apm indices. Defaults to `apm-*`.
apm_oss.errorIndices:: Matcher for indices containing error documents. Defaults to `apm-&#42;-error-&#42;`.
apm_oss.errorIndices:: Matcher for indices containing error documents. Defaults to `apm-\*-error-*`.
apm_oss.onboardingIndices:: Matcher for indices containing onboarding documents. Defaults to `apm-&#42;-onboarding-&#42;`.
apm_oss.onboardingIndices:: Matcher for indices containing onboarding documents. Defaults to `apm-\*-onboarding-*`.
apm_oss.spanIndices:: Matcher for indices containing span documents. Defaults to `apm-&#42;-span-&#42;`.
apm_oss.spanIndices:: Matcher for indices containing span documents. Defaults to `apm-\*-span-*`.
apm_oss.transactionIndices:: Matcher for indices containing transaction documents. Defaults to `apm-&#42;-transaction-&#42;`.
apm_oss.transactionIndices:: Matcher for indices containing transaction documents. Defaults to `apm-\*-transaction-*`.

View file

@ -14,11 +14,19 @@ It is enabled by default.
`xpack.security.enabled`::
Set to `true` (default) to enable {security}. +
+
If set to `false` in `kibana.yml`, the user and role management options are
hidden in this {kib} instance. If `xpack.security.enabled` is set to `true` in
`elasticsearch.yml`, however, you can still use the {security} APIs. To disable
{security} entirely, see the
{ref}/security-settings.html[{es} Security Settings].
Do not set this to `false`. To disable {security} entirely, see
{ref}/security-settings.html[{es} Security Settings]. +
+
If set to `false` in `kibana.yml`, the login form, user and role management screens, and
authorization using <<kibana-privileges>> are disabled. +
+
`xpack.security.audit.enabled`::
Set to `true` to enable audit logging for security events. This is set to `false` by default.
For more details see <<xpack-security-audit-logging>>.
`xpack.security.authorization.legacyFallback`::
Set to `true` (default) to enable the legacy fallback. See <<xpack-security-authorization>>
for more details.
[float]
[[security-ui-settings]]

View file

@ -4,7 +4,7 @@
[float]
=== Hosted Kibana
If you are running our https://cloud.elastic.co[hosted Elasticsearch Service]
If you are running our https://cloud.elastic.co[hosted Elasticsearch Service]
on Elastic Cloud, you can access Kibana with a single click.
[float]
@ -48,7 +48,7 @@ downloaded from the Elastic Docker Registry.
<<docker,Running Kibana on Docker>>
IMPORTANT: If your Elasticsearch installation is protected by
{xpack-ref}/xpack-security.html[{security}] see
{xpack-ref}/elasticsearch-security.html[{security}] see
{kibana-ref}/using-kibana-with-security.html[Configuring Security in Kibana] for
additional setup instructions.

View file

@ -21,7 +21,7 @@ and an Elasticsearch client node on the same machine. For more information, see
[[configuring-kibana-shield]]
=== Using Kibana with {security}
You can use {stack-ov}/xpack-security.html[{security}] to control what
You can use {stack-ov}/elasticsearch-security.html[{security}] to control what
Elasticsearch data users can access through Kibana.
When {security} is enabled, Kibana users have to log in. They need to
@ -40,7 +40,7 @@ For information about setting up Kibana users, see
[[enabling-ssl]]
=== Enabling SSL
See <<configuring-tls>>.
See <<configuring-tls>>.
[float]
[[load-balancing]]

View file

@ -157,7 +157,7 @@ The minimum value is 100.
`server.ssl.redirectHttpFromPort:`:: Kibana will bind to this port and redirect all http requests to https over the port configured as `server.port`.
`server.ssl.supportedProtocols:`:: *Default: TLSv1, TLSv1.1, TLSv1.2* Supported protocols with versions. Valid protocols: `TLSv1`, `TLSv1.1`, `TLSv1.2`
`server.ssl.supportedProtocols:`:: *Default: TLSv1, TLSv1.1, TLSv1.2* An array of supported protocols with versions. Valid protocols: `TLSv1`, `TLSv1.1`, `TLSv1.2`
`status.allowAnonymous:`:: *Default: false* If authentication is enabled, setting this to `true` allows
unauthenticated users to access the Kibana server status API and status page.
unauthenticated users to access the Kibana server status API and status page.

View file

@ -14,6 +14,9 @@ numeric field. Select a field from the drop-down.
the number of unique values in a field. Select a field from the drop-down.
*Standard Deviation*:: The {ref}/search-aggregations-metrics-extendedstats-aggregation.html[_extended stats_]
aggregation returns the standard deviation of data in a numeric field. Select a field from the drop-down.
*Top Hit*:: The {ref}/search-aggregations-metrics-top-hits-aggregation.html[_top hits_]
aggregation returns one or more of the top values from a specific field in your documents. Select a field from the drop-down,
how you want to sort the documents and choose the top fields, and how many values should be returned.
*Percentiles*:: The {ref}/search-aggregations-metrics-percentile-aggregation.html[_percentile_] aggregation divides the
values in a numeric field into percentile bands that you specify. Select a field from the drop-down, then specify one
or more ranges in the *Percentiles* fields. Click the *X* to remove a percentile field. Click *+ Add* to add a

View file

@ -23,23 +23,6 @@
},
"license": "Apache-2.0",
"author": "Rashid Khan <rashid.khan@elastic.co>",
"contributors": [
"Chris Cowan <chris.cowan@elastic.co>",
"Court Ewing <court@elastic.co>",
"Jim Unger <jim.unger@elastic.co>",
"Joe Fleming <joe.fleming@elastic.co>",
"Jon Budzenski <jonathan.budzenski@elastic.co>",
"Juan Thomassie <juan.thomassie@elastic.co>",
"Khalah Jones-Golden <khalah.jones@elastic.co>",
"Lee Drengenberg <lee.drengenberg@elastic.co>",
"Lukas Olson <lukas.olson@elastic.co>",
"Matt Bargar <matt.bargar@elastic.co>",
"Nicolás Bevacqua <nico@elastic.co>",
"Shelby Sturgis <shelby@elastic.co>",
"Spencer Alger <spencer.alger@elastic.co>",
"Tim Sullivan <tim@elastic.co>",
"Yuri Astrakhan <yuri@elastic.co>"
],
"scripts": {
"preinstall": "node ./preinstall_check",
"kbn": "node scripts/kbn",
@ -77,7 +60,7 @@
"url": "https://github.com/elastic/kibana.git"
},
"dependencies": {
"@elastic/eui": "3.2.1",
"@elastic/eui": "3.6.1",
"@elastic/filesaver": "1.1.2",
"@elastic/numeral": "2.3.2",
"@elastic/ui-ace": "0.2.3",
@ -87,7 +70,6 @@
"@kbn/pm": "link:packages/kbn-pm",
"@kbn/test-subj-selector": "link:packages/kbn-test-subj-selector",
"@kbn/ui-framework": "link:packages/kbn-ui-framework",
"JSONStream": "1.1.1",
"abortcontroller-polyfill": "^1.1.9",
"angular": "1.6.9",
"angular-aria": "1.6.6",
@ -96,19 +78,16 @@
"angular-route": "1.4.7",
"angular-sanitize": "1.5.7",
"angular-sortable-view": "0.0.15",
"autoprefixer": "6.5.4",
"autoprefixer": "^9.1.0",
"babel-core": "6.21.0",
"babel-loader": "7.1.2",
"babel-polyfill": "6.20.0",
"babel-register": "6.18.0",
"bluebird": "2.9.34",
"body-parser": "1.12.0",
"boom": "5.2.0",
"brace": "0.11.1",
"bunyan": "1.7.1",
"cache-loader": "1.0.3",
"chalk": "^2.4.1",
"check-hash": "1.0.1",
"color": "1.0.3",
"commander": "2.8.1",
"compare-versions": "3.1.0",
@ -125,7 +104,6 @@
"execa": "^0.10.0",
"expiry-js": "0.1.7",
"extract-text-webpack-plugin": "3.0.1",
"fetch-mock": "^5.13.1",
"file-loader": "1.1.4",
"font-awesome": "4.4.0",
"glob": "^7.1.2",
@ -197,7 +175,6 @@
"script-loader": "0.7.2",
"semver": "^5.5.0",
"style-loader": "0.19.0",
"symbol-observable": "^1.2.0",
"tar": "2.2.0",
"tinygradient": "0.3.0",
"tinymath": "0.2.1",
@ -205,7 +182,7 @@
"trunc-html": "1.0.2",
"trunc-text": "1.0.2",
"type-detect": "^4.0.8",
"uglifyjs-webpack-plugin": "0.4.6",
"uglifyjs-webpack-plugin": "^1.2.7",
"ui-select": "0.19.6",
"url-loader": "0.5.9",
"uuid": "3.0.1",
@ -238,6 +215,7 @@
"@types/bluebird": "^3.1.1",
"@types/chance": "^1.0.0",
"@types/classnames": "^2.2.3",
"@types/dedent": "^0.7.0",
"@types/enzyme": "^3.1.12",
"@types/eslint": "^4.16.2",
"@types/execa": "^0.9.0",
@ -257,6 +235,7 @@
"@types/prop-types": "^15.5.3",
"@types/react": "^16.3.14",
"@types/react-dom": "^16.0.5",
"@types/react-redux": "^5.0.6",
"@types/redux": "^3.6.31",
"@types/redux-actions": "^2.2.1",
"@types/sinon": "^5.0.0",
@ -271,7 +250,7 @@
"chance": "1.0.10",
"cheerio": "0.22.0",
"chokidar": "1.6.0",
"chromedriver": "2.36",
"chromedriver": "2.41.0",
"classnames": "2.2.5",
"dedent": "^0.7.0",
"enzyme": "3.2.0",
@ -290,6 +269,7 @@
"event-stream": "3.3.2",
"expect.js": "0.3.1",
"faker": "1.1.0",
"fetch-mock": "^5.13.1",
"geckodriver": "1.11.0",
"getopts": "2.0.0",
"globby": "^8.0.1",
@ -324,21 +304,19 @@
"listr": "^0.14.1",
"load-grunt-config": "0.19.2",
"makelogs": "^4.1.0",
"marked-text-renderer": "0.1.0",
"mocha": "3.3.0",
"mock-fs": "^4.4.2",
"murmurhash3js": "3.0.1",
"mutation-observer": "^1.0.3",
"ncp": "2.0.0",
"nock": "8.0.0",
"node-sass": "^4.9.0",
"normalize-path": "^3.0.0",
"pixelmatch": "4.0.2",
"postcss": "^7.0.2",
"prettier": "^1.14.0",
"proxyquire": "1.7.11",
"simple-git": "1.37.0",
"sinon": "^5.0.7",
"source-map": "0.5.6",
"source-map-support": "0.2.10",
"strip-ansi": "^3.0.1",
"supertest": "3.0.0",
"supertest-as-promised": "4.0.2",
@ -356,7 +334,7 @@
"zlib": "^1.0.5"
},
"engines": {
"node": "8.11.3",
"node": "8.11.4",
"yarn": "^1.6.0"
}
}

View file

@ -17,21 +17,4 @@
* under the License.
*/
import { Readable } from 'stream';
type LogLevel = 'silent' | 'error' | 'warning' | 'info' | 'debug' | 'verbose';
export class ToolingLog extends Readable {
public verbose(...args: any[]): void;
public debug(...args: any[]): void;
public info(...args: any[]): void;
public success(...args: any[]): void;
public warning(...args: any[]): void;
public error(errOrMsg: string | Error): void;
public write(...args: any[]): void;
public indent(spaces: number): void;
public getLevel(): LogLevel;
public setLevel(level: LogLevel): void;
}
export function createToolingLog(level?: LogLevel): ToolingLog;
export * from './src/tooling_log';

View file

@ -18,4 +18,4 @@
*/
export { withProcRunner } from './proc_runner';
export { createToolingLog, pickLevelFromFlags } from './tooling_log';
export { ToolingLog, pickLevelFromFlags } from './tooling_log';

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { createToolingLog } from '../../tooling_log';
import { ToolingLog } from '../../tooling_log';
import { withProcRunner } from '../with_proc_runner';
describe('proc runner', () => {
@ -34,7 +34,7 @@ describe('proc runner', () => {
}
it('passes procs to a function', async () => {
await withProcRunner(createToolingLog(), async procs => {
await withProcRunner(new ToolingLog(), async procs => {
await runProc({ procs });
await procs.stop('proc');
});

View file

@ -82,7 +82,7 @@ export function createProc(name, { cmd, args, cwd, env, stdin, log }) {
name = name;
lines$ = Rx.merge(observeLines(childProcess.stdout), observeLines(childProcess.stderr)).pipe(
tap(line => log.write(` ${gray('proc')} [${gray(name)}] ${line}`)),
tap(line => log.write(` ${gray('proc')} [${gray(name)}] ${line}`)),
share()
);

View file

@ -0,0 +1,91 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`parses valid log levels correctly: debug 1`] = `
Object {
"flags": Object {
"debug": true,
"error": true,
"info": true,
"silent": true,
"verbose": false,
"warning": true,
},
"name": "debug",
}
`;
exports[`parses valid log levels correctly: error 1`] = `
Object {
"flags": Object {
"debug": false,
"error": true,
"info": false,
"silent": true,
"verbose": false,
"warning": false,
},
"name": "error",
}
`;
exports[`parses valid log levels correctly: info 1`] = `
Object {
"flags": Object {
"debug": false,
"error": true,
"info": true,
"silent": true,
"verbose": false,
"warning": true,
},
"name": "info",
}
`;
exports[`parses valid log levels correctly: silent 1`] = `
Object {
"flags": Object {
"debug": false,
"error": false,
"info": false,
"silent": true,
"verbose": false,
"warning": false,
},
"name": "silent",
}
`;
exports[`parses valid log levels correctly: verbose 1`] = `
Object {
"flags": Object {
"debug": true,
"error": true,
"info": true,
"silent": true,
"verbose": true,
"warning": true,
},
"name": "verbose",
}
`;
exports[`parses valid log levels correctly: warning 1`] = `
Object {
"flags": Object {
"debug": false,
"error": true,
"info": false,
"silent": true,
"verbose": false,
"warning": true,
},
"name": "warning",
}
`;
exports[`throws error for invalid levels: bar 1`] = `"Invalid log level \\"bar\\" (expected one of silent,error,warning,info,debug,verbose)"`;
exports[`throws error for invalid levels: foo 1`] = `"Invalid log level \\"foo\\" (expected one of silent,error,warning,info,debug,verbose)"`;
exports[`throws error for invalid levels: warn 1`] = `"Invalid log level \\"warn\\" (expected one of silent,error,warning,info,debug,verbose)"`;

View file

@ -0,0 +1,226 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`#debug() sends a msg of type "debug" to each writer with indent and arguments 1`] = `
Array [
Array [
Object {
"args": Array [
"foo",
"bar",
"baz",
],
"indent": 0,
"type": "debug",
},
],
]
`;
exports[`#error() sends a msg of type "error" to each writer with indent and arguments 1`] = `
Array [
Array [
Object {
"args": Array [
[Error: error message],
],
"indent": 0,
"type": "error",
},
],
Array [
Object {
"args": Array [
"string message",
],
"indent": 0,
"type": "error",
},
],
]
`;
exports[`#getWritten$() does not emit msg if all writers return false 1`] = `Array []`;
exports[`#getWritten$() does not emit msg when no writers 1`] = `Array []`;
exports[`#getWritten$() emits msg if all writers return true 1`] = `
Array [
Object {
"args": Array [
"foo",
],
"indent": 0,
"type": "debug",
},
Object {
"args": Array [
"bar",
],
"indent": 0,
"type": "info",
},
Object {
"args": Array [
"baz",
],
"indent": 0,
"type": "verbose",
},
]
`;
exports[`#getWritten$() emits msg if some writers return true 1`] = `
Array [
Object {
"args": Array [
"foo",
],
"indent": 0,
"type": "debug",
},
Object {
"args": Array [
"bar",
],
"indent": 0,
"type": "info",
},
Object {
"args": Array [
"baz",
],
"indent": 0,
"type": "verbose",
},
]
`;
exports[`#indent() changes the indent on each written msg 1`] = `
Array [
Array [
Object {
"args": Array [
"foo",
],
"indent": 1,
"type": "debug",
},
],
Array [
Object {
"args": Array [
"bar",
],
"indent": 3,
"type": "debug",
},
],
Array [
Object {
"args": Array [
"baz",
],
"indent": 6,
"type": "debug",
},
],
Array [
Object {
"args": Array [
"box",
],
"indent": 4,
"type": "debug",
},
],
Array [
Object {
"args": Array [
"foo",
],
"indent": 0,
"type": "debug",
},
],
]
`;
exports[`#info() sends a msg of type "info" to each writer with indent and arguments 1`] = `
Array [
Array [
Object {
"args": Array [
"foo",
"bar",
"baz",
],
"indent": 0,
"type": "info",
},
],
]
`;
exports[`#success() sends a msg of type "success" to each writer with indent and arguments 1`] = `
Array [
Array [
Object {
"args": Array [
"foo",
"bar",
"baz",
],
"indent": 0,
"type": "success",
},
],
]
`;
exports[`#verbose() sends a msg of type "verbose" to each writer with indent and arguments 1`] = `
Array [
Array [
Object {
"args": Array [
"foo",
"bar",
"baz",
],
"indent": 0,
"type": "verbose",
},
],
]
`;
exports[`#warning() sends a msg of type "warning" to each writer with indent and arguments 1`] = `
Array [
Array [
Object {
"args": Array [
"foo",
"bar",
"baz",
],
"indent": 0,
"type": "warning",
},
],
]
`;
exports[`#write() sends a msg of type "write" to each writer with indent and arguments 1`] = `
Array [
Array [
Object {
"args": Array [
"foo",
"bar",
"baz",
],
"indent": 0,
"type": "write",
},
],
]
`;

View file

@ -0,0 +1,178 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`formats %s patterns and indents multi-line messages correctly 1`] = `
" │ succ foo bar
│ { foo: { bar: { '1': [Array] } },
│ bar: { bar: { '1': [Array] } } }
│ Infinity
"
`;
exports[`level:debug/type:debug snapshots: is written 1`] = `true`;
exports[`level:debug/type:debug snapshots: output 1`] = `
" debg foo
"
`;
exports[`level:debug/type:error snapshots: is written 1`] = `true`;
exports[`level:debug/type:error snapshots: output 1`] = `
"ERROR foo
"
`;
exports[`level:debug/type:info snapshots: is written 1`] = `true`;
exports[`level:debug/type:info snapshots: output 1`] = `
" info foo
"
`;
exports[`level:debug/type:success snapshots: is written 1`] = `true`;
exports[`level:debug/type:success snapshots: output 1`] = `
" succ foo
"
`;
exports[`level:debug/type:verbose snapshots: is written 1`] = `false`;
exports[`level:debug/type:warning snapshots: is written 1`] = `true`;
exports[`level:debug/type:warning snapshots: output 1`] = `
" warn foo
"
`;
exports[`level:error/type:debug snapshots: is written 1`] = `false`;
exports[`level:error/type:error snapshots: is written 1`] = `true`;
exports[`level:error/type:error snapshots: output 1`] = `
"ERROR foo
"
`;
exports[`level:error/type:info snapshots: is written 1`] = `false`;
exports[`level:error/type:success snapshots: is written 1`] = `false`;
exports[`level:error/type:verbose snapshots: is written 1`] = `false`;
exports[`level:error/type:warning snapshots: is written 1`] = `false`;
exports[`level:info/type:debug snapshots: is written 1`] = `false`;
exports[`level:info/type:error snapshots: is written 1`] = `true`;
exports[`level:info/type:error snapshots: output 1`] = `
"ERROR foo
"
`;
exports[`level:info/type:info snapshots: is written 1`] = `true`;
exports[`level:info/type:info snapshots: output 1`] = `
" info foo
"
`;
exports[`level:info/type:success snapshots: is written 1`] = `true`;
exports[`level:info/type:success snapshots: output 1`] = `
" succ foo
"
`;
exports[`level:info/type:verbose snapshots: is written 1`] = `false`;
exports[`level:info/type:warning snapshots: is written 1`] = `true`;
exports[`level:info/type:warning snapshots: output 1`] = `
" warn foo
"
`;
exports[`level:silent/type:debug snapshots: is written 1`] = `false`;
exports[`level:silent/type:error snapshots: is written 1`] = `false`;
exports[`level:silent/type:info snapshots: is written 1`] = `false`;
exports[`level:silent/type:success snapshots: is written 1`] = `false`;
exports[`level:silent/type:verbose snapshots: is written 1`] = `false`;
exports[`level:silent/type:warning snapshots: is written 1`] = `false`;
exports[`level:verbose/type:debug snapshots: is written 1`] = `true`;
exports[`level:verbose/type:debug snapshots: output 1`] = `
" debg foo
"
`;
exports[`level:verbose/type:error snapshots: is written 1`] = `true`;
exports[`level:verbose/type:error snapshots: output 1`] = `
"ERROR foo
"
`;
exports[`level:verbose/type:info snapshots: is written 1`] = `true`;
exports[`level:verbose/type:info snapshots: output 1`] = `
" info foo
"
`;
exports[`level:verbose/type:success snapshots: is written 1`] = `true`;
exports[`level:verbose/type:success snapshots: output 1`] = `
" succ foo
"
`;
exports[`level:verbose/type:verbose snapshots: is written 1`] = `true`;
exports[`level:verbose/type:verbose snapshots: output 1`] = `
" sill foo
"
`;
exports[`level:verbose/type:warning snapshots: is written 1`] = `true`;
exports[`level:verbose/type:warning snapshots: output 1`] = `
" warn foo
"
`;
exports[`level:warning/type:debug snapshots: is written 1`] = `false`;
exports[`level:warning/type:error snapshots: is written 1`] = `true`;
exports[`level:warning/type:error snapshots: output 1`] = `
"ERROR foo
"
`;
exports[`level:warning/type:info snapshots: is written 1`] = `false`;
exports[`level:warning/type:success snapshots: is written 1`] = `false`;
exports[`level:warning/type:verbose snapshots: is written 1`] = `false`;
exports[`level:warning/type:warning snapshots: is written 1`] = `true`;
exports[`level:warning/type:warning snapshots: output 1`] = `
" warn foo
"
`;
exports[`throws error if created with invalid level 1`] = `"Invalid log level \\"foo\\" (expected one of silent,error,warning,info,debug,verbose)"`;
exports[`throws error if writeTo config is not defined or doesn't have a write method 1`] = `"ToolingLogTextWriter requires the \`writeTo\` option be set to a stream (like process.stdout)"`;
exports[`throws error if writeTo config is not defined or doesn't have a write method 2`] = `"ToolingLogTextWriter requires the \`writeTo\` option be set to a stream (like process.stdout)"`;

View file

@ -1,99 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import Chance from 'chance';
import { createConcatStream, createPromiseFromStreams } from '../../streams';
import { createToolingLog } from '../tooling_log';
const chance = new Chance();
const capture = (level, block) => {
const log = createToolingLog(level);
block(log);
log.end();
return createPromiseFromStreams([log, createConcatStream('')]);
};
const nothingTest = (logLevel, method) => {
describe(`#${method}(...any)`, () => {
it('logs nothing', async () => {
const output = await capture(logLevel, log => log[method]('foo'));
expect(output).to.be('');
});
});
};
const somethingTest = (logLevel, method) => {
describe(`#${method}(...any)`, () => {
it('logs to output stream', async () => {
const output = await capture(logLevel, log => log[method]('foo'));
expect(output).to.contain('foo');
});
});
};
describe('utils: createToolingLog(logLevel, output)', () => {
it('is a readable stream', async () => {
const log = createToolingLog('debug');
log.info('Foo');
log.info('Bar');
log.info('Baz');
log.end();
const output = await createPromiseFromStreams([log, createConcatStream('')]);
expect(output).to.contain('Foo');
expect(output).to.contain('Bar');
expect(output).to.contain('Baz');
});
describe('log level', () => {
describe('logLevel=silent', () => {
nothingTest('silent', 'debug');
nothingTest('silent', 'info');
nothingTest('silent', 'error');
});
describe('logLevel=error', () => {
nothingTest('error', 'debug');
nothingTest('error', 'info');
somethingTest('error', 'error');
});
describe('logLevel=info', () => {
nothingTest('info', 'debug');
somethingTest('info', 'info');
somethingTest('info', 'error');
});
describe('logLevel=debug', () => {
somethingTest('debug', 'debug');
somethingTest('debug', 'info');
somethingTest('debug', 'error');
});
describe('invalid logLevel', () => {
it('throw error', () => {
// avoid the impossibility that a valid level is generated
// by specifying a long length
const level = chance.word({ length: 10 });
expect(() => createToolingLog(level)).to.throwError(level);
});
});
});
});

View file

@ -1,114 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import Chance from 'chance';
import { parseLogLevel } from '../log_levels';
const chance = new Chance();
describe('parseLogLevel(logLevel).flags', () => {
describe('logLevel=silent', () => {
it('produces correct map', () => {
expect(parseLogLevel('silent').flags).to.eql({
silent: true,
error: false,
warning: false,
info: false,
debug: false,
verbose: false,
});
});
});
describe('logLevel=error', () => {
it('produces correct map', () => {
expect(parseLogLevel('error').flags).to.eql({
silent: true,
error: true,
warning: false,
info: false,
debug: false,
verbose: false,
});
});
});
describe('logLevel=warning', () => {
it('produces correct map', () => {
expect(parseLogLevel('warning').flags).to.eql({
silent: true,
error: true,
warning: true,
info: false,
debug: false,
verbose: false,
});
});
});
describe('logLevel=info', () => {
it('produces correct map', () => {
expect(parseLogLevel('info').flags).to.eql({
silent: true,
error: true,
warning: true,
info: true,
debug: false,
verbose: false,
});
});
});
describe('logLevel=debug', () => {
it('produces correct map', () => {
expect(parseLogLevel('debug').flags).to.eql({
silent: true,
error: true,
warning: true,
info: true,
debug: true,
verbose: false,
});
});
});
describe('logLevel=verbose', () => {
it('produces correct map', () => {
expect(parseLogLevel('verbose').flags).to.eql({
silent: true,
error: true,
warning: true,
info: true,
debug: true,
verbose: true,
});
});
});
describe('invalid logLevel', () => {
it('throws error', () => {
// avoid the impossibility that a valid level is generated
// by specifying a long length
const level = chance.word({ length: 10 });
expect(() => parseLogLevel(level)).to.throwError(level);
});
});
});

View file

@ -17,10 +17,6 @@
* under the License.
*/
import { Observable } from '../observable';
export function $error<E extends Error>(error: E) {
return new Observable(observer => {
observer.error(error);
});
}
export { ToolingLog } from './tooling_log';
export { ToolingLogTextWriter, WriterConfig } from './tooling_log_text_writer';
export { pickLevelFromFlags, LogLevel } from './log_levels';

View file

@ -17,5 +17,6 @@
* under the License.
*/
export { createToolingLog } from './tooling_log';
export { ToolingLog } from './tooling_log';
export { ToolingLogTextWriter } from './tooling_log_text_writer';
export { pickLevelFromFlags } from './log_levels';

View file

@ -17,16 +17,13 @@
* under the License.
*/
import { Observable, ObservableInput } from '../observable';
export type LogLevel = 'silent' | 'error' | 'warning' | 'info' | 'debug' | 'verbose';
/**
* Alias for `Observable.from`
*
* If you need to handle:
*
* - promises, use `$fromPromise`
* - functions, use `$fromCallback`
*/
export function $from<T>(x: ObservableInput<T>): Observable<T> {
return Observable.from(x);
export interface ParsedLogLevel {
name: LogLevel;
flags: { [key in LogLevel]: boolean };
}
export function pickLevelFromFlags(flags: { [key: string]: any }): LogLevel;
export function parseLogLevel(level: LogLevel): ParsedLogLevel;

View file

@ -17,22 +17,19 @@
* under the License.
*/
import { map, toArray, toPromise } from '../';
import { $from } from '../../factories';
import { k$ } from '../../k';
import { Observable } from '../../observable';
import { parseLogLevel } from './log_levels';
const number$ = $from([1, 2, 3]);
const collect = <T>(source: Observable<T>) => k$(source)(toArray(), toPromise());
test('returns the modified value', async () => {
const numbers = await collect(k$(number$)(map(n => n * 1000)));
expect(numbers).toEqual([1000, 2000, 3000]);
it('parses valid log levels correctly', () => {
expect(parseLogLevel('silent')).toMatchSnapshot('silent');
expect(parseLogLevel('error')).toMatchSnapshot('error');
expect(parseLogLevel('warning')).toMatchSnapshot('warning');
expect(parseLogLevel('info')).toMatchSnapshot('info');
expect(parseLogLevel('debug')).toMatchSnapshot('debug');
expect(parseLogLevel('verbose')).toMatchSnapshot('verbose');
});
test('sends the index as arg 2', async () => {
const numbers = await collect(k$(number$)(map((n, i) => i)));
expect(numbers).toEqual([0, 1, 2]);
it('throws error for invalid levels', () => {
expect(() => parseLogLevel('warn')).toThrowErrorMatchingSnapshot('warn');
expect(() => parseLogLevel('foo')).toThrowErrorMatchingSnapshot('foo');
expect(() => parseLogLevel('bar')).toThrowErrorMatchingSnapshot('bar');
});

View file

@ -0,0 +1,45 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// tslint:disable max-classes-per-file
import * as Rx from 'rxjs';
import { ToolingLogWriter, WriterConfig } from './tooling_log_text_writer';
export interface LogMessage {
type: 'verbose' | 'debug' | 'info' | 'success' | 'warning' | 'error' | 'write';
indent: number;
args: any[];
}
export class ToolingLog {
constructor(config?: WriterConfig);
public verbose(...args: any[]): void;
public debug(...args: any[]): void;
public info(...args: any[]): void;
public success(...args: any[]): void;
public warning(...args: any[]): void;
public error(errOrMsg: string | Error): void;
public write(...args: any[]): void;
public indent(spaces: number): void;
public getWriters(): ToolingLogWriter[];
public setWriters(reporters: ToolingLogWriter[]): void;
public getWritten$(): Rx.Observable<LogMessage>;
}

View file

@ -17,87 +17,85 @@
* under the License.
*/
import { format } from 'util';
import { PassThrough } from 'stream';
import * as Rx from 'rxjs';
import { EventEmitter } from 'events';
import { magentaBright, yellow, red, blue, green, dim } from 'chalk';
import { ToolingLogTextWriter } from './tooling_log_text_writer';
import { parseLogLevel } from './log_levels';
export class ToolingLog extends EventEmitter {
/**
* Create a ToolingLog object
* @param {WriterConfig} writerConfig
*/
constructor(writerConfig) {
super();
export function createToolingLog(initialLogLevelName = 'silent') {
// current log level (see logLevel.name and logLevel.flags) changed
// with ToolingLog#setLevel(newLogLevelName);
let logLevel = parseLogLevel(initialLogLevelName);
// current indentation level, changed with ToolingLog#indent(delta)
let indentString = '';
class ToolingLog extends PassThrough {
constructor() {
super({ objectMode: true });
}
verbose(...args) {
if (!logLevel.flags.verbose) return;
this.write(' %s ', magentaBright('sill'), format(...args));
}
debug(...args) {
if (!logLevel.flags.debug) return;
this.write(' %s ', dim('debg'), format(...args));
}
info(...args) {
if (!logLevel.flags.info) return;
this.write(' %s ', blue('info'), format(...args));
}
success(...args) {
if (!logLevel.flags.info) return;
this.write(' %s ', green('succ'), format(...args));
}
warning(...args) {
if (!logLevel.flags.warning) return;
this.write(' %s ', yellow('warn'), format(...args));
}
error(err) {
if (!logLevel.flags.error) return;
if (typeof err !== 'string' && !(err instanceof Error)) {
err = new Error(`"${err}" thrown`);
}
this.write('%s ', red('ERROR'), err.stack || err.message || err);
}
indent(delta = 0) {
const width = Math.max(0, indentString.length + delta);
indentString = ' '.repeat(width);
return indentString.length;
}
getLevel() {
return logLevel.name;
}
setLevel(newLogLevelName) {
logLevel = parseLogLevel(newLogLevelName);
}
write(...args) {
format(...args)
.split('\n')
.forEach((line, i) => {
const subLineIndent = i === 0 ? '' : ' ';
const indent = !indentString
? ''
: indentString.slice(0, -1) + (i === 0 && line[0] === '-' ? '└' : '│');
super.write(`${indent}${subLineIndent}${line}\n`);
});
}
this._indent = 0;
this._writers = writerConfig ? [new ToolingLogTextWriter(writerConfig)] : [];
this._written$ = new Rx.Subject();
}
return new ToolingLog();
indent(delta = 0) {
this._indent = Math.max(this._indent + delta, 0);
return this._indent;
}
verbose(...args) {
this._write('verbose', args);
}
debug(...args) {
this._write('debug', args);
}
info(...args) {
this._write('info', args);
}
success(...args) {
this._write('success', args);
}
warning(...args) {
this._write('warning', args);
}
error(error) {
this._write('error', [error]);
}
write(...args) {
this._write('write', args);
}
getWriters() {
return this._writers.slice(0);
}
setWriters(writers) {
this._writers = [...writers];
}
getWritten$() {
return this._written$.asObservable();
}
_write(type, args) {
const msg = {
type,
indent: this._indent,
args,
};
let written = false;
for (const writer of this._writers) {
if (writer.write(msg)) {
written = true;
}
}
if (written) {
this._written$.next(msg);
}
}
}

View file

@ -0,0 +1,143 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { toArray, takeUntil } from 'rxjs/operators';
import { ToolingLog } from './tooling_log';
import { ToolingLogTextWriter } from './tooling_log_text_writer';
it('creates zero writers without a config', () => {
const log = new ToolingLog();
expect(log.getWriters()).toHaveLength(0);
});
it('creates a single writer with a single object', () => {
const log = new ToolingLog({ level: 'warning', writeTo: process.stdout });
expect(log.getWriters()).toHaveLength(1);
const [writer] = log.getWriters();
expect(writer.level).toHaveProperty('name', 'warning');
expect(writer.writeTo).toBe(process.stdout);
});
describe('#get/setWriters()', () => {
it('returns/replaces the current writers', () => {
const log = new ToolingLog();
expect(log.getWriters()).toHaveLength(0);
log.setWriters([
new ToolingLogTextWriter({
level: 'verbose',
writeTo: process.stdout,
}),
new ToolingLogTextWriter({
level: 'verbose',
writeTo: process.stdout,
}),
]);
expect(log.getWriters()).toHaveLength(2);
log.setWriters([]);
expect(log.getWriters()).toHaveLength(0);
});
});
describe('#indent()', () => {
it('changes the indent on each written msg', () => {
const log = new ToolingLog();
const write = jest.fn();
log.setWriters([{ write }]);
log.indent(1);
log.debug('foo');
log.indent(2);
log.debug('bar');
log.indent(3);
log.debug('baz');
log.indent(-2);
log.debug('box');
log.indent(-Infinity);
log.debug('foo');
expect(write.mock.calls).toMatchSnapshot();
});
});
['verbose', 'debug', 'info', 'success', 'warning', 'error', 'write'].forEach(method => {
describe(`#${method}()`, () => {
it(`sends a msg of type "${method}" to each writer with indent and arguments`, () => {
const log = new ToolingLog();
const writeA = jest.fn();
const writeB = jest.fn();
log.setWriters([{ write: writeA }, { write: writeB }]);
if (method === 'error') {
const error = new Error('error message');
error.stack = '... stack trace ...';
log.error(error);
log.error('string message');
} else {
log[method]('foo', 'bar', 'baz');
}
expect(writeA.mock.calls).toMatchSnapshot();
expect(writeA.mock.calls).toEqual(writeB.mock.calls);
});
});
});
describe('#getWritten$()', () => {
async function testWrittenMsgs(writers) {
const log = new ToolingLog();
log.setWriters(writers);
const done$ = new Rx.Subject();
const promise = log
.getWritten$()
.pipe(
takeUntil(done$),
toArray()
)
.toPromise();
log.debug('foo');
log.info('bar');
log.verbose('baz');
done$.next();
expect(await promise).toMatchSnapshot();
}
it('does not emit msg when no writers', async () => {
await testWrittenMsgs([]);
});
it('emits msg if all writers return true', async () => {
await testWrittenMsgs([{ write: jest.fn(() => true) }, { write: jest.fn(() => true) }]);
});
it('emits msg if some writers return true', async () => {
await testWrittenMsgs([{ write: jest.fn(() => true) }, { write: jest.fn(() => false) }]);
});
it('does not emit msg if all writers return false', async () => {
await testWrittenMsgs([{ write: jest.fn(() => false) }, { write: jest.fn(() => false) }]);
});
});

View file

@ -17,20 +17,26 @@
* under the License.
*/
import { OperatorFunction } from '../interfaces';
import { Observable } from '../observable';
import { reduce } from './reduce';
import { LogLevel, ParsedLogLevel } from './log_levels';
import { LogMessage } from './tooling_log';
function concat<T>(source: Observable<T>) {
return reduce<T, T[]>((acc, item) => acc.concat([item]), [] as T[])(source);
export interface ToolingLogWriter {
write(msg: LogMessage): boolean;
}
/**
* Modify a stream to produce a single array containing all of the items emitted
* by source.
*/
export function toArray<T>(): OperatorFunction<T, T[]> {
return function toArrayOperation(source) {
return concat(source);
};
export interface WriteTarget {
write(chunk: string): void;
}
export interface WriterConfig {
level: LogLevel;
writeTo: WriteTarget;
}
export class ToolingLogTextWriter implements ToolingLogTextWriter {
public level: ParsedLogLevel;
public writeTo: WriteTarget;
constructor(config: WriterConfig);
public write(msg: LogMessage): boolean;
}

View file

@ -0,0 +1,92 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { format } from 'util';
import { magentaBright, yellow, red, blue, green, dim } from 'chalk';
import { parseLogLevel } from './log_levels';
const PREFIX_INDENT = ' '.repeat(6);
const MSG_PREFIXES = {
verbose: ` ${magentaBright('sill')} `,
debug: ` ${dim('debg')} `,
info: ` ${blue('info')} `,
success: ` ${green('succ')} `,
warning: ` ${yellow('warn')} `,
error: `${red('ERROR')} `,
};
function shouldWriteType(level, type) {
if (type === 'write') {
return true;
}
return Boolean(level.flags[type === 'success' ? 'info' : type]);
}
function stringifyError(error) {
if (typeof error !== 'string' && !(error instanceof Error)) {
error = new Error(`"${error}" thrown`);
}
return error.stack || error.message || error;
}
export class ToolingLogTextWriter {
constructor(config) {
this.level = parseLogLevel(config.level);
this.writeTo = config.writeTo;
if (!this.writeTo || typeof this.writeTo.write !== 'function') {
throw new Error(
'ToolingLogTextWriter requires the `writeTo` option be set to a stream (like process.stdout)'
);
}
}
write({ type, indent, args }) {
if (!shouldWriteType(this.level, type)) {
return false;
}
const txt = type === 'error' ? stringifyError(args[0]) : format(...args);
const prefix = MSG_PREFIXES[type] || '';
(prefix + txt).split('\n').forEach((line, i) => {
let lineIndent = '';
if (indent > 0) {
// if we are indenting write some spaces followed by a symbol
lineIndent += ' '.repeat(indent - 1);
lineIndent += line.startsWith('-') ? '└' : '│';
}
if (line && prefix && i > 0) {
// apply additional indentation to lines after
// the first if this message gets a prefix
lineIndent += PREFIX_INDENT;
}
this.writeTo.write(`${lineIndent}${line}\n`);
});
return true;
}
}

View file

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { ToolingLogTextWriter } from './tooling_log_text_writer';
it('throws error if created with invalid level', () => {
expect(
() =>
new ToolingLogTextWriter({
level: 'foo',
})
).toThrowErrorMatchingSnapshot();
});
it("throws error if writeTo config is not defined or doesn't have a write method", () => {
expect(() => {
new ToolingLogTextWriter({
level: 'verbose',
writeTo: null,
});
}).toThrowErrorMatchingSnapshot();
expect(() => {
new ToolingLogTextWriter({
level: 'verbose',
writeTo: 'foo',
});
}).toThrowErrorMatchingSnapshot();
});
const levels = ['silent', 'verbose', 'debug', 'info', 'warning', 'error'];
const types = ['verbose', 'debug', 'info', 'warning', 'error', 'success'];
for (const level of levels) {
for (const type of types) {
it(`level:${level}/type:${type} snapshots`, () => {
const write = jest.fn();
const writer = new ToolingLogTextWriter({
level,
writeTo: {
write,
},
});
const written = writer.write({
type: type,
indent: 0,
args: ['foo'],
});
expect(written).toMatchSnapshot('is written');
if (written) {
const output = write.mock.calls.reduce((acc, chunk) => `${acc}${chunk}`, '');
expect(output).toMatchSnapshot('output');
}
});
}
}
it('formats %s patterns and indents multi-line messages correctly', () => {
const write = jest.fn();
const writer = new ToolingLogTextWriter({
level: 'debug',
writeTo: {
write,
},
});
writer.write({
type: 'success',
indent: 10,
args: [
'%s\n%O\n\n%d',
'foo bar',
{ foo: { bar: { 1: [1, 2, 3] } }, bar: { bar: { 1: [1, 2, 3] } } },
Infinity,
],
});
const output = write.mock.calls.reduce((acc, chunk) => `${acc}${chunk}`, '');
expect(output).toMatchSnapshot();
});

View file

@ -1,6 +1,7 @@
{
"extends": "../../tsconfig.json",
"include": [
"index.d.ts"
"index.d.ts",
"src/**/*.d.ts"
],
}

View file

@ -17,7 +17,7 @@
* under the License.
*/
const { createToolingLog } = require('@kbn/dev-utils');
const { ToolingLog } = require('@kbn/dev-utils');
const execa = require('execa');
const { Cluster } = require('../cluster');
const { installSource, installSnapshot, installArchive } = require('../install');
@ -30,9 +30,7 @@ jest.mock('../install', () => ({
jest.mock('execa', () => jest.fn());
const log = createToolingLog('verbose');
log.onData = jest.fn();
log.on('data', log.onData);
const log = new ToolingLog();
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));

View file

@ -38,6 +38,7 @@ function decompressTarball(archive, dirPath) {
}
function decompressZip(input, output) {
mkdirp.sync(output);
return new Promise((resolve, reject) => {
yauzl.open(input, { lazyEntries: true }, (err, zipfile) => {
if (err) {
@ -62,6 +63,7 @@ function decompressZip(input, output) {
const fileName = path.resolve(output, zipPath);
if (/\/$/.test(entry.fileName)) {
mkdirp.sync(fileName);
zipfile.readEntry();
} else {
// file entry
@ -74,9 +76,7 @@ function decompressZip(input, output) {
zipfile.readEntry();
});
mkdirp(path.dirname(fileName), () => {
readStream.pipe(fs.createWriteStream(fileName));
});
readStream.pipe(fs.createWriteStream(fileName));
});
}
});

View file

@ -17,9 +17,11 @@
* under the License.
*/
const { createToolingLog } = require('@kbn/dev-utils');
const { ToolingLog } = require('@kbn/dev-utils');
const log = createToolingLog('verbose');
log.pipe(process.stdout);
const log = new ToolingLog({
level: 'verbose',
writeTo: process.stdout,
});
exports.log = log;

View file

@ -0,0 +1,330 @@
# I18n Guideline
## All Localizers need to know
### Message types
The message ids chosen for message keys are descriptive of the string, and its role in the interface (button, label, header, etc.). Each message id ends with a descriptive type. Types are defined at the end of message id by combining to the last segment using camel case.
The following types are supported:
- header
- label
- button
- dropDown
- placeholder
- tooltip
- aria
- errorMessage
- toggleSwitch
- link and etc.
There is one more complex case, when we have to divide a single expression into different labels.
For example the message before translation looks like:
```js
<p>
The following deprecated languages are in use: {deprecatedLangsInUse.join(', ')}. Support for these languages will be removed in the next major version of Kibana and Elasticsearch. Convert your scripted fields to <EuiLink href={painlessDocLink}>Painless</EuiLink> to avoid any problems.
</p>
```
This phrase contains a variable, which represents languages list, and a link (`Painless`). For such cases we divide the message into two parts: the main message, which contains placeholders, and additional message, which represents inner message.
It is used the following message id naming structure:
1) the main message id has the type on the penultimate position, thereby identifying a divided phrase, and the last segment ends with `Detail`.
```js
{
'kbn.management.editIndexPattern.scripted.deprecationLangLabel.deprecationLangDetail': 'The following deprecated languages are in use: {deprecatedLangsInUse}. Support for these languages will be removed in the next major version of Kibana and Elasticsearch. Convert your scripted fields to {link} to avoid any problems.'
}
```
2) The inner message id has the type on the penultimate position and the name of the variable from the placeholder in the main message (in this case `link`) as the last segment that ends with own type.
For example:
```js
{
'kbn.management.editIndexPattern.scripted.deprecationLangLabel.painlessLink': 'Painless'
}
```
### Attribute with variables interpolation
Messages can contain placeholders for embedding a value of a variable. For example:
```js
{
'kbn.management.editIndexPattern.scripted.deleteFieldLabel': "Delete scripted field '{fieldName}'?"
'kbn.management.editIndexPattern.scripted.noFieldLabel': "'{indexPatternTitle}' index pattern doesn't have a scripted field called '{fieldName}'"
}
```
Mostly such placeholders have meaningful name according to the сontent.
### Pluralization
I18n engine supports proper plural forms. It uses the [ICU Message syntax](http://userguide.icu-project.org/formatparse/messages) to define a message that has a plural label and works for all [CLDR languages](http://cldr.unicode.org/) which have pluralization rules defined. The numeric input is mapped to a plural category, some subset of "zero", "one", "two", "few", "many", and "other" depending on the locale and the type of plural.
For example:
```js
{
'kbn.management.createIndexPattern.step.status.successLabel.strongIndicesLabel': '{indicesLength, plural, one {# index} other {# indices}}'
}
```
In case when `indicesLength` has value 1, the result string will be "`1 index`". In case when `indicesLength` has value 2 and more, the result string - "`2 indices`".
## Best practices
### Naming convention
The message ids chosen for message keys should always be descriptive of the string, and its role in the interface (button label, title, etc.). Think of them as long variable names. When you have to change a message id, adding a progressive number to the existing key should always be used as a last resort.
- Message id should start with namespace (`kbn`, `common.ui` and etc.).
For example:
```js
'kbn.management.createIndexPattern.stepTime.options.patternHeader'
'common.ui.indexPattern.warningLabel'
```
- Use camelCase for naming segments, comprising several words.
- Each message id should end with a type. For example:
```js
'kbn.management.editIndexPattern.createIndexButton'
'kbn.management.editIndexPattern.mappingConflictHeader'
'kbn.management.editIndexPattern.mappingConflictLabel'
'kbn.management.editIndexPattern.fields.filterAria'
'kbn.management.editIndexPattern.fields.filterPlaceholder'
'kbn.management.editIndexPattern.refreshTooltip'
'kbn.management.editIndexPattern.fields.allTypesDropDown'
'kbn.management.createIndexPattern.includeSystemIndicesToggleSwitch'
'kbn.management.editIndexPattern.wrongTypeErrorMessage'
'kbn.management.editIndexPattern.scripted.table.nameDescription'
```
- For complex messages, which are divided into several parts, use the following approach:
- the main message id should have the type on the penultimate position, thereby identifying a divided phrase, and the last segment should end with `Detail`,
- the inner message id should have the type on the penultimate position and the name of the variable from the placeholder in the main message as the last segment that ends with its own type.
For example, before the translation there was a message:
```js
<strong>Success!</strong>
Your index pattern matches <strong>{exactMatchedIndices.length} {exactMatchedIndices.length === 1 ? 'index' : 'indices'}</strong>.
```
After translation we get the following structure:
```js
<FormattedMessage
id="kbn.management.createIndexPattern.step.status.successLabel.successDetail"
defaultMessage="{strongSuccess} Your index pattern matches {strongIndices}."
values={{
strongSuccess: (
<strong>
<FormattedMessage
id="kbn.management.createIndexPattern.step.status.successLabel.strongSuccessLabel"
defaultMessage="Success!"
/>
</strong>),
strongIndices: (
<strong>
<FormattedMessage
id="kbn.management.createIndexPattern.step.status.successLabel.strongIndicesLabel"
defaultMessage="{indicesLength, plural, one {# index} other {# indices}}"
values={{ indicesLength: exactMatchedIndices.length }}
/>
</strong>)
}}
/>
```
### Defining type for message
Each message id should end with a type of the message.
| type | example message id |
| --- | --- |
| header | `kbn.management.createIndexPatternHeader` |
| label | `kbn.management.createIndexPatternLabel ` |
| button | `kbn.management.editIndexPattern.scripted.addFieldButton` |
| drop down | `kbn.management.editIndexPattern.fields.allTypesDropDown` |
| placeholder | `kbn.management.createIndexPattern.stepTime.options.patternPlaceholder` |
| `aria-label` attribute | `kbn.management.editIndexPattern.removeAria` |
| tooltip | `kbn.management.editIndexPattern.removeTooltip` |
| error message | `kbn.management.createIndexPattern.step.invalidCharactersErrorMessage` |
| toggleSwitch | `kbn.management.createIndexPattern.includeSystemIndicesToggleSwitch` |
For example:
- for header:
```js
<h1>
<FormattedMessage
id="kbn.management.createIndexPatternHeader"
defaultMessage="Create index pattern"
/>
</h1>
```
- for label:
```js
<EuiTextColor color="subdued">
<FormattedMessage
id="kbn.management.createIndexPatternLabel"
defaultMessage="Kibana uses index patterns to retrieve data from Elasticsearch indices for things like visualizations."
/>
</EuiTextColor>
```
- for button:
```js
<EuiButton data-test-subj="addScriptedFieldLink" href={addScriptedFieldUrl}>
<FormattedMessage id="kbn.management.editIndexPattern.scripted.addFieldButton" defaultMessage="Add scripted field"/>
</EuiButton>
```
- for dropDown:
```js
<select ng-model="indexedFieldTypeFilter" ng-options="o for o in indexedFieldTypes">
<option value=""
i18n-id="kbn.management.editIndexPattern.fields.allTypesDropDown"
i18n-default-message="All field types"></option>
</select>
```
- for placeholder:
```js
<EuiFieldText
name="indexPatternId"
placeholder={intl.formatMessage({
id: 'kbn.management.createIndexPattern.stepTime.options.patternPlaceholder',
defaultMessage: 'custom-index-pattern-id' })}
/>
```
- for `aria-label` attribute and tooltip
```js
<button
aria-label="{{'kbn.management.editIndexPattern.removeAria' | i18n: {defaultMessage: 'Remove index pattern'} }}"
tooltip="{{'kbn.management.editIndexPattern.removeTooltip' | i18n: {defaultMessage: 'Remove index pattern'} }}"
>
</button>
```
- for errorMessage:
```js
errors.push(
intl.formatMessage(
{
id: 'kbn.management.createIndexPattern.step.invalidCharactersErrorMessage',
defaultMessage: 'An index pattern cannot contain spaces or the characters: {characterList}'
},
{ characterList }
));
```
- for toggleSwitch
```js
<EuiSwitch
label={<FormattedMessage
id="kbn.management.createIndexPattern.includeSystemIndicesToggleSwitch"
defaultMessage="Include system indices"
/>}
/>
```
### Text with plurals
The numeric input is mapped to a plural category, some subset of "zero", "one", "two", "few", "many", and "other" depending on the locale and the type of plural. There are languages with multiple plural forms [Language Plural Rules](http://www.unicode.org/cldr/charts/latest/supplemental/language_plural_rules.html).
Here is an example of message translation depending on a plural category:
```js
<span i18n-id="kbn.management.editIndexPattern.mappingConflictLabel"
i18n-default-message="{conflictFieldsLength, plural, one {A field is} other {# fields are}} defined as several types (string, integer, etc) across the indices that match this pattern."
i18n-values="{ conflictFieldsLength: conflictFields.length }"></span>
```
When `conflictFieldsLength` equals 1, the result string will be `"A field is defined as several types (string, integer, etc) across the indices that match this pattern."`. In cases when `conflictFieldsLength` has value of 2 or more, the result string - `"2 fields are defined as several types (string, integer, etc) across the indices that match this pattern."`.
### Splitting
Splitting sentences into several keys often inadvertently presumes a grammar, a sentence structure, and such composite strings are often very difficult to translate.
- Do not divide a single sentence into different labels unless you have absolutely no other choice.
- Do not divide sentences that belong together into separate labels.
For example:
`The following dialogue box indicates progress. You can close it and the process will continue to run in the background.`
If this group of sentences is separated its possible that the context of the `'it'` in `'close it'` will be lost.
### Unit tests
When testing React component that use the injectI18n higher-order component, use the shallowWithIntl helper function defined in test_utils/enzyme_helpers to render the component. This will shallow render the component with Enzyme and inject the necessary context and props to use the intl mock defined in test_utils/mocks/intl.
For example, there is a component that is wrapped by `injectI18n`, like in the `AddFilter` component:
```js
// ...
export class AddFilterComponent extends Component {
// ...
render() {
const { filter } = this.state;
return (
<EuiFlexGroup>
<EuiFlexItem grow={10}>
<EuiFieldText
fullWidth
value={filter}
onChange={e => this.setState({ filter: e.target.value.trim() })}
placeholder={this.props.intl.formatMessage({
id: 'kbn.management.indexPattern.edit.source.placeholder',
defaultMessage: 'source filter, accepts wildcards (e.g., `user*` to filter fields starting with \'user\')'
})}
/>
</EuiFlexItem>
</EuiFlexGroup>
);
}
}
export const AddFilter = injectI18n(AddFilterComponent);
```
To test the `AddFilterComponent` component it is needed to render it using `shallowWithIntl` function to pass `intl` object into the `props`.
```js
// ...
it('should render normally', async () => {
const component = shallowWithIntl(
<AddFilterComponent onAddFilter={() => {}}/>
);
expect(component).toMatchSnapshot();
});
// ...
```

View file

@ -26150,6 +26150,9 @@ var subscribeTo = function (result) {
}
};
}
else if (result && typeof result[__WEBPACK_IMPORTED_MODULE_9__symbol_observable__["a" /* observable */]] === 'function') {
return Object(__WEBPACK_IMPORTED_MODULE_4__subscribeToObservable__["a" /* subscribeToObservable */])(result);
}
else if (Object(__WEBPACK_IMPORTED_MODULE_5__isArrayLike__["a" /* isArrayLike */])(result)) {
return Object(__WEBPACK_IMPORTED_MODULE_1__subscribeToArray__["a" /* subscribeToArray */])(result);
}
@ -26159,9 +26162,6 @@ var subscribeTo = function (result) {
else if (result && typeof result[__WEBPACK_IMPORTED_MODULE_8__symbol_iterator__["a" /* iterator */]] === 'function') {
return Object(__WEBPACK_IMPORTED_MODULE_3__subscribeToIterable__["a" /* subscribeToIterable */])(result);
}
else if (result && typeof result[__WEBPACK_IMPORTED_MODULE_9__symbol_observable__["a" /* observable */]] === 'function') {
return Object(__WEBPACK_IMPORTED_MODULE_4__subscribeToObservable__["a" /* subscribeToObservable */])(result);
}
else {
var value = Object(__WEBPACK_IMPORTED_MODULE_7__isObject__["a" /* isObject */])(result) ? 'an invalid object' : "'" + result + "'";
var msg = "You provided " + value + " where a stream was expected."

View file

@ -3,9 +3,6 @@
"version": "1.0.0",
"private": true,
"main": "./target/index.js",
"dependencies": {
"lodash": "4.17.4"
},
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-preset-env": "^1.6.1"

View file

@ -4,8 +4,7 @@
"private": true,
"main": "./target/index.js",
"dependencies": {
"@elastic/bar": "link:../bar",
"lodash": "4.17.4"
"@elastic/bar": "link:../bar"
},
"devDependencies": {
"babel-cli": "^6.26.0",

View file

@ -19,9 +19,6 @@ Array [
exports[`kbn-pm production builds and copies projects for production: packages/bar/package.json 1`] = `
Object {
"dependencies": Object {
"lodash": "4.17.4",
},
"devDependencies": Object {
"babel-cli": "^6.26.0",
"babel-preset-env": "^1.6.1",
@ -48,7 +45,6 @@ exports[`kbn-pm production builds and copies projects for production: packages/f
Object {
"dependencies": Object {
"@elastic/bar": "file:../bar",
"lodash": "4.17.4",
},
"devDependencies": Object {
"babel-cli": "^6.26.0",

View file

@ -1,9 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`run tests CLI options accepts help option even if invalid options passed 1`] = `
Array [
Array [
"Run Functional Tests
"Run Functional Tests
Usage:
node scripts/functional_tests --help
@ -21,55 +19,48 @@ Options:
--verbose Log everything.
--debug Run in debug mode.
--quiet Only log errors.
--silent Log nothing.",
],
]
--silent Log nothing.
"
`;
exports[`run tests CLI options rejects boolean config value 1`] = `
Array [
Array [
"Error: functional_tests: invalid argument [true] to option [config]",
],
]
"
functional_tests: invalid argument [true] to option [config]
...stack trace...
"
`;
exports[`run tests CLI options rejects boolean value for kibana-install-dir 1`] = `
Array [
Array [
"Error: functional_tests: invalid argument [true] to option [kibana-install-dir]",
],
]
"
functional_tests: invalid argument [true] to option [kibana-install-dir]
...stack trace...
"
`;
exports[`run tests CLI options rejects empty config value if no default passed 1`] = `
Array [
Array [
"Error: functional_tests: config is required",
],
]
"
functional_tests: config is required
...stack trace...
"
`;
exports[`run tests CLI options rejects invalid options even if valid options exist 1`] = `
Array [
Array [
"Error: functional_tests: invalid option [aintnothang]",
],
]
"
functional_tests: invalid option [aintnothang]
...stack trace...
"
`;
exports[`run tests CLI options rejects non-boolean value for bail 1`] = `
Array [
Array [
"Error: functional_tests: invalid argument [peanut] to option [bail]",
],
]
"
functional_tests: invalid argument [peanut] to option [bail]
...stack trace...
"
`;
exports[`run tests CLI options rejects non-enum value for esFrom 1`] = `
Array [
Array [
"Error: functional_tests: invalid argument [butter] to option [esFrom]",
],
]
"
functional_tests: invalid argument [butter] to option [esFrom]
...stack trace...
"
`;

View file

@ -18,7 +18,7 @@
*/
import dedent from 'dedent';
import { createToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
import { ToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
const options = {
help: { desc: 'Display this menu and exit.' },
@ -99,9 +99,10 @@ export function processOptions(userOptions, defaultConfigPaths) {
}
function createLogger() {
const log = createToolingLog(pickLevelFromFlags(userOptions));
log.pipe(process.stdout);
return log;
return new ToolingLog({
level: pickLevelFromFlags(userOptions),
writeTo: process.stdout,
});
}
return {

View file

@ -17,9 +17,8 @@
* under the License.
*/
import chalk from 'chalk';
import getopts from 'getopts';
import { runTests } from '../../tasks';
import { runCli } from '../../lib';
import { processOptions, displayHelp } from './args';
/**
@ -31,17 +30,8 @@ import { processOptions, displayHelp } from './args';
* if no config option is passed
*/
export async function runTestsCli(defaultConfigPaths) {
try {
const userOptions = getopts(process.argv.slice(2)) || {};
if (userOptions.help) {
console.log(displayHelp());
return undefined;
}
await runCli(displayHelp, async userOptions => {
const options = processOptions(userOptions, defaultConfigPaths);
await runTests(options);
} catch (err) {
console.log(chalk.red(err));
process.exit(1);
}
});
}

View file

@ -18,6 +18,7 @@
*/
import { runTestsCli } from './cli';
import { checkMockConsoleLogSnapshot } from '../../test_helpers';
// Note: Stub the runTests function to keep testing only around the cli
// method and arguments.
@ -62,7 +63,7 @@ describe('run tests CLI', () => {
await runTestsCli();
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('rejects empty config value if no default passed', async () => {
@ -71,7 +72,7 @@ describe('run tests CLI', () => {
await runTestsCli();
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('accepts empty config value if default passed', async () => {
@ -88,7 +89,7 @@ describe('run tests CLI', () => {
await runTestsCli(['foo']);
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('accepts string value for kibana-install-dir', async () => {
@ -105,7 +106,7 @@ describe('run tests CLI', () => {
await runTestsCli(['foo']);
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('accepts boolean value for updateBaselines', async () => {
@ -130,7 +131,7 @@ describe('run tests CLI', () => {
await runTestsCli(['foo']);
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('accepts value for grep', async () => {
@ -187,7 +188,7 @@ describe('run tests CLI', () => {
await runTestsCli(['foo']);
expect(exitMock).not.toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('rejects invalid options even if valid options exist', async () => {
@ -196,7 +197,7 @@ describe('run tests CLI', () => {
await runTestsCli(['foo']);
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
});
});

View file

@ -1,57 +1,50 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`start servers CLI options accepts boolean value for updateBaselines 1`] = `
Array [
Array [
"Error: functional_tests_server: invalid option [updateBaselines]",
],
]
"
functional_tests_server: invalid option [updateBaselines]
...stack trace...
"
`;
exports[`start servers CLI options rejects bail 1`] = `
Array [
Array [
"Error: functional_tests_server: invalid option [bail]",
],
]
"
functional_tests_server: invalid option [bail]
...stack trace...
"
`;
exports[`start servers CLI options rejects boolean config value 1`] = `
Array [
Array [
"Error: functional_tests_server: invalid argument [true] to option [config]",
],
]
"
functional_tests_server: invalid argument [true] to option [config]
...stack trace...
"
`;
exports[`start servers CLI options rejects boolean value for kibana-install-dir 1`] = `
Array [
Array [
"Error: functional_tests_server: invalid argument [true] to option [kibana-install-dir]",
],
]
"
functional_tests_server: invalid argument [true] to option [kibana-install-dir]
...stack trace...
"
`;
exports[`start servers CLI options rejects empty config value if no default passed 1`] = `
Array [
Array [
"Error: functional_tests_server: config is required",
],
]
"
functional_tests_server: config is required
...stack trace...
"
`;
exports[`start servers CLI options rejects invalid options even if valid options exist 1`] = `
Array [
Array [
"Error: functional_tests_server: invalid option [grep]",
],
]
"
functional_tests_server: invalid option [grep]
...stack trace...
"
`;
exports[`start servers CLI options rejects non-enum value for esFrom 1`] = `
Array [
Array [
"Error: functional_tests_server: invalid argument [butter] to option [esFrom]",
],
]
"
functional_tests_server: invalid argument [butter] to option [esFrom]
...stack trace...
"
`;

View file

@ -18,7 +18,7 @@
*/
import dedent from 'dedent';
import { createToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
import { ToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
const options = {
help: { desc: 'Display this menu and exit.' },
@ -86,9 +86,10 @@ export function processOptions(userOptions, defaultConfigPath) {
}
function createLogger() {
const log = createToolingLog(pickLevelFromFlags(userOptions));
log.pipe(process.stdout);
return log;
return new ToolingLog({
level: pickLevelFromFlags(userOptions),
writeTo: process.stdout,
});
}
return {

View file

@ -17,9 +17,8 @@
* under the License.
*/
import chalk from 'chalk';
import getopts from 'getopts';
import { startServers } from '../../tasks';
import { runCli } from '../../lib';
import { processOptions, displayHelp } from './args';
/**
@ -28,17 +27,8 @@ import { processOptions, displayHelp } from './args';
* if no config option is passed
*/
export async function startServersCli(defaultConfigPath) {
try {
const userOptions = getopts(process.argv.slice(2)) || {};
if (userOptions.help) {
console.log(displayHelp());
return undefined;
}
await runCli(displayHelp, async userOptions => {
const options = processOptions(userOptions, defaultConfigPath);
await startServers(options);
} catch (err) {
console.log(chalk.red(err));
process.exit(1);
}
});
}

View file

@ -18,6 +18,7 @@
*/
import { startServersCli } from './cli';
import { checkMockConsoleLogSnapshot } from '../../test_helpers';
// Note: Stub the startServers function to keep testing only around the cli
// method and arguments.
@ -62,7 +63,7 @@ describe('start servers CLI', () => {
await startServersCli();
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('rejects empty config value if no default passed', async () => {
@ -71,7 +72,7 @@ describe('start servers CLI', () => {
await startServersCli();
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('accepts empty config value if default passed', async () => {
@ -88,7 +89,7 @@ describe('start servers CLI', () => {
await startServersCli('foo');
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('accepts string value for kibana-install-dir', async () => {
@ -105,7 +106,7 @@ describe('start servers CLI', () => {
await startServersCli('foo');
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('accepts boolean value for updateBaselines', async () => {
@ -114,7 +115,7 @@ describe('start servers CLI', () => {
await startServersCli('foo');
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('accepts source value for esFrom', async () => {
@ -131,7 +132,7 @@ describe('start servers CLI', () => {
await startServersCli('foo');
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
it('accepts debug option', async () => {
@ -188,7 +189,7 @@ describe('start servers CLI', () => {
await startServersCli('foo');
expect(exitMock).toHaveBeenCalledWith(1);
expect(logMock.mock.calls).toMatchSnapshot();
checkMockConsoleLogSnapshot(logMock);
});
});
});

View file

@ -0,0 +1,26 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`does right thing when non-error is thrown 1`] = `
"
'foo bar' thrown!
...stack trace...
"
`;
exports[`logs no stack trace then exits when stack missing 1`] = `
"
foo error
(no stack trace)
"
`;
exports[`logs the stack then exits when run function throws an error 1`] = `
"
foo error
stack 1
stack 2
stack 3
"
`;

View file

@ -21,3 +21,4 @@ export { runKibanaServer } from './run_kibana_server';
export { runElasticsearch } from './run_elasticsearch';
export { runFtr } from './run_ftr';
export { KIBANA_ROOT, KIBANA_FTR_SCRIPT, FUNCTIONAL_CONFIG_PATH, API_CONFIG_PATH } from './paths';
export { runCli } from './run_cli';

View file

@ -0,0 +1,71 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { inspect } from 'util';
import chalk from 'chalk';
import getopts from 'getopts';
export class CliError extends Error {
constructor(message, exitCode = 1) {
super(message);
this.exitCode = exitCode;
Error.captureStackTrace(this, CliError);
}
}
export async function runCli(getHelpText, run) {
try {
const userOptions = getopts(process.argv.slice(2)) || {};
if (userOptions.help) {
console.log(getHelpText());
return;
}
await run(userOptions);
} catch (error) {
if (!(error instanceof Error)) {
error = new Error(`${inspect(error)} thrown!`);
}
console.log();
console.log(chalk.red(error.message));
// CliError is a special error class that indicates that the error is produced as a part
// of using the CLI, and does not need a stack trace to make sense, so we skip the stack
// trace logging if the error thrown is an instance of this class
if (!(error instanceof CliError)) {
// first line in the stack trace is the message, skip it as we log it directly and color it red
if (error.stack) {
console.log(
error.stack
.split('\n')
.slice(1)
.join('\n')
);
} else {
console.log(' (no stack trace)');
}
}
console.log();
process.exit(error.exitCode || 1);
}
}

Some files were not shown because too many files have changed in this diff Show more