Merge branch 'master' of github.com:elastic/kibana into feature-secops

This commit is contained in:
Xavier Mouligneau 2019-01-25 11:46:32 -05:00
commit 3b51264eb3
1775 changed files with 50522 additions and 28098 deletions

View file

@ -5,7 +5,7 @@ bower_components
/.es
/plugins
/optimize
/dlls
/built_assets
/src/fixtures/vislib/mock_data
/src/ui/public/angular-bootstrap
/src/ui/public/flot-charts

View file

@ -203,6 +203,34 @@ module.exports = {
},
},
/**
* Files that run in the browser with only node-level transpilation
*/
{
files: [
'test/functional/services/lib/leadfoot_element_wrapper/scroll_into_view_if_necessary.js',
],
rules: {
'prefer-object-spread/prefer-object-spread': 'off',
'no-var': 'off',
'prefer-const': 'off',
'prefer-destructuring': 'off',
'no-restricted-syntax': [
'error',
'ArrowFunctionExpression',
'AwaitExpression',
'ClassDeclaration',
'RestElement',
'SpreadElement',
'YieldExpression',
'VariableDeclaration[kind="const"]',
'VariableDeclaration[kind="let"]',
'VariableDeclarator[id.type="ArrayPattern"]',
'VariableDeclarator[id.type="ObjectPattern"]',
],
},
},
/**
* Files that run AFTER node version check
* and are not also transpiled with babel

24
.github/CODEOWNERS vendored
View file

@ -1,9 +1,31 @@
# GitHub CODEOWNERS definition
# See: https://help.github.com/articles/about-codeowners/
# Identify which groups will be pinged by changes to different parts of the codebase.
# For more info, see https://help.github.com/articles/about-codeowners/
# APM
/x-pack/plugins/apm/ @elastic/apm-ui
# Beats
/x-pack/plugins/beats_management/ @elastic/beats
# Canvas
/x-pack/plugins/canvas/ @elastic/kibana-canvas
# Security
/x-pack/plugins/security/ @elastic/kibana-security
/x-pack/plugins/spaces/ @elastic/kibana-security
# Design
**/*.scss @elastic/kibana-design
# Elasticsearch UI
/src/legacy/core_plugins/console/ @elastic/es-ui
/x-pack/plugins/console_extensions/ @elastic/es-ui
/x-pack/plugins/cross_cluster_replication/ @elastic/es-ui
/x-pack/plugins/index_lifecycle_management/ @elastic/es-ui
/x-pack/plugins/index_management/ @elastic/es-ui
/x-pack/plugins/license_management/ @elastic/es-ui
/x-pack/plugins/remote_clusters/ @elastic/es-ui
/x-pack/plugins/rollup/ @elastic/es-ui
/x-pack/plugins/searchprofiler/ @elastic/es-ui
/x-pack/plugins/watcher/ @elastic/es-ui

3
.gitignore vendored
View file

@ -9,7 +9,7 @@ node_modules
!/src/dev/notice/__fixtures__/node_modules
trash
/optimize
/dlls
/built_assets
target
/build
.jruby
@ -44,4 +44,3 @@ package-lock.json
*.sublime-*
npm-debug.log*
.tern-project
**/public/index.css

View file

@ -4,6 +4,8 @@
"server": "src/server",
"console": "src/legacy/core_plugins/console",
"inputControl": "src/legacy/core_plugins/input_control_vis",
"inspectorViews": "src/legacy/core_plugins/inspector_views",
"interpreter": "src/legacy/core_plugins/interpreter",
"kbn": "src/legacy/core_plugins/kibana",
"kbnVislibVisTypes": "src/legacy/core_plugins/kbn_vislib_vis_types",
"markdownVis": "src/legacy/core_plugins/markdown_vis",
@ -19,13 +21,16 @@
"xpack.apm": "x-pack/plugins/apm",
"xpack.beatsManagement": "x-pack/plugins/beats_management",
"xpack.crossClusterReplication": "x-pack/plugins/cross_cluster_replication",
"xpack.dashboardMode": "x-pack/plugins/dashboard_mode",
"xpack.graph": "x-pack/plugins/graph",
"xpack.grokDebugger": "x-pack/plugins/grokdebugger",
"xpack.idxMgmt": "x-pack/plugins/index_management",
"xpack.indexLifecycleMgmt": "x-pack/plugins/index_lifecycle_management",
"xpack.infra": "x-pack/plugins/infra",
"xpack.licenseMgmt": "x-pack/plugins/license_management",
"xpack.ml": "x-pack/plugins/ml",
"xpack.logstash": "x-pack/plugins/logstash",
"xpack.main": "x-pack/plugins/xpack_main",
"xpack.monitoring": "x-pack/plugins/monitoring",
"xpack.remoteClusters": "x-pack/plugins/remote_clusters",
"xpack.reporting": "x-pack/plugins/reporting",
@ -35,6 +40,7 @@
"xpack.security": "x-pack/plugins/security",
"xpack.spaces": "x-pack/plugins/spaces",
"xpack.upgradeAssistant": "x-pack/plugins/upgrade_assistant",
"xpack.uptime": "x-pack/plugins/uptime",
"xpack.watcher": "x-pack/plugins/watcher"
},
"exclude": [

View file

@ -34,6 +34,32 @@ THE SOFTWARE.
This product uses Noto fonts that are licensed under the SIL Open
Font License, Version 1.1.
---
Based on the scroll-into-view-if-necessary module from npm
https://github.com/stipsan/compute-scroll-into-view/blob/master/src/index.ts#L269-L340
MIT License
Copyright (c) 2018 Cody Olsen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
---
Pretty handling of logarithmic axes.
Copyright (c) 2007-2014 IOLA and Ole Laursen.

File diff suppressed because it is too large Load diff

View file

@ -80,7 +80,7 @@ POST api/kibana/dashboards/import?exclude=index-pattern
"hits": 0,
"description": "",
"panelsJSON": "[{\"gridData\":{\"w\":24,\"h\":15,\"x\":0,\"y\":0,\"i\":\"1\"},\"version\":\"7.0.0-alpha1\",\"panelIndex\":\"1\",\"type\":\"visualization\",\"id\":\"80b956f0-b2cd-11e8-ad8e-85441f0c2e5c\",\"embeddableConfig\":{}}]",
"optionsJSON": "{\"darkTheme\":false,\"useMargins\":true,\"hidePanelTitles\":false}",
"optionsJSON": "{\"useMargins\":true,\"hidePanelTitles\":false}",
"version": 1,
"timeRestore": false,
"kibanaSavedObjectMeta": {

View file

@ -28,7 +28,7 @@ the shortened URL token for the provided request body.
--------------------------------------------------
POST api/shorten_url
{
"url": "/app/kibana#/dashboard?_g=()&_a=(description:'',filters:!(),fullScreenMode:!f,options:(darkTheme:!f,hidePanelTitles:!f,useMargins:!t),panels:!((embeddableConfig:(),gridData:(h:15,i:'1',w:24,x:0,y:0),id:'8f4d0c00-4c86-11e8-b3d7-01146121b73d',panelIndex:'1',type:visualization,version:'7.0.0-alpha1')),query:(language:lucene,query:''),timeRestore:!f,title:'New%20Dashboard',viewMode:edit)",
"url": "/app/kibana#/dashboard?_g=()&_a=(description:'',filters:!(),fullScreenMode:!f,options:(hidePanelTitles:!f,useMargins:!t),panels:!((embeddableConfig:(),gridData:(h:15,i:'1',w:24,x:0,y:0),id:'8f4d0c00-4c86-11e8-b3d7-01146121b73d',panelIndex:'1',type:visualization,version:'7.0.0-alpha1')),query:(language:lucene,query:''),timeRestore:!f,title:'New%20Dashboard',viewMode:edit)",
}
--------------------------------------------------
// KIBANA

View file

@ -55,7 +55,6 @@ The list of common parameters:
- *title*: title of your visualization as displayed in kibana
- *icon*: <string> the https://elastic.github.io/eui/#/display/icons[EUI icon] type to use for this visualization
- *image*: instead of an icon you can provide a SVG image (imported)
- *legacyIcon*: (DEPRECATED) <string> provide a class name (e.g. for a font awesome icon)
- *description*: description of your visualization as shown in kibana
- *hidden*: <bool> if set to true, will hide the type from showing up in the visualization wizard
- *visConfig*: object holding visualization parameters

Binary file not shown.

After

Width:  |  Height:  |  Size: 151 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 183 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 145 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 90 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 189 KiB

View file

@ -17,9 +17,14 @@ image::infrastructure/images/infra-sysmon.jpg[Infrastructure Overview in Kibana]
[float]
== Add data sources
Kibana provides step-by-step instructions to help you add your data sources.
The {infra-guide}[Infrastructure Monitoring Guide] is good source for more detailed
The {infra-guide}[Infrastructure Monitoring Guide] is a good source for more detailed
instructions and information.
[float]
== Configure data sources
By default the Infrastructure UI uses the `metricbeat-*` index pattern to query the data. If you configured Metricbeat to export data to a different set of indices, you will need to set `xpack.infra.sources.default.metricAlias` in `config/kibana.yml` to match your index pattern. You can also configure the timestamp field by overriding `xpack.infra.sources.default.fields.timestamp`. See <<infrastructure-ui-settings-kb>> for a complete list.
--
include::monitor.asciidoc[]

View file

@ -17,9 +17,13 @@ image::logs/images/logs-console.png[Log Console in Kibana]
== Add data sources
Kibana provides step-by-step instructions to help you add your data sources.
The {infra-guide}[Infrastructure Monitoring Guide] is good source for more detailed information and
The {infra-guide}[Infrastructure Monitoring Guide] is a good source for more detailed information and
instructions.
[float]
== Configure data sources
By default the Logs UI uses the `filebeat-*` index pattern to query the data. If your logs are located in a different set of indices, you will need to set `xpack.infra.sources.default.logAlias` in `config/kibana.yml` to match your log's index pattern. You can also configure the timestamp field by overriding `xpack.infra.sources.default.fields.timestamp`, by default it is set to `@timestamp`. See <<logs-ui-settings-kb>> for a complete list.
--

View file

@ -2,7 +2,7 @@
[[logs-ui]]
== Using the Logs UI
Customize the Infrastructure UI to focus on the data you want to see and control how you see it.
Customize the Logs UI to focus on the data you want to see and control how you see it.
[role="screenshot"]
image::logs/images/logs-console.png[Log Console in Kibana]

View file

@ -19,6 +19,16 @@ include::management/rollups/create_and_manage_rollups.asciidoc[]
include::management/rollups/visualize_rollup_data.asciidoc[]
include::management/index-lifecycle-policies/intro-to-lifecycle-policies.asciidoc[]
include::management/index-lifecycle-policies/create-policy.asciidoc[]
include::management/index-lifecycle-policies/manage-policy.asciidoc[]
include::management/index-lifecycle-policies/add-policy-to-index.asciidoc[]
include::management/index-lifecycle-policies/example-index-lifecycle-policy.asciidoc[]
include::management/managing-fields.asciidoc[]
include::management/managing-indices.asciidoc[]
@ -28,3 +38,8 @@ include::management/advanced-options.asciidoc[]
include::management/managing-saved-objects.asciidoc[]
include::management/managing-beats.asciidoc[]
include::management/managing-remote-clusters.asciidoc[]

View file

@ -39,6 +39,7 @@ document.
`discover:sort:defaultOrder`:: Controls the default sort direction for time based index patterns in the Discover app.
`doc_table:highlight`:: Highlight results in Discover and Saved Searches Dashboard. Highlighting makes request slow when
working on big documents. Set this property to `false` to disable highlighting.
`doc_table:hideTimeColumn`:: Hide the 'Time' column in Discover and in all Saved Searches on Dashboards.
`search:includeFrozen`:: Will include {ref}/frozen-indices.html[frozen indices] in results if enabled. Searching through frozen indices
might increase the search time.
`courier:maxSegmentCount`:: Kibana splits requests in the Discover app into segments to limit the size of requests sent to
@ -76,7 +77,6 @@ mentioned use "_default_".
`timepicker:timeDefaults`:: The default time filter selection.
`timepicker:refreshIntervalDefaults`:: The time filter's default refresh interval.
`timepicker:quickRanges`:: The list of ranges to show in the Quick section of the time picker. This should be an array of objects, with each object containing `from`, `to` (see {ref}/common-options.html#date-math[accepted formats]), `display` (the title to be displayed), and `section` (which column to put the option in).
`dashboard:defaultDarkTheme`:: Set this property to `true` to make new dashboards use the dark theme by default.
`filters:pinnedByDefault`:: Set this property to `true` to make filters have a global state by default.
`filterEditor:suggestValues`:: Set this property to `false` to prevent the filter editor from suggesting values for fields.
`notifications:banner`:: You can specify a custom banner to display temporary notices to all users. This field supports

View file

@ -0,0 +1,17 @@
[[adding-policy-to-index]]
=== Adding a policy to an index
You can add a lifecycle policy to an index and view the status for indices
managed by a policy in *Management > {es} > Index Management*. This page lists your
{es} indices, which you can filter by lifecycle status and lifecycle phase.
To add a policy, select the index name and then select *Manage > Add lifecycle policy*.
Youll see the policy name, the phase the index is in, the current
action, and if any errors occurred performing that action.
To remove a policy from an index, select *Manage > Remove lifecycle policy*.
[role="screenshot"]
image::images/index_management_add_policy.png[][UI for adding a policy to an index]

View file

@ -0,0 +1,57 @@
[[creating-index-lifecycle-policies]]
=== Creating an index lifecycle policy
An index lifecycle policy enables you to define rules over when to perform
certain actions, such as a rollover or force merge, on an index. Index lifecycle
management automates execution of those actions at the right time.
When you create an index lifecycle policy, consider the tradeoffs between
performance and availability. As you move your index through the lifecycle,
youre likely moving your data to less performant hardware and reducing the
number of shards and replicas. Its important to ensure that the index
continues to have enough replicas to prevent data loss in the event of failures.
*Index Lifecycle Policies* is automatically enabled in {kib}. Go to
*Management > {es} > Index Lifecycle Policies*.
NOTE: If you dont want to use this feature, you can disable it by setting
`xpack.ilm.enabled` to false in your `kibana.yml` configuration file. If you
disable *Index Management*, then *Index Lifecycle Policies* is also disabled.
[role="screenshot"]
image::images/index-lifecycle-policies-create.png[][UI for creating an index lifecycle policy]
You can define up to four phases in the index lifecycle. For each phase, you
can enable actions to optimize performance for that phase. Transitioning
between phases is based on the age of the index.
The four phases of an index lifecycle policy are:
* *Hot.* The index is actively being queried and written to. You can optionally
roll over to a new index when the
original index reaches a specified size or age. When a rollover occurs, a new
index is created, added to the index alias, and designated as the new “hot”
index. You can still query the previous indices, but you only ever write to
the “hot” index. See {ref}/indices-rollover-index.html[Rollover index] for more information.
* *Warm.* The index is typically searched at a lower rate than when the data is
hot. The index is not used for storing for new data, but might occasionally add
late-arriving data, for example, from a Beat that had a network problem that's now fixed.
You can optionally shrink the number replicas and move the shards to a
different set of nodes with smaller or less performant hardware. You can also
reduce the number of primary shards and force merge the index into
smaller {ref}/indices-segments.html[segments].
* *Cold.* The index is no longer being updated and is seldom queried, but is
still searchable. If you have a big deployment, you can move it to even
less performant hardware. You might also reduce the number of replicas because
you expect the data to be queried less frequently.
* *Delete.* The index is no longer relevant. You can define when it is safe to
delete it.
The index lifecycle always includes an active hot phase. The warm, cold, and
delete phases are optional. For example, you might define all four phases for
one policy and only a hot and delete phase for another. See {ref}/_actions.html[Actions]
for more information on the actions available in each phase.

View file

@ -0,0 +1,22 @@
[[example-using-index-lifecycle-policy]]
=== Example of using an index lifecycle policy
A common use case for managing index lifecycle policies is when youre using
{beats-ref}/beats-reference.html[Beats] to continually send time-series data,
such as metrics and log data, to {es}. When you create the Beats packages, an
index template is installed. The template includes a default policy to apply
when new indices are created.
You can edit the policy in {kib}'s *Index Lifecycle Policies*. For example, you might:
* Rollover the index when it reaches 50 GB in size or is 30 days old. These
settings are the default for the Beats lifecycle policy. This avoids
having 1000s of tiny indices. When a rollover occurs, a new “hot” index is
created and added to the index alias.
* Move the index into the warm phase, shrink the index down to a single shard,
and force merge to a single segment.
* After 60 days, move the index into the cold phase and onto less expensive hardware.
* Delete the index after 90 days.

View file

@ -0,0 +1,29 @@
[[index-lifecycle-policies]]
== Index lifecycle policies
If you're working with time series data, you don't want to continually dump
everything into a single index. Instead, you might periodically roll over the
data to a new index to keep it from growing so big it's slow and expensive.
As the index ages and you query it less frequently, youll likely move it to
less expensive hardware and reduce the number of shards and replicas.
To automatically move an index through its lifecycle, you can create a policy
to define actions to perform on the index as it ages. Index lifecycle policies
are especially useful when working with {beats-ref}/beats-reference.html[Beats]
data shippers, which continually
send operational data, such as metrics and logs, to Elasticsearch. You can
automate a rollover to a new index when the existing index reaches a specified
size or age. This ensures that all indices have a similar size instead of having
daily indices where size can vary based on the number of Beats and the number
of events sent.
{kib}s *Index Lifecycle Policies* walks you through the process for creating
and configuring a policy. Before using this feature, you should be familiar
with index lifecycle management:
* For an introduction, see
{ref}/getting-started-index-lifecycle-management.html[Getting started with index
lifecycle management].
* To dig into the concepts and technical details, see
{ref}/index-lifecycle-management.html[Managing the index lifecycle].
* To check out the APIs, see {ref}/index-lifecycle-management-api.html[Index lifecycle management API].

View file

@ -0,0 +1,27 @@
[[managing-index-lifecycle-policies]]
=== Managing index lifecycle policies
Your configured policies appear on the *Index lifecycle policies* page.
You can update an existing index lifecycle policy to fix errors or change
strategies for newly created indices. To edit a policy, select its name.
[role="screenshot"]
image::images/index_lifecycle_policies_options.png[][UI for viewing and editing an index lifecycle policy]
In addition, you can:
* *View indices linked to the policy.* This is important when editing a policy.
Any changes you make affect all indices attached to the policy. The settings
for the current phase are cached, so the update doesnt affect that phase. This
prevents conflicts when youre modifying a phase that is currently executing on
an index. The changes takes effect when the next phase in the index lifecycle begins.
* *Add the policy to an index template.* When an index is automatically
created using the index template, the policy is applied. If the index is rolled
over, the policies for any matching index templates are applied to the newly
created index. For more information, see {ref}/indices-templates.html[Index templates].
* *Delete a policy.* You cant delete a policy that is currently in use or
recover a deleted index.

View file

@ -94,13 +94,13 @@ https://www.elastic.co/blog/using-painless-kibana-scripted-fields[Using Painless
=== Creating a Scripted Field
To create a scripted field:
. Go to *Settings > Indices*
. Go to *Management > Kibana > Index Patterns*
. Select the index pattern you want to add a scripted field to.
. Go to the pattern's *Scripted Fields* tab.
. Click *Add Scripted Field*.
. Go to the pattern's *Scripted fields* tab.
. Click *Add scripted field*.
. Enter a name for the scripted field.
. Enter the expression that you want to use to compute a value on the fly from your index data.
. Click *Save Scripted Field*.
. Click *Create field*.
For more information about scripted fields in Elasticsearch, see
{ref}/modules-scripting.html[Scripting].
@ -110,9 +110,10 @@ For more information about scripted fields in Elasticsearch, see
=== Updating a Scripted Field
To modify a scripted field:
. Go to *Settings > Indices*
. Go to *Management > Kibana > Index Patterns*
. Click the index pattern's *Scripted fields* tab.
. Click the *Edit* button for the scripted field you want to change.
. Make your changes and then click *Save Scripted Field* to update the field.
. Make your changes and then click *Save field* to update the field.
WARNING: Keep in mind that there's no built-in validation of a scripted field. If your scripts are buggy, you'll get
exceptions whenever you try to view the dynamically generated data.
@ -122,6 +123,7 @@ exceptions whenever you try to view the dynamically generated data.
=== Deleting a Scripted Field
To delete a scripted field:
. Go to *Settings > Indices*
. Go to *Management > Kibana > Index Patterns*
. Click the index pattern's *Scripted fields* tab.
. Click the *Delete* button for the scripted field you want to remove.
. Confirm that you really want to delete the field.
. Click *Delete* in the confirmation window.

View file

@ -0,0 +1,26 @@
[[managing-remote-clusters]]
== Managing Remote Clusters
{kib} *Management* provides two user interfaces for working with data from remote
clusters.
*Remote Clusters* helps you manage remote clusters for use with
{ref}/modules-cross-cluster-search.html[cross cluster search] and
{xpack-ref}/xpack-ccr.html[cross cluster replication]. You can add and remove remote
clusters and check their connectivity.
Go to *Management > Elasticsearch > Remote Clusters* to get started.
[role="screenshot"]
image::images/add_remote_cluster.png[][UI for adding a remote cluster]
*Cross Cluster Replication* includes tools to help you create and manage the remote
replication process. You can follow an index pattern on the remote cluster for
auto-discovery and then replicate new indices in the local cluster that match the
auto-follow pattern.
Go to *Management > Elasticsearch > Cross Cluster Replication* to get started.
[role="screenshot"]
image::images/auto_follow_pattern.png[][UI for adding an auto-follow pattern]

View file

@ -9,6 +9,13 @@ your application to Kibana 7.0.
See also <<release-highlights>> and <<release-notes>>.
[float]
=== Removed support for users relying on direct index privileges to the Kibana index in Elasticsearch
*Details:* With the introduction of Kibana RBAC in 6.4, users no longer require privileges to the Kibana index in Elasticsearch. Instead, users
should be granted <<kibana-privileges>>. Prior to 7.0, when a user that relies upon direct index privileges logs into Kibana, a deprecation warning is logged. If you are using the `kibana_user` or `kibana_dashboard_only_user` role to grant access to Kibana, or a custom role using <<kibana-privileges>>, no changes are required.
*Impact:* You must change any roles which grant access to Kibana using index privileges to instead use <<kibana-privileges>>. Watcher jobs using the Reporting attachment type must be updated as well.
[float]
=== Removed support for tribe nodes
*Details:* Elasticsearch 7.0 removes the tribe node feature, so Kibana removes it as well.
@ -103,3 +110,16 @@ The port is now protocol dependent: https ports will use 443, and http ports wil
`server.ssl.supportedProtocols`
*Impact:* Users relying upon TLSv1 will be unable to use Kibana unless `server.ssl.supportedProtocols` is explicitly set.
[float]
=== kibana.yml setting `server.ssl.cert` is no longer valid
*Details:* This deprecated setting has been removed and `server.ssl.certificate` should be used instead.
*Impact:* Users with `server.ssl.cert` set should use `server.ssl.certificate` instead
[float]
=== kibana.yml `server.ssl.enabled` must be set to `true` to enable SSL
*Details:* Previously, if `server.ssl.certificate` and `server.ssl.key` were set, SSL would automatically be enabled.
It's now required that the user sets `server.ssl.enabled` to true for this to occur.
*Impact:* Users with both `server.ssl.certificate` and `server.ssl.key` set must now also set `server.ssl.enabled` to enable SSL.

View file

@ -20,19 +20,28 @@ than running multiple jobs against the same data.
A _population job_ detects activity that is unusual compared to the behavior of
the population. For more information, see
{stack-ov}/ml-configuring-pop.html[Performing Population Analysis].
{stack-ov}/ml-configuring-pop.html[Performing population analysis].
An _advanced job_ can contain multiple detectors and enables you to configure all
job settings.
{kib} can also recognize certain types of data and provide specialized wizards
for that context. For example, if you use {filebeat-ref}/index.html[Filebeat]
for that context. For example, if you use {filebeat-ref}/index.html[{filebeat}]
to ship access logs from your
http://nginx.org/[Nginx] and https://httpd.apache.org/[Apache] HTTP servers to
{es}, the following wizards appear:
[role="screenshot"]
image::ml/images/ml-data-recognizer.jpg[A screenshot of the Apache and NGINX job creation wizards]
image::ml/images/ml-data-recognizer-filebeat.jpg[A screenshot of the {filebeat} job creation wizards]
Likewise, if you use {auditbeat-ref}/index.html[{auditbeat}] to audit process
activity on your systems, the following wizards appear:
[role="screenshot"]
image::ml/images/ml-data-recognizer-auditbeat.jpg[A screenshot of the {auditbeat} job creation wizards]
These wizards create {ml} jobs, dashboards, searches, and visualizations that
are customized to help you analyze your {auditbeat} and {filebeat} data.
If you are not certain which type of job to create, you can use the
*Data Visualizer* to learn more about your data and to identify possible fields

Binary file not shown.

After

Width:  |  Height:  |  Size: 520 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 228 KiB

After

Width:  |  Height:  |  Size: 359 KiB

Before After
Before After

Binary file not shown.

After

Width:  |  Height:  |  Size: 173 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 169 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 250 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 350 KiB

After

Width:  |  Height:  |  Size: 175 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 208 KiB

After

Width:  |  Height:  |  Size: 146 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

After

Width:  |  Height:  |  Size: 116 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 307 KiB

After

Width:  |  Height:  |  Size: 216 KiB

Before After
Before After

View file

@ -46,6 +46,16 @@ The *Anomaly Explorer* and *Single Metric Viewer* display the results of your
[role="screenshot"]
image::ml/images/ml-single-metric-viewer.jpg[Single Metric Viewer]
You can optionally add annotations by drag-selecting a period of time in
the *Single Metric Viewer* and adding a description. For example, you can add an
explanation for anomalies in that time period or provide notes about what is
occurring in your operational environment at that time:
[role="screenshot"]
image::ml/images/ml-annotations-list.jpg[Single Metric Viewer with annotations]
The *Job Management* pane shows the full list of annotations for each job.
NOTE: The {kib} {ml-features} use pop-ups. You must configure your
web browser so that it does not block pop-up windows or create an exception for
your {kib} URL.

View file

@ -5,8 +5,6 @@
<titleabbrev>Collecting monitoring data with {metricbeat}</titleabbrev>
++++
beta[]
In 6.4 and later, you can use {metricbeat} to collect data about {kib}
and ship it to the monitoring cluster, rather than routing it through the
production cluster as described in <<monitoring-kibana>>.

View file

@ -0,0 +1,17 @@
`xpack.infra.enabled`:: Set to `false` to disable the Logs and Infrastructure UI plugin {kib}. Defaults to `true`.
`xpack.infra.sources.default.logAlias`:: Index pattern for matching indices that contain log data. Defaults to `filebeat-*`.
`xpack.infra.sources.default.metricAlias`:: Index pattern for matching indices that contain Metricbeat data. Defaults to `metricbeat-*`.
`xpack.infra.sources.default.fields.timestamp`:: Timestamp used to sort log entries. Defaults to `@timestamp`.
`xpack.infra.sources.default.fields.message`:: Fields used to display messages in the Logs UI. Defaults to `['message', '@message']`.
`xpack.infra.sources.default.fields.tiebreaker`:: Field used to break ties between two entries with the same timestamp. Defaults to `_doc`.
`xpack.infra.sources.default.fields.host`:: Field used to identify hosts. Defaults to `beat.hostname`.
`xpack.infra.sources.default.fields.container`:: Field used to identify Docker containers. Defaults to `docker.container.name`.
`xpack.infra.sources.default.fields.pod`:: Field used to identify Kubernetes pods. Defaults to `kubernetes.pod.name`.

View file

@ -0,0 +1,14 @@
[role="xpack"]
[[infrastructure-ui-settings-kb]]
=== Infrastructure UI Settings in Kibana
++++
<titleabbrev>Infrastructure UI Settings</titleabbrev>
++++
You do not need to configure any settings to use the Infrastructure UI. It is enabled by default.
[float]
[[general-infra-ui-settings-kb]]
==== General Infrastructure UI Settings
include::general-infra-logs-ui-settings.asciidoc[]

View file

@ -0,0 +1,14 @@
[role="xpack"]
[[logs-ui-settings-kb]]
=== Logs UI Settings in Kibana
++++
<titleabbrev>Logs UI Settings</titleabbrev>
++++
You do not need to configure any settings to use the Logs UI. It is enabled by default.
[float]
[[general-logs-ui-settings-kb]]
==== General Logs UI Settings
include::general-infra-logs-ui-settings.asciidoc[]

View file

@ -12,6 +12,8 @@ For more {kib} configuration settings, see <<settings>>.
include::apm-settings.asciidoc[]
include::dev-settings.asciidoc[]
include::graph-settings.asciidoc[]
include::infrastructure-ui-settings.asciidoc[]
include::logs-ui-settings.asciidoc[]
include::ml-settings.asciidoc[]
include::reporting-settings.asciidoc[]
include::spaces-settings.asciidoc[]

View file

@ -172,3 +172,6 @@ The minimum value is 100.
`status.allowAnonymous:`:: *Default: false* If authentication is enabled, setting this to `true` allows
unauthenticated users to access the Kibana server status API and status page.
`rollup.enabled:`:: *Default: true* Set this value to false to disable the Rollup user interface.
`license_management.enabled`:: *Default: true* Set this value to false to disable the License Management user interface.

View file

@ -1,8 +1,6 @@
[[time-series-visual-builder]]
== Time Series Visual Builder
*Experimental Feature*
Time Series Visual Builder is a time series data visualizer with an emphasis
on allowing you to use the full power of Elasticsearch aggregation framework.
Time Series Visual Builder allows you to combine an infinite number of

3
kibana.d.ts vendored
View file

@ -32,9 +32,12 @@ import * as LegacyKibanaServer from './src/server/kbn_server';
*/
// tslint:disable:no-namespace
export namespace Legacy {
export type IndexPatternsService = LegacyKibanaServer.IndexPatternsService;
export type KibanaConfig = LegacyKibanaServer.KibanaConfig;
export type Request = LegacyKibanaServer.Request;
export type ResponseToolkit = LegacyKibanaServer.ResponseToolkit;
export type SavedObjectsClient = LegacyKibanaServer.SavedObjectsClient;
export type SavedObjectsService = LegacyKibanaServer.SavedObjectsService;
export type Server = LegacyKibanaServer.Server;
export namespace Plugins {

View file

@ -27,7 +27,7 @@
"extraPatterns": [
"build",
"optimize",
"dlls",
"built_assets",
".eslintcache"
]
}
@ -95,7 +95,7 @@
},
"dependencies": {
"@elastic/datemath": "5.0.2",
"@elastic/eui": "6.0.1",
"@elastic/eui": "6.7.0",
"@elastic/filesaver": "1.1.2",
"@elastic/good": "8.1.1-kibana2",
"@elastic/numeral": "2.3.2",
@ -123,7 +123,7 @@
"babel-loader": "7.1.5",
"babel-polyfill": "6.26.0",
"babel-register": "6.26.0",
"bluebird": "2.9.34",
"bluebird": "3.5.3",
"boom": "^7.2.0",
"brace": "0.11.1",
"cache-loader": "1.2.2",
@ -190,11 +190,11 @@
"pug": "^2.0.3",
"querystring-browser": "1.0.4",
"raw-loader": "0.5.1",
"react": "^16.3.0",
"react": "^16.6.0",
"react-addons-shallow-compare": "15.6.2",
"react-anything-sortable": "^1.7.4",
"react-color": "^2.13.8",
"react-dom": "^16.3.0",
"react-dom": "^16.6.0",
"react-grid-layout": "^0.16.2",
"react-input-range": "^1.3.0",
"react-markdown": "^3.1.4",
@ -226,7 +226,6 @@
"topojson-client": "3.0.0",
"trunc-html": "1.0.2",
"trunc-text": "1.0.2",
"ts-optchain": "^0.1.1",
"tslib": "^1.9.3",
"type-detect": "^4.0.8",
"ui-select": "0.19.6",
@ -295,7 +294,7 @@
"@types/opn": "^5.1.0",
"@types/podium": "^1.0.0",
"@types/prop-types": "^15.5.3",
"@types/puppeteer": "^1.6.2",
"@types/puppeteer-core": "^1.9.0",
"@types/react": "16.3.14",
"@types/react-dom": "^16.0.5",
"@types/react-redux": "^6.0.6",
@ -303,6 +302,7 @@
"@types/react-virtualized": "^9.18.7",
"@types/redux": "^3.6.31",
"@types/redux-actions": "^2.2.1",
"@types/rimraf": "^2.0.2",
"@types/semver": "^5.5.0",
"@types/sinon": "^5.0.1",
"@types/strip-ansi": "^3.0.0",
@ -324,9 +324,10 @@
"classnames": "2.2.5",
"dedent": "^0.7.0",
"delete-empty": "^2.0.0",
"enzyme": "3.2.0",
"enzyme-adapter-react-16": "^1.1.1",
"enzyme-to-json": "3.3.1",
"enzyme": "^3.7.0",
"enzyme-adapter-react-16": "^1.6.0",
"enzyme-adapter-utils": "^1.8.1",
"enzyme-to-json": "^3.3.4",
"eslint": "^5.6.0",
"eslint-config-prettier": "^3.1.0",
"eslint-plugin-babel": "^5.2.0",
@ -360,7 +361,7 @@
"jest-raw-loader": "^1.0.1",
"jimp": "0.2.28",
"json5": "^1.0.1",
"karma": "1.7.0",
"karma": "3.1.4",
"karma-chrome-launcher": "2.1.1",
"karma-coverage": "1.1.1",
"karma-firefox-launcher": "1.0.1",

View file

@ -33,6 +33,7 @@ exports.help = (defaults = {}) => {
--version Version of ES to download [default: ${defaults.version}]
--base-path Path containing cache/installations [default: ${basePath}]
--install-path Installation path, defaults to 'source' within base-path
--data-archive Path to zip or tarball containing an ES data directory to seed the cluster with.
--password Sets password for elastic user [default: ${password}]
-E Additional key=value settings to pass to Elasticsearch
--download-only Download the snapshot but don't actually start it
@ -49,6 +50,7 @@ exports.run = async (defaults = {}) => {
alias: {
basePath: 'base-path',
installPath: 'install-path',
dataArchive: 'data-archive',
esArgs: 'E',
},
@ -62,6 +64,11 @@ exports.run = async (defaults = {}) => {
await cluster.downloadSnapshot(options);
} else {
const { installPath } = await cluster.installSnapshot(options);
if (options.dataArchive) {
await cluster.extractDataDirectory(installPath, options.dataArchive);
}
await cluster.run(installPath, { esArgs: options.esArgs });
}
};

View file

@ -33,6 +33,7 @@ exports.help = (defaults = {}) => {
--source-path Path to ES source [default: ${defaults['source-path']}]
--base-path Path containing cache/installations [default: ${basePath}]
--install-path Installation path, defaults to 'source' within base-path
--data-archive Path to zip or tarball containing an ES data directory to seed the cluster with.
--password Sets password for elastic user [default: ${password}]
-E Additional key=value settings to pass to Elasticsearch
@ -49,6 +50,7 @@ exports.run = async (defaults = {}) => {
basePath: 'base-path',
installPath: 'install-path',
sourcePath: 'source-path',
dataArchive: 'data-archive',
esArgs: 'E',
},
@ -57,5 +59,10 @@ exports.run = async (defaults = {}) => {
const cluster = new Cluster();
const { installPath } = await cluster.installSource(options);
if (options.dataArchive) {
await cluster.extractDataDirectory(installPath, options.dataArchive);
}
await cluster.run(installPath, { esArgs: options.esArgs });
};

View file

@ -19,9 +19,10 @@
const execa = require('execa');
const chalk = require('chalk');
const path = require('path');
const { downloadSnapshot, installSnapshot, installSource, installArchive } = require('./install');
const { ES_BIN } = require('./paths');
const { log: defaultLog, parseEsLog, extractConfigFiles } = require('./utils');
const { log: defaultLog, parseEsLog, extractConfigFiles, decompress } = require('./utils');
const { createCliError } = require('./errors');
const { promisify } = require('util');
const treeKillAsync = promisify(require('tree-kill'));
@ -116,6 +117,28 @@ exports.Cluster = class Cluster {
return { installPath };
}
/**
* Unpakcs a tar or zip file containing the data directory for an
* ES cluster.
*
* @param {String} installPath
* @param {String} archivePath
*/
async extractDataDirectory(installPath, archivePath) {
this._log.info(chalk.bold(`Extracting data directory`));
this._log.indent(4);
// decompress excludes the root directory as that is how our archives are
// structured. This works in our favor as we can explicitly extract into the data dir
const extractPath = path.resolve(installPath, 'data');
this._log.info(`Data archive: ${archivePath}`);
this._log.info(`Extract path: ${extractPath}`);
await decompress(archivePath, extractPath);
this._log.indent(-4);
}
/**
* Starts ES and returns resolved promise once started
*

View file

@ -45,7 +45,7 @@ exports.downloadSnapshot = async function installSnapshot({
log = defaultLog,
}) {
const fileName = getFilename(license, version);
const url = `https://snapshots.elastic.co/downloads/elasticsearch/${fileName}`;
const url = getUrl(fileName);
const dest = path.resolve(basePath, 'cache', fileName);
log.info('version: %s', chalk.bold(version));
@ -150,3 +150,13 @@ function getFilename(license, version) {
return `${basename}-SNAPSHOT.${extension}`;
}
function getUrl(fileName) {
if (process.env.TEST_ES_SNAPSHOT_VERSION) {
return `https://snapshots.elastic.co/${
process.env.TEST_ES_SNAPSHOT_VERSION
}/downloads/elasticsearch/${fileName}`;
} else {
return `https://snapshots.elastic.co/downloads/elasticsearch/${fileName}`;
}
}

View file

@ -46,9 +46,13 @@ themselves, and those messages will always be in English, so we don't have to ke
defined inline.
__Note:__ locale defined in `i18n.locale` and the one used for translation files should
match exactly, e.g. `i18n.locale: zn` and `.../translations/zh_CN.json` won't match and
default English translations will be used, but `i18n.locale: zh_CN` and`.../translations/zh_CN.json`
or `i18n.locale: zn` and `.../translations/zn.json` will work as expected.
match exactly, e.g. `i18n.locale: zh` and `.../translations/zh-CN.json` won't match and
default English translations will be used, but `i18n.locale: zh-CN` and`.../translations/zh-CN.json`
or `i18n.locale: zh` and `.../translations/zh.json` will work as expected.
__Note:__ locale should look like `zh-CN` where `zh` - lowercase two-letter or three-letter ISO-639 code
and `CN` - uppercase two-letter ISO-3166 code (optional).
[ISO-639](https://www.iso.org/iso-639-language-codes.html) and [ISO-3166](https://www.iso.org/iso-3166-country-codes.html) codes should be separated with `-` character.
## I18n engine

View file

@ -153,8 +153,7 @@ describe('I18n engine', () => {
});
test('should add messages with normalized passed locale', () => {
const locale = 'en-us';
i18n.setLocale(locale);
i18n.setLocale('en-US');
i18n.addTranslation(
{
@ -162,10 +161,10 @@ describe('I18n engine', () => {
['a.b.c']: 'bar',
},
},
'en_US'
'en-us'
);
expect(i18n.getLocale()).toBe(locale);
expect(i18n.getLocale()).toBe('en-us');
expect(i18n.getTranslation()).toEqual({
messages: {
['a.b.c']: 'bar',
@ -234,7 +233,7 @@ describe('I18n engine', () => {
});
test('should normalize passed locale', () => {
i18n.setLocale('en_US');
i18n.setLocale('en-US');
expect(i18n.getLocale()).toBe('en-us');
});
});
@ -267,7 +266,7 @@ describe('I18n engine', () => {
});
test('should normalize passed locale', () => {
i18n.setDefaultLocale('en_US');
i18n.setDefaultLocale('en-US');
expect(i18n.getDefaultLocale()).toBe('en-us');
});
@ -418,16 +417,16 @@ describe('I18n engine', () => {
describe('translate', () => {
test('should throw error if id is not a non-empty string', () => {
expect(() => i18n.translate(undefined as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(null as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(true as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(5 as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate({} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate('')).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(undefined as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(null as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(true as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(5 as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate({} as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate('', {} as any)).toThrowErrorMatchingSnapshot();
});
test('should throw error if translation message and defaultMessage are not provided', () => {
expect(() => i18n.translate('foo')).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate('foo', {} as any)).toThrowErrorMatchingSnapshot();
});
test('should return message as is if values are not provided', () => {
@ -438,7 +437,7 @@ describe('I18n engine', () => {
},
});
expect(i18n.translate('a.b.c')).toBe('foo');
expect(i18n.translate('a.b.c', {} as any)).toBe('foo');
});
test('should return default message as is if values are not provided', () => {
@ -467,10 +466,10 @@ describe('I18n engine', () => {
expect(
i18n.translate('a.b.c', {
values: { a: 1, b: 2, c: 3 },
})
} as any)
).toBe('foo 1, 2, 3 bar');
expect(i18n.translate('d.e.f', { values: { foo: 'bar' } })).toBe('bar');
expect(i18n.translate('d.e.f', { values: { foo: 'bar' } } as any)).toBe('bar');
});
test('should interpolate variables for default messages', () => {
@ -494,9 +493,13 @@ describe('I18n engine', () => {
},
});
expect(i18n.translate('a.b.c', { values: { numPhotos: 0 } })).toBe('You have no photos.');
expect(i18n.translate('a.b.c', { values: { numPhotos: 1 } })).toBe('You have one photo.');
expect(i18n.translate('a.b.c', { values: { numPhotos: 1000 } })).toBe(
expect(i18n.translate('a.b.c', { values: { numPhotos: 0 } } as any)).toBe(
'You have no photos.'
);
expect(i18n.translate('a.b.c', { values: { numPhotos: 1 } } as any)).toBe(
'You have one photo.'
);
expect(i18n.translate('a.b.c', { values: { numPhotos: 1000 } } as any)).toBe(
'You have 1,000 photos.'
);
});
@ -551,7 +554,9 @@ describe('I18n engine', () => {
});
i18n.setDefaultLocale('en');
expect(() => i18n.translate('a.b.c', { values: { foo: 0 } })).toThrowErrorMatchingSnapshot();
expect(() =>
i18n.translate('a.b.c', { values: { foo: 0 } } as any)
).toThrowErrorMatchingSnapshot();
expect(() =>
i18n.translate('d.e.f', {
@ -574,7 +579,7 @@ describe('I18n engine', () => {
});
i18n.setDefaultLocale('en');
expect(i18n.translate('a.b.c', { values: { result: 0.15 } })).toBe('Result: 15%');
expect(i18n.translate('a.b.c', { values: { result: 0.15 } } as any)).toBe('Result: 15%');
expect(
i18n.translate('d.e.f', {
@ -598,25 +603,25 @@ describe('I18n engine', () => {
expect(
i18n.translate('a.short', {
values: { start: new Date(2018, 5, 20) },
})
} as any)
).toBe('Sale begins 6/20/18');
expect(
i18n.translate('a.medium', {
values: { start: new Date(2018, 5, 20) },
})
} as any)
).toBe('Sale begins Jun 20, 2018');
expect(
i18n.translate('a.long', {
values: { start: new Date(2018, 5, 20) },
})
} as any)
).toBe('Sale begins June 20, 2018');
expect(
i18n.translate('a.full', {
values: { start: new Date(2018, 5, 20) },
})
} as any)
).toBe('Sale begins Wednesday, June 20, 2018');
});
@ -664,13 +669,13 @@ describe('I18n engine', () => {
expect(
i18n.translate('a.short', {
values: { expires: new Date(2018, 5, 20, 18, 40, 30, 50) },
})
} as any)
).toBe('Coupon expires at 6:40 PM');
expect(
i18n.translate('a.medium', {
values: { expires: new Date(2018, 5, 20, 18, 40, 30, 50) },
})
} as any)
).toBe('Coupon expires at 6:40:30 PM');
});
@ -706,7 +711,9 @@ describe('I18n engine', () => {
},
});
expect(i18n.translate('a.b.c', { values: { total: 1000 } })).toBe('Your total is $1,000.00');
expect(i18n.translate('a.b.c', { values: { total: 1000 } } as any)).toBe(
'Your total is $1,000.00'
);
i18n.setFormats({
number: {
@ -714,9 +721,13 @@ describe('I18n engine', () => {
},
});
expect(i18n.translate('a.b.c', { values: { total: 1000 } })).toBe('Your total is $1,000.00');
expect(i18n.translate('a.b.c', { values: { total: 1000 } } as any)).toBe(
'Your total is $1,000.00'
);
expect(i18n.translate('d.e.f', { values: { total: 1000 } })).toBe('Your total is €1,000.00');
expect(i18n.translate('d.e.f', { values: { total: 1000 } } as any)).toBe(
'Your total is €1,000.00'
);
});
test('should format default message with a custom format', () => {
@ -768,7 +779,9 @@ describe('I18n engine', () => {
});
i18n.setDefaultLocale('en');
expect(i18n.translate('a.b.c', { values: { total: 1000 } })).toBe('Your total is 1,000');
expect(i18n.translate('a.b.c', { values: { total: 1000 } } as any)).toBe(
'Your total is 1,000'
);
expect(
i18n.translate('d.e.f', {
@ -788,7 +801,7 @@ describe('I18n engine', () => {
i18n.setDefaultLocale('en');
expect(() =>
i18n.translate('a.b.c', { values: { total: 1 } })
i18n.translate('a.b.c', { values: { total: 1 } } as any)
).toThrowErrorMatchingSnapshot();
expect(() =>

View file

@ -30,7 +30,6 @@ import { isPseudoLocale, translateUsingPseudoLocale } from './pseudo_locale';
import './locales.js';
const EN_LOCALE = 'en';
const LOCALE_DELIMITER = '-';
const translationsForLocale: Record<string, Translation> = {};
const getMessageFormat = memoizeIntlConstructor(IntlMessageFormat);
@ -55,7 +54,7 @@ function getMessageById(id: string): string | undefined {
* @param locale
*/
function normalizeLocale(locale: string) {
return locale.toLowerCase().replace('_', LOCALE_DELIMITER);
return locale.toLowerCase();
}
/**
@ -165,8 +164,8 @@ export function getRegisteredLocales() {
}
interface TranslateArguments {
values?: { [key: string]: string | number | Date };
defaultMessage?: string;
values?: Record<string, string | number | boolean | Date | null | undefined>;
defaultMessage: string;
description?: string;
}
@ -177,13 +176,7 @@ interface TranslateArguments {
* @param [options.values] - values to pass into translation
* @param [options.defaultMessage] - will be used unless translation was successful
*/
export function translate(
id: string,
{ values = {}, defaultMessage = '' }: TranslateArguments = {
values: {},
defaultMessage: '',
}
) {
export function translate(id: string, { values = {}, defaultMessage }: TranslateArguments) {
const shouldUsePseudoLocale = isPseudoLocale(currentLocale);
if (!id || !isString(id)) {

View file

@ -21,7 +21,7 @@ declare module 'intl-format-cache' {
import IntlMessageFormat from 'intl-messageformat';
interface Message {
format: (values: { [key: string]: string | number | Date }) => string;
format: (values: Record<string, string | number | boolean | Date | null | undefined>) => string;
}
function memoizeIntlConstructor(

View file

@ -17,8 +17,8 @@
* under the License.
*/
export { functionsRegistry } from './lib/functions_registry';
export { typesRegistry } from './lib/types_registry';
export { FunctionsRegistry } from './lib/functions_registry';
export { TypesRegistry } from './lib/types_registry';
export { createError } from './interpreter/create_error';
export { interpretProvider } from './interpreter/interpret';
export { serializeProvider } from './lib/serialize';

View file

@ -22,7 +22,6 @@ import { each, keys, last, mapValues, reduce, zipObject } from 'lodash';
import { getType } from '../lib/get_type';
import { fromExpression } from '../lib/ast';
import { getByAlias } from '../lib/get_by_alias';
import { typesRegistry } from '../lib/types_registry';
import { castProvider } from './cast';
import { createError } from './create_error';
@ -103,7 +102,7 @@ export function interpretProvider(config) {
}
// Validate the function output against the type definition's validate function
const type = typesRegistry.get(fnDef.type);
const type = handlers.types[fnDef.type];
if (type && type.validate) {
try {
type.validate(fnOutput);

View file

@ -20,10 +20,8 @@
import { Registry } from './registry';
import { Fn } from './fn';
class FunctionsRegistry extends Registry {
export class FunctionsRegistry extends Registry {
wrapper(obj) {
return new Fn(obj);
}
}
export const functionsRegistry = new FunctionsRegistry();

View file

@ -20,10 +20,8 @@
import { Registry } from './registry';
import { Type } from './type';
class TypesRegistry extends Registry {
export class TypesRegistry extends Registry {
wrapper(obj) {
return new Type(obj);
}
}
export const typesRegistry = new TypesRegistry();

View file

@ -19,6 +19,6 @@
import { clog } from './clog';
export const commonFunctions = [
export const browserFunctions = [
clog,
];

View file

@ -17,4 +17,7 @@
* under the License.
*/
import '../common/register';
import { browserFunctions } from './index';
// eslint-disable-next-line no-undef
browserFunctions.forEach(canvas.register);

View file

@ -17,7 +17,4 @@
* under the License.
*/
import { commonFunctions } from './index';
// eslint-disable-next-line no-undef
commonFunctions.forEach(canvas.register);
import '../common/register';

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { map, zipObject } from 'lodash';
import { map, pick, zipObject } from 'lodash';
export const datatable = () => ({
name: 'datatable',
@ -78,5 +78,32 @@ export const datatable = () => ({
},
};
},
pointseries: datatable => {
// datatable columns are an array that looks like [{ name: "one", type: "string" }, { name: "two", type: "string" }]
// rows look like [{ one: 1, two: 2}, { one: 3, two: 4}, ...]
const validFields = ['x', 'y', 'color', 'size', 'text'];
const columns = datatable.columns.filter(column => validFields.includes(column.name));
const rows = datatable.rows.map(row => pick(row, validFields));
return {
type: 'pointseries',
columns: columns.reduce((acc, column) => {
/* pointseries columns are an object that looks like this
* {
* x: { type: "string", expression: "x", role: "dimension" },
* y: { type: "string", expression: "y", role: "dimension" }
* }
*/
acc[column.name] = {
type: column.type,
expression: column.name,
role: 'dimension',
};
return acc;
}, {}),
rows,
};
},
},
});

View file

@ -29,7 +29,6 @@ import { render } from './render';
import { shape } from './shape';
import { string } from './string';
import { style } from './style';
import { kibanaTable } from './kibana_table';
import { kibanaContext } from './kibana_context';
export const typeSpecs = [
@ -45,6 +44,5 @@ export const typeSpecs = [
shape,
string,
style,
kibanaTable,
kibanaContext,
];

View file

@ -24,7 +24,7 @@ export const pointseries = () => ({
return {
type: 'pointseries',
rows: [],
columns: [],
columns: {},
};
},
},

View file

@ -20,18 +20,7 @@
import { i18n } from '@kbn/i18n';
import $script from 'scriptjs';
let resolvePromise = null;
let called = false;
let populatePromise = new Promise(_resolve => {
resolvePromise = _resolve;
});
export const getBrowserRegistries = () => {
return populatePromise;
};
const loadBrowserRegistries = (registries, basePath) => {
export const loadBrowserRegistries = (registries, basePath) => {
const remainingTypes = Object.keys(registries);
const populatedTypes = {};
@ -58,27 +47,3 @@ const loadBrowserRegistries = (registries, basePath) => {
loadType();
});
};
export const populateBrowserRegistries = (registries, basePath) => {
if (called) {
const oldPromise = populatePromise;
let newResolve;
populatePromise = new Promise(_resolve => {
newResolve = _resolve;
});
oldPromise.then(oldTypes => {
loadBrowserRegistries(registries, basePath).then(newTypes => {
newResolve({
...oldTypes,
...newTypes,
});
});
});
return populatePromise;
}
called = true;
loadBrowserRegistries(registries, basePath).then(registries => {
resolvePromise(registries);
});
return populatePromise;
};

View file

@ -17,6 +17,7 @@
* under the License.
*/
export { populateBrowserRegistries, getBrowserRegistries } from './browser_registries';
export { loadBrowserRegistries } from './browser_registries';
export { createSocket } from './socket';
export { initializeInterpreter, interpretAst, getInitializedFunctions } from './interpreter';
export { initializeInterpreter } from './interpreter';
export { RenderFunctionsRegistry } from './render_functions_registry';

View file

@ -19,18 +19,28 @@
import { socketInterpreterProvider } from '../common/interpreter/socket_interpret';
import { serializeProvider } from '../common/lib/serialize';
import { getSocket } from './socket';
import { typesRegistry } from '../common/lib/types_registry';
import { createHandlers } from './create_handlers';
import { functionsRegistry } from '../common/lib/functions_registry';
import { getBrowserRegistries } from './browser_registries';
let socket;
let resolve;
const functionList = new Promise(_resolve => (resolve = _resolve));
export async function initializeInterpreter(socket, typesRegistry, functionsRegistry) {
let resolve;
const functionList = new Promise(_resolve => (resolve = _resolve));
export async function initializeInterpreter() {
socket = getSocket();
const getInitializedFunctions = async () => {
return functionList;
};
const interpretAst = async (ast, context, handlers) => {
// Load plugins before attempting to get functions, otherwise this gets racey
const serverFunctionList = await functionList;
const interpretFn = await socketInterpreterProvider({
types: typesRegistry.toJS(),
handlers: { ...handlers, ...createHandlers(socket) },
functions: functionsRegistry.toJS(),
referableFunctions: serverFunctionList,
socket: socket,
});
return interpretFn(ast, context);
};
// Listen for interpreter runs
socket.on('run', ({ ast, context, id }) => {
@ -42,27 +52,20 @@ export async function initializeInterpreter() {
});
// Create the function list
socket.emit('getFunctionList');
socket.once('functionList', resolve);
return functionList;
let gotFunctionList = false;
socket.once('functionList', (fl) => {
gotFunctionList = true;
resolve(fl);
});
const interval = setInterval(() => {
if (gotFunctionList) {
clearInterval(interval);
return;
}
socket.emit('getFunctionList');
}, 1000);
return { getInitializedFunctions, interpretAst };
}
export async function getInitializedFunctions() {
return functionList;
}
// Use the above promise to seed the interpreter with the functions it can defer to
export async function interpretAst(ast, context, handlers) {
// Load plugins before attempting to get functions, otherwise this gets racey
return Promise.all([functionList, getBrowserRegistries()])
.then(([serverFunctionList]) => {
return socketInterpreterProvider({
types: typesRegistry.toJS(),
handlers: { ...handlers, ...createHandlers(socket) },
functions: functionsRegistry.toJS(),
referableFunctions: serverFunctionList,
socket: socket,
});
})
.then(interpretFn => interpretFn(ast, context));
}

View file

@ -17,25 +17,26 @@
* under the License.
*/
import chrome from 'ui/chrome';
import { populateBrowserRegistries, createSocket, initializeInterpreter } from '@kbn/interpreter/public';
import { typesRegistry, functionsRegistry } from '@kbn/interpreter/common';
import { functions } from './functions';
export function RenderFunction(config) {
// This must match the name of the function that is used to create the `type: render` object
this.name = config.name;
const basePath = chrome.getBasePath();
// Use this to set a more friendly name
this.displayName = config.displayName || this.name;
const types = {
browserFunctions: functionsRegistry,
types: typesRegistry
};
// A sentence or few about what this element does
this.help = config.help;
function addFunction(fnDef) {
functionsRegistry.register(fnDef);
// used to validate the data before calling the render function
this.validate = config.validate || function validate() {};
// tell the renderer if the dom node should be reused, it's recreated each time by default
this.reuseDomNode = Boolean(config.reuseDomNode);
// the function called to render the data
this.render =
config.render ||
function render(domNode, data, done) {
done();
};
}
functions.forEach(addFunction);
createSocket(basePath).then(async () => {
await populateBrowserRegistries(types, basePath);
await initializeInterpreter();
});

View file

@ -0,0 +1,29 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Registry } from '../common';
import { RenderFunction } from './render_function';
class RenderFunctionsRegistry extends Registry {
wrapper(obj) {
return new RenderFunction(obj);
}
}
export { RenderFunctionsRegistry };

View file

@ -18,22 +18,13 @@
*/
import io from 'socket.io-client';
import { functionsRegistry } from '../common/lib/functions_registry';
import { getBrowserRegistries } from './browser_registries';
const SOCKET_CONNECTION_TIMEOUT = 5000; // timeout in ms
let socket;
export async function createSocket(basePath) {
if (socket != null) return socket;
export async function createSocket(basePath, functionsRegistry) {
return new Promise((resolve, rej) => {
const reject = p => {
socket = null; // reset the socket on errors
rej(p);
};
socket = io({
return new Promise((resolve, reject) => {
const socket = io({
path: `${basePath}/socket.io`,
transports: ['polling', 'websocket'],
transportOptions: {
@ -49,12 +40,11 @@ export async function createSocket(basePath) {
});
socket.on('getFunctionList', () => {
const pluginsLoaded = getBrowserRegistries();
pluginsLoaded.then(() => socket.emit('functionList', functionsRegistry.toJS()));
socket.emit('functionList', functionsRegistry.toJS());
});
socket.on('connect', () => {
resolve();
resolve(socket);
socket.off('connectionFailed', errorHandler);
socket.off('connect_error', errorHandler);
socket.off('connect_timeout', errorHandler);
@ -71,8 +61,3 @@ export async function createSocket(basePath) {
socket.on('connect_timeout', errorHandler);
});
}
export function getSocket() {
if (!socket) throw new Error('getSocket failed, socket has not been created');
return socket;
}

View file

@ -17,6 +17,6 @@
* under the License.
*/
export { populateServerRegistries, getServerRegistries } from './server_registries';
export { populateServerRegistries } from './server_registries';
export { getPluginPaths } from './get_plugin_paths';
export { pluginPaths } from './plugin_paths';

View file

@ -18,56 +18,39 @@
*/
import { i18n } from '@kbn/i18n';
import { typesRegistry } from '../common/lib/types_registry';
import { functionsRegistry as serverFunctions } from '../common/lib/functions_registry';
import { getPluginPaths } from './get_plugin_paths';
const registries = {
serverFunctions: serverFunctions,
commonFunctions: serverFunctions,
types: typesRegistry,
};
let resolve = null;
let called = false;
export const populateServerRegistries = registries => {
if (!registries) throw new Error('registries are required');
const populatePromise = new Promise(_resolve => {
resolve = _resolve;
});
return new Promise(resolve => {
const remainingTypes = Object.keys(registries);
const populatedTypes = {};
export const getServerRegistries = () => {
return populatePromise;
};
const loadType = () => {
const type = remainingTypes.pop();
getPluginPaths(type).then(paths => {
global.canvas = global.canvas || {};
global.canvas.register = d => registries[type].register(d);
global.canvas.i18n = i18n;
export const populateServerRegistries = types => {
if (called) {
return populatePromise;
}
called = true;
if (!types || !types.length) throw new Error('types is required');
paths.forEach(path => {
require(path); // eslint-disable-line import/no-dynamic-require
});
const remainingTypes = types;
const populatedTypes = {};
delete global.canvas;
const loadType = () => {
const type = remainingTypes.pop();
getPluginPaths(type).then(paths => {
global.canvas = global.canvas || {};
global.canvas.register = d => registries[type].register(d);
global.canvas.i18n = i18n;
paths.forEach(path => {
require(path); // eslint-disable-line import/no-dynamic-require
populatedTypes[type] = registries[type];
if (remainingTypes.length) {
loadType();
}
else {
resolve(populatedTypes);
}
});
};
delete global.canvas;
populatedTypes[type] = registries[type];
if (remainingTypes.length) loadType();
else resolve(populatedTypes);
});
};
if (remainingTypes.length) loadType();
return populatePromise;
if (remainingTypes.length) loadType();
});
};

View file

@ -36,6 +36,7 @@ module.exports = function ({ sourceMaps }, { watch }) {
entry: {
'types/all': resolve(PLUGIN_SOURCE_DIR, 'types/register.js'),
'functions/browser/all': resolve(PLUGIN_SOURCE_DIR, 'functions/browser/register.js'),
'functions/browser/common': resolve(PLUGIN_SOURCE_DIR, 'functions/common/register.js'),
},
// there were problems with the node and web targets since this code is actually

View file

@ -36,6 +36,7 @@ export function createEsTestCluster(options = {}) {
log,
basePath = resolve(KIBANA_ROOT, '.es'),
esFrom = esTestConfig.getBuildFrom(),
dataArchive,
} = options;
const randomHash = Math.random()
@ -74,6 +75,10 @@ export function createEsTestCluster(options = {}) {
throw new Error(`unknown option esFrom "${esFrom}"`);
}
if (dataArchive) {
await cluster.extractDataDirectory(installPath, dataArchive);
}
await cluster.start(installPath, {
esArgs: [
`cluster.name=${clusterName}`,

View file

@ -20,12 +20,12 @@
import { format as formatUrl } from 'url';
import request from 'request';
import { delay, fromNode as fcb } from 'bluebird';
import { delay } from 'bluebird';
export const DEFAULT_SUPERUSER_PASS = 'iamsuperuser';
async function updateCredentials(port, auth, username, password, retries = 10) {
const result = await fcb(cb =>
const result = await new Promise((resolve, reject) =>
request(
{
method: 'PUT',
@ -40,13 +40,15 @@ async function updateCredentials(port, auth, username, password, retries = 10) {
body: { password },
},
(err, httpResponse, body) => {
cb(err, { httpResponse, body });
if (err) return reject(err);
resolve({ httpResponse, body });
}
)
);
const { body, httpResponse } = result;
const { statusCode } = httpResponse;
if (statusCode === 200) {
return;
}
@ -59,21 +61,61 @@ async function updateCredentials(port, auth, username, password, retries = 10) {
throw new Error(`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`);
}
export async function setupUsers(log, config) {
const esPort = config.get('servers.elasticsearch.port');
export async function setupUsers(log, esPort, updates) {
// track the current credentials for the `elastic` user as
// they will likely change as we apply updates
let auth = `elastic:${DEFAULT_SUPERUSER_PASS}`;
// list of updates we need to apply
const updates = [config.get('servers.elasticsearch'), config.get('servers.kibana')];
for (const { username, password, roles } of updates) {
// If working with a built-in user, just change the password
if (['logstash_system', 'elastic', 'kibana'].includes(username)) {
await updateCredentials(esPort, auth, username, password);
log.info('setting %j user password to %j', username, password);
// If not a builtin user, add them
} else {
await insertUser(esPort, auth, username, password, roles);
log.info('Added %j user with password to %j', username, password);
}
for (const { username, password } of updates) {
log.info('setting %j user password to %j', username, password);
await updateCredentials(esPort, auth, username, password);
if (username === 'elastic') {
auth = `elastic:${password}`;
}
}
}
async function insertUser(port, auth, username, password, roles = [], retries = 10) {
const result = await new Promise((resolve, reject) =>
request(
{
method: 'POST',
uri: formatUrl({
protocol: 'http:',
auth,
hostname: 'localhost',
port,
pathname: `/_xpack/security/user/${username}`,
}),
json: true,
body: { password, roles },
},
(err, httpResponse, body) => {
if (err) return reject(err);
resolve({ httpResponse, body });
}
)
);
const { body, httpResponse } = result;
const { statusCode } = httpResponse;
if (statusCode === 200) {
return;
}
if (retries > 0) {
await delay(2500);
return await insertUser(port, auth, username, password, retries - 1);
}
throw new Error(`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`);
}

View file

@ -37,6 +37,7 @@ export async function runElasticsearch({ config, options }) {
log,
basePath: resolve(KIBANA_ROOT, '.es'),
esFrom: esFrom || config.get('esTestCluster.from'),
dataArchive: config.get('esTestCluster.dataArchive'),
});
const esArgs = config.get('esTestCluster.serverArgs');
@ -44,7 +45,10 @@ export async function runElasticsearch({ config, options }) {
await cluster.start(esArgs);
if (isTrialLicense) {
await setupUsers(log, config);
await setupUsers(log, config.get('servers.elasticsearch.port'), [
config.get('servers.elasticsearch'),
config.get('servers.kibana'),
]);
}
return cluster;

View file

@ -26,3 +26,5 @@ export { OPTIMIZE_BUNDLE_DIR, KIBANA_ROOT } from './functional_tests/lib/paths';
export { esTestConfig, createEsTestCluster } from './es';
export { kbnTestConfig, kibanaServerTestUser, kibanaTestUser, adminTestUser } from './kbn';
export { setupUsers, DEFAULT_SUPERUSER_PASS } from './functional_tests/lib/auth';

View file

@ -104,9 +104,14 @@ module.exports = function (grunt) {
Promise.all([uiFrameworkWatch(), uiFrameworkServerStart()]).then(done);
});
grunt.registerTask('compileCss', function () {
grunt.registerTask('compileCssLight', function () {
const done = this.async();
uiFrameworkCompile().then(done);
uiFrameworkCompileLight().then(done);
});
grunt.registerTask('compileCssDark', function () {
const done = this.async();
uiFrameworkCompileDark().then(done);
});
function uiFrameworkServerStart() {
@ -141,9 +146,36 @@ module.exports = function (grunt) {
});
}
function uiFrameworkCompile() {
const src = 'src/index.scss';
const dest = 'dist/ui_framework.css';
function uiFrameworkCompileLight() {
const src = 'src/kui_light.scss';
const dest = 'dist/kui_light.css';
return new Promise(resolve => {
sass.render({
file: src,
}, function (error, result) {
if (error) {
grunt.log.error(error);
}
postcss([postcssConfig])
.process(result.css, { from: src, to: dest })
.then(result => {
grunt.file.write(dest, result.css);
if (result.map) {
grunt.file.write(`${dest}.map`, result.map);
}
resolve();
});
});
});
}
function uiFrameworkCompileDark() {
const src = 'src/kui_dark.scss';
const dest = 'dist/kui_dark.css';
return new Promise(resolve => {
sass.render({
@ -175,7 +207,8 @@ module.exports = function (grunt) {
grunt.util.spawn({
cmd: isPlatformWindows ? '.\\node_modules\\.bin\\grunt.cmd' : './node_modules/.bin/grunt',
args: [
'compileCss',
'compileCssLight',
'compileCssDark',
],
}, (error, result) => {
if (error) {

File diff suppressed because it is too large Load diff

View file

@ -4,7 +4,7 @@ html {
.guideBody {
height: 100%;
background-color: #000000;
background-color: $euiColorFullShade;
margin: 0;
min-width: $guideMinWidth;
}
@ -61,7 +61,7 @@ html {
display: flex;
justify-content: center;
flex: 1 0 auto;
background-color: #ffffff;
background-color: $euiColorEmptyShade;
}
.guideContentPage__hint {
@ -74,7 +74,7 @@ html {
padding: 30px;
margin: 20px;
border-radius: 4px;
background-color: #e8e8e8;
background-color: $euiColorLightestShade;
line-height: $guideLineHeight;
}
@ -96,11 +96,11 @@ html {
}
.guideWarning {
border-left: 5px solid #e8488b;
border-left: 5px solid $euiColorAccent;
margin-top: 19px;
padding: 0 14px;
line-height: 21px;
color: #e8488b;
color: $euiColorAccent;
}
.guideBreak {

View file

@ -1,6 +1,6 @@
.guideCode {
padding: 2px 4px;
font-family: 'Ubuntu Mono', monospace;
background-color: #e8e8e8;
color: #565656;
background-color: $euiColorLightestShade;
color: $euiColorDarkShade;
}

View file

@ -5,11 +5,11 @@
bottom: 0;
width: $guideCodeViewerWidth;
padding: 6px 0 40px;
background-color: white;
background-color: $euiColorEmptyShade;
transform: translateX($guideCodeViewerWidth);
transition: transform $guideCodeViewerTransition;
overflow: auto;
border-left: 1px solid #d6d6d6;
border-left: $euiBorderThin;
@include scrollbar;
@ -25,7 +25,7 @@
.guideCodeViewer__header {
padding: 0 20px 6px;
line-height: $guideLineHeight;
border-bottom: 1px solid #d6d6d6;
border-bottom: $euiBorderThin;
font-size: 14px;
font-weight: 700;
margin-bottom: 10px;
@ -47,7 +47,7 @@
.guideCodeViewer__title {
padding: 0 20px 6px;
border-bottom: 1px solid #d6d6d6;
border-bottom: $euiBorderThin;
line-height: $guideLineHeight;
font-size: 14px;
}
@ -61,7 +61,7 @@
.hljs {
display: block;
padding: 15px 20px;
color: #637c84;
color: $euiColorDarkShade;
font-size: 14px;
line-height: 1.3;
font-family: 'Ubuntu Mono', monospace;

View file

@ -1,5 +1,5 @@
$guideVerticalRhythm: 20px;
$guideLineHeight: 24px;
$guideVerticalRhythm: $euiSize;
$guideLineHeight: $euiSizeL;
$guideNavHeight: 60px;
$guideSideNavWidth: 400px;
$guideSideNavSmallWidth: 220px;
@ -9,11 +9,11 @@ $guideCodeViewerTransition: 0.2s ease;
$guideChromeTransition: 0.3s ease;
// Colors
$guideBaseBackgroundColor: #f7f7f7;
$guidePanelBackgroundColor: #ffffff;
$guideTextColor: #444;
$guideLinkColor: #00a9e5;
$guideLinkHoverColor: #00a9e5;
$guideBaseBackgroundColor: $euiColorLightestShade;
$guidePanelBackgroundColor: $euiColorEmptyShade;
$guideTextColor: $euiColorDarkestShade;
$guideLinkColor: $euiColorPrimary;
$guideLinkHoverColor: darken($euiColorPrimary, 10%);
// Breakpoints
$guideMinWidth: 840px;
@ -31,7 +31,7 @@ $guideNormalBreakpoint: 1900px;
}
}
@mixin scrollbar($color: rgba(#454D58, 0.4)) {
@mixin scrollbar($color: $euiBorderColor) {
&::-webkit-scrollbar {
width: 16px;
height: 16px;

View file

@ -8,21 +8,16 @@
margin-top: 0;
}
.guideDemo--darkTheme {
background-color: #272727;
padding: 10px;
}
.guideDemo__highlightGrid {
.kuiFlexItem {
background: transparentize(#0096CC, .9);
background: transparentize($euiColorPrimary, .9);
padding: 16px;
}
}
.guideDemo__highlightGridWrap {
.kuiFlexItem div {
background: transparentize(#0096CC, .9);
background: transparentize($euiColorPrimary, .9);
padding: 16px;
}
}

View file

@ -55,7 +55,6 @@ export class GuideDemo extends Component {
render() {
const {
isFullScreen,
isDarkTheme,
children,
className,
js, // eslint-disable-line no-unused-vars
@ -66,8 +65,6 @@ export class GuideDemo extends Component {
const classes = classNames('guideDemo', className, {
'guideDemo--fullScreen': isFullScreen,
'guideDemo--darkTheme': isDarkTheme,
'theme-dark': isDarkTheme,
});
return (
@ -88,7 +85,6 @@ GuideDemo.propTypes = {
html: PropTypes.string.isRequired,
css: PropTypes.string.isRequired,
isFullScreen: PropTypes.bool.isRequired,
isDarkTheme: PropTypes.bool.isRequired,
};
GuideDemo.defaultProps = {
@ -96,5 +92,4 @@ GuideDemo.defaultProps = {
html: '',
css: '',
isFullScreen: false,
isDarkTheme: false,
};

View file

@ -8,7 +8,7 @@
right: 0;
min-width: $guideMinWidth;
height: $guideNavHeight;
border-bottom: 1px solid #CCCCCC;
border-bottom: $euiBorderThin;
color: $guideTextColor;
background-color: $guidePanelBackgroundColor;
transition:
@ -16,11 +16,11 @@
height 0.3s ease,
box-shadow 0.3s linear;
overflow: hidden;
box-shadow: 0 0 0 rgba(black, 0.3);
@include euiBottomShadowMedium;
&.is-guide-nav-open {
height: 100%;
box-shadow: 0 40px 200px rgba(black, 0.05);
@include euiBottomShadow;
}
&.is-chrome-hidden {
@ -182,14 +182,14 @@
line-height: 10px;
padding: 4px 20px;
color: $guideLinkHoverColor;
background-color: #fff;
background-color: $euiColorEmptyShade;
border: 1px solid $guideLinkHoverColor;
border-radius: 3px;
cursor: pointer;
&:hover,
&:active {
background-color: #e6f7fc;
background-color: $euiColorLightestShade;
}
&.guideNavPaginationButton-isDisabled {

View file

@ -11,7 +11,7 @@
$scrollBarWidth: 20px;
background-color: $guidePanelBackgroundColor;
border: 1px solid #CCCCCC;
border: $euiBorderThin;
border-radius: 4px;
flex: 1 1 auto;
padding: 40px 60px;
@ -34,3 +34,9 @@
}
}
}
.guidePageKillScreen {
background-color: tintOrShade($euiColorDanger, 90%, 70%);
padding: $euiSizeL;
margin-bottom: $euiSizeL;
}

View file

@ -62,7 +62,7 @@ export class GuidePage extends Component {
</GuidePageSideNav>
<div className="guidePageBody">
<div style={{ marginBottom: 40, backgroundColor: '#ffec9d', padding: 20 }}>
<div className="guidePageKillScreen">
<h1 className="guideTitle">
The Kibana UI Framework has been DEPRECATED.
</h1>

View file

@ -21,7 +21,7 @@
line-height: 10px;
padding: 4px 10px;
color: $guideLinkHoverColor;
background-color: #fff;
background-color: $euiColorEmptyShade;
border: 1px solid $guideLinkHoverColor;
border-radius: 3px;
cursor: pointer;
@ -30,7 +30,7 @@
&:hover,
&:active {
background-color: #e6f7fc;
background-color: $euiColorLightestShade;
}
.is-chrome-hidden & {

View file

@ -1,3 +1,8 @@
@import "~@elastic/eui/src/global_styling/index";
@import "../../dist/ui_framework.css";
@import '~@elastic/eui/src/themes/k6/k6_globals';
@import '~@elastic/eui/src/themes/k6/k6_colors_dark';
@import '~@elastic/eui/src/global_styling/functions/index';
@import '~@elastic/eui/src/global_styling/variables/index';
@import '~@elastic/eui/src/global_styling/mixins/index';
@import '~@elastic/eui/src/global_styling/reset/index';
@import "../../dist/kui_dark.css";
@import "./components/guide_components";

View file

@ -94,10 +94,6 @@ export default props => (
<GuideDemo>
<Basic />
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<Basic />
</GuideDemo>
</GuideSection>
<GuideSection

View file

@ -103,10 +103,6 @@ export default props => (
<GuideDemo>
<TextInput/>
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<TextInput/>
</GuideDemo>
</GuideSection>
<GuideSection
@ -126,11 +122,6 @@ export default props => (
<GuideDemo
html={assistedInputHtml}
/>
<GuideDemo
html={assistedInputHtml}
isDarkTheme
/>
</GuideSection>
<GuideSection
@ -143,11 +134,6 @@ export default props => (
<GuideDemo
html={searchInputHtml}
/>
<GuideDemo
html={searchInputHtml}
isDarkTheme
/>
</GuideSection>
<GuideSection
@ -179,10 +165,6 @@ export default props => (
<GuideDemo>
<TextArea/>
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<TextArea/>
</GuideDemo>
</GuideSection>
<GuideSection
@ -213,10 +195,6 @@ export default props => (
<GuideDemo>
<CheckBox/>
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<CheckBox/>
</GuideDemo>
</GuideSection>
<GuideSection
@ -233,9 +211,6 @@ export default props => (
<Select/>
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<Select/>
</GuideDemo>
</GuideSection>
</GuidePage>
);

View file

@ -40,11 +40,6 @@ export default props => (
<GuideDemo
html={linkHtml}
/>
<GuideDemo
html={linkHtml}
isDarkTheme={true}
/>
</GuideSection>
</GuidePage>
);

View file

@ -86,10 +86,6 @@ export default props => (
<GuideDemo>
<SimpleLocalNav />
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<SimpleLocalNav />
</GuideDemo>
</GuideSection>
<GuideSection
@ -112,10 +108,6 @@ export default props => (
<GuideDemo>
<LocalNavWithBreadcrumbs />
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<LocalNavWithBreadcrumbs />
</GuideDemo>
</GuideSection>
<GuideSection
@ -139,9 +131,6 @@ export default props => (
<LocalNavWithSearch />
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<LocalNavWithSearch />
</GuideDemo>
</GuideSection>
<GuideSection
@ -160,10 +149,6 @@ export default props => (
<GuideDemo>
<LocalNavWithSearchError />
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<LocalNavWithSearchError />
</GuideDemo>
</GuideSection>
<GuideSection
@ -190,10 +175,6 @@ export default props => (
<GuideDemo>
<LocalNavWithMenuItemStates />
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<LocalNavWithMenuItemStates />
</GuideDemo>
</GuideSection>
<GuideSection
@ -216,10 +197,6 @@ export default props => (
<GuideDemo>
<LocalNavWithDropdown />
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<LocalNavWithDropdown />
</GuideDemo>
</GuideSection>
<GuideSection
@ -242,10 +219,6 @@ export default props => (
<GuideDemo>
<LocalNavWithDropdownPanels />
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<LocalNavWithDropdownPanels />
</GuideDemo>
</GuideSection>
<GuideSection
@ -268,10 +241,6 @@ export default props => (
<GuideDemo>
<LocalNavWithTabs />
</GuideDemo>
<GuideDemo isDarkTheme={true}>
<LocalNavWithTabs />
</GuideDemo>
</GuideSection>
<GuideSection
@ -284,11 +253,6 @@ export default props => (
<GuideDemo
html={datePickerHtml}
/>
<GuideDemo
html={datePickerHtml}
isDarkTheme={true}
/>
</GuideSection>
</GuidePage>
);

View file

@ -59,10 +59,6 @@ export default props => (
<GuideDemo>
<ModalExample />
</GuideDemo>
<GuideDemo isDarkTheme>
<ModalExample />
</GuideDemo>
</GuideSection>
<GuideSection

Some files were not shown because too many files have changed in this diff Show more