mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
Merge remote-tracking branch 'upstream/6.7' into 6.7
This commit is contained in:
commit
d2f208bcba
50 changed files with 508 additions and 172 deletions
|
@ -6,6 +6,7 @@ set -e
|
||||||
cd "$(dirname "$0")/.."
|
cd "$(dirname "$0")/.."
|
||||||
|
|
||||||
source src/dev/ci_setup/setup.sh
|
source src/dev/ci_setup/setup.sh
|
||||||
|
source src/dev/ci_setup/checkout_sibling_es.sh
|
||||||
|
|
||||||
case "$JOB" in
|
case "$JOB" in
|
||||||
kibana-intake)
|
kibana-intake)
|
||||||
|
|
|
@ -41,10 +41,6 @@ module.exports = {
|
||||||
forceNode: true,
|
forceNode: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
react: {
|
|
||||||
version: '16.3',
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
|
||||||
rules: {
|
rules: {
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
|
|
||||||
This section summarizes the changes in each release.
|
This section summarizes the changes in each release.
|
||||||
|
|
||||||
|
* <<release-notes-6.6.2>>
|
||||||
|
* <<release-notes-6.6.1>>
|
||||||
* <<release-notes-6.6.0>>
|
* <<release-notes-6.6.0>>
|
||||||
* <<release-notes-6.5.4>>
|
* <<release-notes-6.5.4>>
|
||||||
* <<release-notes-6.5.3>>
|
* <<release-notes-6.5.3>>
|
||||||
|
@ -84,6 +86,92 @@ This section summarizes the changes in each release.
|
||||||
//=== Known Issues
|
//=== Known Issues
|
||||||
////
|
////
|
||||||
|
|
||||||
|
[[release-notes-6.6.2]]
|
||||||
|
== {kib} 6.6.2
|
||||||
|
|
||||||
|
See <<breaking-changes-6.0, breaking changes>> for the changes to be aware of
|
||||||
|
when migrating your application from one version of Kibana to another.
|
||||||
|
|
||||||
|
|
||||||
|
[float]
|
||||||
|
[[bug-6.6.2]]
|
||||||
|
=== Bug fixes
|
||||||
|
APM::
|
||||||
|
* Avoids crashing the transaction details page if trace duration is 0 {pull}31799[#31799]
|
||||||
|
Canvas::
|
||||||
|
* Provides a valid `axisconfig` position default {pull}32335[#32335]
|
||||||
|
Kibana App::
|
||||||
|
* Removes the use of 's' regex flag in {kib} query language {pull}31292[#31292]
|
||||||
|
* Fixes vislib legend filters {pull}29592[#29592]
|
||||||
|
Machine Learning::
|
||||||
|
* Sets default number of shards to 1 when creating a new index for File data visualizer {pull}31567[#31567]
|
||||||
|
* Adds missing error handling to annotations request {pull}32384[#32384]
|
||||||
|
Management::
|
||||||
|
* Fixes bug where rollup job search would display an empty prompt if no jobs matched the search {pull}31642[#31642]
|
||||||
|
Monitoring::
|
||||||
|
* Ensures that bulk uploader only starts once {pull}31307[#31307]
|
||||||
|
* Addresses some UI regressions with shard allocation {pull}29757[#29757]
|
||||||
|
Operations::
|
||||||
|
* Bumps Node to 10.15.2 {pull}32200[#32200]
|
||||||
|
Visualizations::
|
||||||
|
* Formats Time Series Visual Builder error message {pull}31569[#31569]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
[[release-notes-6.6.1]]
|
||||||
|
== {kib} 6.6.1
|
||||||
|
|
||||||
|
See <<breaking-changes-6.0, breaking changes>> for the changes to be aware of
|
||||||
|
when migrating your application from one version of Kibana to another.
|
||||||
|
|
||||||
|
|
||||||
|
[float]
|
||||||
|
[[bug-6.6.1]]
|
||||||
|
=== Bug fixes
|
||||||
|
Canvas::
|
||||||
|
* Wraps URL check in `retry.try` {pull}29536[#29536]
|
||||||
|
* Avoids server crash when no value is found {pull}29069[#29069]
|
||||||
|
* Identifies Canvas for metrics collection {pull}29078[#29078]
|
||||||
|
* Removes text selection on writeable change {pull}28887[#28887]
|
||||||
|
* Prevents sort in pie function {pull}27076[#27076]
|
||||||
|
* Maintains element selection when using up/down layering operations {pull}29634[#29634]
|
||||||
|
* Uses `server.basePath` to create socket connection from interpreter {pull}29393[#29393]
|
||||||
|
Kibana App::
|
||||||
|
* Renames `defaultSpaceBasePath` to `serverBasePath` {pull}29431[#29431]
|
||||||
|
Machine Learning::
|
||||||
|
* Fixes race condition related to view by swimlane update {pull}28990[#28990]
|
||||||
|
* Adds an integrity check to creating, updating, and deleting annotations {pull}29969[#29969]
|
||||||
|
* Removes key attribute from annotation before indexing {pull}30183[#30183]
|
||||||
|
* Makes index pattern related links optional {pull}29332[#29332]
|
||||||
|
* Fixes unmounting jobs list React component on page change {pull}29980[#29980]
|
||||||
|
* Uses intl.formatMessage for File Data Visualizer file picker {pull}29251[#29251]
|
||||||
|
Management::
|
||||||
|
* Adds Webhook Action type on client {pull}29818[#29818]
|
||||||
|
Monitoring::
|
||||||
|
* Fixes Logstash date format {pull}29637[#29637]
|
||||||
|
* Fixes UI regressions with shard allocation {pull}29757[#29757]
|
||||||
|
Operations::
|
||||||
|
* Fixes plugin deprecations {pull}29737[#29737]
|
||||||
|
* Changes Node version to 10.15.1 {pull}27918[#27918]
|
||||||
|
* Fixes Chrome EUI icons on status page {pull}29131[#29131]
|
||||||
|
Querying & Filtering::
|
||||||
|
* Adds support for matching field names with newlines {pull}29539[#29539]
|
||||||
|
Reporting::
|
||||||
|
* Fixes date formatting on server for CSV export {pull}29977[#29977]
|
||||||
|
Security::
|
||||||
|
* Adds missing cluster privileges to role management screen {pull}28692[#28692]
|
||||||
|
* Fixes an issue with a cross-site scripting (XSS) vulnerability (CVE-2019-7608). See https://www.elastic.co/community/security[Security issues].
|
||||||
|
* Fixes an arbitrary code execution flaw in the Timelion visualizer (CVE-2019-7609). See https://www.elastic.co/community/security[Security issues].
|
||||||
|
* Fixes an arbitrary code execution flaw in the security audit logger (CVE-2019-7610). See https://www.elastic.co/community/security[Security issues].
|
||||||
|
Visualizations::
|
||||||
|
* Fixes standard deviation aggregation to prevent crash of Time Series Visual Builder {pull}30798[#30798]
|
||||||
|
* Fixes Time Series Visual Builder flot chart render after index pattern change {pull}29949[#29949]
|
||||||
|
* Enables `orderBy` and `orderAgg` in visualization editor for rollup visualizations {pull}29894[#29894]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
[[release-notes-6.6.0]]
|
[[release-notes-6.6.0]]
|
||||||
== {kib} 6.6.0
|
== {kib} 6.6.0
|
||||||
|
|
||||||
|
|
|
@ -1,31 +1,30 @@
|
||||||
[[management-cross-cluster-search]]
|
[[management-cross-cluster-search]]
|
||||||
=== Cross Cluster Search
|
=== {ccs-cap}
|
||||||
|
|
||||||
Elasticsearch supports the ability to run search and aggregation requests across multiple
|
{es} supports the ability to run search and aggregation requests across multiple
|
||||||
clusters using a module called _cross cluster search_.
|
clusters using a module called _{ccs}_.
|
||||||
|
|
||||||
In order to take advantage of cross cluster search, you must configure your Elasticsearch
|
In order to take advantage of {ccs}, you must configure your {es}
|
||||||
clusters accordingly. Review the corresponding Elasticsearch
|
clusters accordingly. Review the corresponding {es}
|
||||||
{ref}/modules-cross-cluster-search.html[documentation] before attempting to use cross cluster
|
{ref}/modules-cross-cluster-search.html[documentation] before attempting to use {ccs} in {kib}.
|
||||||
search in Kibana.
|
|
||||||
|
|
||||||
Once your Elasticsearch clusters are configured for cross cluster search, you can create
|
Once your {es} clusters are configured for {ccs}, you can create
|
||||||
specific index patterns in Kibana to search across the clusters of your choosing. Using the
|
specific index patterns in {kib} to search across the clusters of your choosing. Using the
|
||||||
same syntax that you'd use in a raw cross cluster search request in Elasticsearch, create your
|
same syntax that you'd use in a raw {ccs} request in {es}, create your
|
||||||
index pattern in Kibana with the convention `<cluster-names>:<pattern>`.
|
index pattern in {kib} with the convention `<cluster-names>:<pattern>`.
|
||||||
|
|
||||||
For example, if you want to query logstash indices across two of the Elasticsearch clusters
|
For example, if you want to query {ls} indices across two of the {es} clusters
|
||||||
that you set up for cross cluster search, which were named `cluster_one` and `cluster_two`,
|
that you set up for {ccs}, which were named `cluster_one` and `cluster_two`,
|
||||||
you would use `cluster_one:logstash-*,cluster_two:logstash-*` as your index pattern in Kibana.
|
you would use `cluster_one:logstash-*,cluster_two:logstash-*` as your index pattern in {kib}.
|
||||||
|
|
||||||
Just like in raw search requests in Elasticsearch, you can use wildcards in your cluster names
|
Just like in raw search requests in {es}, you can use wildcards in your cluster names
|
||||||
to match any number of clusters, so if you wanted to search logstash indices across any
|
to match any number of clusters, so if you wanted to search {ls} indices across any
|
||||||
clusters named `cluster_foo`, `cluster_bar`, and so on, you would use `cluster_*:logstash-*`
|
clusters named `cluster_foo`, `cluster_bar`, and so on, you would use `cluster_*:logstash-*`
|
||||||
as your index pattern in Kibana.
|
as your index pattern in {kib}.
|
||||||
|
|
||||||
If you want to query across all Elasticsearch clusters that have been configured for cross
|
If you want to query across all {es} clusters that have been configured for {ccs},
|
||||||
cluster search, then use a standalone wildcard for your cluster name in your Kibana index
|
then use a standalone wildcard for your cluster name in your {kib} index
|
||||||
pattern: `*:logstash-*`.
|
pattern: `*:logstash-*`.
|
||||||
|
|
||||||
Once an index pattern is configured using the cross cluster search syntax, all searches and
|
Once an index pattern is configured using the {ccs} syntax, all searches and
|
||||||
aggregations using that index pattern in Kibana take advantage of cross cluster search.
|
aggregations using that index pattern in {kib} take advantage of {ccs}.
|
||||||
|
|
|
@ -2,23 +2,22 @@
|
||||||
== Working with remote clusters
|
== Working with remote clusters
|
||||||
|
|
||||||
{kib} *Management* provides user interfaces for working with data from remote
|
{kib} *Management* provides user interfaces for working with data from remote
|
||||||
clusters and managing the cross cluster replication process. You can replicate indices from a
|
clusters and managing the {ccr} process. You can replicate indices from a
|
||||||
leader remote cluster to a follower index in a local cluster. The local follower indices
|
leader remote cluster to a follower index in a local cluster. The local follower indices
|
||||||
can be used to provide remote backups for disaster recovery or for geo-proximite copies of data.
|
can be used to provide remote backups for disaster recovery or for geo-proximite copies of data.
|
||||||
|
|
||||||
Before using these features, you should be familiar with the following concepts:
|
Before using these features, you should be familiar with the following concepts:
|
||||||
|
|
||||||
* {xpack-ref}/xpack-ccr.html[Cross cluster replication]
|
* {stack-ov}/xpack-ccr.html[{ccr-cap}]
|
||||||
* {ref}/modules-cross-cluster-search.html[Cross cluster search]
|
* {ref}/modules-cross-cluster-search.html[{ccs-cap}]
|
||||||
* {xpack-ref}/cross-cluster-configuring.html[Cross cluster security requirements]
|
* {stack-ov}/cross-cluster-configuring.html[Cross-cluster security requirements]
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
[[managing-remote-clusters]]
|
[[managing-remote-clusters]]
|
||||||
== Managing remote clusters
|
== Managing remote clusters
|
||||||
|
|
||||||
*Remote clusters* helps you manage remote clusters for use with
|
*Remote clusters* helps you manage remote clusters for use with
|
||||||
cross cluster search and cross cluster replication. You can add and remove remote
|
{ccs} and {ccr}. You can add and remove remote clusters and check their connectivity.
|
||||||
clusters and check their connectivity.
|
|
||||||
|
|
||||||
Before you use this feature, you should be familiar with the concept of
|
Before you use this feature, you should be familiar with the concept of
|
||||||
{ref}/modules-remote-clusters.html[remote clusters].
|
{ref}/modules-remote-clusters.html[remote clusters].
|
||||||
|
@ -32,16 +31,16 @@ from the *Remote clusters* list view.
|
||||||
[role="screenshot"]
|
[role="screenshot"]
|
||||||
image::images/add_remote_cluster.png[][UI for adding a remote cluster]
|
image::images/add_remote_cluster.png[][UI for adding a remote cluster]
|
||||||
|
|
||||||
Once a remote cluster is registered, you can use the tools under *Cross Cluster Replication*
|
Once a remote cluster is registered, you can use the tools under *{ccr-cap}*
|
||||||
to add and manage follower indices on the local cluster, and replicate data from
|
to add and manage follower indices on the local cluster, and replicate data from
|
||||||
indices on the remote cluster based on an auto-follow index pattern.
|
indices on the remote cluster based on an auto-follow index pattern.
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
[[managing-cross-cluster-replication]]
|
[[managing-cross-cluster-replication]]
|
||||||
== Managing cross cluster replication
|
== [xpack]#Managing {ccr}#
|
||||||
|
|
||||||
*Cross Cluster Replication* helps you create and manage the cross cluster
|
*{ccr-cap}* helps you create and manage the {ccr} process.
|
||||||
replication process. If you want to replicate data from existing indices, or set up
|
If you want to replicate data from existing indices, or set up
|
||||||
local followers on a case-by-case basis, go to *Follower indices*.
|
local followers on a case-by-case basis, go to *Follower indices*.
|
||||||
If you want to automatically detect and follow new indices when they are created
|
If you want to automatically detect and follow new indices when they are created
|
||||||
on a remote cluster, you can do so from *Auto-follow patterns*.
|
on a remote cluster, you can do so from *Auto-follow patterns*.
|
||||||
|
@ -56,10 +55,10 @@ a given remote cluster, and monitor whether the replication is active.
|
||||||
|
|
||||||
Before you use these features, you should be familiar with the following concepts:
|
Before you use these features, you should be familiar with the following concepts:
|
||||||
|
|
||||||
* {xpack-ref}/ccr-requirements.html[Requirements for leader indices]
|
* {stack-ov}/ccr-requirements.html[Requirements for leader indices]
|
||||||
* {xpack-ref}/ccr-auto-follow.html[Automatically following indices]
|
* {stack-ov}/ccr-auto-follow.html[Automatically following indices]
|
||||||
|
|
||||||
To get started, go to *Management > Elasticsearch > Cross Cluster Replication*.
|
To get started, go to *Management > Elasticsearch > {ccr-cap}*.
|
||||||
|
|
||||||
[role="screenshot"]
|
[role="screenshot"]
|
||||||
image::images/auto_follow_pattern.png[][UI for adding an auto-follow pattern]
|
image::images/auto_follow_pattern.png[][UI for adding an auto-follow pattern]
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
[[cross-cluster-kibana]]
|
[[cross-cluster-kibana]]
|
||||||
==== Cross Cluster Search and Kibana
|
==== {ccs-cap} and {kib}
|
||||||
|
|
||||||
When Kibana is used to search across multiple clusters, a two-step authorization
|
When {kib} is used to search across multiple clusters, a two-step authorization
|
||||||
process determines whether or not the user can access indices on a remote
|
process determines whether or not the user can access indices on a remote
|
||||||
cluster:
|
cluster:
|
||||||
|
|
||||||
* First, the local cluster determines if the user is authorized to access remote
|
* First, the local cluster determines if the user is authorized to access remote
|
||||||
clusters. (The local cluster is the cluster Kibana is connected to.)
|
clusters. (The local cluster is the cluster {kib} is connected to.)
|
||||||
* If they are, the remote cluster then determines if the user has access
|
* If they are, the remote cluster then determines if the user has access
|
||||||
to the specified indices.
|
to the specified indices.
|
||||||
|
|
||||||
To grant Kibana users access to remote clusters, assign them a local role
|
To grant {kib} users access to remote clusters, assign them a local role
|
||||||
with read privileges to indices on the remote clusters. You specify remote
|
with read privileges to indices on the remote clusters. You specify remote
|
||||||
cluster indices as `<remote_cluster_name>:<index_name>`.
|
cluster indices as `<remote_cluster_name>:<index_name>`.
|
||||||
|
|
||||||
|
@ -18,10 +18,10 @@ To enable users to actually read the remote indices, you must create a matching
|
||||||
role on the remote clusters that grants the `read_cross_cluster` privilege
|
role on the remote clusters that grants the `read_cross_cluster` privilege
|
||||||
and access to the appropriate indices.
|
and access to the appropriate indices.
|
||||||
|
|
||||||
For example, if Kibana is connected to the cluster where you're actively
|
For example, if {kib} is connected to the cluster where you're actively
|
||||||
indexing Logstash data (your _local cluster_) and you're periodically
|
indexing {ls} data (your _local cluster_) and you're periodically
|
||||||
offloading older time-based indices to an archive cluster
|
offloading older time-based indices to an archive cluster
|
||||||
(your _remote cluster_) and you want to enable Kibana users to search both
|
(your _remote cluster_) and you want to enable {kib} users to search both
|
||||||
clusters:
|
clusters:
|
||||||
|
|
||||||
. On the local cluster, create a `logstash_reader` role that grants
|
. On the local cluster, create a `logstash_reader` role that grants
|
||||||
|
@ -31,7 +31,7 @@ NOTE: If you configure the local cluster as another remote in {es}, the
|
||||||
`logstash_reader` role on your local cluster also needs to grant the
|
`logstash_reader` role on your local cluster also needs to grant the
|
||||||
`read_cross_cluster` privilege.
|
`read_cross_cluster` privilege.
|
||||||
|
|
||||||
. Assign your Kibana users the `kibana_user` role and your `logstash_reader`
|
. Assign your {kib} users the `kibana_user` role and your `logstash_reader`
|
||||||
role.
|
role.
|
||||||
|
|
||||||
. On the remote cluster, create a `logstash_reader` role that grants the
|
. On the remote cluster, create a `logstash_reader` role that grants the
|
||||||
|
|
|
@ -1,15 +1,17 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
[[apm-settings-kb]]
|
[[apm-settings-kb]]
|
||||||
=== APM Settings in Kibana
|
=== APM settings in Kibana
|
||||||
++++
|
++++
|
||||||
<titleabbrev>APM Settings</titleabbrev>
|
<titleabbrev>APM settings</titleabbrev>
|
||||||
++++
|
++++
|
||||||
|
|
||||||
You do not need to configure any settings to use APM. It is enabled by default.
|
You do not need to configure any settings to use APM. It is enabled by default.
|
||||||
|
If you'd like to change any of the default values,
|
||||||
|
copy and paste the relevant settings below into your `kibana.yml` configuration file.
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
[[general-apm-settings-kb]]
|
[[general-apm-settings-kb]]
|
||||||
==== General APM Settings
|
==== General APM settings
|
||||||
|
|
||||||
xpack.apm.enabled:: Set to `false` to disabled the APM plugin {kib}. Defaults to
|
xpack.apm.enabled:: Set to `false` to disabled the APM plugin {kib}. Defaults to
|
||||||
`true`.
|
`true`.
|
||||||
|
@ -19,7 +21,7 @@ xpack.apm.ui.enabled:: Set to `false` to hide the APM plugin {kib} from the menu
|
||||||
|
|
||||||
xpack.apm.ui.transactionGroupBucketSize:: Number of top transaction groups displayed in APM plugin in Kibana. Defaults to `100`.
|
xpack.apm.ui.transactionGroupBucketSize:: Number of top transaction groups displayed in APM plugin in Kibana. Defaults to `100`.
|
||||||
|
|
||||||
apm_oss.indexPattern:: Index pattern is used for integrations with Machine Learning and Kuery Bar. It must match all apm indices. Defaults to `apm-*`.
|
apm_oss.indexPattern:: Index pattern is used for integrations with Machine Learning and the query bar. It must match all apm indices. Defaults to `apm-*`.
|
||||||
|
|
||||||
apm_oss.errorIndices:: Matcher for indices containing error documents. Defaults to `apm-*`.
|
apm_oss.errorIndices:: Matcher for indices containing error documents. Defaults to `apm-*`.
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ module.exports = {
|
||||||
|
|
||||||
settings: {
|
settings: {
|
||||||
react: {
|
react: {
|
||||||
version: semver.coerce(PKG.dependencies.react),
|
version: semver.valid(semver.coerce(PKG.dependencies.react)),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ Usage:
|
||||||
Options:
|
Options:
|
||||||
--help Display this menu and exit.
|
--help Display this menu and exit.
|
||||||
--config <file> Pass in a config. Can pass in multiple configs.
|
--config <file> Pass in a config. Can pass in multiple configs.
|
||||||
--esFrom <snapshot|source> Build Elasticsearch from source or run from snapshot. Default: snapshot
|
--esFrom <snapshot|source> Build Elasticsearch from source or run from snapshot. Default: $TEST_ES_FROM or snapshot
|
||||||
--kibana-install-dir <dir> Run Kibana from existing install directory instead of from source.
|
--kibana-install-dir <dir> Run Kibana from existing install directory instead of from source.
|
||||||
--bail Stop the test run at the first failure.
|
--bail Stop the test run at the first failure.
|
||||||
--grep <pattern> Pattern to select which tests to run.
|
--grep <pattern> Pattern to select which tests to run.
|
||||||
|
@ -32,6 +32,7 @@ Object {
|
||||||
<absolute path>/foo,
|
<absolute path>/foo,
|
||||||
],
|
],
|
||||||
"createLogger": [Function],
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
"extraKbnOpts": undefined,
|
"extraKbnOpts": undefined,
|
||||||
"suiteTags": Object {
|
"suiteTags": Object {
|
||||||
"exclude": Array [],
|
"exclude": Array [],
|
||||||
|
@ -49,6 +50,7 @@ Object {
|
||||||
],
|
],
|
||||||
"createLogger": [Function],
|
"createLogger": [Function],
|
||||||
"debug": true,
|
"debug": true,
|
||||||
|
"esFrom": "snapshot",
|
||||||
"extraKbnOpts": undefined,
|
"extraKbnOpts": undefined,
|
||||||
"suiteTags": Object {
|
"suiteTags": Object {
|
||||||
"exclude": Array [],
|
"exclude": Array [],
|
||||||
|
@ -65,6 +67,7 @@ Object {
|
||||||
<absolute path>/foo,
|
<absolute path>/foo,
|
||||||
],
|
],
|
||||||
"createLogger": [Function],
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
"extraKbnOpts": undefined,
|
"extraKbnOpts": undefined,
|
||||||
"suiteTags": Object {
|
"suiteTags": Object {
|
||||||
"exclude": Array [],
|
"exclude": Array [],
|
||||||
|
@ -83,6 +86,7 @@ Object {
|
||||||
<absolute path>/foo,
|
<absolute path>/foo,
|
||||||
],
|
],
|
||||||
"createLogger": [Function],
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
"extraKbnOpts": Object {
|
"extraKbnOpts": Object {
|
||||||
"server.foo": "bar",
|
"server.foo": "bar",
|
||||||
},
|
},
|
||||||
|
@ -100,6 +104,7 @@ Object {
|
||||||
<absolute path>/foo,
|
<absolute path>/foo,
|
||||||
],
|
],
|
||||||
"createLogger": [Function],
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
"extraKbnOpts": undefined,
|
"extraKbnOpts": undefined,
|
||||||
"quiet": true,
|
"quiet": true,
|
||||||
"suiteTags": Object {
|
"suiteTags": Object {
|
||||||
|
@ -116,6 +121,7 @@ Object {
|
||||||
<absolute path>/foo,
|
<absolute path>/foo,
|
||||||
],
|
],
|
||||||
"createLogger": [Function],
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
"extraKbnOpts": undefined,
|
"extraKbnOpts": undefined,
|
||||||
"silent": true,
|
"silent": true,
|
||||||
"suiteTags": Object {
|
"suiteTags": Object {
|
||||||
|
@ -125,6 +131,22 @@ Object {
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
exports[`process options for run tests CLI accepts source value for $TEST_ES_FROM 1`] = `
|
||||||
|
Object {
|
||||||
|
"assertNoneExcluded": false,
|
||||||
|
"configs": Array [
|
||||||
|
<absolute path>/foo,
|
||||||
|
],
|
||||||
|
"createLogger": [Function],
|
||||||
|
"esFrom": "source",
|
||||||
|
"extraKbnOpts": undefined,
|
||||||
|
"suiteTags": Object {
|
||||||
|
"exclude": Array [],
|
||||||
|
"include": Array [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
exports[`process options for run tests CLI accepts source value for esFrom 1`] = `
|
exports[`process options for run tests CLI accepts source value for esFrom 1`] = `
|
||||||
Object {
|
Object {
|
||||||
"assertNoneExcluded": false,
|
"assertNoneExcluded": false,
|
||||||
|
@ -148,6 +170,7 @@ Object {
|
||||||
<absolute path>/foo,
|
<absolute path>/foo,
|
||||||
],
|
],
|
||||||
"createLogger": [Function],
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
"extraKbnOpts": undefined,
|
"extraKbnOpts": undefined,
|
||||||
"installDir": "foo",
|
"installDir": "foo",
|
||||||
"suiteTags": Object {
|
"suiteTags": Object {
|
||||||
|
@ -164,6 +187,7 @@ Object {
|
||||||
<absolute path>/foo,
|
<absolute path>/foo,
|
||||||
],
|
],
|
||||||
"createLogger": [Function],
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
"extraKbnOpts": undefined,
|
"extraKbnOpts": undefined,
|
||||||
"grep": "management",
|
"grep": "management",
|
||||||
"suiteTags": Object {
|
"suiteTags": Object {
|
||||||
|
@ -180,6 +204,7 @@ Object {
|
||||||
<absolute path>/foo,
|
<absolute path>/foo,
|
||||||
],
|
],
|
||||||
"createLogger": [Function],
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
"extraKbnOpts": undefined,
|
"extraKbnOpts": undefined,
|
||||||
"suiteTags": Object {
|
"suiteTags": Object {
|
||||||
"exclude": Array [],
|
"exclude": Array [],
|
||||||
|
@ -188,3 +213,19 @@ Object {
|
||||||
"verbose": true,
|
"verbose": true,
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
exports[`process options for run tests CLI prioritizes source flag over $TEST_ES_FROM 1`] = `
|
||||||
|
Object {
|
||||||
|
"assertNoneExcluded": false,
|
||||||
|
"configs": Array [
|
||||||
|
<absolute path>/foo,
|
||||||
|
],
|
||||||
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
|
"extraKbnOpts": undefined,
|
||||||
|
"suiteTags": Object {
|
||||||
|
"exclude": Array [],
|
||||||
|
"include": Array [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
|
@ -11,7 +11,7 @@ Usage:
|
||||||
Options:
|
Options:
|
||||||
--help Display this menu and exit.
|
--help Display this menu and exit.
|
||||||
--config <file> Pass in a config. Can pass in multiple configs.
|
--config <file> Pass in a config. Can pass in multiple configs.
|
||||||
--esFrom <snapshot|source> Build Elasticsearch from source or run from snapshot. Default: snapshot
|
--esFrom <snapshot|source> Build Elasticsearch from source or run from snapshot. Default: $TEST_ES_FROM or snapshot
|
||||||
--kibana-install-dir <dir> Run Kibana from existing install directory instead of from source.
|
--kibana-install-dir <dir> Run Kibana from existing install directory instead of from source.
|
||||||
--bail Stop the test run at the first failure.
|
--bail Stop the test run at the first failure.
|
||||||
--grep <pattern> Pattern to select which tests to run.
|
--grep <pattern> Pattern to select which tests to run.
|
||||||
|
|
|
@ -32,7 +32,7 @@ const options = {
|
||||||
arg: '<snapshot|source>',
|
arg: '<snapshot|source>',
|
||||||
choices: ['snapshot', 'source'],
|
choices: ['snapshot', 'source'],
|
||||||
desc: 'Build Elasticsearch from source or run from snapshot.',
|
desc: 'Build Elasticsearch from source or run from snapshot.',
|
||||||
default: 'snapshot',
|
defaultHelp: 'Default: $TEST_ES_FROM or snapshot',
|
||||||
},
|
},
|
||||||
'kibana-install-dir': {
|
'kibana-install-dir': {
|
||||||
arg: '<dir>',
|
arg: '<dir>',
|
||||||
|
@ -71,7 +71,7 @@ export function displayHelp() {
|
||||||
return {
|
return {
|
||||||
...option,
|
...option,
|
||||||
usage: `${name} ${option.arg || ''}`,
|
usage: `${name} ${option.arg || ''}`,
|
||||||
default: option.default ? `Default: ${option.default}` : '',
|
default: option.defaultHelp || '',
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
.map(option => {
|
.map(option => {
|
||||||
|
@ -106,6 +106,10 @@ export function processOptions(userOptions, defaultConfigPaths) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!userOptions.esFrom) {
|
||||||
|
userOptions.esFrom = process.env.TEST_ES_FROM || 'snapshot';
|
||||||
|
}
|
||||||
|
|
||||||
if (userOptions['kibana-install-dir']) {
|
if (userOptions['kibana-install-dir']) {
|
||||||
userOptions.installDir = userOptions['kibana-install-dir'];
|
userOptions.installDir = userOptions['kibana-install-dir'];
|
||||||
delete userOptions['kibana-install-dir'];
|
delete userOptions['kibana-install-dir'];
|
||||||
|
|
|
@ -22,6 +22,14 @@ import { createAbsolutePathSerializer } from '@kbn/dev-utils';
|
||||||
|
|
||||||
expect.addSnapshotSerializer(createAbsolutePathSerializer(process.cwd()));
|
expect.addSnapshotSerializer(createAbsolutePathSerializer(process.cwd()));
|
||||||
|
|
||||||
|
const INITIAL_TEST_ES_FROM = process.env.TEST_ES_FROM;
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env.TEST_ES_FROM = 'snapshot';
|
||||||
|
});
|
||||||
|
afterEach(() => {
|
||||||
|
process.env.TEST_ES_FROM = INITIAL_TEST_ES_FROM;
|
||||||
|
});
|
||||||
|
|
||||||
describe('display help for run tests CLI', () => {
|
describe('display help for run tests CLI', () => {
|
||||||
it('displays as expected', () => {
|
it('displays as expected', () => {
|
||||||
expect(displayHelp()).toMatchSnapshot();
|
expect(displayHelp()).toMatchSnapshot();
|
||||||
|
@ -73,6 +81,18 @@ describe('process options for run tests CLI', () => {
|
||||||
expect(options).toMatchSnapshot();
|
expect(options).toMatchSnapshot();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('accepts source value for $TEST_ES_FROM', () => {
|
||||||
|
process.env.TEST_ES_FROM = 'source';
|
||||||
|
const options = processOptions({}, ['foo']);
|
||||||
|
expect(options).toMatchSnapshot();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('prioritizes source flag over $TEST_ES_FROM', () => {
|
||||||
|
process.env.TEST_ES_FROM = 'source';
|
||||||
|
const options = processOptions({ esFrom: 'snapshot' }, ['foo']);
|
||||||
|
expect(options).toMatchSnapshot();
|
||||||
|
});
|
||||||
|
|
||||||
it('rejects non-enum value for esFrom', () => {
|
it('rejects non-enum value for esFrom', () => {
|
||||||
expect(() => {
|
expect(() => {
|
||||||
processOptions({ esFrom: 'butter' }, ['foo']);
|
processOptions({ esFrom: 'butter' }, ['foo']);
|
||||||
|
|
|
@ -30,7 +30,7 @@ jest.mock('../../tasks', () => ({
|
||||||
|
|
||||||
describe('run tests CLI', () => {
|
describe('run tests CLI', () => {
|
||||||
describe('options', () => {
|
describe('options', () => {
|
||||||
const originalObjects = {};
|
const originalObjects = { process, console };
|
||||||
const exitMock = jest.fn();
|
const exitMock = jest.fn();
|
||||||
const logMock = jest.fn(); // mock logging so we don't send output to the test results
|
const logMock = jest.fn(); // mock logging so we don't send output to the test results
|
||||||
const argvMock = ['foo', 'foo'];
|
const argvMock = ['foo', 'foo'];
|
||||||
|
@ -40,11 +40,13 @@ describe('run tests CLI', () => {
|
||||||
argv: argvMock,
|
argv: argvMock,
|
||||||
stdout: new Writable(),
|
stdout: new Writable(),
|
||||||
cwd: jest.fn(),
|
cwd: jest.fn(),
|
||||||
|
env: {
|
||||||
|
...originalObjects.process.env,
|
||||||
|
TEST_ES_FROM: 'snapshot',
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
originalObjects.process = process;
|
|
||||||
originalObjects.console = console;
|
|
||||||
global.process = processMock;
|
global.process = processMock;
|
||||||
global.console = { log: logMock };
|
global.console = { log: logMock };
|
||||||
});
|
});
|
||||||
|
@ -56,6 +58,10 @@ describe('run tests CLI', () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
global.process.argv = [...argvMock];
|
global.process.argv = [...argvMock];
|
||||||
|
global.process.env = {
|
||||||
|
...originalObjects.process.env,
|
||||||
|
TEST_ES_FROM: 'snapshot',
|
||||||
|
};
|
||||||
jest.resetAllMocks();
|
jest.resetAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ Usage:
|
||||||
Options:
|
Options:
|
||||||
--help Display this menu and exit.
|
--help Display this menu and exit.
|
||||||
--config <file> Pass in a config
|
--config <file> Pass in a config
|
||||||
--esFrom <snapshot|source|path> Build Elasticsearch from source, snapshot or path to existing install dir. Default: snapshot
|
--esFrom <snapshot|source|path> Build Elasticsearch from source, snapshot or path to existing install dir. Default: $TEST_ES_FROM or snapshot
|
||||||
--kibana-install-dir <dir> Run Kibana from existing install directory instead of from source.
|
--kibana-install-dir <dir> Run Kibana from existing install directory instead of from source.
|
||||||
--verbose Log everything.
|
--verbose Log everything.
|
||||||
--debug Run in debug mode.
|
--debug Run in debug mode.
|
||||||
|
@ -72,6 +72,15 @@ Object {
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
exports[`process options for start servers CLI accepts source value for $TEST_ES_FROM 1`] = `
|
||||||
|
Object {
|
||||||
|
"config": <absolute path>/foo,
|
||||||
|
"createLogger": [Function],
|
||||||
|
"esFrom": "source",
|
||||||
|
"extraKbnOpts": undefined,
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
exports[`process options for start servers CLI accepts source value for esFrom 1`] = `
|
exports[`process options for start servers CLI accepts source value for esFrom 1`] = `
|
||||||
Object {
|
Object {
|
||||||
"config": <absolute path>/foo,
|
"config": <absolute path>/foo,
|
||||||
|
@ -100,3 +109,12 @@ Object {
|
||||||
"verbose": true,
|
"verbose": true,
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
exports[`process options for start servers CLI prioritizes source flag over $TEST_ES_FROM 1`] = `
|
||||||
|
Object {
|
||||||
|
"config": <absolute path>/foo,
|
||||||
|
"createLogger": [Function],
|
||||||
|
"esFrom": "snapshot",
|
||||||
|
"extraKbnOpts": undefined,
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
|
@ -31,7 +31,7 @@ const options = {
|
||||||
esFrom: {
|
esFrom: {
|
||||||
arg: '<snapshot|source|path>',
|
arg: '<snapshot|source|path>',
|
||||||
desc: 'Build Elasticsearch from source, snapshot or path to existing install dir.',
|
desc: 'Build Elasticsearch from source, snapshot or path to existing install dir.',
|
||||||
default: 'snapshot',
|
defaultHelp: 'Default: $TEST_ES_FROM or snapshot',
|
||||||
},
|
},
|
||||||
'kibana-install-dir': {
|
'kibana-install-dir': {
|
||||||
arg: '<dir>',
|
arg: '<dir>',
|
||||||
|
@ -51,7 +51,7 @@ export function displayHelp() {
|
||||||
return {
|
return {
|
||||||
...option,
|
...option,
|
||||||
usage: `${name} ${option.arg || ''}`,
|
usage: `${name} ${option.arg || ''}`,
|
||||||
default: option.default ? `Default: ${option.default}` : '',
|
default: option.defaultHelp || '',
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
.map(option => {
|
.map(option => {
|
||||||
|
@ -82,7 +82,7 @@ export function processOptions(userOptions, defaultConfigPath) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!userOptions.esFrom) {
|
if (!userOptions.esFrom) {
|
||||||
userOptions.esFrom = 'snapshot';
|
userOptions.esFrom = process.env.TEST_ES_FROM || 'snapshot';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (userOptions['kibana-install-dir']) {
|
if (userOptions['kibana-install-dir']) {
|
||||||
|
|
|
@ -22,6 +22,14 @@ import { createAbsolutePathSerializer } from '@kbn/dev-utils';
|
||||||
|
|
||||||
expect.addSnapshotSerializer(createAbsolutePathSerializer(process.cwd()));
|
expect.addSnapshotSerializer(createAbsolutePathSerializer(process.cwd()));
|
||||||
|
|
||||||
|
const INITIAL_TEST_ES_FROM = process.env.TEST_ES_FROM;
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env.TEST_ES_FROM = 'snapshot';
|
||||||
|
});
|
||||||
|
afterEach(() => {
|
||||||
|
process.env.TEST_ES_FROM = INITIAL_TEST_ES_FROM;
|
||||||
|
});
|
||||||
|
|
||||||
describe('display help for start servers CLI', () => {
|
describe('display help for start servers CLI', () => {
|
||||||
it('displays as expected', () => {
|
it('displays as expected', () => {
|
||||||
expect(displayHelp()).toMatchSnapshot();
|
expect(displayHelp()).toMatchSnapshot();
|
||||||
|
@ -68,6 +76,18 @@ describe('process options for start servers CLI', () => {
|
||||||
expect(options).toMatchSnapshot();
|
expect(options).toMatchSnapshot();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('accepts source value for $TEST_ES_FROM', () => {
|
||||||
|
process.env.TEST_ES_FROM = 'source';
|
||||||
|
const options = processOptions({}, 'foo');
|
||||||
|
expect(options).toMatchSnapshot();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('prioritizes source flag over $TEST_ES_FROM', () => {
|
||||||
|
process.env.TEST_ES_FROM = 'source';
|
||||||
|
const options = processOptions({ esFrom: 'snapshot' }, 'foo');
|
||||||
|
expect(options).toMatchSnapshot();
|
||||||
|
});
|
||||||
|
|
||||||
it('accepts debug option', () => {
|
it('accepts debug option', () => {
|
||||||
const options = processOptions({ debug: true }, 'foo');
|
const options = processOptions({ debug: true }, 'foo');
|
||||||
expect(options).toMatchSnapshot();
|
expect(options).toMatchSnapshot();
|
||||||
|
|
|
@ -30,7 +30,7 @@ jest.mock('../../tasks', () => ({
|
||||||
|
|
||||||
describe('start servers CLI', () => {
|
describe('start servers CLI', () => {
|
||||||
describe('options', () => {
|
describe('options', () => {
|
||||||
const originalObjects = {};
|
const originalObjects = { process, console };
|
||||||
const exitMock = jest.fn();
|
const exitMock = jest.fn();
|
||||||
const logMock = jest.fn(); // mock logging so we don't send output to the test results
|
const logMock = jest.fn(); // mock logging so we don't send output to the test results
|
||||||
const argvMock = ['foo', 'foo'];
|
const argvMock = ['foo', 'foo'];
|
||||||
|
@ -40,11 +40,13 @@ describe('start servers CLI', () => {
|
||||||
argv: argvMock,
|
argv: argvMock,
|
||||||
stdout: new Writable(),
|
stdout: new Writable(),
|
||||||
cwd: jest.fn(),
|
cwd: jest.fn(),
|
||||||
|
env: {
|
||||||
|
...originalObjects.process.env,
|
||||||
|
TEST_ES_FROM: 'snapshot',
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
originalObjects.process = process;
|
|
||||||
originalObjects.console = console;
|
|
||||||
global.process = processMock;
|
global.process = processMock;
|
||||||
global.console = { log: logMock };
|
global.console = { log: logMock };
|
||||||
});
|
});
|
||||||
|
@ -56,6 +58,10 @@ describe('start servers CLI', () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
global.process.argv = [...argvMock];
|
global.process.argv = [...argvMock];
|
||||||
|
global.process.env = {
|
||||||
|
...originalObjects.process.env,
|
||||||
|
TEST_ES_FROM: 'snapshot',
|
||||||
|
};
|
||||||
jest.resetAllMocks();
|
jest.resetAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -48,12 +48,12 @@ function checkout_sibling {
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cloneBranch="${PR_TARGET_BRANCH:-master}"
|
cloneBranch="${PR_TARGET_BRANCH:-$KIBANA_PKG_BRANCH}"
|
||||||
if clone_target_is_valid ; then
|
if clone_target_is_valid ; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cloneBranch="master"
|
cloneBranch="$KIBANA_PKG_BRANCH"
|
||||||
if clone_target_is_valid; then
|
if clone_target_is_valid; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
@ -64,13 +64,15 @@ function checkout_sibling {
|
||||||
|
|
||||||
function checkout_clone_target {
|
function checkout_clone_target {
|
||||||
pick_clone_target
|
pick_clone_target
|
||||||
if [[ $cloneBranch = "6.7" && $cloneAuthor = "elastic" ]]; then
|
|
||||||
export TEST_ES_FROM=snapshot
|
if [[ "$cloneAuthor/$cloneBranch" != "elastic/$KIBANA_PKG_BRANCH" ]]; then
|
||||||
|
echo " -> Setting TEST_ES_FROM=source so that ES in tests will be built from $cloneAuthor/$cloneBranch"
|
||||||
|
export TEST_ES_FROM=source
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo " -> checking out '${cloneBranch}' branch from ${cloneAuthor}/${project}..."
|
echo " -> checking out '${cloneBranch}' branch from ${cloneAuthor}/${project}..."
|
||||||
git clone -b "$cloneBranch" "git@github.com:${cloneAuthor}/${project}.git" "$targetDir" --depth=1
|
git clone -b "$cloneBranch" "git@github.com:${cloneAuthor}/${project}.git" "$targetDir" --depth=1
|
||||||
echo " -> checked out ${project} revision: $(git -C ${targetDir} rev-parse HEAD)"
|
echo " -> checked out ${project} revision: $(git -C "${targetDir}" rev-parse HEAD)"
|
||||||
echo
|
echo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,6 +88,7 @@ function checkout_sibling {
|
||||||
}
|
}
|
||||||
|
|
||||||
checkout_sibling "elasticsearch" "${PARENT_DIR}/elasticsearch" "USE_EXISTING_ES"
|
checkout_sibling "elasticsearch" "${PARENT_DIR}/elasticsearch" "USE_EXISTING_ES"
|
||||||
|
export TEST_ES_FROM=${TEST_ES_FROM:-snapshot}
|
||||||
|
|
||||||
# Set the JAVA_HOME based on the Java property file in the ES repo
|
# Set the JAVA_HOME based on the Java property file in the ES repo
|
||||||
# This assumes the naming convention used on CI (ex: ~/.java/java10)
|
# This assumes the naming convention used on CI (ex: ~/.java/java10)
|
||||||
|
|
|
@ -30,14 +30,22 @@ else
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
export KIBANA_DIR="$dir"
|
export KIBANA_DIR="$dir"
|
||||||
export XPACK_DIR="$KIBANA_DIR/x-pack"
|
export XPACK_DIR="$KIBANA_DIR/x-pack"
|
||||||
export PARENT_DIR="$(cd "$KIBANA_DIR/.."; pwd)"
|
|
||||||
export NODE_OPTIONS="--max_old_space_size=2048"
|
export NODE_OPTIONS="--max_old_space_size=2048"
|
||||||
echo "-> KIBANA_DIR $KIBANA_DIR"
|
|
||||||
echo "-> XPACK_DIR $XPACK_DIR"
|
parentDir="$(cd "$KIBANA_DIR/.."; pwd)"
|
||||||
echo "-> PARENT_DIR $PARENT_DIR"
|
export PARENT_DIR="$parentDir"
|
||||||
echo "-> NODE_OPTIONS $NODE_OPTIONS"
|
|
||||||
|
kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")"
|
||||||
|
export KIBANA_PKG_BRANCH="$kbnBranch"
|
||||||
|
|
||||||
|
echo " -- KIBANA_DIR='$KIBANA_DIR'"
|
||||||
|
echo " -- XPACK_DIR='$XPACK_DIR'"
|
||||||
|
echo " -- PARENT_DIR='$PARENT_DIR'"
|
||||||
|
echo " -- NODE_OPTIONS='$NODE_OPTIONS'"
|
||||||
|
echo " -- KIBANA_PKG_BRANCH='$KIBANA_PKG_BRANCH'"
|
||||||
|
|
||||||
###
|
###
|
||||||
### download node
|
### download node
|
||||||
|
@ -81,7 +89,6 @@ else
|
||||||
else
|
else
|
||||||
curl --silent "$nodeUrl" | tar -xz -C "$nodeDir" --strip-components=1
|
curl --silent "$nodeUrl" | tar -xz -C "$nodeDir" --strip-components=1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
###
|
###
|
||||||
|
|
|
@ -65,7 +65,6 @@ module.exports = function (grunt) {
|
||||||
'--server.port=5610',
|
'--server.port=5610',
|
||||||
];
|
];
|
||||||
|
|
||||||
const esFrom = process.env.TEST_ES_FROM || 'source';
|
|
||||||
return {
|
return {
|
||||||
// used by the test and jenkins:unit tasks
|
// used by the test and jenkins:unit tasks
|
||||||
// runs the eslint script to check for linting errors
|
// runs the eslint script to check for linting errors
|
||||||
|
@ -195,7 +194,6 @@ module.exports = function (grunt) {
|
||||||
args: [
|
args: [
|
||||||
'scripts/functional_tests',
|
'scripts/functional_tests',
|
||||||
'--config', 'test/api_integration/config.js',
|
'--config', 'test/api_integration/config.js',
|
||||||
'--esFrom', esFrom,
|
|
||||||
'--bail',
|
'--bail',
|
||||||
'--debug',
|
'--debug',
|
||||||
],
|
],
|
||||||
|
@ -207,7 +205,6 @@ module.exports = function (grunt) {
|
||||||
'scripts/functional_tests',
|
'scripts/functional_tests',
|
||||||
'--config', 'test/server_integration/http/ssl/config.js',
|
'--config', 'test/server_integration/http/ssl/config.js',
|
||||||
'--config', 'test/server_integration/http/ssl_redirect/config.js',
|
'--config', 'test/server_integration/http/ssl_redirect/config.js',
|
||||||
'--esFrom', esFrom,
|
|
||||||
'--bail',
|
'--bail',
|
||||||
'--debug',
|
'--debug',
|
||||||
'--kibana-install-dir', KIBANA_INSTALL_DIR,
|
'--kibana-install-dir', KIBANA_INSTALL_DIR,
|
||||||
|
@ -219,7 +216,6 @@ module.exports = function (grunt) {
|
||||||
args: [
|
args: [
|
||||||
'scripts/functional_tests',
|
'scripts/functional_tests',
|
||||||
'--config', 'test/plugin_functional/config.js',
|
'--config', 'test/plugin_functional/config.js',
|
||||||
'--esFrom', esFrom,
|
|
||||||
'--bail',
|
'--bail',
|
||||||
'--debug',
|
'--debug',
|
||||||
'--kibana-install-dir', KIBANA_INSTALL_DIR,
|
'--kibana-install-dir', KIBANA_INSTALL_DIR,
|
||||||
|
@ -233,7 +229,6 @@ module.exports = function (grunt) {
|
||||||
args: [
|
args: [
|
||||||
'scripts/functional_tests',
|
'scripts/functional_tests',
|
||||||
'--config', 'test/functional/config.js',
|
'--config', 'test/functional/config.js',
|
||||||
'--esFrom', esFrom,
|
|
||||||
'--bail',
|
'--bail',
|
||||||
'--debug',
|
'--debug',
|
||||||
'--',
|
'--',
|
||||||
|
@ -242,7 +237,6 @@ module.exports = function (grunt) {
|
||||||
},
|
},
|
||||||
|
|
||||||
...getFunctionalTestGroupRunConfigs({
|
...getFunctionalTestGroupRunConfigs({
|
||||||
esFrom,
|
|
||||||
kibanaInstallDir: KIBANA_INSTALL_DIR
|
kibanaInstallDir: KIBANA_INSTALL_DIR
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
|
@ -39,7 +39,7 @@ const TEST_TAGS = [
|
||||||
'ciGroup12'
|
'ciGroup12'
|
||||||
];
|
];
|
||||||
|
|
||||||
export function getFunctionalTestGroupRunConfigs({ esFrom, kibanaInstallDir } = {}) {
|
export function getFunctionalTestGroupRunConfigs({ kibanaInstallDir } = {}) {
|
||||||
return {
|
return {
|
||||||
// include a run task for each test group
|
// include a run task for each test group
|
||||||
...TEST_TAGS.reduce((acc, tag) => ({
|
...TEST_TAGS.reduce((acc, tag) => ({
|
||||||
|
@ -50,7 +50,6 @@ export function getFunctionalTestGroupRunConfigs({ esFrom, kibanaInstallDir } =
|
||||||
'scripts/functional_tests',
|
'scripts/functional_tests',
|
||||||
'--include-tag', tag,
|
'--include-tag', tag,
|
||||||
'--config', 'test/functional/config.js',
|
'--config', 'test/functional/config.js',
|
||||||
'--esFrom', esFrom,
|
|
||||||
'--bail',
|
'--bail',
|
||||||
'--debug',
|
'--debug',
|
||||||
'--kibana-install-dir', kibanaInstallDir,
|
'--kibana-install-dir', kibanaInstallDir,
|
||||||
|
|
|
@ -13,17 +13,14 @@ function report {
|
||||||
|
|
||||||
trap report EXIT
|
trap report EXIT
|
||||||
|
|
||||||
source src/dev/ci_setup/checkout_sibling_es.sh
|
|
||||||
|
|
||||||
"$(FORCE_COLOR=0 yarn bin)/grunt" functionalTests:ensureAllTestsInCiGroup;
|
"$(FORCE_COLOR=0 yarn bin)/grunt" functionalTests:ensureAllTestsInCiGroup;
|
||||||
|
|
||||||
node scripts/build --debug --oss;
|
node scripts/build --debug --oss;
|
||||||
|
|
||||||
export TEST_BROWSER_HEADLESS=1
|
export TEST_BROWSER_HEADLESS=1
|
||||||
export TEST_ES_FROM=${TEST_ES_FROM:-source}
|
|
||||||
|
|
||||||
"$(FORCE_COLOR=0 yarn bin)/grunt" "run:functionalTests_ciGroup${CI_GROUP}" --from=source;
|
"$(FORCE_COLOR=0 yarn bin)/grunt" "run:functionalTests_ciGroup${CI_GROUP}";
|
||||||
|
|
||||||
if [ "$CI_GROUP" == "1" ]; then
|
if [ "$CI_GROUP" == "1" ]; then
|
||||||
"$(FORCE_COLOR=0 yarn bin)/grunt" run:pluginFunctionalTestsRelease --from=source;
|
"$(FORCE_COLOR=0 yarn bin)/grunt" run:pluginFunctionalTestsRelease;
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -12,9 +12,6 @@ function report {
|
||||||
|
|
||||||
trap report EXIT
|
trap report EXIT
|
||||||
|
|
||||||
source src/dev/ci_setup/checkout_sibling_es.sh
|
|
||||||
|
|
||||||
export TEST_BROWSER_HEADLESS=1
|
export TEST_BROWSER_HEADLESS=1
|
||||||
export TEST_ES_FROM=${TEST_ES_FROM:-source}
|
|
||||||
|
|
||||||
"$(FORCE_COLOR=0 yarn bin)/grunt" jenkins:unit --from=source --dev;
|
"$(FORCE_COLOR=0 yarn bin)/grunt" jenkins:unit --dev;
|
||||||
|
|
|
@ -13,8 +13,6 @@ function report {
|
||||||
|
|
||||||
trap report EXIT
|
trap report EXIT
|
||||||
|
|
||||||
source src/dev/ci_setup/checkout_sibling_es.sh
|
|
||||||
|
|
||||||
export TEST_BROWSER_HEADLESS=1
|
export TEST_BROWSER_HEADLESS=1
|
||||||
|
|
||||||
echo " -> Running mocha tests"
|
echo " -> Running mocha tests"
|
||||||
|
@ -23,7 +21,6 @@ yarn test
|
||||||
echo ""
|
echo ""
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
|
|
||||||
echo " -> Running jest tests"
|
echo " -> Running jest tests"
|
||||||
cd "$XPACK_DIR"
|
cd "$XPACK_DIR"
|
||||||
node scripts/jest --ci --no-cache --verbose
|
node scripts/jest --ci --no-cache --verbose
|
||||||
|
|
|
@ -13,8 +13,6 @@ function report {
|
||||||
|
|
||||||
trap report EXIT
|
trap report EXIT
|
||||||
|
|
||||||
source src/dev/ci_setup/checkout_sibling_es.sh
|
|
||||||
|
|
||||||
export TEST_BROWSER_HEADLESS=1
|
export TEST_BROWSER_HEADLESS=1
|
||||||
|
|
||||||
echo " -> Ensuring all functional tests are in a ciGroup"
|
echo " -> Ensuring all functional tests are in a ciGroup"
|
||||||
|
@ -35,7 +33,6 @@ installDir="$PARENT_DIR/install/kibana"
|
||||||
mkdir -p "$installDir"
|
mkdir -p "$installDir"
|
||||||
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
|
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
|
||||||
|
|
||||||
export TEST_ES_FROM=${TEST_ES_FROM:-source}
|
|
||||||
echo " -> Running functional and api tests"
|
echo " -> Running functional and api tests"
|
||||||
cd "$XPACK_DIR"
|
cd "$XPACK_DIR"
|
||||||
node scripts/functional_tests --debug --bail --kibana-install-dir "$installDir" --include-tag "ciGroup$CI_GROUP"
|
node scripts/functional_tests --debug --bail --kibana-install-dir "$installDir" --include-tag "ciGroup$CI_GROUP"
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
import expect from 'expect.js';
|
import expect from 'expect.js';
|
||||||
import { mapColumn } from '../mapColumn';
|
import { mapColumn } from '../mapColumn';
|
||||||
import { functionWrapper } from '../../../../__tests__/helpers/function_wrapper';
|
import { functionWrapper } from '../../../../__tests__/helpers/function_wrapper';
|
||||||
import { testTable } from './fixtures/test_tables';
|
import { testTable, emptyTable } from './fixtures/test_tables';
|
||||||
|
|
||||||
const pricePlusTwo = datatable => Promise.resolve(datatable.rows[0].price + 2);
|
const pricePlusTwo = datatable => Promise.resolve(datatable.rows[0].price + 2);
|
||||||
|
|
||||||
|
@ -42,6 +42,16 @@ describe('mapColumn', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('adds a column to empty tables', () => {
|
||||||
|
return fn(emptyTable, { name: 'name', expression: pricePlusTwo }).then(result => {
|
||||||
|
expect(result.type).to.be('datatable');
|
||||||
|
expect(result.columns).to.have.length(1);
|
||||||
|
expect(result.columns[0])
|
||||||
|
.to.have.property('name', 'name')
|
||||||
|
.and.to.have.property('type', 'null');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('expression', () => {
|
describe('expression', () => {
|
||||||
it('maps null values to the new column', () => {
|
it('maps null values to the new column', () => {
|
||||||
return fn(testTable, { name: 'empty' }).then(result => {
|
return fn(testTable, { name: 'empty' }).then(result => {
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
import expect from 'expect.js';
|
import expect from 'expect.js';
|
||||||
import { staticColumn } from '../staticColumn';
|
import { staticColumn } from '../staticColumn';
|
||||||
import { functionWrapper } from '../../../../__tests__/helpers/function_wrapper';
|
import { functionWrapper } from '../../../../__tests__/helpers/function_wrapper';
|
||||||
import { testTable } from './fixtures/test_tables';
|
import { testTable, emptyTable } from './fixtures/test_tables';
|
||||||
|
|
||||||
describe('staticColumn', () => {
|
describe('staticColumn', () => {
|
||||||
const fn = functionWrapper(staticColumn);
|
const fn = functionWrapper(staticColumn);
|
||||||
|
@ -37,4 +37,12 @@ describe('staticColumn', () => {
|
||||||
expect(result.columns).to.eql([...testTable.columns, { name: 'empty', type: 'null' }]);
|
expect(result.columns).to.eql([...testTable.columns, { name: 'empty', type: 'null' }]);
|
||||||
expect(result.rows.every(row => row.empty === null)).to.be(true);
|
expect(result.rows.every(row => row.empty === null)).to.be(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('adds a column to empty tables', () => {
|
||||||
|
const result = fn(emptyTable, { name: 'empty', value: 1 });
|
||||||
|
|
||||||
|
expect(result.type).to.be('datatable');
|
||||||
|
expect(result.columns).to.eql([{ name: 'empty', type: 'number' }]);
|
||||||
|
expect(result.rows.length).to.be(0);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -47,7 +47,7 @@ export const mapColumn = () => ({
|
||||||
|
|
||||||
return Promise.all(rowPromises).then(rows => {
|
return Promise.all(rowPromises).then(rows => {
|
||||||
const existingColumnIndex = columns.findIndex(({ name }) => name === args.name);
|
const existingColumnIndex = columns.findIndex(({ name }) => name === args.name);
|
||||||
const type = getType(rows[0][args.name]);
|
const type = rows.length ? getType(rows[0][args.name]) : 'null';
|
||||||
const newColumn = { name: args.name, type };
|
const newColumn = { name: args.name, type };
|
||||||
if (existingColumnIndex === -1) {
|
if (existingColumnIndex === -1) {
|
||||||
columns.push(newColumn);
|
columns.push(newColumn);
|
||||||
|
|
|
@ -29,7 +29,7 @@ export const staticColumn = () => ({
|
||||||
},
|
},
|
||||||
fn: (context, args) => {
|
fn: (context, args) => {
|
||||||
const rows = context.rows.map(row => ({ ...row, [args.name]: args.value }));
|
const rows = context.rows.map(row => ({ ...row, [args.name]: args.value }));
|
||||||
const type = getType(rows[0][args.name]);
|
const type = getType(args.value);
|
||||||
const columns = [...context.columns];
|
const columns = [...context.columns];
|
||||||
const existingColumnIndex = columns.findIndex(({ name }) => name === args.name);
|
const existingColumnIndex = columns.findIndex(({ name }) => name === args.name);
|
||||||
const newColumn = { name: args.name, type };
|
const newColumn = { name: args.name, type };
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { to } from './to';
|
|
||||||
|
|
||||||
export const commonFunctions = [to];
|
|
|
@ -5,14 +5,13 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { routes } from './server/routes';
|
import { routes } from './server/routes';
|
||||||
import { commonFunctions } from './common/functions';
|
|
||||||
import { registerCanvasUsageCollector } from './server/usage';
|
import { registerCanvasUsageCollector } from './server/usage';
|
||||||
import { functions } from './canvas_plugin_src/functions/server';
|
import { functions } from './canvas_plugin_src/functions/server';
|
||||||
import { loadSampleData } from './server/sample_data';
|
import { loadSampleData } from './server/sample_data';
|
||||||
|
|
||||||
export default async function(server /*options*/) {
|
export default async function(server /*options*/) {
|
||||||
const { serverFunctions } = server.plugins.interpreter.register({
|
const { serverFunctions } = server.plugins.interpreter.register({
|
||||||
serverFunctions: commonFunctions.concat(functions),
|
serverFunctions: functions,
|
||||||
});
|
});
|
||||||
|
|
||||||
server.injectUiAppVars('canvas', async () => {
|
server.injectUiAppVars('canvas', async () => {
|
||||||
|
|
|
@ -26,7 +26,6 @@ import { tagSpecs } from '../../../canvas_plugin_src/uis/tags';
|
||||||
import { functions as browserFunctions } from '../../../canvas_plugin_src/functions/browser';
|
import { functions as browserFunctions } from '../../../canvas_plugin_src/functions/browser';
|
||||||
import { functions as commonPluginFunctions } from '../../../canvas_plugin_src/functions/common';
|
import { functions as commonPluginFunctions } from '../../../canvas_plugin_src/functions/common';
|
||||||
import { templateSpecs } from '../../../canvas_plugin_src/templates';
|
import { templateSpecs } from '../../../canvas_plugin_src/templates';
|
||||||
import { commonFunctions } from '../../../common/functions';
|
|
||||||
import { clientFunctions } from '../../functions';
|
import { clientFunctions } from '../../functions';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
|
@ -69,10 +68,7 @@ register(registries, {
|
||||||
viewUIs: viewSpecs,
|
viewUIs: viewSpecs,
|
||||||
datasourceUIs: datasourceSpecs,
|
datasourceUIs: datasourceSpecs,
|
||||||
argumentUIs: argSpecs,
|
argumentUIs: argSpecs,
|
||||||
browserFunctions: browserFunctions
|
browserFunctions: browserFunctions.concat(clientFunctions).concat(commonPluginFunctions),
|
||||||
.concat(commonFunctions)
|
|
||||||
.concat(clientFunctions)
|
|
||||||
.concat(commonPluginFunctions),
|
|
||||||
templates: templateSpecs,
|
templates: templateSpecs,
|
||||||
tagUIs: tagSpecs,
|
tagUIs: tagSpecs,
|
||||||
});
|
});
|
||||||
|
|
|
@ -5,6 +5,11 @@ body.canvas-isFullscreen {
|
||||||
left: 0;
|
left: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// hide global loading indicator
|
||||||
|
.kbnLoadingIndicator {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
// set the background color
|
// set the background color
|
||||||
.canvasLayout {
|
.canvasLayout {
|
||||||
background: #000; // This hex is OK, we always want it black
|
background: #000; // This hex is OK, we always want it black
|
||||||
|
|
|
@ -14,7 +14,6 @@ import {
|
||||||
EuiSpacer,
|
EuiSpacer,
|
||||||
EuiFlexGroup,
|
EuiFlexGroup,
|
||||||
EuiFlexItem,
|
EuiFlexItem,
|
||||||
EuiBetaBadge,
|
|
||||||
EuiLink,
|
EuiLink,
|
||||||
} from '@elastic/eui';
|
} from '@elastic/eui';
|
||||||
import { WorkpadLoader } from '../workpad_loader';
|
import { WorkpadLoader } from '../workpad_loader';
|
||||||
|
@ -51,12 +50,6 @@ export const WorkpadManager = ({ onClose }) => {
|
||||||
<EuiFlexItem grow={false}>
|
<EuiFlexItem grow={false}>
|
||||||
<EuiModalHeaderTitle>Canvas workpads</EuiModalHeaderTitle>
|
<EuiModalHeaderTitle>Canvas workpads</EuiModalHeaderTitle>
|
||||||
</EuiFlexItem>
|
</EuiFlexItem>
|
||||||
<EuiFlexItem grow={false}>
|
|
||||||
<EuiBetaBadge
|
|
||||||
label="Beta"
|
|
||||||
tooltipContent="Canvas is still in beta. Please help us improve by reporting issues or bugs in the Kibana repo."
|
|
||||||
/>
|
|
||||||
</EuiFlexItem>
|
|
||||||
<EuiFlexItem grow={false}>
|
<EuiFlexItem grow={false}>
|
||||||
<EuiLink href={documentationLinks.canvas} target="_blank">
|
<EuiLink href={documentationLinks.canvas} target="_blank">
|
||||||
Docs
|
Docs
|
||||||
|
|
|
@ -7,5 +7,6 @@
|
||||||
import { asset } from './asset';
|
import { asset } from './asset';
|
||||||
import { filters } from './filters';
|
import { filters } from './filters';
|
||||||
import { timelion } from './timelion';
|
import { timelion } from './timelion';
|
||||||
|
import { to } from './to';
|
||||||
|
|
||||||
export const clientFunctions = [asset, filters, timelion];
|
export const clientFunctions = [asset, filters, timelion, to];
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { castProvider } from '@kbn/interpreter/common';
|
import { castProvider } from '@kbn/interpreter/common';
|
||||||
|
import { registries } from '@kbn/interpreter/public';
|
||||||
|
|
||||||
export const to = () => ({
|
export const to = () => ({
|
||||||
name: 'to',
|
name: 'to',
|
||||||
|
@ -19,11 +20,11 @@ export const to = () => ({
|
||||||
multi: true,
|
multi: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
fn: (context, args, { types }) => {
|
fn: (context, args) => {
|
||||||
if (!args.type) {
|
if (!args.type) {
|
||||||
throw new Error('Must specify a casting type');
|
throw new Error('Must specify a casting type');
|
||||||
}
|
}
|
||||||
|
|
||||||
return castProvider(types)(context, args.type);
|
return castProvider(registries.types.toJS())(context, args.type);
|
||||||
},
|
},
|
||||||
});
|
});
|
|
@ -23,6 +23,21 @@ const EMPTY_FEATURE_COLLECTION = {
|
||||||
features: []
|
features: []
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
const CLOSED_SHAPE_MB_FILTER = [
|
||||||
|
'any',
|
||||||
|
['==', ['geometry-type'], 'Polygon'],
|
||||||
|
['==', ['geometry-type'], 'MultiPolygon']
|
||||||
|
];
|
||||||
|
|
||||||
|
const ALL_SHAPE_MB_FILTER = [
|
||||||
|
'any',
|
||||||
|
['==', ['geometry-type'], 'Polygon'],
|
||||||
|
['==', ['geometry-type'], 'MultiPolygon'],
|
||||||
|
['==', ['geometry-type'], 'LineString'],
|
||||||
|
['==', ['geometry-type'], 'MultiLineString']
|
||||||
|
];
|
||||||
|
|
||||||
export class VectorLayer extends AbstractLayer {
|
export class VectorLayer extends AbstractLayer {
|
||||||
|
|
||||||
static type = 'VECTOR';
|
static type = 'VECTOR';
|
||||||
|
@ -431,13 +446,7 @@ export class VectorLayer extends AbstractLayer {
|
||||||
source: sourceId,
|
source: sourceId,
|
||||||
paint: {}
|
paint: {}
|
||||||
});
|
});
|
||||||
mbMap.setFilter(fillLayerId, [
|
mbMap.setFilter(fillLayerId, CLOSED_SHAPE_MB_FILTER);
|
||||||
'any',
|
|
||||||
['==', ['geometry-type'], 'Polygon'],
|
|
||||||
['==', ['geometry-type'], 'MultiPolygon'],
|
|
||||||
['==', ['geometry-type'], 'LineString'],
|
|
||||||
['==', ['geometry-type'], 'MultiLineString']
|
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
if (!mbMap.getLayer(lineLayerId)) {
|
if (!mbMap.getLayer(lineLayerId)) {
|
||||||
mbMap.addLayer({
|
mbMap.addLayer({
|
||||||
|
@ -446,13 +455,7 @@ export class VectorLayer extends AbstractLayer {
|
||||||
source: sourceId,
|
source: sourceId,
|
||||||
paint: {}
|
paint: {}
|
||||||
});
|
});
|
||||||
mbMap.setFilter(lineLayerId, [
|
mbMap.setFilter(lineLayerId, ALL_SHAPE_MB_FILTER);
|
||||||
'any',
|
|
||||||
['==', ['geometry-type'], 'Polygon'],
|
|
||||||
['==', ['geometry-type'], 'MultiPolygon'],
|
|
||||||
['==', ['geometry-type'], 'LineString'],
|
|
||||||
['==', ['geometry-type'], 'MultiLineString']
|
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
this._style.setMBPaintProperties({
|
this._style.setMBPaintProperties({
|
||||||
alpha: this.getAlpha(),
|
alpha: this.getAlpha(),
|
||||||
|
|
|
@ -42,6 +42,7 @@ const indexPrivileges = [
|
||||||
'read_cross_cluster',
|
'read_cross_cluster',
|
||||||
'manage_follow_index',
|
'manage_follow_index',
|
||||||
'manage_ilm',
|
'manage_ilm',
|
||||||
|
'manage_leader_index',
|
||||||
];
|
];
|
||||||
|
|
||||||
export function getClusterPrivileges() {
|
export function getClusterPrivileges() {
|
||||||
|
|
|
@ -46,6 +46,7 @@ module.constant('shieldPrivileges', {
|
||||||
'read_cross_cluster',
|
'read_cross_cluster',
|
||||||
'manage_follow_index',
|
'manage_follow_index',
|
||||||
'manage_ilm',
|
'manage_ilm',
|
||||||
|
'manage_leader_index',
|
||||||
],
|
],
|
||||||
applications: []
|
applications: []
|
||||||
});
|
});
|
||||||
|
|
|
@ -141,6 +141,10 @@ exports[`it renders without crashing 1`] = `
|
||||||
"isGroupLabelOption": false,
|
"isGroupLabelOption": false,
|
||||||
"label": "manage_ilm",
|
"label": "manage_ilm",
|
||||||
},
|
},
|
||||||
|
Object {
|
||||||
|
"isGroupLabelOption": false,
|
||||||
|
"label": "manage_leader_index",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
selectedOptions={Array []}
|
selectedOptions={Array []}
|
||||||
|
|
|
@ -22,11 +22,47 @@ describe('filterDeps', () => {
|
||||||
expect(fd({ level: 'warning' } as DeprecationInfo)).toBe(false);
|
expect(fd({ level: 'warning' } as DeprecationInfo)).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('filters on search', () => {
|
test('filters on title search', () => {
|
||||||
const fd = filterDeps(LevelFilterOption.critical, 'wow');
|
const fd = filterDeps(LevelFilterOption.critical, 'wow');
|
||||||
expect(fd({ level: 'critical', message: 'the wow error' } as DeprecationInfo)).toBe(true);
|
expect(fd({ level: 'critical', message: 'the wow error' } as DeprecationInfo)).toBe(true);
|
||||||
expect(fd({ level: 'critical', message: 'other error' } as DeprecationInfo)).toBe(false);
|
expect(fd({ level: 'critical', message: 'other error' } as DeprecationInfo)).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('filters on index search', () => {
|
||||||
|
const fd = filterDeps(LevelFilterOption.critical, 'myIndex');
|
||||||
|
expect(
|
||||||
|
fd({
|
||||||
|
level: 'critical',
|
||||||
|
message: 'the wow error',
|
||||||
|
index: 'myIndex-2',
|
||||||
|
} as EnrichedDeprecationInfo)
|
||||||
|
).toBe(true);
|
||||||
|
expect(
|
||||||
|
fd({
|
||||||
|
level: 'critical',
|
||||||
|
message: 'other error',
|
||||||
|
index: 'notIndex',
|
||||||
|
} as EnrichedDeprecationInfo)
|
||||||
|
).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('filters on node search', () => {
|
||||||
|
const fd = filterDeps(LevelFilterOption.critical, 'myNode');
|
||||||
|
expect(
|
||||||
|
fd({
|
||||||
|
level: 'critical',
|
||||||
|
message: 'the wow error',
|
||||||
|
index: 'myNode-123',
|
||||||
|
} as EnrichedDeprecationInfo)
|
||||||
|
).toBe(true);
|
||||||
|
expect(
|
||||||
|
fd({
|
||||||
|
level: 'critical',
|
||||||
|
message: 'other error',
|
||||||
|
index: 'notNode',
|
||||||
|
} as EnrichedDeprecationInfo)
|
||||||
|
).toBe(false);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('GroupedDeprecations', () => {
|
describe('GroupedDeprecations', () => {
|
||||||
|
|
|
@ -28,7 +28,7 @@ import { DeprecationList } from './list';
|
||||||
|
|
||||||
// exported only for testing
|
// exported only for testing
|
||||||
export const filterDeps = (level: LevelFilterOption, search: string = '') => {
|
export const filterDeps = (level: LevelFilterOption, search: string = '') => {
|
||||||
const conditions: Array<(dep: DeprecationInfo) => boolean> = [];
|
const conditions: Array<(dep: EnrichedDeprecationInfo) => boolean> = [];
|
||||||
|
|
||||||
if (level !== LevelFilterOption.all) {
|
if (level !== LevelFilterOption.all) {
|
||||||
conditions.push((dep: DeprecationInfo) => dep.level === level);
|
conditions.push((dep: DeprecationInfo) => dep.level === level);
|
||||||
|
@ -41,7 +41,9 @@ export const filterDeps = (level: LevelFilterOption, search: string = '') => {
|
||||||
const searchReg = new RegExp(search.toLowerCase());
|
const searchReg = new RegExp(search.toLowerCase());
|
||||||
return Boolean(
|
return Boolean(
|
||||||
dep.message.toLowerCase().match(searchReg) ||
|
dep.message.toLowerCase().match(searchReg) ||
|
||||||
(dep.details && dep.details.match(searchReg))
|
(dep.details && dep.details.toLowerCase().match(searchReg)) ||
|
||||||
|
(dep.index && dep.index.toLowerCase().match(searchReg)) ||
|
||||||
|
(dep.node && dep.node.toLowerCase().match(searchReg))
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// ignore any regexp errors.
|
// ignore any regexp errors.
|
||||||
|
@ -51,7 +53,7 @@ export const filterDeps = (level: LevelFilterOption, search: string = '') => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return true if every condition function returns true (boolean AND)
|
// Return true if every condition function returns true (boolean AND)
|
||||||
return (dep: DeprecationInfo) => conditions.map(c => c(dep)).every(t => t);
|
return (dep: EnrichedDeprecationInfo) => conditions.map(c => c(dep)).every(t => t);
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -412,12 +412,9 @@ export const reindexServiceFactory = (
|
||||||
const { count } = await callCluster('count', { index: reindexOp.attributes.indexName });
|
const { count } = await callCluster('count', { index: reindexOp.attributes.indexName });
|
||||||
|
|
||||||
if (taskResponse.task.status.created < count) {
|
if (taskResponse.task.status.created < count) {
|
||||||
if (taskResponse.response.failures && taskResponse.response.failures.length > 0) {
|
// Include the entire task result in the error message. This should be guaranteed
|
||||||
const failureExample = JSON.stringify(taskResponse.response.failures[0]);
|
// to be JSON-serializable since it just came back from Elasticsearch.
|
||||||
throw Boom.badData(`Reindexing failed with failures like: ${failureExample}`);
|
throw Boom.badData(`Reindexing failed: ${JSON.stringify(taskResponse)}`);
|
||||||
} else {
|
|
||||||
throw Boom.badData('Reindexing failed due to new documents created in original index.');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update the status
|
// Update the status
|
||||||
|
|
|
@ -16,26 +16,58 @@ import { registerClusterCheckupRoutes } from './cluster_checkup';
|
||||||
const MigrationApis = require('../lib/es_migration_apis');
|
const MigrationApis = require('../lib/es_migration_apis');
|
||||||
MigrationApis.getUpgradeAssistantStatus = jest.fn();
|
MigrationApis.getUpgradeAssistantStatus = jest.fn();
|
||||||
|
|
||||||
|
function register(plugins = {}) {
|
||||||
|
const server = new Server();
|
||||||
|
server.plugins = {
|
||||||
|
elasticsearch: {
|
||||||
|
getCluster: () => ({ callWithRequest: jest.fn() } as any),
|
||||||
|
} as any,
|
||||||
|
...plugins,
|
||||||
|
} as any;
|
||||||
|
server.config = () => ({ get: () => '' } as any);
|
||||||
|
|
||||||
|
registerClusterCheckupRoutes(server);
|
||||||
|
|
||||||
|
return server;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Since these route callbacks are so thin, these serve simply as integration tests
|
* Since these route callbacks are so thin, these serve simply as integration tests
|
||||||
* to ensure they're wired up to the lib functions correctly. Business logic is tested
|
* to ensure they're wired up to the lib functions correctly. Business logic is tested
|
||||||
* more thoroughly in the es_migration_apis test.
|
* more thoroughly in the es_migration_apis test.
|
||||||
*/
|
*/
|
||||||
describe('cluster checkup API', () => {
|
describe('cluster checkup API', () => {
|
||||||
const server = new Server();
|
const spy = jest.spyOn(MigrationApis, 'getUpgradeAssistantStatus');
|
||||||
server.plugins = {
|
|
||||||
elasticsearch: {
|
|
||||||
getCluster: () => ({ callWithRequest: jest.fn() } as any),
|
|
||||||
} as any,
|
|
||||||
cloud: {
|
|
||||||
config: { isCloudEnabled: false },
|
|
||||||
},
|
|
||||||
} as any;
|
|
||||||
server.config = () => ({ get: () => '' } as any);
|
|
||||||
|
|
||||||
registerClusterCheckupRoutes(server);
|
afterEach(() => jest.clearAllMocks());
|
||||||
|
|
||||||
|
describe('with cloud enabled', () => {
|
||||||
|
it('is provided to getUpgradeAssistantStatus', async () => {
|
||||||
|
const server = register({
|
||||||
|
cloud: {
|
||||||
|
config: {
|
||||||
|
isCloudEnabled: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
MigrationApis.getUpgradeAssistantStatus.mockResolvedValue({
|
||||||
|
cluster: [],
|
||||||
|
indices: [],
|
||||||
|
nodes: [],
|
||||||
|
});
|
||||||
|
await server.inject({
|
||||||
|
method: 'GET',
|
||||||
|
url: '/api/upgrade_assistant/status',
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(spy.mock.calls[0][2]).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('GET /api/upgrade_assistant/reindex/{indexName}.json', () => {
|
describe('GET /api/upgrade_assistant/reindex/{indexName}.json', () => {
|
||||||
|
const server = register();
|
||||||
|
|
||||||
it('returns state', async () => {
|
it('returns state', async () => {
|
||||||
MigrationApis.getUpgradeAssistantStatus.mockResolvedValue({
|
MigrationApis.getUpgradeAssistantStatus.mockResolvedValue({
|
||||||
cluster: [],
|
cluster: [],
|
||||||
|
|
|
@ -6,13 +6,14 @@
|
||||||
|
|
||||||
import Boom from 'boom';
|
import Boom from 'boom';
|
||||||
import { Legacy } from 'kibana';
|
import { Legacy } from 'kibana';
|
||||||
|
import _ from 'lodash';
|
||||||
|
|
||||||
import { getUpgradeAssistantStatus } from '../lib/es_migration_apis';
|
import { getUpgradeAssistantStatus } from '../lib/es_migration_apis';
|
||||||
import { EsVersionPrecheck } from '../lib/es_version_precheck';
|
import { EsVersionPrecheck } from '../lib/es_version_precheck';
|
||||||
|
|
||||||
export function registerClusterCheckupRoutes(server: Legacy.Server) {
|
export function registerClusterCheckupRoutes(server: Legacy.Server) {
|
||||||
const { callWithRequest } = server.plugins.elasticsearch.getCluster('admin');
|
const { callWithRequest } = server.plugins.elasticsearch.getCluster('admin');
|
||||||
const { isCloudEnabled } = server.plugins.cloud.config;
|
const isCloudEnabled = _.get(server.plugins, 'cloud.config.isCloudEnabled', false);
|
||||||
|
|
||||||
server.route({
|
server.route({
|
||||||
path: '/api/upgrade_assistant/status',
|
path: '/api/upgrade_assistant/status',
|
||||||
|
|
|
@ -8,7 +8,7 @@ export default function ({ getService, loadTestFile }) {
|
||||||
const esArchiver = getService('esArchiver');
|
const esArchiver = getService('esArchiver');
|
||||||
const archive = 'uptime/full_heartbeat';
|
const archive = 'uptime/full_heartbeat';
|
||||||
|
|
||||||
describe('graphql queries', () => {
|
describe('graphql', () => {
|
||||||
before('load heartbeat data', async () => await esArchiver.load(archive));
|
before('load heartbeat data', async () => await esArchiver.load(archive));
|
||||||
after('unload heartbeat index', async () => await esArchiver.unload(archive));
|
after('unload heartbeat index', async () => await esArchiver.unload(archive));
|
||||||
// each of these test files imports a GQL query from
|
// each of these test files imports a GQL query from
|
||||||
|
|
|
@ -8,9 +8,9 @@ export default function ({ getService, loadTestFile }) {
|
||||||
const es = getService('es');
|
const es = getService('es');
|
||||||
|
|
||||||
describe('uptime', () => {
|
describe('uptime', () => {
|
||||||
beforeEach(() =>
|
before(() =>
|
||||||
es.indices.delete({
|
es.indices.delete({
|
||||||
index: 'heartbeat',
|
index: 'heartbeat*',
|
||||||
ignore: [404],
|
ignore: [404],
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
|
|
@ -11,8 +11,11 @@ const EXPECTED_JOIN_VALUES = {
|
||||||
alpha: 10,
|
alpha: 10,
|
||||||
bravo: 3,
|
bravo: 3,
|
||||||
charlie: 12,
|
charlie: 12,
|
||||||
|
tango: undefined
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const VECTOR_SOURCE_ID = 'n1t6f';
|
||||||
|
|
||||||
export default function ({ getPageObjects, getService }) {
|
export default function ({ getPageObjects, getService }) {
|
||||||
const PageObjects = getPageObjects(['maps']);
|
const PageObjects = getPageObjects(['maps']);
|
||||||
const inspector = getService('inspector');
|
const inspector = getService('inspector');
|
||||||
|
@ -44,14 +47,41 @@ export default function ({ getPageObjects, getService }) {
|
||||||
|
|
||||||
it('should decorate feature properties with join property', async () => {
|
it('should decorate feature properties with join property', async () => {
|
||||||
const mapboxStyle = await PageObjects.maps.getMapboxStyle();
|
const mapboxStyle = await PageObjects.maps.getMapboxStyle();
|
||||||
expect(mapboxStyle.sources.n1t6f.data.features.length).to.equal(3);
|
expect(mapboxStyle.sources[VECTOR_SOURCE_ID].data.features.length).to.equal(4);
|
||||||
|
|
||||||
mapboxStyle.sources.n1t6f.data.features.forEach(({ properties }) => {
|
mapboxStyle.sources.n1t6f.data.features.forEach(({ properties }) => {
|
||||||
expect(properties.hasOwnProperty(JOIN_PROPERTY_NAME)).to.be(true);
|
if (properties.name === 'tango') {
|
||||||
|
//left join, which means we won't rescale joins that do not match
|
||||||
|
expect(properties.hasOwnProperty(JOIN_PROPERTY_NAME)).to.be(false);
|
||||||
|
} else {
|
||||||
|
expect(properties.hasOwnProperty(JOIN_PROPERTY_NAME)).to.be(true);
|
||||||
|
}
|
||||||
expect(properties[JOIN_PROPERTY_NAME]).to.be(EXPECTED_JOIN_VALUES[properties.name]);
|
expect(properties[JOIN_PROPERTY_NAME]).to.be(EXPECTED_JOIN_VALUES[properties.name]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should style fills, points and lines independently', async () => {
|
||||||
|
const mapboxStyle = await PageObjects.maps.getMapboxStyle();
|
||||||
|
const layersForVectorSource = mapboxStyle.layers.filter(mbLayer => {
|
||||||
|
return mbLayer.id.startsWith(VECTOR_SOURCE_ID);
|
||||||
|
});
|
||||||
|
|
||||||
|
//circle layer for points
|
||||||
|
// eslint-disable-next-line max-len
|
||||||
|
expect(layersForVectorSource[0]).to.eql({ 'id': 'n1t6f_circle', 'type': 'circle', 'source': 'n1t6f', 'minzoom': 0, 'maxzoom': 24, 'filter': ['any', ['==', ['geometry-type'], 'Point'], ['==', ['geometry-type'], 'MultiPoint']], 'paint': { 'circle-color': ['interpolate', ['linear'], ['coalesce', ['get', '__kbn__scaled(__kbnjoin__max_of_prop1_groupby_meta_for_geo_shapes*.shape_name)'], -1], -1, 'rgba(0,0,0,0)', 0, '#f7faff', 0.125, '#ddeaf7', 0.25, '#c5daee', 0.375, '#9dc9e0', 0.5, '#6aadd5', 0.625, '#4191c5', 0.75, '#2070b4', 0.875, '#072f6b'], 'circle-opacity': 0.75, 'circle-stroke-color': '#FFFFFF', 'circle-stroke-opacity': 0.75, 'circle-stroke-width': 1, 'circle-radius': 10 } });
|
||||||
|
|
||||||
|
//fill layer
|
||||||
|
// eslint-disable-next-line max-len
|
||||||
|
expect(layersForVectorSource[1]).to.eql({ 'id': 'n1t6f_fill', 'type': 'fill', 'source': 'n1t6f', 'minzoom': 0, 'maxzoom': 24, 'filter': ['any', ['==', ['geometry-type'], 'Polygon'], ['==', ['geometry-type'], 'MultiPolygon']], 'paint': { 'fill-color': ['interpolate', ['linear'], ['coalesce', ['get', '__kbn__scaled(__kbnjoin__max_of_prop1_groupby_meta_for_geo_shapes*.shape_name)'], -1], -1, 'rgba(0,0,0,0)', 0, '#f7faff', 0.125, '#ddeaf7', 0.25, '#c5daee', 0.375, '#9dc9e0', 0.5, '#6aadd5', 0.625, '#4191c5', 0.75, '#2070b4', 0.875, '#072f6b'], 'fill-opacity': 0.75 } }
|
||||||
|
);
|
||||||
|
|
||||||
|
//line layer for borders
|
||||||
|
// eslint-disable-next-line max-len
|
||||||
|
expect(layersForVectorSource[2]).to.eql({ 'id': 'n1t6f_line', 'type': 'line', 'source': 'n1t6f', 'minzoom': 0, 'maxzoom': 24, 'filter': ['any', ['==', ['geometry-type'], 'Polygon'], ['==', ['geometry-type'], 'MultiPolygon'], ['==', ['geometry-type'], 'LineString'], ['==', ['geometry-type'], 'MultiLineString']], 'paint': { 'line-color': '#FFFFFF', 'line-opacity': 0.75, 'line-width': 1 } });
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
describe('inspector', () => {
|
describe('inspector', () => {
|
||||||
afterEach(async () => {
|
afterEach(async () => {
|
||||||
await inspector.close();
|
await inspector.close();
|
||||||
|
|
|
@ -59,6 +59,40 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
{
|
||||||
|
"type": "doc",
|
||||||
|
"value": {
|
||||||
|
"index": "geo_shapes",
|
||||||
|
"type": "doc",
|
||||||
|
"id": "4",
|
||||||
|
"source": {
|
||||||
|
"geometry": {
|
||||||
|
"type": "linestring",
|
||||||
|
"coordinates": [
|
||||||
|
[
|
||||||
|
40,
|
||||||
|
5
|
||||||
|
],
|
||||||
|
[
|
||||||
|
35,
|
||||||
|
-5
|
||||||
|
],
|
||||||
|
[
|
||||||
|
45,
|
||||||
|
-5
|
||||||
|
],
|
||||||
|
[
|
||||||
|
40,
|
||||||
|
5
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"name": "tango"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
"type": "doc",
|
"type": "doc",
|
||||||
"value": {
|
"value": {
|
||||||
|
|
2
x-pack/typings/hapi.d.ts
vendored
2
x-pack/typings/hapi.d.ts
vendored
|
@ -11,7 +11,7 @@ import { XPackMainPlugin } from 'x-pack/plugins/xpack_main/xpack_main';
|
||||||
|
|
||||||
declare module 'hapi' {
|
declare module 'hapi' {
|
||||||
interface PluginProperties {
|
interface PluginProperties {
|
||||||
cloud: CloudPlugin;
|
cloud?: CloudPlugin;
|
||||||
xpack_main: XPackMainPlugin;
|
xpack_main: XPackMainPlugin;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue