Merge branch 'master' of github.com:elastic/kibana into feature-secops
39
.ci/jobs.yml
|
@ -1,27 +1,26 @@
|
|||
JOB:
|
||||
- kibana-intake
|
||||
- x-pack-intake
|
||||
# make sure all kibana-ciGRoups are listed in tasks/function_test_groups.js
|
||||
#- kibana-ciGroup1
|
||||
#- kibana-ciGroup2
|
||||
#- kibana-ciGroup3
|
||||
#- kibana-ciGroup4
|
||||
#- kibana-ciGroup5
|
||||
#- kibana-ciGroup6
|
||||
#- kibana-ciGroup7
|
||||
#- kibana-ciGroup8
|
||||
#- kibana-ciGroup9
|
||||
#- kibana-ciGroup10
|
||||
#- kibana-ciGroup11
|
||||
#- kibana-ciGroup12
|
||||
# - kibana-ciGroup1
|
||||
# - kibana-ciGroup2
|
||||
# - kibana-ciGroup3
|
||||
# - kibana-ciGroup4
|
||||
# - kibana-ciGroup5
|
||||
# - kibana-ciGroup6
|
||||
# - kibana-ciGroup7
|
||||
# - kibana-ciGroup8
|
||||
# - kibana-ciGroup9
|
||||
# - kibana-ciGroup10
|
||||
# - kibana-ciGroup11
|
||||
# - kibana-ciGroup12
|
||||
# make sure all x-pack-ciGroups are listed in test/scripts/jenkins_xpack_ci_group.sh
|
||||
#- x-pack-ciGroup1
|
||||
#- x-pack-ciGroup2
|
||||
#- x-pack-ciGroup3
|
||||
#- x-pack-ciGroup4
|
||||
- x-pack-ciGroup5
|
||||
#- x-pack-ciGroup6
|
||||
#- x-pack-ciGroup7
|
||||
# - x-pack-ciGroup1
|
||||
# - x-pack-ciGroup2
|
||||
# - x-pack-ciGroup3
|
||||
# - x-pack-ciGroup4
|
||||
# - x-pack-ciGroup5
|
||||
- x-pack-ciGroup6
|
||||
# - x-pack-ciGroup7
|
||||
|
||||
# `~` is yaml for `null`
|
||||
exclude: ~
|
||||
|
|
|
@ -2,24 +2,50 @@
|
|||
|
||||
set -e
|
||||
|
||||
branch="$(git rev-parse --abbrev-ref HEAD 2> /dev/null)"
|
||||
|
||||
# run setup script that gives us node, yarn, and bootstraps the project
|
||||
source src/dev/ci_setup/setup.sh;
|
||||
|
||||
# download es snapshots
|
||||
node scripts/es snapshot --download-only;
|
||||
node scripts/es snapshot --license=oss --download-only;
|
||||
|
||||
# download reporting browsers
|
||||
cd "x-pack";
|
||||
yarn gulp prepare;
|
||||
cd -;
|
||||
|
||||
# cache the chromedriver bin
|
||||
chromedriverDistVersion="$(node -e "console.log(require('chromedriver').version)")"
|
||||
chromedriverPkgVersion="$(node -e "console.log(require('./package.json').devDependencies.chromedriver)")"
|
||||
if [ -z "$chromedriverDistVersion" ] || [ -z "$chromedriverPkgVersion" ]; then
|
||||
echo "UNABLE TO DETERMINE CHROMEDRIVER VERSIONS"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir ".chromedriver"
|
||||
curl "https://chromedriver.storage.googleapis.com/$chromedriverDistVersion/chromedriver_linux64.zip" > .chromedriver/chromedriver.zip
|
||||
echo "$chromedriverPkgVersion" > .chromedriver/pkgVersion
|
||||
|
||||
# archive cacheable directories
|
||||
mkdir -p "$HOME/.kibana/bootstrap_cache"
|
||||
tar -cf "$HOME/.kibana/bootstrap_cache/master.tar" \
|
||||
tar -cf "$HOME/.kibana/bootstrap_cache/$branch.tar" \
|
||||
node_modules \
|
||||
packages/*/node_modules \
|
||||
x-pack/node_modules \
|
||||
x-pack/plugins/*/node_modules \
|
||||
x-pack/plugins/reporting/.chromium \
|
||||
test/plugin_functional/plugins/*/node_modules \
|
||||
.es;
|
||||
.es \
|
||||
.chromedriver;
|
||||
|
||||
echo "created $HOME/.kibana/bootstrap_cache/$branch.tar"
|
||||
|
||||
if [ "$branch" == "master" ]; then
|
||||
echo "Creating bootstrap cache for 7.x";
|
||||
|
||||
git clone https://github.com/elastic/kibana.git --branch 7.x --depth 1 /tmp/kibana-7.x
|
||||
(cd /tmp/kibana-7.x && ./.ci/packer_cache.sh);
|
||||
rm -rf /tmp/kibana-7.x;
|
||||
fi
|
||||
|
|
|
@ -6,6 +6,7 @@ bower_components
|
|||
/plugins
|
||||
/optimize
|
||||
/built_assets
|
||||
/html_docs
|
||||
/src/fixtures/vislib/mock_data
|
||||
/src/legacy/ui/public/angular-bootstrap
|
||||
/src/legacy/ui/public/flot-charts
|
||||
|
|
17
.eslintrc.js
|
@ -109,7 +109,7 @@ module.exports = {
|
|||
// instructs import/no-extraneous-dependencies to treat modules
|
||||
// in plugins/ or ui/ namespace as "core modules" so they don't
|
||||
// trigger failures for not being listed in package.json
|
||||
'import/core-modules': ['plugins', 'ui', 'uiExports'],
|
||||
'import/core-modules': ['plugins', 'legacy/ui', 'uiExports'],
|
||||
|
||||
'import/resolver': {
|
||||
'@kbn/eslint-import-resolver-kibana': {
|
||||
|
@ -208,7 +208,8 @@ module.exports = {
|
|||
*/
|
||||
{
|
||||
files: [
|
||||
'test/functional/services/lib/leadfoot_element_wrapper/scroll_into_view_if_necessary.js',
|
||||
'test/functional/services/lib/web_element_wrapper/scroll_into_view_if_necessary.js',
|
||||
'**/browser_exec_scripts/**/*',
|
||||
],
|
||||
rules: {
|
||||
'prefer-object-spread/prefer-object-spread': 'off',
|
||||
|
@ -220,6 +221,7 @@ module.exports = {
|
|||
'ArrowFunctionExpression',
|
||||
'AwaitExpression',
|
||||
'ClassDeclaration',
|
||||
'ImportDeclaration',
|
||||
'RestElement',
|
||||
'SpreadElement',
|
||||
'YieldExpression',
|
||||
|
@ -284,16 +286,6 @@ module.exports = {
|
|||
},
|
||||
},
|
||||
|
||||
/**
|
||||
* X-Pack global overrides
|
||||
*/
|
||||
{
|
||||
files: ['x-pack/**/*'],
|
||||
rules: {
|
||||
quotes: 'off',
|
||||
},
|
||||
},
|
||||
|
||||
/**
|
||||
* Files that require Elastic license headers instead of Apache 2.0 header
|
||||
*/
|
||||
|
@ -366,7 +358,6 @@ module.exports = {
|
|||
{
|
||||
files: ['x-pack/plugins/ml/**/*'],
|
||||
rules: {
|
||||
quotes: 'error',
|
||||
'no-shadow': 'error',
|
||||
},
|
||||
},
|
||||
|
|
3
.github/CODEOWNERS
vendored
|
@ -14,6 +14,9 @@
|
|||
# Machine Learning
|
||||
/x-pack/plugins/ml/ @elastic/ml-ui
|
||||
|
||||
# Platform
|
||||
/src/core/ @elastic/kibana-platform
|
||||
|
||||
# Security
|
||||
/x-pack/plugins/security/ @elastic/kibana-security
|
||||
/x-pack/plugins/spaces/ @elastic/kibana-security
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
"xpack.infra": "x-pack/plugins/infra",
|
||||
"xpack.kueryAutocomplete": "x-pack/plugins/kuery_autocomplete",
|
||||
"xpack.licenseMgmt": "x-pack/plugins/license_management",
|
||||
"xpack.maps": "x-pack/plugins/maps",
|
||||
"xpack.ml": "x-pack/plugins/ml",
|
||||
"xpack.logstash": "x-pack/plugins/logstash",
|
||||
"xpack.main": "x-pack/plugins/xpack_main",
|
||||
|
@ -41,12 +42,14 @@
|
|||
"xpack.searchProfiler": "x-pack/plugins/searchprofiler",
|
||||
"xpack.secops": "x-pack/plugins/secops",
|
||||
"xpack.security": "x-pack/plugins/security",
|
||||
"xpack.server": "x-pack/server",
|
||||
"xpack.spaces": "x-pack/plugins/spaces",
|
||||
"xpack.upgradeAssistant": "x-pack/plugins/upgrade_assistant",
|
||||
"xpack.uptime": "x-pack/plugins/uptime",
|
||||
"xpack.watcher": "x-pack/plugins/watcher"
|
||||
},
|
||||
"exclude": [
|
||||
"src/legacy/ui/ui_render/ui_render_mixin.js",
|
||||
"src/core/public/fatal_errors/get_error_info.ts",
|
||||
"src/legacy/ui/ui_render/bootstrap/app_bootstrap.js",
|
||||
"src/legacy/ui/ui_render/ui_render_mixin.js",
|
||||
|
|
|
@ -1 +1 @@
|
|||
10.15.1
|
||||
10.15.2
|
||||
|
|
2
.nvmrc
|
@ -1 +1 @@
|
|||
10.15.1
|
||||
10.15.2
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
files:
|
||||
include:
|
||||
- 'src/legacy/core_plugins/metrics/**/*.s+(a|c)ss'
|
||||
- 'src/legacy/core_plugins/timelion/**/*.s+(a|c)ss'
|
||||
- 'src/legacy/ui/public/query_bar/**/*.s+(a|c)ss'
|
||||
- 'src/legacy/ui/public/vislib/**/*.s+(a|c)ss'
|
||||
- 'x-pack/plugins/rollup/**/*.s+(a|c)ss'
|
||||
- 'x-pack/plugins/security/**/*.s+(a|c)ss'
|
||||
rules:
|
||||
|
|
|
@ -21,6 +21,7 @@ A high level overview of our contributing guidelines.
|
|||
- [Customizing `config/kibana.dev.yml`](#customizing-configkibanadevyml)
|
||||
- [Setting Up SSL](#setting-up-ssl)
|
||||
- [Linting](#linting)
|
||||
- [Internationalization](#internationalization)
|
||||
- [Testing and Building](#testing-and-building)
|
||||
- [Debugging server code](#debugging-server-code)
|
||||
- [Debugging Unit Tests](#debugging-unit-tests)
|
||||
|
@ -278,6 +279,25 @@ IntelliJ | Settings » Languages & Frameworks » JavaScript » Code Quality To
|
|||
|
||||
Another tool we use for enforcing consistent coding style is EditorConfig, which can be set up by installing a plugin in your editor that dynamically updates its configuration. Take a look at the [EditorConfig](http://editorconfig.org/#download) site to find a plugin for your editor, and browse our [`.editorconfig`](https://github.com/elastic/kibana/blob/master/.editorconfig) file to see what config rules we set up.
|
||||
|
||||
### Internationalization
|
||||
|
||||
All user-facing labels and info texts in Kibana should be internationalized. Please take a look at the [readme](packages/kbn-i18n/README.md) and the [guideline](packages/kbn-i18n/GUIDELINE.md) of the i18n package on how to do so.
|
||||
|
||||
In order to enable translations in the React parts of the application, the top most component of every `ReactDOM.render` call should be an `I18nContext`:
|
||||
```jsx
|
||||
import { I18nContext } from 'ui/i18n';
|
||||
|
||||
ReactDOM.render(
|
||||
<I18nContext>
|
||||
{myComponentTree}
|
||||
</I18nContext>,
|
||||
container
|
||||
);
|
||||
```
|
||||
|
||||
There is a number of tools was created to support internationalization in Kibana that would allow one to validate internationalized labels,
|
||||
extract them to a `JSON` file or integrate translations back to Kibana. To know more, please read corresponding [readme](src/dev/i18n/README.md) file.
|
||||
|
||||
### Testing and Building
|
||||
|
||||
To ensure that your changes will not break other functionality, please run the test suite and build process before submitting your Pull Request.
|
||||
|
|
|
@ -18,6 +18,10 @@
|
|||
# default to `true` starting in Kibana 7.0.
|
||||
#server.rewriteBasePath: false
|
||||
|
||||
# Specifies the default route when opening Kibana. You can use this setting to modify
|
||||
# the landing page when opening Kibana.
|
||||
#server.defaultRoute: /app/kibana
|
||||
|
||||
# The maximum payload size in bytes for incoming server requests.
|
||||
#server.maxPayloadBytes: 1048576
|
||||
|
||||
|
@ -36,9 +40,6 @@
|
|||
# dashboards. Kibana creates a new index if the index doesn't already exist.
|
||||
#kibana.index: ".kibana"
|
||||
|
||||
# The default application to load.
|
||||
#kibana.defaultAppId: "home"
|
||||
|
||||
# If your Elasticsearch is protected with basic authentication, these settings provide
|
||||
# the username and password that the Kibana server uses to perform maintenance on the Kibana
|
||||
# index at startup. Your Kibana users still need to authenticate with Elasticsearch, which
|
||||
|
|
|
@ -34,6 +34,7 @@ NOTE: You cannot access these APIs via the Console in Kibana.
|
|||
* <<dashboard-import-api>>
|
||||
* <<logstash-configuration-management-api>>
|
||||
* <<url-shortening-api>>
|
||||
* <<upgrade-assistant-api>>
|
||||
--
|
||||
|
||||
include::api/spaces-management.asciidoc[]
|
||||
|
@ -42,4 +43,4 @@ include::api/saved-objects.asciidoc[]
|
|||
include::api/dashboard-import.asciidoc[]
|
||||
include::api/logstash-configuration-management.asciidoc[]
|
||||
include::api/url-shortening.asciidoc[]
|
||||
|
||||
include::api/upgrade-assistant.asciidoc[]
|
||||
|
|
|
@ -18,6 +18,9 @@ NOTE: You cannot access these endpoints via the Console in Kibana.
|
|||
* <<saved-objects-api-bulk-create>>
|
||||
* <<saved-objects-api-update>>
|
||||
* <<saved-objects-api-delete>>
|
||||
* <<saved-objects-api-export>>
|
||||
* <<saved-objects-api-import>>
|
||||
* <<saved-objects-api-resolve-import-conflicts>>
|
||||
|
||||
include::saved-objects/get.asciidoc[]
|
||||
include::saved-objects/bulk_get.asciidoc[]
|
||||
|
@ -26,3 +29,6 @@ include::saved-objects/create.asciidoc[]
|
|||
include::saved-objects/bulk_create.asciidoc[]
|
||||
include::saved-objects/update.asciidoc[]
|
||||
include::saved-objects/delete.asciidoc[]
|
||||
include::saved-objects/export.asciidoc[]
|
||||
include::saved-objects/import.asciidoc[]
|
||||
include::saved-objects/resolve_import_conflicts.asciidoc[]
|
||||
|
|
39
docs/api/saved-objects/export.asciidoc
Normal file
|
@ -0,0 +1,39 @@
|
|||
[[saved-objects-api-export]]
|
||||
=== Export Objects
|
||||
|
||||
experimental[This functionality is *experimental* and may be changed or removed completely in a future release.]
|
||||
|
||||
The export saved objects API enables you to retrieve a set of saved objects that can later be imported into Kibana.
|
||||
|
||||
Note: You cannot access this endpoint via the Console in Kibana.
|
||||
|
||||
==== Request
|
||||
|
||||
`POST /api/saved_objects/_export`
|
||||
|
||||
==== Request Body
|
||||
`type` (optional)::
|
||||
(array|string) The saved object type(s) that the export should be limited to
|
||||
`objects` (optional)::
|
||||
(array) A list of objects to export
|
||||
|
||||
Note: At least `type` or `objects` must be passed in.
|
||||
|
||||
==== Response body
|
||||
|
||||
The response body will have a format of newline delimited JSON.
|
||||
|
||||
==== Examples
|
||||
|
||||
The following example exports all index pattern saved objects.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST api/saved_objects/_export
|
||||
{
|
||||
"type": "index-patterns"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// KIBANA
|
||||
|
||||
A successful call returns a response code of `200` along with the exported objects as the response body.
|
96
docs/api/saved-objects/import.asciidoc
Normal file
|
@ -0,0 +1,96 @@
|
|||
[[saved-objects-api-import]]
|
||||
=== Import Objects
|
||||
|
||||
experimental[This functionality is *experimental* and may be changed or removed completely in a future release.]
|
||||
|
||||
The import saved objects API enables you to create a set of Kibana saved objects from a file created by the export API.
|
||||
|
||||
Note: You cannot access this endpoint via the Console in Kibana.
|
||||
|
||||
==== Request
|
||||
|
||||
`POST /api/saved_objects/_import`
|
||||
|
||||
==== Query Parameters
|
||||
|
||||
`overwrite` (optional)::
|
||||
(boolean) Overwrite saved objects if they exist already
|
||||
|
||||
==== Request body
|
||||
|
||||
The request body must be of type multipart/form-data.
|
||||
|
||||
`file`::
|
||||
A file exported using the export API.
|
||||
|
||||
==== Response body
|
||||
|
||||
The response body will have a top level `success` property that indicates
|
||||
if the import was successful or not as well as a `successCount` indicating how many records are successfully imported.
|
||||
In the scenario the import wasn't successful a top level `errors` array will contain the objects that failed to import.
|
||||
|
||||
==== Examples
|
||||
|
||||
The following example imports an index pattern and dashboard.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST api/saved_objects/_import
|
||||
Content-Type: multipart/form-data; boundary=EXAMPLE
|
||||
--EXAMPLE
|
||||
Content-Disposition: form-data; name="file"; filename="export.ndjson"
|
||||
Content-Type: application/ndjson
|
||||
|
||||
{"type":"index-pattern","id":"my-pattern","attributes":{"title":"my-pattern-*"}}
|
||||
{"type":"dashboard","id":"my-dashboard","attributes":{"title":"Look at my dashboard"}}
|
||||
--EXAMPLE--
|
||||
--------------------------------------------------
|
||||
// KIBANA
|
||||
|
||||
A successful call returns a response code of `200` and a response body
|
||||
containing a JSON structure similar to the following example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"success": true,
|
||||
"successCount": 2
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
The following example imports an index pattern and dashboard but has a conflict on the index pattern.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST api/saved_objects/_import
|
||||
Content-Type: multipart/form-data; boundary=EXAMPLE
|
||||
--EXAMPLE
|
||||
Content-Disposition: form-data; name="file"; filename="export.ndjson"
|
||||
Content-Type: application/ndjson
|
||||
|
||||
{"type":"index-pattern","id":"my-pattern","attributes":{"title":"my-pattern-*"}}
|
||||
{"type":"dashboard","id":"my-dashboard","attributes":{"title":"Look at my dashboard"}}
|
||||
--EXAMPLE--
|
||||
--------------------------------------------------
|
||||
// KIBANA
|
||||
|
||||
The call returns a response code of `200` and a response body
|
||||
containing a JSON structure similar to the following example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"success": false,
|
||||
"successCount": 1,
|
||||
"errors": [
|
||||
{
|
||||
"id": "my-pattern",
|
||||
"type": "index-pattern",
|
||||
"error": {
|
||||
"statusCode": 409,
|
||||
"message": "version conflict, document already exists",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
--------------------------------------------------
|
104
docs/api/saved-objects/resolve_import_conflicts.asciidoc
Normal file
|
@ -0,0 +1,104 @@
|
|||
[[saved-objects-api-resolve-import-conflicts]]
|
||||
=== Resolve Import Conflicts
|
||||
|
||||
experimental[This functionality is *experimental* and may be changed or removed completely in a future release.]
|
||||
|
||||
The resolve import conflicts API enables you to resolve conflicts given by the import API by either overwriting specific saved objects or changing references to a newly created object.
|
||||
|
||||
Note: You cannot access this endpoint via the Console in Kibana.
|
||||
|
||||
==== Request
|
||||
|
||||
`POST /api/saved_objects/_resolve_import_conflicts`
|
||||
|
||||
==== Request body
|
||||
|
||||
The request body must be of type multipart/form-data.
|
||||
|
||||
`file`::
|
||||
(ndjson) The same new line delimited JSON objects given to the import API.
|
||||
|
||||
`overwrites` (optional)::
|
||||
(array) A list of `type` and `id` objects allowed to be overwritten on import.
|
||||
|
||||
`replaceReferences` (optional)::
|
||||
(array) A list of `type`, `from` and `to` used to change imported saved object references to.
|
||||
|
||||
`skips` (optional)::
|
||||
(array) A list of `type` and `id` objects to skip importing.
|
||||
|
||||
==== Response body
|
||||
|
||||
The response body will have a top level `success` property that indicates
|
||||
if the import was successful or not as well as a `successCount` indicating how many records are successfully resolved.
|
||||
In the scenario the import wasn't successful a top level `errors` array will contain the objects that failed to import.
|
||||
|
||||
==== Examples
|
||||
|
||||
The following example resolves conflicts for an index pattern and dashboard but indicates to skip the index pattern.
|
||||
This will cause the index pattern to not be in the system and the dashboard to overwrite the existing saved object.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST api/saved_objects/_resolve_import_conflicts
|
||||
Content-Type: multipart/form-data; boundary=EXAMPLE
|
||||
--EXAMPLE
|
||||
Content-Disposition: form-data; name="file"; filename="export.ndjson"
|
||||
Content-Type: application/ndjson
|
||||
|
||||
{"type":"index-pattern","id":"my-pattern","attributes":{"title":"my-pattern-*"}}
|
||||
{"type":"dashboard","id":"my-dashboard","attributes":{"title":"Look at my dashboard"}}
|
||||
--EXAMPLE
|
||||
Content-Disposition: form-data; name="skips"
|
||||
|
||||
[{"type":"index-pattern","id":"my-pattern"}]
|
||||
--EXAMPLE
|
||||
Content-Disposition: form-data; name="overwrites"
|
||||
|
||||
[{"type":"dashboard","id":"my-dashboard"}]
|
||||
--EXAMPLE--
|
||||
--------------------------------------------------
|
||||
// KIBANA
|
||||
|
||||
A successful call returns a response code of `200` and a response body
|
||||
containing a JSON structure similar to the following example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"success": true,
|
||||
"successCount": 1
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
The following example resolves conflicts for a visualization and dashboard but indicates
|
||||
to replace the dashboard references to another visualization.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST api/saved_objects/_resolve_import_conflicts
|
||||
Content-Type: multipart/form-data; boundary=EXAMPLE
|
||||
--EXAMPLE
|
||||
Content-Disposition: form-data; name="file"; filename="export.ndjson"
|
||||
Content-Type: application/ndjson
|
||||
|
||||
{"type":"visualization","id":"my-vis","attributes":{"title":"Look at my visualization"}}
|
||||
{"type":"dashboard","id":"my-dashboard","attributes":{"title":"Look at my dashboard"},"references":[{"name":"panel_0","type":"visualization","id":"my-vis"}]}
|
||||
--EXAMPLE
|
||||
Content-Disposition: form-data; name="replaceReferences"
|
||||
|
||||
[{"type":"visualization","from":"my-vis","to":"my-vis-2"}]
|
||||
--EXAMPLE--
|
||||
--------------------------------------------------
|
||||
// KIBANA
|
||||
|
||||
A successful call returns a response code of `200` and a response body
|
||||
containing a JSON structure similar to the following example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"success": true,
|
||||
"successCount": 1
|
||||
}
|
||||
--------------------------------------------------
|
15
docs/api/upgrade-assistant.asciidoc
Normal file
|
@ -0,0 +1,15 @@
|
|||
[role="xpack"]
|
||||
[[upgrade-assistant-api]]
|
||||
== Upgrade Assistant API
|
||||
|
||||
experimental[This API is *experimental* and may be changed or removed completely in a future release. The underlying Upgrade Assistant concepts are stable, but the APIs for managing Upgrade Assistant are currently experimental.]
|
||||
|
||||
The Upgrade Assistant API allows you to check the upgrade status of your Elasticsearch cluster
|
||||
and reindex indices that were created in the previous major version. The assistant helps you prepare
|
||||
for the next major version of Elasticsearch.
|
||||
|
||||
* <<upgrade-assistant-api-status>>
|
||||
* <<upgrade-assistant-api-reindexing>>
|
||||
|
||||
include::upgrade-assistant/status.asciidoc[]
|
||||
include::upgrade-assistant/reindexing.asciidoc[]
|
154
docs/api/upgrade-assistant/reindexing.asciidoc
Normal file
|
@ -0,0 +1,154 @@
|
|||
[[upgrade-assistant-api-reindexing]]
|
||||
=== Reindex API
|
||||
|
||||
experimental[This API is *experimental* and may be changed or removed completely in a future release. The underlying Upgrade Assistant concepts are stable, but the APIs for managing Upgrade Assistant are currently experimental.]
|
||||
|
||||
When checking the upgrade status, some indices will have the `reindex` paramaeter set to `true`. You can use this Reindexing API to reindex these indices.
|
||||
|
||||
==== Start or resume a reindex
|
||||
|
||||
===== Request
|
||||
|
||||
To start a new reindex or resume a paused reindex, submit a POST request to the `/api/upgrade_assistant/reindex/<indexName>` endpoint:
|
||||
|
||||
Note: You cannot access this endpoint via the Console in Kibana.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /api/upgrade_assistant/reindex/myIndex
|
||||
--------------------------------------------------
|
||||
// KIBANA
|
||||
|
||||
===== Response
|
||||
|
||||
A successful call returns a response code of `200` and a response body
|
||||
containing a JSON structure similar to the following example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"indexName": ".ml-state",
|
||||
"newIndexName": ".reindexed-v7-ml-state",
|
||||
"status": 0,
|
||||
"lastCompletedStep": 0,
|
||||
"reindexTaskId": null,
|
||||
"reindexTaskPercComplete": null,
|
||||
"errorMessage": null
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
See the next section for an explanation of each field.
|
||||
|
||||
==== Check the status of a reindex
|
||||
|
||||
===== Request
|
||||
|
||||
Once a reindex is started, you can check the status of the reindex operation by submitting a GET request to the `/api/upgrade_assistant/reindex/<indexName>` endpoint:
|
||||
|
||||
Note: You cannot access this endpoint via the Console in Kibana.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /api/upgrade_assistant/reindex/myIndex
|
||||
--------------------------------------------------
|
||||
// KIBANA
|
||||
|
||||
===== Response
|
||||
|
||||
A successful call returns a response code of `200` and a response body
|
||||
containing a JSON structure similar to the following example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"reindexOp": {
|
||||
"indexName": ".ml-state",
|
||||
"newIndexName": ".reindexed-v7-ml-state", <1>
|
||||
"status": 0, <2>
|
||||
"lastCompletedStep": 40, <3>
|
||||
"reindexTaskId": "QprwvTMzRQ2MLWOW22oQ4Q:11819", <4>
|
||||
"reindexTaskPercComplete": 0.3, <5>
|
||||
"errorMessage": null <6>
|
||||
},
|
||||
"warnings": [], <7>
|
||||
"hasRequiredPrivileges": true <8>
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
<1> Name of the new index that is being created.
|
||||
<2> Current status of the reindex. See the <<status-code,status code table>> for details.
|
||||
<3> Last successfully completed step of the reindex. See the <<step-code,step code table>> table for details.
|
||||
<4> Task ID of the reindex task in Elasticsearch. Only present if reindexing has started.
|
||||
<5> Percentage of how far the reindexing task in Elasticsearch has progressed, in decimal from from 0 to 1.
|
||||
<6> Error that caused the reindex to fail, if it failed.
|
||||
<7> An array of any warning codes explaining what changes are required for this reindex. See the <<warning-code,warning code table>> for details.
|
||||
<8> Whether or not the current user has required privileges to reindex this index. Returns `true` if Security is not available or disabled.
|
||||
|
||||
[[status-code]]
|
||||
===== Status code
|
||||
|
||||
The `status` field corresponds to these statuses:
|
||||
|
||||
[horizontal]
|
||||
0:: in progress
|
||||
1:: completed
|
||||
2:: failed
|
||||
3:: paused
|
||||
4:: cancelled
|
||||
|
||||
[[step-code]]
|
||||
===== Step code
|
||||
|
||||
The `lastCompletedStep` field corresponds to these steps:
|
||||
|
||||
[horizontal]
|
||||
0:: The reindex operation has been created in Kibana.
|
||||
10:: Index group services stopped. Only applies to some system indices.
|
||||
20:: index set to readonly
|
||||
30:: The new destination index has been created.
|
||||
40:: The reindex task in Elasticsearch has started.
|
||||
50:: The reindex task in Elasticsearch has completed.
|
||||
60:: Aliases have been created to point to the new index, and the old index has been deleted.
|
||||
70:: Index group services have been resumed. Only applies to some system indices.
|
||||
|
||||
[[warning-code]]
|
||||
===== Warning code
|
||||
|
||||
The `warnings` field corresponds to an array of integers for these warnings:
|
||||
|
||||
[horizontal]
|
||||
0:: The `_all` meta field will be removed.
|
||||
1:: Any coerced boolean values will be converted in the source document (example: `yes`, `1`, `off`).
|
||||
2:: Documents will be converted to support Elastic Common Schema. Only applies to APM indices created in 6.x.
|
||||
|
||||
===== Paused reindexes
|
||||
|
||||
If the Kibana node that started the reindex is shutdown or restarted, the reindex will go into a paused state after some time.
|
||||
To resume the reindex, you must submit a new POST request to the `/api/upgrade_assistant/reindex/<indexName>` endpoint.
|
||||
|
||||
==== Cancel a reindex
|
||||
|
||||
===== Request
|
||||
|
||||
You can cancel reindexes that are waiting for the Elasticsearch reindex task to complete (`lastCompletedStep` set to `40`).
|
||||
To cancel a reindex, submit a POST request to the `/api/upgrade_assistant/reindex/<indexName>/cancel` endpoint:
|
||||
|
||||
Note: You cannot access this endpoint via the Console in Kibana.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /api/upgrade_assistant/reindex/myIndex/cancel
|
||||
--------------------------------------------------
|
||||
// KIBANA
|
||||
|
||||
===== Response
|
||||
|
||||
A successful call returns a response code of `200` and a response body
|
||||
containing a JSON structure similar to the following example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"acknowledged": true
|
||||
}
|
||||
--------------------------------------------------
|
48
docs/api/upgrade-assistant/status.asciidoc
Normal file
|
@ -0,0 +1,48 @@
|
|||
[[upgrade-assistant-api-status]]
|
||||
=== Upgrade Readiness Status
|
||||
|
||||
experimental[This API is *experimental* and may be changed or removed completely in a future release. The underlying Upgrade Assistant concepts are stable, but the APIs for managing Upgrade Assistant are currently experimental.]
|
||||
|
||||
==== Request
|
||||
|
||||
To check the status of your cluster, submit a GET request to the `/api/upgrade_assistant/status` endpoint:
|
||||
|
||||
Note: You cannot access this endpoint via the Console in Kibana.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /api/upgrade_assistant/status
|
||||
--------------------------------------------------
|
||||
// KIBANA
|
||||
|
||||
==== Response
|
||||
|
||||
A successful call returns a response code of `200` and a response body
|
||||
containing a JSON structure similar to the following example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"readyForUpgrade": false,
|
||||
"cluster": [
|
||||
{
|
||||
"message": "Cluster deprecated issue",
|
||||
"details": "...",
|
||||
"level": "warning",
|
||||
"url": "https://docs.elastic.co/..."
|
||||
}
|
||||
],
|
||||
"indices": [
|
||||
{
|
||||
"message": "Index was created before 6.0",
|
||||
"details": "...",
|
||||
"index": "myIndex",
|
||||
"level": "critical",
|
||||
"reindex": true, <1>
|
||||
"url": "https://docs.elastic.co/..."
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
<1> You can fix indices with the `reindex` attribute set to `true` using the <<upgrade-assistant-api-reindexing>>.
|
|
@ -20,7 +20,7 @@ Many Kibana developers hang out on `irc.freenode.net` in the `#kibana` channel.
|
|||
[float]
|
||||
==== Plugin Generator
|
||||
|
||||
It is recommended that you kick-start your plugin by generating it with the {repo}tree/{branch}/packages/kbn-plugin-generator[Kibana Plugin Generator]. Run the following within the Kibana repo and you will be asked a couple questions, see some progress bars, and have a freshly generated plugin ready for you to play within Kibana's sibling `kibana-extra` folder.
|
||||
It is recommended that you kick-start your plugin by generating it with the {repo}tree/{branch}/packages/kbn-plugin-generator[Kibana Plugin Generator]. Run the following within the Kibana repo and you will be asked a couple questions, see some progress bars, and have a freshly generated plugin ready for you to play within Kibana's `plugins` folder.
|
||||
|
||||
["source","shell"]
|
||||
-----------
|
||||
|
@ -31,14 +31,15 @@ node scripts/generate_plugin my_plugin_name # replace "my_plugin_name" with your
|
|||
[float]
|
||||
==== Directory structure for plugins
|
||||
|
||||
The Kibana directory must be named `kibana`, and your plugin directory must be located within the sibling `kibana-extra` folder, for example:
|
||||
The Kibana directory must be named `kibana`, and your plugin directory should be located in the root of `kibana` in a `plugins` directory, for example:
|
||||
|
||||
["source","shell"]
|
||||
-----------
|
||||
.
|
||||
├── kibana
|
||||
├── kibana-extra/foo-plugin
|
||||
└── kibana-extra/bar-plugin
|
||||
└── kibana
|
||||
└── plugins
|
||||
├── foo-plugin
|
||||
└── bar-plugin
|
||||
-----------
|
||||
|
||||
[float]
|
||||
|
|
|
@ -25,6 +25,8 @@ If you are running our https://cloud.elastic.co[hosted Elasticsearch Service]
|
|||
on Elastic Cloud, you can access Kibana with a single click.
|
||||
--
|
||||
|
||||
include::getting-started/add-sample-data.asciidoc[]
|
||||
|
||||
include::getting-started/tutorial-sample-data.asciidoc[]
|
||||
|
||||
include::getting-started/tutorial-sample-filter.asciidoc[]
|
||||
|
|
31
docs/getting-started/add-sample-data.asciidoc
Normal file
|
@ -0,0 +1,31 @@
|
|||
[[add-sample-data]]
|
||||
== Get up and running with sample data
|
||||
|
||||
{kib} has three sample data sets that you can use to explore {kib} before loading your own data
|
||||
source. Each set is prepackaged with a dashboard of visualizations and a
|
||||
{kibana-ref}/canvas-getting-started.html[Canvas workpad].
|
||||
|
||||
The sample data sets address common use cases:
|
||||
|
||||
* *eCommerce orders* includes visualizations for product-related information,
|
||||
such as cost, revenue, and price.
|
||||
* *Web logs* lets you analyze website traffic.
|
||||
* *Flight data* enables you to view and interact with flight routes for four airlines.
|
||||
|
||||
To get started, go to the home page and click the link next to *Add sample data*.
|
||||
|
||||
Once you have loaded a data set, click *View data* to view visualizations in *Dashboard*.
|
||||
|
||||
*Note:* The timestamps in the sample data sets are relative to when they are installed.
|
||||
If you uninstall and reinstall a data set, the timestamps will change to reflect the most recent installation.
|
||||
|
||||
|
||||
[role="screenshot"]
|
||||
image::images/add-sample-data.png[]
|
||||
|
||||
[float]
|
||||
==== Next steps
|
||||
|
||||
Play with the sample flight data in the {kibana-ref}/tutorial-sample-data.html[flight dashboard tutorial].
|
||||
|
||||
Learn how to load data, define index patterns and build visualizations by {kibana-ref}/tutorial-build-dashboard.html[building your own dashboard].
|
BIN
docs/images/add-sample-data.png
Normal file
After Width: | Height: | Size: 660 KiB |
BIN
docs/images/management-upgrade-assistant-9.0.png
Executable file
After Width: | Height: | Size: 326 KiB |
BIN
docs/images/management_index_labels.png
Normal file
After Width: | Height: | Size: 206 KiB |
|
@ -42,6 +42,8 @@ include::canvas.asciidoc[]
|
|||
|
||||
include::ml/index.asciidoc[]
|
||||
|
||||
include::maps/index.asciidoc[]
|
||||
|
||||
include::infrastructure/index.asciidoc[]
|
||||
|
||||
include::logs/index.asciidoc[]
|
||||
|
@ -82,4 +84,4 @@ include::migration.asciidoc[]
|
|||
|
||||
include::CHANGELOG.asciidoc[]
|
||||
|
||||
include::redirects.asciidoc[]
|
||||
include::redirects.asciidoc[]
|
||||
|
|
|
@ -22,7 +22,7 @@ compatible with other configuration settings. Deleting a custom setting removes
|
|||
[horizontal]
|
||||
`query:queryString:options`:: Options for the Lucene query string parser.
|
||||
`query:allowLeadingWildcards`:: When set, * is allowed as the first character in a query clause. Currently only applies when experimental query features are enabled in the query bar. To disallow leading wildcards in basic lucene queries, use query:queryString:options.
|
||||
`search:queryLanguage`:: Default is `kuery`. Query language used by the query bar. Choose between the lucene query syntax and kuery, a new language built specifically for Kibana.
|
||||
`search:queryLanguage`:: Default is `KQL`. Query language used by the query bar. Choose between the lucene query syntax and KQL, a new language built specifically for Kibana.
|
||||
`sort:options`:: Options for the Elasticsearch {ref}/search-request-sort.html[sort] parameter.
|
||||
`dateFormat`:: The format to use for displaying pretty-formatted dates.
|
||||
`dateFormat:tz`:: The timezone that Kibana uses. The default value of `Browser` uses the timezone detected by the browser.
|
||||
|
@ -73,6 +73,7 @@ mentioned use "_default_".
|
|||
`format:percent:defaultPattern`:: Default numeral format for the "percent" format.
|
||||
`format:currency:defaultPattern`:: Default numeral format for the "currency" format.
|
||||
`savedObjects:perPage`:: The number of objects shown on each page of the list of saved objects. The default value is 5.
|
||||
`savedObjects:listingLimit`:: The total number of objects to query for lists of saved objects. The default value is 1000. Do not set above 10000.
|
||||
`timepicker:timeDefaults`:: The default time filter selection.
|
||||
`timepicker:refreshIntervalDefaults`:: The time filter's default refresh interval.
|
||||
`timepicker:quickRanges`:: The list of ranges to show in the Quick section of the time picker. This should be an array of objects, with each object containing `from`, `to` (see {ref}/common-options.html#date-math[accepted formats]), `display` (the title to be displayed), and `section` (which column to put the option in).
|
||||
|
|
|
@ -21,11 +21,13 @@ disable *Index Management*, then *Index Lifecycle Policies* is also disabled.
|
|||
[role="screenshot"]
|
||||
image::images/index-lifecycle-policies-create.png[][UI for creating an index lifecycle policy]
|
||||
|
||||
==== Defining the phases of the index lifecycle
|
||||
|
||||
You can define up to four phases in the index lifecycle. For each phase, you
|
||||
can enable actions to optimize performance for that phase. Transitioning
|
||||
between phases is based on the age of the index.
|
||||
|
||||
The four phases of an index lifecycle policy are:
|
||||
The four phases in the index lifecycle are:
|
||||
|
||||
* *Hot.* The index is actively being queried and written to. You can optionally
|
||||
roll over to a new index when the
|
||||
|
@ -35,8 +37,8 @@ index. You can still query the previous indices, but you only ever write to
|
|||
the “hot” index. See {ref}/indices-rollover-index.html[Rollover index] for more information.
|
||||
|
||||
* *Warm.* The index is typically searched at a lower rate than when the data is
|
||||
hot. The index is not used for storing for new data, but might occasionally add
|
||||
late-arriving data, for example, from a Beat that had a network problem that's now fixed.
|
||||
hot. The index is not used for storing new data, but might occasionally add
|
||||
late-arriving data, for example, from a Beat with a network problem that's now fixed.
|
||||
You can optionally shrink the number replicas and move the shards to a
|
||||
different set of nodes with smaller or less performant hardware. You can also
|
||||
reduce the number of primary shards and force merge the index into
|
||||
|
@ -45,7 +47,10 @@ smaller {ref}/indices-segments.html[segments].
|
|||
* *Cold.* The index is no longer being updated and is seldom queried, but is
|
||||
still searchable. If you have a big deployment, you can move it to even
|
||||
less performant hardware. You might also reduce the number of replicas because
|
||||
you expect the data to be queried less frequently.
|
||||
you expect the data to be queried less frequently. To keep the index searchable
|
||||
for a longer period, and reduce the hardware requirements, you can use the
|
||||
{ref}/frozen-indices.html[freeze action]. Queries are slower on a frozen index because the index is
|
||||
reloaded from the disk to RAM on demand.
|
||||
|
||||
* *Delete.* The index is no longer relevant. You can define when it is safe to
|
||||
delete it.
|
||||
|
@ -55,3 +60,13 @@ delete phases are optional. For example, you might define all four phases for
|
|||
one policy and only a hot and delete phase for another. See {ref}/_actions.html[Actions]
|
||||
for more information on the actions available in each phase.
|
||||
|
||||
==== Setting the index priority
|
||||
|
||||
For the hot, warm, and cold phases, you can set a priority for recovering
|
||||
indices after a node restart. Indices with higher priorities are recovered
|
||||
before indices with lower priorities. By default, the index priority is set to
|
||||
100 in the hot phase, 50 in the warm phase, and 0 in the cold phase.
|
||||
If the cold phase of one index has data that
|
||||
is more important than the data in the hot phase of another, you might increase
|
||||
the index priority in the cold phase. See
|
||||
{ref}/recovery-prioritization.html[Index recovery prioritization].
|
||||
|
|
|
@ -1,30 +1,59 @@
|
|||
[[managing-indices]]
|
||||
== Managing Indices
|
||||
|
||||
The *Index Management* UI enables you to view index settings,
|
||||
mappings, and statistics and perform management operations.
|
||||
These include refreshing, flushing, clearing the cache, merging segments,
|
||||
and closing or deleting indices. The UI provides a convenient way to
|
||||
*Index Management* enables you to view index settings,
|
||||
mappings, and statistics and perform index-level operations.
|
||||
These include refreshing, flushing, clearing the cache, force merging segments,
|
||||
freezing indices, and more. *Index Management* provides a convenient way to
|
||||
perform bulk operations on multiple indices.
|
||||
|
||||
To open the UI, select *Management > Index Management*. If security is enabled,
|
||||
you must have the the `monitor` cluster privilege and the `view_index_metadata`
|
||||
To access this feature, go to *Management > {es} > Index Management*.
|
||||
If security is enabled,
|
||||
you must have the `monitor` cluster privilege and the `view_index_metadata`
|
||||
and `manage` index privileges to view the data. See
|
||||
{xpack-ref}/security-privileges.html[Security Privileges] for more
|
||||
information.
|
||||
|
||||
*Index Management* uses badges to make users aware when an index is {ref}/frozen-indices.html[frozen],
|
||||
a {ref}/ccr-put-follow.html[follower index],
|
||||
or a {ref}/rollup-get-rollup-index-caps.html[rollup index].
|
||||
Clicking a badge filters for all indices of that type.
|
||||
|
||||
[role="screenshot"]
|
||||
image::images/management-index-management.png[Index Management UI]
|
||||
image::images/management_index_labels.png[Index Management UI]
|
||||
|
||||
Click the name of an index to display the index summary and access
|
||||
the index settings, mapping, and statistics. The *Manage* menu in the
|
||||
lower right of the index pane enables you to manage
|
||||
the selected index.
|
||||
Clicking the name of an index displays the index summary and provides access to
|
||||
the index settings, mapping, and statistics.
|
||||
|
||||
To perform bulk operations, select the checkboxes of the indices you want to
|
||||
modify and choose an operation from the *Manage indices* menu
|
||||
next to the query bar. To select all
|
||||
indices, select the checkbox in the *Name* header.
|
||||
From the *Manage* menu, you can perform these index-level operations on either
|
||||
a single or multiple indices:
|
||||
|
||||
* *Close the index*. Blocks the index from read/write operations.
|
||||
A closed index exists in the cluster, but doesn't consume resources
|
||||
other than disk space. If you reopen a closed index, it goes through the
|
||||
normal recovery process.
|
||||
|
||||
* *Force merge the index*. Reduces the number of segments in your shard by
|
||||
merging smaller files and clearing deleted ones. Only force merge a read-only index.
|
||||
|
||||
* *Refresh the index*. Writes the operations in the indexing buffer to the
|
||||
filesystem cache. This is automatically done once per second. Forcing a manual
|
||||
refresh is useful during testing, but should not be routinely done in
|
||||
production because it has a performance impact.
|
||||
|
||||
* *Clear the index cache*. Clears all caches associated with the index.
|
||||
|
||||
* *Flush the index*. Frees memory by syncing the filesystem cache to disk and
|
||||
clearing the cache. Once the sync is complete, the internal transaction log is reset.
|
||||
|
||||
* *Freeze the index*. Makes the index read-only and reduces its memory footprint
|
||||
by moving shards to disk. {ref}/frozen-indices.html[Frozen indices] remain
|
||||
searchable, but queries take longer.
|
||||
|
||||
* *Delete the index*. Permanently removes the index and all of it's documents.
|
||||
|
||||
* *Add an lifecycle policy*. Specifies a policy for managing the lifecycle of the
|
||||
index.
|
||||
|
||||
For information about the available management operations,
|
||||
see {ref}/indices.html[Indices APIs] in the Elasticsearch Reference.
|
||||
see {ref}/indices.html[Indices APIs].
|
||||
|
|
|
@ -1,14 +1,47 @@
|
|||
[[upgrade-assistant]]
|
||||
== Upgrade Assistant
|
||||
|
||||
The Upgrade Assistant helps you prepare for your upgrade to {es} 8.0.
|
||||
To access the assistant, go to *Management > 8.0 Upgrade Assistant*.
|
||||
The Upgrade Assistant helps you prepare for your upgrade to {es} 9.0.
|
||||
To access the assistant, go to *Management > 9.0 Upgrade Assistant*.
|
||||
|
||||
The assistant identifies the deprecated settings in your cluster and indices
|
||||
and guides you through the process of resolving issues, including reindexing.
|
||||
|
||||
Before upgrading to Elasticsearch 8.0, make sure that you are using the final
|
||||
7.x minor release to see the most up-to-date deprecation issues.
|
||||
Before upgrading to Elasticsearch 9.0, make sure that you are using the final
|
||||
8.x minor release to see the most up-to-date deprecation issues.
|
||||
|
||||
[float]
|
||||
=== Reindexing
|
||||
|
||||
The *Indices* page lists the indices that are incompatible with the next
|
||||
major version of {es}. You can initiate a reindex to resolve the issues.
|
||||
|
||||
[role="screenshot"]
|
||||
image::images/management-upgrade-assistant-8.0.png[]
|
||||
image::images/management-upgrade-assistant-9.0.png[]
|
||||
|
||||
For a preview of how the data will change during the reindex, select the
|
||||
index name. A warning appears if the index requires destructive changes.
|
||||
Back up your index, then proceed with the reindex by accepting each breaking change.
|
||||
|
||||
You can follow the progress as the Upgrade Assistant makes the index read-only,
|
||||
creates a new index, reindexes the documents, and creates an alias that points
|
||||
from the old index to the new one.
|
||||
|
||||
If the reindexing fails or is cancelled, the changes are rolled back, the
|
||||
new index is deleted, and the original index becomes writable. An error
|
||||
message explains the reason for the failure.
|
||||
|
||||
You can reindex multiple indices at a time, but keep an eye on the
|
||||
{es} metrics, including CPU usage, memory pressure, and disk usage. If a
|
||||
metric is so high it affects query performance, cancel the reindex and
|
||||
continue by reindexing fewer indices at a time.
|
||||
|
||||
Additional considerations:
|
||||
|
||||
* During a reindex of a Watcher (`.watches`) index, the Watcher process
|
||||
pauses and no alerts are triggered.
|
||||
|
||||
* During a reindex of a Machine Learning (`.ml-state`) index, the Machine
|
||||
Learning job pauses and models are not trained or updated.
|
||||
|
||||
|
||||
|
|
17
docs/maps/heatmap-layer.asciidoc
Normal file
|
@ -0,0 +1,17 @@
|
|||
[[heatmap-layer]]
|
||||
== Heat map layer
|
||||
|
||||
In the heat map layer, point data is clustered to show locations with higher densities.
|
||||
|
||||
[role="screenshot"]
|
||||
image::maps/images/heatmap_layer.png[]
|
||||
|
||||
You can create a heat map layer from the following data source:
|
||||
|
||||
*Grid aggregation*:: Geospatial data grouped in grids with metrics for each gridded cell.
|
||||
Set *Show as* to *heat map*.
|
||||
The index must contain at least one field mapped as {ref}/geo-point.html[geo_point].
|
||||
|
||||
NOTE: Only count and sum metric aggregations are available with the grid aggregation source and heat map layers.
|
||||
Mean, median, min, and max are turned off because the heat map will blend nearby values.
|
||||
Blending two average values would make the cluster more prominent, even though it just might literally mean that these nearby areas are average.
|
BIN
docs/maps/images/heatmap_layer.png
Normal file
After Width: | Height: | Size: 350 KiB |
BIN
docs/maps/images/sample_data_ecommerce.png
Normal file
After Width: | Height: | Size: 1.2 MiB |
BIN
docs/maps/images/terms_join.png
Normal file
After Width: | Height: | Size: 849 KiB |
BIN
docs/maps/images/terms_join_metric_config.png
Normal file
After Width: | Height: | Size: 62 KiB |
BIN
docs/maps/images/terms_join_shared_key_config.png
Normal file
After Width: | Height: | Size: 121 KiB |
BIN
docs/maps/images/terms_join_tooltip.png
Normal file
After Width: | Height: | Size: 111 KiB |
BIN
docs/maps/images/tile_layer.png
Normal file
After Width: | Height: | Size: 752 KiB |
BIN
docs/maps/images/vector_layer.png
Normal file
After Width: | Height: | Size: 467 KiB |
BIN
docs/maps/images/vector_style_dynamic.png
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
docs/maps/images/vector_style_static.png
Normal file
After Width: | Height: | Size: 8.9 KiB |
18
docs/maps/index.asciidoc
Normal file
|
@ -0,0 +1,18 @@
|
|||
[[maps]]
|
||||
= Maps
|
||||
|
||||
[partintro]
|
||||
--
|
||||
|
||||
beta[]
|
||||
|
||||
The **Maps** application enables you to parse through your geographical data at scale, with speed, and in real time. With features like multiple layers and indices in a map, plotting of raw documents, dynamic client-side styling, and global search across multiple layers, you can understand and monitor your data with ease.
|
||||
|
||||
[role="screenshot"]
|
||||
image::maps/images/sample_data_ecommerce.png[]
|
||||
|
||||
--
|
||||
|
||||
include::heatmap-layer.asciidoc[]
|
||||
include::tile-layer.asciidoc[]
|
||||
include::vector-layer.asciidoc[]
|
94
docs/maps/terms-join.asciidoc
Normal file
|
@ -0,0 +1,94 @@
|
|||
[[terms-join]]
|
||||
=== Terms join
|
||||
|
||||
Terms joins use a shared key to combine the results of an Elasticsearch terms aggregation and vector features.
|
||||
You can augment vector features with property values that symbolize features and provide richer tooltip content.
|
||||
|
||||
[role="screenshot"]
|
||||
image::maps/images/terms_join.png[]
|
||||
|
||||
Follow the example below to understand how *Terms joins* work.
|
||||
This example uses Elastic Maps Service(EMS) World Countries as the vector source and
|
||||
the Kibana sample data set "Sample web logs" as the Elasticsearch index.
|
||||
|
||||
Example feature from World Countries:
|
||||
--------------------------------------------------
|
||||
{
|
||||
geometry: {
|
||||
coordinates: [...],
|
||||
type: "Polygon"
|
||||
},
|
||||
properties: {
|
||||
name: "Sweden",
|
||||
iso2: "SE",
|
||||
iso3: "SWE"
|
||||
},
|
||||
type: "Feature"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Example documents from Sample web logs:
|
||||
--------------------------------------------------
|
||||
{
|
||||
bytes: 1837,
|
||||
geo: {
|
||||
src: "SE"
|
||||
},
|
||||
timestamp: "Feb 28, 2019 @ 07:23:08.754"
|
||||
},
|
||||
{
|
||||
bytes: 971,
|
||||
geo: {
|
||||
src: "SE"
|
||||
},
|
||||
timestamp: "Feb 27, 2019 @ 08:10:45.205"
|
||||
},
|
||||
{
|
||||
bytes: 4277,
|
||||
geo: {
|
||||
src: "SE"
|
||||
},
|
||||
timestamp: "Feb 21, 2019 @ 05:24:33.945"
|
||||
},
|
||||
{
|
||||
bytes: 5624,
|
||||
geo: {
|
||||
src: "SE"
|
||||
},
|
||||
timestamp: "Feb 21, 2019 @ 04:57:05.921"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
The JOIN configuration links the vector source "World Countries" to the Elasticsearch index "kibana_sample_data_logs"
|
||||
on the shared key *iso2 = geo.src*.
|
||||
[role="screenshot"]
|
||||
image::maps/images/terms_join_shared_key_config.png[]
|
||||
|
||||
The METRICS configuration defines two metric aggregations:
|
||||
the count of all documents in the terms bucket and
|
||||
the average of the field "bytes" for all documents in the terms bucket.
|
||||
[role="screenshot"]
|
||||
image::maps/images/terms_join_metric_config.png[]
|
||||
|
||||
Example terms aggregation response:
|
||||
--------------------------------------------------
|
||||
{
|
||||
aggregations: {
|
||||
join: {
|
||||
buckets: [
|
||||
{
|
||||
doc_count: 4,
|
||||
key: "SE",
|
||||
avg_of_bytes: {
|
||||
value: 3177.25
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Finally, the terms aggregation response is joined with the vector features.
|
||||
[role="screenshot"]
|
||||
image::maps/images/terms_join_tooltip.png[]
|
18
docs/maps/tile-layer.asciidoc
Normal file
|
@ -0,0 +1,18 @@
|
|||
[[tile-layer]]
|
||||
== Tile layer
|
||||
|
||||
The tile layer displays image tiles served from a tile server.
|
||||
|
||||
[role="screenshot"]
|
||||
image::maps/images/tile_layer.png[]
|
||||
|
||||
You can create a tile layer from the following data sources:
|
||||
|
||||
*Custom Tile Map Service*:: Map tiles configured in kibana.yml.
|
||||
See map.tilemap.url in <<settings>> for details.
|
||||
|
||||
*Tiles*:: Map tiles from https://www.elastic.co/elastic-maps-service[Elastic Maps Service].
|
||||
|
||||
*Tile Map Service from URL*:: Map tiles from a URL that includes the XYZ coordinates.
|
||||
|
||||
*Web Map Service*:: Maps from OGC Standard WMS.
|
24
docs/maps/vector-layer.asciidoc
Normal file
|
@ -0,0 +1,24 @@
|
|||
[[vector-layer]]
|
||||
== Vector layer
|
||||
|
||||
The vector layer displays points, lines, and polygons.
|
||||
|
||||
[role="screenshot"]
|
||||
image::maps/images/vector_layer.png[]
|
||||
|
||||
You can create a vector layer from the following sources:
|
||||
|
||||
*Custom vector shapes*:: Vector shapes from static files configured in kibana.yml.
|
||||
See map.regionmap.* in <<settings>> for details.
|
||||
|
||||
*Documents*:: Geospatial data from a Kibana index pattern.
|
||||
The index must contain at least one field mapped as {ref}/geo-point.html[geo_point] or {ref}/geo-shape.html[geo_shape].
|
||||
|
||||
*Grid aggregation*:: Geospatial data grouped in grids with metrics for each gridded cell.
|
||||
Set *Show as* to *grid rectangles* or *points*.
|
||||
The index must contain at least one field mapped as {ref}/geo-point.html[geo_point].
|
||||
|
||||
*Vector shapes*:: Vector shapes of administrative boundaries from https://www.elastic.co/elastic-maps-service[Elastic Maps Service].
|
||||
|
||||
include::terms-join.asciidoc[]
|
||||
include::vector-style.asciidoc[]
|
20
docs/maps/vector-style.asciidoc
Normal file
|
@ -0,0 +1,20 @@
|
|||
[[vector-style]]
|
||||
=== Vector style
|
||||
|
||||
*Border color*:: Defines the border color of the vector features.
|
||||
|
||||
*Border width*:: Defines the border width of the vector features.
|
||||
|
||||
*Fill color*:: Defines the fill color of the vector features.
|
||||
|
||||
*Symbol size*:: Defines the symbol size of point features.
|
||||
|
||||
Click the *link* button to toggle between static styling and data-driven styling.
|
||||
|
||||
[role="screenshot"]
|
||||
image::maps/images/vector_style_static.png[]
|
||||
|
||||
[role="screenshot"]
|
||||
image::maps/images/vector_style_dynamic.png[]
|
||||
|
||||
NOTE: The *link* button is only available when your vector features contain numeric properties.
|
|
@ -10,3 +10,12 @@ your application to Kibana 8.0.
|
|||
coming[8.0.0]
|
||||
|
||||
See also <<release-highlights>> and <<release-notes>>.
|
||||
|
||||
////
|
||||
The following section is re-used in the Installation and Upgrade Guide
|
||||
[[breaking_80_notable]]
|
||||
=== Notable breaking changes
|
||||
////
|
||||
// tag::notable-breaking-changes[]
|
||||
|
||||
// end::notable-breaking-changes[]
|
|
@ -2,9 +2,19 @@
|
|||
[[reporting-getting-started]]
|
||||
== Getting Started
|
||||
|
||||
{reporting} is automatically enabled in {kib}.
|
||||
[float]
|
||||
=== System Setup
|
||||
|
||||
To manually generate a report:
|
||||
{reporting} is automatically enabled in {kib}. The first time Kibana runs, it extracts a custom build for the Chromium web browser, which
|
||||
runs on the server in headless mode to load Kibana and capture the rendered Kibana charts as images.
|
||||
|
||||
Chromium is an open-source project not related to Elastic, but the Chromium binary for Kibana has been custom-built by Elastic to ensure it
|
||||
works with minimal setup. However, the Kibana server OS might still require additional dependencies for Chromium. See the
|
||||
<<reporting-troubleshooting-system-dependencies, Reporting Troubleshooting>> section for more information about the system dependencies
|
||||
for different operating systems.
|
||||
|
||||
[float]
|
||||
=== Generating a Report Manually
|
||||
|
||||
. Open {kib} in your web browser and log in. If you are running {kib}
|
||||
locally, go to `http://localhost:5601`. To access {kib} and generate
|
||||
|
@ -35,6 +45,9 @@ image:reporting/images/share-button.png["Reporting Button",link="share-button.pn
|
|||
|
||||
... Click the *Generate PDF* button.
|
||||
|
||||
[float]
|
||||
=== Generating a Report Automatically
|
||||
|
||||
If you want to automatically generate reports from a script or with
|
||||
{watcher}, use the displayed Generation URL. For more information, see
|
||||
<<automating-report-generation, Automating Report Generation>>
|
||||
|
|
|
@ -9,10 +9,6 @@ visualizations, and saved searches. Dashboards and visualizations are
|
|||
exported as PDF documents, while saved searches in Discover
|
||||
are exported to CSV.
|
||||
|
||||
NOTE: On Linux, the `libfontconfig` and `libfreetype6` packages and system
|
||||
fonts are required to generate PDF reports. If no system fonts are available,
|
||||
labels are not rendered correctly in the reports.
|
||||
|
||||
Reporting is located in the share menu from the {kib} toolbar:
|
||||
|
||||
[role="screenshot"]
|
||||
|
|
|
@ -1,61 +1,102 @@
|
|||
[role="xpack"]
|
||||
[[reporting-troubleshooting]]
|
||||
== Reporting Troubleshooting
|
||||
++++
|
||||
<titleabbrev>Troubleshooting</titleabbrev>
|
||||
++++
|
||||
|
||||
Having trouble? Here are solutions to common problems you might encounter while using Reporting.
|
||||
|
||||
[float]
|
||||
=== Verbose Logging
|
||||
Kibana's server logs have a lot of useful information for troubleshooting and understanding how things work. If you're having any issues at
|
||||
all, the full logs from Reporting will be the first place to look. In `kibana.yml`:
|
||||
|
||||
[source,yaml]
|
||||
--------------------------------------------------------------------------------
|
||||
logging.verbose: true
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
For more information about logging, see <<logging-verbose,Kibana configuration settings>>.
|
||||
|
||||
[float]
|
||||
[[reporting-troubleshooting-system-dependencies]]
|
||||
=== System Dependencies
|
||||
Reporting launches a "headless" web browser called Chromium on the Kibana server. It is a custom build made by Elastic of an open source
|
||||
project, and it is intended to have minimal dependencies on OS libraries. However, the Kibana server OS might still require additional
|
||||
dependencies for Chromium.
|
||||
|
||||
Make sure Kibana server OS has the appropriate packages installed for the distribution.
|
||||
|
||||
[float]
|
||||
==== On CentOS/RHEL systems, the following packages should be installed:
|
||||
* `ipa-gothic-fonts`
|
||||
* `xorg-x11-fonts-100dpi`
|
||||
* `xorg-x11-fonts-75dpi`
|
||||
* `xorg-x11-utils`
|
||||
* `xorg-x11-fonts-cyrillic`
|
||||
* `xorg-x11-fonts-Type1`
|
||||
* `xorg-x11-fonts-misc`
|
||||
* `fontconfig`
|
||||
* `freetype`
|
||||
|
||||
[float]
|
||||
==== On Ubuntu/Debian systems, the following packages should be installed:
|
||||
* `fonts-liberation`
|
||||
* `libfontconfig1`
|
||||
|
||||
[float]
|
||||
=== Text is Not rendered correctly in generated reports
|
||||
|
||||
If a report label is rendered as an empty rectangle, no system fonts
|
||||
are available. Install at least one font package on the system.
|
||||
If a report label is rendered as an empty rectangle, no system fonts are available. Install at least one font package on the system.
|
||||
|
||||
If the report is missing certain Chinese, Japanese or Korean characters, ensure that a system font with
|
||||
those characters is installed.
|
||||
|
||||
[float]
|
||||
=== Error generating your report
|
||||
You might see "There was an error generating your report" or one of the following errors when you download your report. See below for
|
||||
an explanation of why the failure occurred and what you can do to fix it.
|
||||
If the report is missing certain Chinese, Japanese or Korean characters, ensure that a system font with those characters is installed.
|
||||
|
||||
[float]
|
||||
=== Data Table Visualization does not show all data in PDF reports
|
||||
There is currently a known limitation with the Data Table visualization that only the first page of data rows, which are the only data visible on the screen, are shown in PDF reports.
|
||||
There is currently a known limitation with the Data Table visualization that only the first page of data rows, which are the only data
|
||||
visible on the screen, are shown in PDF reports.
|
||||
|
||||
[float]
|
||||
==== `You must install fontconfig and freetype for Reporting to work'`
|
||||
Reporting uses a headless browser on the Kibana server, which relies on some
|
||||
system packages. Install the appropriate fontconfig and freetype packages for
|
||||
your distribution.
|
||||
=== File Permissions
|
||||
Ensure that the `headless_shell` binary located in your Kibana data directory is owned by the user who is running Kibana, that the
|
||||
user has the execute permission, and if applicable, that the filesystem is mounted with the `exec` option.
|
||||
|
||||
[NOTE]
|
||||
--
|
||||
The Chromium binary is located in the Kibana installation directory as `data/headless_shell-OS_TYPE/headless_shell`. The full path is logged
|
||||
the first time Kibana starts when verbose logging is enabled.
|
||||
--
|
||||
|
||||
[float]
|
||||
==== `Max attempts reached (3)`
|
||||
[[reporting-troubleshooting-error-messages]]
|
||||
=== Error Messages
|
||||
Whenever possible, a Reporting error message tries to be as self-explanatory as possible. Here are some error messages you might encounter,
|
||||
along with the solution.
|
||||
|
||||
[float]
|
||||
==== "Max attempts reached"
|
||||
There are two primary causes of this error:
|
||||
|
||||
. You're creating a PDF of a visualization or dashboard that spans a large amount of data and Kibana is hitting the `xpack.reporting.queue.timeout`
|
||||
|
||||
. Kibana is hosted behind a reverse-proxy, and the <<reporting-kibana-server-settings, Kibana server settings>> are not configured correctly
|
||||
|
||||
Create a Markdown visualization and then create a PDF report. If this succeeds, increase the `xpack.reporting.queue.timeout`
|
||||
setting. If the PDF report fails with "Max attempts reached (3)," check your <<reporting-kibana-server-settings, Kibana server settings>>.
|
||||
Create a Markdown visualization and then create a PDF report. If this succeeds, increase the `xpack.reporting.queue.timeout` setting. If the
|
||||
PDF report fails with "Max attempts reached," check your <<reporting-kibana-server-settings, Kibana server settings>>.
|
||||
|
||||
[float]
|
||||
==== `You must install freetype and ttf-font for Reporting to work`
|
||||
Reporting using the Chromium browser relies on system packages and at least one system font. Install the appropriate fontconfig and freetype
|
||||
packages for your distribution and at least one system font.
|
||||
[[reporting-troubleshooting-nss-dependency]]
|
||||
==== "You must install nss for Reporting to work"
|
||||
Reporting using the Chromium browser relies on the Network Security Service libraries (NSS). Install the appropriate nss package for your
|
||||
distribution.
|
||||
|
||||
[float]
|
||||
==== `You must install nss for Reporting to work`
|
||||
Reporting using the Chromium browser relies on the Network Security Service libraries (NSS). Install the appropriate nss package for your distribution.
|
||||
|
||||
[float]
|
||||
==== `Unable to use Chromium sandbox. This can be disabled at your own risk with 'xpack.reporting.capture.browser.chromium.disableSandbox'`
|
||||
[[reporting-troubleshooting-sandbox-dependency]]
|
||||
==== "Unable to use Chromium sandbox"
|
||||
Chromium uses sandboxing techniques that are built on top of operating system primitives. The Linux sandbox depends on user namespaces,
|
||||
which were introduced with the 3.8 Linux kernel. However, many distributions don't have user namespaces enabled by default, or they require
|
||||
the CAP_SYS_ADMIN capability.
|
||||
|
||||
Elastic recommends that you research the feasibility of enabling unprivileged user namespaces before disabling the sandbox. An exception
|
||||
is if you are running Kibana in Docker because the container runs in a user namespace with the built-in seccomp/bpf filters.
|
||||
|
||||
[float]
|
||||
==== `Caught error spawning Chromium`
|
||||
Ensure that the `headless_shell` binary located in your Kibana data directory is owned by the user who is running Kibana, that the user has the execute permission,
|
||||
and if applicable, that the filesystem is mounted with the `exec` option.
|
|
@ -10,8 +10,8 @@
|
|||
|
||||
`xpack.infra.sources.default.fields.tiebreaker`:: Field used to break ties between two entries with the same timestamp. Defaults to `_doc`.
|
||||
|
||||
`xpack.infra.sources.default.fields.host`:: Field used to identify hosts. Defaults to `beat.hostname`.
|
||||
`xpack.infra.sources.default.fields.host`:: Field used to identify hosts. Defaults to `host.name`.
|
||||
|
||||
`xpack.infra.sources.default.fields.container`:: Field used to identify Docker containers. Defaults to `docker.container.name`.
|
||||
`xpack.infra.sources.default.fields.container`:: Field used to identify Docker containers. Defaults to `container.id`.
|
||||
|
||||
`xpack.infra.sources.default.fields.pod`:: Field used to identify Kubernetes pods. Defaults to `kubernetes.pod.name`.
|
||||
`xpack.infra.sources.default.fields.pod`:: Field used to identify Kubernetes pods. Defaults to `kubernetes.pod.uid`.
|
||||
|
|
|
@ -52,4 +52,5 @@ routing requests through a load balancer or proxy).
|
|||
|
||||
`xpack.security.sessionTimeout`::
|
||||
Sets the session duration (in milliseconds). By default, sessions stay active
|
||||
until the browser is closed.
|
||||
until the browser is closed. When this is set to an explicit timeout, closing the
|
||||
browser still requires the user to log back in to {kib}.
|
||||
|
|
|
@ -3,43 +3,62 @@
|
|||
experimental[]
|
||||
|
||||
You can build https://vega.github.io/vega/examples/[Vega] and
|
||||
https://vega.github.io/vega-lite/examples/[VegaLite] data visualizations
|
||||
https://vega.github.io/vega-lite/examples/[Vega-Lite] data visualizations
|
||||
into Kibana, either standalone, or on top of a map. To see Vega in action,
|
||||
watch this
|
||||
https://www.youtube.com/watch?v=lQGCipY3th8[short introduction video].
|
||||
|
||||
Currently Vega version 4.3 and Vega-Lite version 2.6 are supported.
|
||||
|
||||
NOTE: In Vega it is possible to load data dynamically, e.g. by setting signals as data urls. This is not supported in Kibana as all data is fetched at once prior to passing it to the Vega renderer.
|
||||
|
||||
|
||||
[[vega-quick-demo]]
|
||||
=== Getting Started with Vega
|
||||
|
||||
Follow these steps to create your first Vega visualization.
|
||||
|
||||
. In Kibana, choose Visualize, and add Vega visualization. You should see a default graph.
|
||||
. Try changing `mark` from `line` to `point`, `area`, `bar`, `circle`,
|
||||
`square`, ... (see
|
||||
https://vega.github.io/vega-lite/docs/mark.html#mark-def[docs])
|
||||
. Try other https://vega.github.io/vega/examples/[Vega] or
|
||||
https://vega.github.io/vega-lite/examples/[VegaLite] visualizations. You
|
||||
may need to make URLs absolute, e.g. replace
|
||||
* To experiment using sample data, first click the {kib} logo in the upper left hand corner
|
||||
and then click the link next to *Sample Data*.
|
||||
* Once you have data loaded, go to *Visualize*, click *+*, and select *Vega* to see an example graph.
|
||||
*Note*: The default graph is written in Vega-Lite, but you can build visualizations
|
||||
in either language. See <<vega-vs-vegalite, Vega vs. Vega-Lite>> for more information.
|
||||
* Try changing `mark` from `line` to `point`, `area`, `bar`, `circle`,
|
||||
or `square`. Check out the
|
||||
https://vega.github.io/vega-lite/docs/mark.html#mark-def[Vega-Lite docs] for more information.
|
||||
* Explore other available https://vega.github.io/vega/examples/[Vega] or
|
||||
https://vega.github.io/vega-lite/examples/[Vega-Lite] visualizations.
|
||||
*Note*: You might need to make URLs absolute, for example, replace
|
||||
`"url": "data/world-110m.json"` with
|
||||
`"url": "https://vega.github.io/editor/data/world-110m.json"`. (see
|
||||
link:#Using%20Vega%20and%20VegaLite%20examples[notes below])
|
||||
. Using https://www.npmjs.com/package/makelogs[makelogs util], generate
|
||||
some logstash data and try link:public/examples/logstash[logstash
|
||||
examples]. *(Do not use makelogs on a production cluster.)*
|
||||
`"url": "https://vega.github.io/editor/data/world-110m.json"`.
|
||||
See <<vega-using-vega-and-vegalite-examples, Vega and Vega-Lite examples>>.
|
||||
* For more information on getting started, check out this https://www.elastic.co/blog/getting-started-with-vega-visualizations-in-kibana[blog post].
|
||||
|
||||
|
||||
[[vega-vs-vegalite]]
|
||||
=== Vega vs VegaLite
|
||||
=== Vega vs Vega-Lite
|
||||
|
||||
VegaLite is a simplified version of Vega, useful to quickly get started,
|
||||
but has a number of limitations. VegaLite is automatically converted
|
||||
into Vega before rendering. Compare
|
||||
https://github.com/nyurik/kibana-vega-vis/blob/master/examples/logstash/logstash-simple_line-vega.json[logstash-simple_line-vega]
|
||||
and
|
||||
https://github.com/nyurik/kibana-vega-vis/blob/master/examples/logstash/logstash-simple_line-vegalite.json[logstash-simple_line-vegalite]
|
||||
(both use the same Elasticsearch logstash data). You may use
|
||||
https://vega.github.io/editor/[this editor] to convert VegaLite into
|
||||
Vega.
|
||||
The Vega visualization in {kib} supports both Vega and Vega-Lite. You can use the
|
||||
`schema` value to define which language you would like to use and its minimum
|
||||
required version.
|
||||
|
||||
For example:
|
||||
|
||||
* Vega-Lite v2: `$schema: https://vega.github.io/schema/vega-lite/v2.json`
|
||||
* Vega v4: `$schema: https://vega.github.io/schema/vega/v4.json`
|
||||
|
||||
The `schema` URL is only used for identification, and does not need to be accessible by {kib}.
|
||||
|
||||
Vega-Lite is a simplified version of Vega; it automates some constructions and has
|
||||
much shorter specifications than Vega. Vega-Lite is automatically converted into
|
||||
Vega before rendering, but it has some limitations, and there are some visualizations
|
||||
that can be expressed in Vega that cannot be expressed in Vega-Lite. You can learn more
|
||||
in the https://vega.github.io/vega-lite/[Vega-Lite documentation].
|
||||
|
||||
You can use https://vega.github.io/editor/[this editor] to convert Vega-Lite into
|
||||
Vega.
|
||||
|
||||
When you create a Vega visualization in {kib}, you can edit the `schema`
|
||||
value in the dev tools to the left of the graph to define which of the two expression
|
||||
languages you would like to use (see examples above).
|
||||
|
||||
|
||||
[[vega-querying-elasticsearch]]
|
||||
|
@ -243,12 +262,12 @@ positioning of the map.
|
|||
Use browser debugging tools (e.g. F12 or Ctrl+Shift+J in Chrome) to
|
||||
inspect the `VEGA_DEBUG` variable:
|
||||
* `view` - access to the Vega View object. See https://vega.github.io/vega/docs/api/debugging/[Vega Debugging Guide]
|
||||
on how to inspect data and signals at runtime. For VegaLite, `VEGA_DEBUG.view.data('source_0')` gets the main data set.
|
||||
on how to inspect data and signals at runtime. For Vega-Lite, `VEGA_DEBUG.view.data('source_0')` gets the main data set.
|
||||
For Vega, it uses the data name as defined in your Vega spec.
|
||||
* `vega_spec` - Vega JSON graph specification after some modifications by Kibana. In case
|
||||
of VegaLite, this is the output of the VegaLite compiler.
|
||||
* `vegalite_spec` - If this is a VegaLite graph, JSON specification of the graph before
|
||||
VegaLite compilation.
|
||||
of Vega-Lite, this is the output of the Vega-Lite compiler.
|
||||
* `vegalite_spec` - If this is a Vega-Lite graph, JSON specification of the graph before
|
||||
Vega-Lite compilation.
|
||||
|
||||
[[vega-data]]
|
||||
==== Data
|
||||
|
@ -275,19 +294,28 @@ to your kibana.yml file.
|
|||
[[vega-useful-links]]
|
||||
=== Useful Links
|
||||
|
||||
* https://vega.github.io/editor/[Editor] - includes examples for Vega &
|
||||
VegaLite, but does not support any Kibana-specific features like
|
||||
Elasticsearch requests and interactive base maps.
|
||||
* VegaLite
|
||||
https://vega.github.io/vega-lite/tutorials/getting_started.html[Tutorials],
|
||||
https://vega.github.io/vega-lite/docs/[docs], and
|
||||
https://vega.github.io/vega-lite/examples/[examples]
|
||||
* Vega https://vega.github.io/vega/tutorials/[Tutorial],
|
||||
https://vega.github.io/vega/docs/[docs],
|
||||
https://vega.github.io/vega/examples/[examples]
|
||||
==== Vega Editor
|
||||
The https://vega.github.io/editor/[Vega Editor] includes examples for Vega & Vega-Lite, but does not support any
|
||||
{kib}-specific features like {es} requests and interactive base maps.
|
||||
|
||||
==== Vega-Lite resources
|
||||
* https://vega.github.io/vega-lite/tutorials/getting_started.html[Tutorials]
|
||||
* https://vega.github.io/vega-lite/docs/[Docs]
|
||||
* https://vega.github.io/vega-lite/examples/[Examples]
|
||||
|
||||
==== Vega resources
|
||||
* https://vega.github.io/vega/tutorials/[Tutorials]
|
||||
* https://vega.github.io/vega/docs/[Docs]
|
||||
* https://vega.github.io/vega/examples/[Examples]
|
||||
|
||||
==== Elastic blog posts
|
||||
* https://www.elastic.co/blog/getting-started-with-vega-visualizations-in-kibana[Getting Started with Vega Visualizations in Kibana]
|
||||
* https://www.elastic.co/blog/custom-vega-visualizations-in-kibana[Custom Vega Visualizations in Kibana]
|
||||
* https://www.elastic.co/blog/sankey-visualization-with-vega-in-kibana[Sankey Visualization with Vega in Kibana]
|
||||
|
||||
|
||||
[[vega-using-vega-and-vegalite-examples]]
|
||||
==== Using Vega and VegaLite examples
|
||||
==== Using Vega and Vega-Lite examples
|
||||
|
||||
When using https://vega.github.io/vega/examples/[Vega] and
|
||||
https://vega.github.io/vega-lite/examples/[VegaLite] examples, you may
|
||||
|
@ -328,16 +356,16 @@ additional configuration options.
|
|||
==== Sizing and positioning
|
||||
|
||||
[[vega-and-vegalite]]
|
||||
Vega and VegaLite
|
||||
===== Vega and Vega-Lite
|
||||
|
||||
By default, Kibana Vega graphs will use
|
||||
`autosize = { type: 'fit', contains: 'padding' }` layout model for Vega
|
||||
and VegaLite graphs. The `fit` model uses all available space, ignores
|
||||
and Vega-Lite graphs. The `fit` model uses all available space, ignores
|
||||
`width` and `height` values, but respects the padding values. You may
|
||||
override this behaviour by specifying a different `autosize` value.
|
||||
|
||||
[[vega-on-a-map]]
|
||||
Vega on a map
|
||||
===== Vega on a map
|
||||
|
||||
All Vega graphs will ignore `autosize`, `width`, `height`, and `padding`
|
||||
values, using `fit` model with zero padding.
|
||||
|
|
5
kibana.d.ts
vendored
|
@ -25,6 +25,7 @@ export * from './target/types/type_exports';
|
|||
* All exports from TS ambient definitions (where types are added for JS source in a .d.ts file).
|
||||
*/
|
||||
import * as LegacyElasticsearch from './src/legacy/core_plugins/elasticsearch';
|
||||
import * as LegacyKibanaPluginSpec from './src/legacy/plugin_discovery/plugin_spec/plugin_spec_options';
|
||||
import * as LegacyKibanaServer from './src/legacy/server/kbn_server';
|
||||
|
||||
/**
|
||||
|
@ -40,6 +41,10 @@ export namespace Legacy {
|
|||
export type SavedObjectsService = LegacyKibanaServer.SavedObjectsService;
|
||||
export type Server = LegacyKibanaServer.Server;
|
||||
|
||||
export type InitPluginFunction = LegacyKibanaPluginSpec.InitPluginFunction;
|
||||
export type UiExports = LegacyKibanaPluginSpec.UiExports;
|
||||
export type PluginSpecOptions = LegacyKibanaPluginSpec.PluginSpecOptions;
|
||||
|
||||
export namespace Plugins {
|
||||
export namespace elasticsearch {
|
||||
export type Plugin = LegacyElasticsearch.ElasticsearchPlugin;
|
||||
|
|
51
package.json
|
@ -74,8 +74,8 @@
|
|||
"url": "https://github.com/elastic/kibana.git"
|
||||
},
|
||||
"resolutions": {
|
||||
"**/@types/node": "10.12.12",
|
||||
"@types/react": "16.3.14"
|
||||
"**/@types/node": "10.12.27",
|
||||
"**/typescript": "^3.3.3333"
|
||||
},
|
||||
"workspaces": {
|
||||
"packages": [
|
||||
|
@ -95,7 +95,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@elastic/datemath": "5.0.2",
|
||||
"@elastic/eui": "6.10.1",
|
||||
"@elastic/eui": "9.0.2",
|
||||
"@elastic/filesaver": "1.1.2",
|
||||
"@elastic/good": "8.1.1-kibana2",
|
||||
"@elastic/numeral": "2.3.2",
|
||||
|
@ -109,6 +109,7 @@
|
|||
"@kbn/pm": "1.0.0",
|
||||
"@kbn/test-subj-selector": "0.2.1",
|
||||
"@kbn/ui-framework": "1.0.0",
|
||||
"@types/json-stable-stringify": "^1.0.32",
|
||||
"@types/lodash.clonedeep": "^4.5.4",
|
||||
"JSONStream": "1.1.1",
|
||||
"abortcontroller-polyfill": "^1.1.9",
|
||||
|
@ -132,6 +133,7 @@
|
|||
"color": "1.0.3",
|
||||
"commander": "2.8.1",
|
||||
"compare-versions": "3.1.0",
|
||||
"core-js": "2.5.3",
|
||||
"css-loader": "1.0.0",
|
||||
"custom-event-polyfill": "^0.3.0",
|
||||
"d3": "3.5.6",
|
||||
|
@ -151,7 +153,7 @@
|
|||
"globby": "^8.0.1",
|
||||
"good-squeeze": "2.1.0",
|
||||
"h2o2": "^8.1.2",
|
||||
"handlebars": "4.0.5",
|
||||
"handlebars": "4.0.13",
|
||||
"hapi": "^17.5.3",
|
||||
"hjson": "3.1.0",
|
||||
"hoek": "^5.0.4",
|
||||
|
@ -161,6 +163,7 @@
|
|||
"joi": "^13.5.2",
|
||||
"jquery": "^3.3.1",
|
||||
"js-yaml": "3.4.1",
|
||||
"json-stable-stringify": "^1.0.1",
|
||||
"json-stringify-pretty-compact": "1.0.4",
|
||||
"json-stringify-safe": "5.0.1",
|
||||
"leaflet": "1.0.3",
|
||||
|
@ -192,11 +195,11 @@
|
|||
"pug": "^2.0.3",
|
||||
"querystring-browser": "1.0.4",
|
||||
"raw-loader": "0.5.1",
|
||||
"react": "^16.6.0",
|
||||
"react": "^16.8.0",
|
||||
"react-addons-shallow-compare": "15.6.2",
|
||||
"react-anything-sortable": "^1.7.4",
|
||||
"react-color": "^2.13.8",
|
||||
"react-dom": "^16.6.0",
|
||||
"react-dom": "^16.8.0",
|
||||
"react-grid-layout": "^0.16.2",
|
||||
"react-input-range": "^1.3.0",
|
||||
"react-markdown": "^3.1.4",
|
||||
|
@ -234,8 +237,8 @@
|
|||
"uuid": "3.0.1",
|
||||
"val-loader": "^1.1.1",
|
||||
"validate-npm-package-name": "2.2.2",
|
||||
"vega-lib": "^3.3.1",
|
||||
"vega-lite": "^2.4.0",
|
||||
"vega-lib": "4.3.0",
|
||||
"vega-lite": "^2.6.0",
|
||||
"vega-schema-url-parser": "1.0.0",
|
||||
"vega-tooltip": "^0.9.14",
|
||||
"vision": "^5.3.3",
|
||||
|
@ -247,8 +250,8 @@
|
|||
"yauzl": "2.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/parser": "^7.1.0",
|
||||
"@babel/types": "^7.0.0",
|
||||
"@babel/parser": "^7.3.4",
|
||||
"@babel/types": "^7.3.4",
|
||||
"@elastic/eslint-config-kibana": "0.15.0",
|
||||
"@elastic/eslint-plugin-kibana-custom": "1.1.0",
|
||||
"@elastic/makelogs": "^4.4.0",
|
||||
|
@ -265,6 +268,7 @@
|
|||
"@types/boom": "^7.2.0",
|
||||
"@types/chance": "^1.0.0",
|
||||
"@types/cheerio": "^0.22.10",
|
||||
"@types/chromedriver": "^2.38.0",
|
||||
"@types/classnames": "^2.2.3",
|
||||
"@types/d3": "^3.5.41",
|
||||
"@types/dedent": "^0.7.0",
|
||||
|
@ -275,7 +279,6 @@
|
|||
"@types/eslint": "^4.16.2",
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/fetch-mock": "7.2.1",
|
||||
"@types/json5": "^0.0.30",
|
||||
"@types/getopts": "^2.0.0",
|
||||
"@types/glob": "^5.0.35",
|
||||
"@types/globby": "^8.0.0",
|
||||
|
@ -284,22 +287,24 @@
|
|||
"@types/has-ansi": "^3.0.0",
|
||||
"@types/hoek": "^4.1.3",
|
||||
"@types/humps": "^1.1.2",
|
||||
"@types/jest": "^23.3.1",
|
||||
"@types/jest": "^24.0.9",
|
||||
"@types/joi": "^13.4.2",
|
||||
"@types/jquery": "^3.3.6",
|
||||
"@types/js-yaml": "^3.11.1",
|
||||
"@types/json5": "^0.0.30",
|
||||
"@types/listr": "^0.13.0",
|
||||
"@types/lodash": "^3.10.1",
|
||||
"@types/minimatch": "^2.0.29",
|
||||
"@types/mocha": "^5.2.6",
|
||||
"@types/moment-timezone": "^0.5.8",
|
||||
"@types/mustache": "^0.8.31",
|
||||
"@types/node": "^10.12.12",
|
||||
"@types/node": "^10.12.27",
|
||||
"@types/opn": "^5.1.0",
|
||||
"@types/podium": "^1.0.0",
|
||||
"@types/prop-types": "^15.5.3",
|
||||
"@types/puppeteer-core": "^1.9.0",
|
||||
"@types/react": "16.3.14",
|
||||
"@types/react-dom": "^16.0.5",
|
||||
"@types/react": "^16.8.0",
|
||||
"@types/react-dom": "^16.8.0",
|
||||
"@types/react-redux": "^6.0.6",
|
||||
"@types/react-router-dom": "^4.3.1",
|
||||
"@types/react-virtualized": "^9.18.7",
|
||||
|
@ -328,8 +333,8 @@
|
|||
"dedent": "^0.7.0",
|
||||
"delete-empty": "^2.0.0",
|
||||
"enzyme": "^3.7.0",
|
||||
"enzyme-adapter-react-16": "^1.6.0",
|
||||
"enzyme-adapter-utils": "^1.8.1",
|
||||
"enzyme-adapter-react-16": "^1.9.0",
|
||||
"enzyme-adapter-utils": "^1.10.0",
|
||||
"enzyme-to-json": "^3.3.4",
|
||||
"eslint": "^5.6.0",
|
||||
"eslint-config-prettier": "^3.1.0",
|
||||
|
@ -360,11 +365,10 @@
|
|||
"intl-messageformat-parser": "^1.4.0",
|
||||
"is-path-inside": "^2.0.0",
|
||||
"istanbul-instrumenter-loader": "3.0.1",
|
||||
"jest": "^23.6.0",
|
||||
"jest-cli": "^23.6.0",
|
||||
"jest": "^24.1.0",
|
||||
"jest-cli": "^24.1.0",
|
||||
"jest-raw-loader": "^1.0.1",
|
||||
"jimp": "0.2.28",
|
||||
"json-stable-stringify": "^1.0.1",
|
||||
"json5": "^1.0.1",
|
||||
"karma": "3.1.4",
|
||||
"karma-chrome-launcher": "2.1.1",
|
||||
|
@ -374,11 +378,9 @@
|
|||
"karma-junit-reporter": "1.2.0",
|
||||
"karma-mocha": "1.3.0",
|
||||
"karma-safari-launcher": "1.0.0",
|
||||
"leadfoot": "1.7.5",
|
||||
"license-checker": "^16.0.0",
|
||||
"listr": "^0.14.1",
|
||||
"load-grunt-config": "0.19.2",
|
||||
"makelogs": "^4.3.0",
|
||||
"mocha": "3.3.0",
|
||||
"multistream": "^2.1.1",
|
||||
"murmurhash3js": "3.0.1",
|
||||
|
@ -394,6 +396,7 @@
|
|||
"proxyquire": "1.7.11",
|
||||
"regenerate": "^1.4.0",
|
||||
"sass-lint": "^1.12.1",
|
||||
"selenium-webdriver": "^4.0.0-alpha.1",
|
||||
"simple-git": "1.37.0",
|
||||
"sinon": "^5.0.7",
|
||||
"strip-ansi": "^3.0.1",
|
||||
|
@ -411,14 +414,14 @@
|
|||
"tslint-config-prettier": "^1.15.0",
|
||||
"tslint-microsoft-contrib": "^6.0.0",
|
||||
"tslint-plugin-prettier": "^2.0.0",
|
||||
"typescript": "^3.0.3",
|
||||
"typescript": "^3.3.3333",
|
||||
"vinyl-fs": "^3.0.2",
|
||||
"xml2js": "^0.4.19",
|
||||
"xmlbuilder": "9.0.4",
|
||||
"zlib": "^1.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": "10.15.1",
|
||||
"node": "10.15.2",
|
||||
"yarn": "^1.10.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -100,6 +100,7 @@ module.exports = {
|
|||
strict: [ 'error', 'never' ],
|
||||
'valid-typeof': 'error',
|
||||
'wrap-iife': [ 'error', 'outside' ],
|
||||
'eol-last': ['error', 'always'],
|
||||
yoda: 'off',
|
||||
|
||||
'object-curly-spacing': 'off', // overridden with babel/object-curly-spacing
|
||||
|
@ -150,7 +151,7 @@ module.exports = {
|
|||
'jsx-a11y/role-supports-aria-props': 'error',
|
||||
'jsx-a11y/scope': 'error',
|
||||
'jsx-a11y/tabindex-no-positive': 'error',
|
||||
'jsx-a11y/label-has-associated-control': 'error',
|
||||
'jsx-a11y/label-has-associated-control': 'error',
|
||||
'react/jsx-equals-spacing': ['error', 'never'],
|
||||
'react/jsx-indent': ['error', 2],
|
||||
'react/no-will-update-set-state': 'error',
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@kbn/babel-preset": "1.0.0",
|
||||
"@babel/parser": "^7.1.3",
|
||||
"@babel/traverse": "^7.1.4",
|
||||
"@babel/parser": "^7.3.4",
|
||||
"@babel/traverse": "^7.3.4",
|
||||
"lodash": "^4.17.11"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,10 +17,23 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
// TODO: replace this when we use the method exposed by security https://github.com/elastic/kibana/pull/24616
|
||||
export const isSecurityEnabled = server => {
|
||||
const kibanaSecurity = server.plugins.security;
|
||||
const esSecurity = server.plugins.xpack_main.info.feature('security');
|
||||
module.exports = {
|
||||
presets: [
|
||||
require.resolve('@babel/preset-react'),
|
||||
require.resolve('@babel/preset-typescript'),
|
||||
],
|
||||
plugins: [
|
||||
require.resolve('babel7-plugin-add-module-exports'),
|
||||
// stage 3
|
||||
require.resolve('@babel/plugin-proposal-async-generator-functions'),
|
||||
require.resolve('@babel/plugin-proposal-object-rest-spread'),
|
||||
|
||||
return kibanaSecurity && esSecurity.isAvailable() && esSecurity.isEnabled();
|
||||
// the class properties proposal was merged with the private fields proposal
|
||||
// into the "class fields" proposal. Babel doesn't support this combined
|
||||
// proposal yet, which includes private field, so this transform is
|
||||
// TECHNICALLY stage 2, but for all intents and purposes it's stage 3
|
||||
//
|
||||
// See https://github.com/babel/proposals/issues/12 for progress
|
||||
require.resolve('@babel/plugin-proposal-class-properties'),
|
||||
],
|
||||
};
|
50
packages/kbn-babel-preset/node_preset_7.js
Normal file
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
module.exports = () => ({
|
||||
presets: [
|
||||
[
|
||||
require.resolve('@babel/preset-env'),
|
||||
{
|
||||
targets: {
|
||||
// only applies the necessary transformations based on the
|
||||
// current node.js processes version. For example: running
|
||||
// `nvm install 8 && node ./src/cli` will run kibana in node
|
||||
// version 8 and babel will stop transpiling async/await
|
||||
// because they are supported in the "current" version of node
|
||||
node: 'current',
|
||||
},
|
||||
|
||||
// replaces `import "babel-polyfill"` with a list of require statements
|
||||
// for just the polyfills that the target versions don't already supply
|
||||
// on their own
|
||||
useBuiltIns: 'entry',
|
||||
},
|
||||
],
|
||||
require('./common_preset_7'),
|
||||
],
|
||||
plugins: [
|
||||
[
|
||||
require.resolve('babel-plugin-transform-define'),
|
||||
{
|
||||
'global.__BUILT_WITH_BABEL__': 'true'
|
||||
}
|
||||
]
|
||||
]
|
||||
});
|
|
@ -4,12 +4,20 @@
|
|||
"version": "1.0.0",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.3.4",
|
||||
"@babel/plugin-proposal-async-generator-functions": "^7.2.0",
|
||||
"@babel/plugin-proposal-class-properties": "^7.3.4",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.3.4",
|
||||
"@babel/preset-env": "^7.3.4",
|
||||
"@babel/preset-react": "^7.0.0",
|
||||
"@babel/preset-typescript": "^7.3.3",
|
||||
"babel-plugin-add-module-exports": "^0.2.1",
|
||||
"babel-plugin-transform-async-generator-functions": "^6.24.1",
|
||||
"babel-plugin-transform-class-properties": "^6.24.1",
|
||||
"babel-plugin-transform-define": "^1.3.0",
|
||||
"babel-plugin-transform-define": "^1.3.1",
|
||||
"babel-plugin-transform-object-rest-spread": "^6.26.0",
|
||||
"babel-preset-env": "^1.7.0",
|
||||
"babel-preset-react": "^6.24.1"
|
||||
"babel-preset-react": "^6.24.1",
|
||||
"babel7-plugin-add-module-exports": "npm:babel-plugin-add-module-exports@^1.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
"kbn:bootstrap": "yarn build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^3.0.3"
|
||||
"typescript": "^3.3.3333"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"joi": "^13.5.2",
|
||||
|
|
|
@ -48,7 +48,7 @@ function numberToDuration(numberMs: number) {
|
|||
return momentDuration(numberMs);
|
||||
}
|
||||
|
||||
export function ensureDuration(value?: Duration | string | number) {
|
||||
export function ensureDuration(value: Duration | string | number) {
|
||||
if (typeof value === 'string') {
|
||||
return stringToDuration(value);
|
||||
}
|
||||
|
|
|
@ -48,6 +48,8 @@ import {
|
|||
TypeOf,
|
||||
TypeOptions,
|
||||
UnionType,
|
||||
URIOptions,
|
||||
URIType,
|
||||
} from './types';
|
||||
|
||||
export { ObjectType, TypeOf, Type };
|
||||
|
@ -65,6 +67,10 @@ function string(options?: StringOptions): Type<string> {
|
|||
return new StringType(options);
|
||||
}
|
||||
|
||||
function uri(options?: URIOptions): Type<string> {
|
||||
return new URIType(options);
|
||||
}
|
||||
|
||||
function literal<T extends string | number | boolean>(value: T): Type<T> {
|
||||
return new LiteralType(value);
|
||||
}
|
||||
|
@ -188,6 +194,7 @@ export const schema = {
|
|||
recordOf,
|
||||
siblingRef,
|
||||
string,
|
||||
uri,
|
||||
};
|
||||
|
||||
export type Schema = typeof schema;
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`#scheme returns error when shorter string 1`] = `"expected URI with scheme [http|https] but but got [ftp://elastic.co]."`;
|
||||
|
||||
exports[`#scheme returns error when shorter string 2`] = `"expected URI with scheme [http|https] but but got [file:///kibana.log]."`;
|
||||
|
||||
exports[`#validate throws when returns string 1`] = `"validator failure"`;
|
||||
|
||||
exports[`is required by default 1`] = `"expected value of type [string] but got [undefined]."`;
|
||||
|
||||
exports[`returns error when not string 1`] = `"expected value of type [string] but got [number]."`;
|
||||
|
||||
exports[`returns error when not string 2`] = `"expected value of type [string] but got [Array]."`;
|
||||
|
||||
exports[`returns error when not string 3`] = `"expected value of type [string] but got [RegExp]."`;
|
||||
|
||||
exports[`returns error when value is not a URI 1`] = `"value is [3domain.local] but it must be a valid URI (see RFC 3986)."`;
|
||||
|
||||
exports[`returns error when value is not a URI 2`] = `"value is [http://8010:0:0:0:9:500:300C:200A] but it must be a valid URI (see RFC 3986)."`;
|
||||
|
||||
exports[`returns error when value is not a URI 3`] = `"value is [-] but it must be a valid URI (see RFC 3986)."`;
|
||||
|
||||
exports[`returns error when value is not a URI 4`] = `"value is [https://example.com?baz[]=foo&baz[]=bar] but it must be a valid URI (see RFC 3986)."`;
|
||||
|
||||
exports[`returns error when value is not a URI 5`] = `"value is [http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa] but it must be a valid URI (see RFC 3986)."`;
|
|
@ -20,7 +20,7 @@
|
|||
import { duration as momentDuration } from 'moment';
|
||||
import { schema } from '..';
|
||||
|
||||
const { duration } = schema;
|
||||
const { duration, object, contextRef, siblingRef } = schema;
|
||||
|
||||
test('returns value by default', () => {
|
||||
expect(duration().validate('123s')).toMatchSnapshot();
|
||||
|
@ -58,6 +58,70 @@ describe('#defaultValue', () => {
|
|||
}).validate(undefined)
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('can be a function that returns compatible type', () => {
|
||||
expect(
|
||||
duration({
|
||||
defaultValue: () => 600,
|
||||
}).validate(undefined)
|
||||
).toMatchInlineSnapshot(`"PT0.6S"`);
|
||||
|
||||
expect(
|
||||
duration({
|
||||
defaultValue: () => '1h',
|
||||
}).validate(undefined)
|
||||
).toMatchInlineSnapshot(`"PT1H"`);
|
||||
|
||||
expect(
|
||||
duration({
|
||||
defaultValue: () => momentDuration(1, 'hour'),
|
||||
}).validate(undefined)
|
||||
).toMatchInlineSnapshot(`"PT1H"`);
|
||||
});
|
||||
|
||||
test('can be a reference to a moment.Duration', () => {
|
||||
expect(
|
||||
object({
|
||||
source: duration({ defaultValue: 600 }),
|
||||
target: duration({ defaultValue: siblingRef('source') }),
|
||||
fromContext: duration({ defaultValue: contextRef('val') }),
|
||||
}).validate(undefined, { val: momentDuration(700, 'ms') })
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"fromContext": "PT0.7S",
|
||||
"source": "PT0.6S",
|
||||
"target": "PT0.6S",
|
||||
}
|
||||
`);
|
||||
|
||||
expect(
|
||||
object({
|
||||
source: duration({ defaultValue: '1h' }),
|
||||
target: duration({ defaultValue: siblingRef('source') }),
|
||||
fromContext: duration({ defaultValue: contextRef('val') }),
|
||||
}).validate(undefined, { val: momentDuration(2, 'hour') })
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"fromContext": "PT2H",
|
||||
"source": "PT1H",
|
||||
"target": "PT1H",
|
||||
}
|
||||
`);
|
||||
|
||||
expect(
|
||||
object({
|
||||
source: duration({ defaultValue: momentDuration(1, 'hour') }),
|
||||
target: duration({ defaultValue: siblingRef('source') }),
|
||||
fromContext: duration({ defaultValue: contextRef('val') }),
|
||||
}).validate(undefined, { val: momentDuration(2, 'hour') })
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"fromContext": "PT2H",
|
||||
"source": "PT1H",
|
||||
"target": "PT1H",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
test('returns error when not string or non-safe positive integer', () => {
|
||||
|
|
|
@ -21,20 +21,33 @@ import typeDetect from 'type-detect';
|
|||
import { Duration, ensureDuration } from '../duration';
|
||||
import { SchemaTypeError } from '../errors';
|
||||
import { internals } from '../internals';
|
||||
import { Reference } from '../references';
|
||||
import { Type } from './type';
|
||||
|
||||
type DurationValueType = Duration | string | number;
|
||||
|
||||
export interface DurationOptions {
|
||||
// we need to special-case defaultValue as we want to handle string inputs too
|
||||
defaultValue?: DurationValueType | Reference<DurationValueType> | (() => DurationValueType);
|
||||
validate?: (value: Duration) => string | void;
|
||||
defaultValue?: Duration | string | number;
|
||||
}
|
||||
|
||||
export class DurationType extends Type<Duration> {
|
||||
constructor(options: DurationOptions = {}) {
|
||||
super(internals.duration(), {
|
||||
...options,
|
||||
defaultValue: ensureDuration(options.defaultValue),
|
||||
});
|
||||
let defaultValue;
|
||||
if (typeof options.defaultValue === 'function') {
|
||||
const originalDefaultValue = options.defaultValue;
|
||||
defaultValue = () => ensureDuration(originalDefaultValue());
|
||||
} else if (
|
||||
typeof options.defaultValue === 'string' ||
|
||||
typeof options.defaultValue === 'number'
|
||||
) {
|
||||
defaultValue = ensureDuration(options.defaultValue);
|
||||
} else {
|
||||
defaultValue = options.defaultValue;
|
||||
}
|
||||
|
||||
super(internals.duration(), { ...options, defaultValue });
|
||||
}
|
||||
|
||||
protected handleError(type: string, { message, value }: Record<string, any>, path: string[]) {
|
||||
|
|
|
@ -32,3 +32,4 @@ export { ObjectType, Props, TypeOf } from './object_type';
|
|||
export { RecordOfOptions, RecordOfType } from './record_type';
|
||||
export { StringOptions, StringType } from './string_type';
|
||||
export { UnionType } from './union_type';
|
||||
export { URIOptions, URIType } from './uri_type';
|
||||
|
|
144
packages/kbn-config-schema/src/types/uri_type.test.ts
Normal file
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { schema } from '..';
|
||||
|
||||
test('is required by default', () => {
|
||||
expect(() => schema.uri().validate(undefined)).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('returns value for valid URI as per RFC3986', () => {
|
||||
const uriSchema = schema.uri();
|
||||
|
||||
expect(uriSchema.validate('http://tools.ietf.org/html/rfc3986')).toBe(
|
||||
'http://tools.ietf.org/html/rfc3986'
|
||||
);
|
||||
expect(uriSchema.validate('udp://3domain.local')).toBe('udp://3domain.local');
|
||||
expect(uriSchema.validate('urn:elastic:kibana')).toBe('urn:elastic:kibana');
|
||||
expect(uriSchema.validate('ftp://ftp.ietf.org/rfc/rfc3986.txt')).toBe(
|
||||
'ftp://ftp.ietf.org/rfc/rfc3986.txt'
|
||||
);
|
||||
expect(uriSchema.validate('mailto:Platform.Kibana@elastic.co')).toBe(
|
||||
'mailto:Platform.Kibana@elastic.co'
|
||||
);
|
||||
expect(uriSchema.validate('tel:+500-111-222-333')).toBe('tel:+500-111-222-333');
|
||||
expect(uriSchema.validate('file:///kibana.log')).toBe('file:///kibana.log');
|
||||
expect(uriSchema.validate('http://elastic@localhost:9200')).toBe('http://elastic@localhost:9200');
|
||||
expect(uriSchema.validate('http://elastic:changeme@localhost:9200')).toBe(
|
||||
'http://elastic:changeme@localhost:9200'
|
||||
);
|
||||
expect(uriSchema.validate('ldap://[2001:db8::7]/c=GB?objectClass?one')).toBe(
|
||||
'ldap://[2001:db8::7]/c=GB?objectClass?one'
|
||||
);
|
||||
|
||||
const uriWithMaxAllowedLength = `http://${'a'.repeat(255)}`;
|
||||
expect(uriSchema.validate(uriWithMaxAllowedLength)).toBe(uriWithMaxAllowedLength);
|
||||
});
|
||||
|
||||
test('returns error when value is not a URI', () => {
|
||||
const uriSchema = schema.uri();
|
||||
|
||||
expect(() => uriSchema.validate('3domain.local')).toThrowErrorMatchingSnapshot();
|
||||
expect(() =>
|
||||
uriSchema.validate('http://8010:0:0:0:9:500:300C:200A')
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
expect(() => uriSchema.validate('-')).toThrowErrorMatchingSnapshot();
|
||||
expect(() =>
|
||||
uriSchema.validate('https://example.com?baz[]=foo&baz[]=bar')
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
|
||||
const tooLongUri = `http://${'a'.repeat(256)}`;
|
||||
expect(() => uriSchema.validate(tooLongUri)).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
describe('#scheme', () => {
|
||||
test('returns value when URI has required scheme', () => {
|
||||
const uriSchema = schema.uri({ scheme: ['http', 'https'] });
|
||||
|
||||
expect(uriSchema.validate('http://elastic.co')).toBe('http://elastic.co');
|
||||
expect(uriSchema.validate('https://elastic.co')).toBe('https://elastic.co');
|
||||
});
|
||||
|
||||
test('returns error when shorter string', () => {
|
||||
const uriSchema = schema.uri({ scheme: ['http', 'https'] });
|
||||
|
||||
expect(() => uriSchema.validate('ftp://elastic.co')).toThrowErrorMatchingSnapshot();
|
||||
expect(() => uriSchema.validate('file:///kibana.log')).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('#defaultValue', () => {
|
||||
test('returns default when URI is undefined', () => {
|
||||
expect(schema.uri({ defaultValue: 'http://localhost:9200' }).validate(undefined)).toBe(
|
||||
'http://localhost:9200'
|
||||
);
|
||||
});
|
||||
|
||||
test('returns value when specified', () => {
|
||||
expect(
|
||||
schema.uri({ defaultValue: 'http://localhost:9200' }).validate('http://kibana.local')
|
||||
).toBe('http://kibana.local');
|
||||
});
|
||||
|
||||
test('returns value from context when context reference is specified', () => {
|
||||
expect(
|
||||
schema.uri({ defaultValue: schema.contextRef('some_uri') }).validate(undefined, {
|
||||
some_uri: 'http://kibana.local',
|
||||
})
|
||||
).toBe('http://kibana.local');
|
||||
});
|
||||
});
|
||||
|
||||
describe('#validate', () => {
|
||||
test('is called with input value', () => {
|
||||
let calledWith;
|
||||
|
||||
const validator = (val: any) => {
|
||||
calledWith = val;
|
||||
};
|
||||
|
||||
schema.uri({ validate: validator }).validate('http://kibana.local');
|
||||
|
||||
expect(calledWith).toBe('http://kibana.local');
|
||||
});
|
||||
|
||||
test('is not called with default value in no input', () => {
|
||||
const validate = jest.fn();
|
||||
|
||||
schema.uri({ validate, defaultValue: 'http://kibana.local' }).validate(undefined);
|
||||
|
||||
expect(validate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('throws when returns string', () => {
|
||||
const validate = () => 'validator failure';
|
||||
|
||||
expect(() =>
|
||||
schema.uri({ validate }).validate('http://kibana.local')
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
test('returns error when not string', () => {
|
||||
expect(() => schema.uri().validate(123)).toThrowErrorMatchingSnapshot();
|
||||
|
||||
expect(() => schema.uri().validate([1, 2, 3])).toThrowErrorMatchingSnapshot();
|
||||
|
||||
expect(() => schema.uri().validate(/abc/)).toThrowErrorMatchingSnapshot();
|
||||
});
|
44
packages/kbn-config-schema/src/types/uri_type.ts
Normal file
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import typeDetect from 'type-detect';
|
||||
import { internals } from '../internals';
|
||||
import { Type, TypeOptions } from './type';
|
||||
|
||||
export type URIOptions = TypeOptions<string> & {
|
||||
scheme?: string | string[];
|
||||
};
|
||||
|
||||
export class URIType extends Type<string> {
|
||||
constructor(options: URIOptions = {}) {
|
||||
super(internals.string().uri({ scheme: options.scheme }), options);
|
||||
}
|
||||
|
||||
protected handleError(type: string, { value, scheme }: Record<string, unknown>) {
|
||||
switch (type) {
|
||||
case 'any.required':
|
||||
case 'string.base':
|
||||
return `expected value of type [string] but got [${typeDetect(value)}].`;
|
||||
case 'string.uriCustomScheme':
|
||||
return `expected URI with scheme [${scheme}] but but got [${value}].`;
|
||||
case 'string.uri':
|
||||
return `value is [${value}] but it must be a valid URI (see RFC 3986).`;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -5,7 +5,11 @@
|
|||
"declarationDir": "./target/types",
|
||||
"outDir": "./target/out",
|
||||
"stripInternal": true,
|
||||
"declarationMap": true
|
||||
"declarationMap": true,
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"./types/joi.d.ts",
|
||||
|
|
|
@ -15,17 +15,17 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.2.3",
|
||||
"@babel/core": "^7.2.2",
|
||||
"@babel/plugin-proposal-class-properties": "^7.3.0",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.3.1",
|
||||
"@babel/preset-env": "^7.3.1",
|
||||
"@babel/preset-typescript": "^7.1.0",
|
||||
"@babel/core": "^7.3.4",
|
||||
"@babel/plugin-proposal-class-properties": "^7.3.4",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.3.4",
|
||||
"@babel/preset-env": "^7.3.4",
|
||||
"@babel/preset-typescript": "^7.3.3",
|
||||
"@kbn/babel-preset": "1.0.0",
|
||||
"@kbn/dev-utils": "1.0.0",
|
||||
"expect.js": "0.3.1",
|
||||
"del": "^3.0.0",
|
||||
"expect.js": "0.3.1",
|
||||
"getopts": "^2.2.3",
|
||||
"supports-color": "^6.1.0",
|
||||
"typescript": "^3.0.3"
|
||||
"typescript": "^3.3.3333"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ describe('Filter Manager', function () {
|
|||
const field = getField(indexPattern, 'script number');
|
||||
expected.meta.field = 'script number';
|
||||
_.set(expected, 'script.script', {
|
||||
inline: '(' + field.script + ') == value',
|
||||
source: '(' + field.script + ') == value',
|
||||
lang: 'expression',
|
||||
params: {
|
||||
value: 5,
|
||||
|
|
|
@ -51,7 +51,7 @@ describe('Filter Manager', function () {
|
|||
expected.meta.field = 'script number';
|
||||
_.set(expected, 'script.script', {
|
||||
lang: 'expression',
|
||||
inline: '(' + field.script + ')>=gte && (' + field.script + ')<=lte',
|
||||
source: '(' + field.script + ')>=gte && (' + field.script + ')<=lte',
|
||||
params: {
|
||||
value: '>=1 <=3',
|
||||
gte: 1,
|
||||
|
@ -68,7 +68,7 @@ describe('Filter Manager', function () {
|
|||
`gte(() -> { ${field.script} }, params.gte) && ` +
|
||||
`lte(() -> { ${field.script} }, params.lte)`;
|
||||
|
||||
const inlineScript = buildRangeFilter(field, { gte: 1, lte: 3 }, indexPattern).script.script.inline;
|
||||
const inlineScript = buildRangeFilter(field, { gte: 1, lte: 3 }, indexPattern).script.script.source;
|
||||
expect(inlineScript).to.be(expected);
|
||||
});
|
||||
|
||||
|
@ -89,7 +89,7 @@ describe('Filter Manager', function () {
|
|||
params[key] = 5;
|
||||
const filter = buildRangeFilter(field, params, indexPattern);
|
||||
|
||||
expect(filter.script.script.inline).to.be(
|
||||
expect(filter.script.script.source).to.be(
|
||||
'(' + field.script + ')' + operator + key);
|
||||
expect(filter.script.script.params[key]).to.be(5);
|
||||
expect(filter.script.script.params.value).to.be(operator + 5);
|
||||
|
@ -120,7 +120,7 @@ describe('Filter Manager', function () {
|
|||
it('does not contain a script condition for the infinite side', function () {
|
||||
const field = getField(indexPattern, 'script number');
|
||||
const script = field.script;
|
||||
expect(filter.script.script.inline).to.equal(`(${script})>=gte`);
|
||||
expect(filter.script.script.source).to.equal(`(${script})>=gte`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -41,7 +41,7 @@ export function getPhraseScript(field, value) {
|
|||
|
||||
return {
|
||||
script: {
|
||||
inline: script,
|
||||
source: script,
|
||||
lang: field.lang,
|
||||
params: {
|
||||
value: convertedValue
|
||||
|
|
|
@ -98,7 +98,7 @@ export function getRangeScript(field, params) {
|
|||
|
||||
return {
|
||||
script: {
|
||||
inline: script,
|
||||
source: script,
|
||||
params: knownParams,
|
||||
lang: field.lang
|
||||
}
|
||||
|
|
|
@ -418,4 +418,81 @@ describe('kuery AST API', function () {
|
|||
|
||||
});
|
||||
|
||||
describe('doesKueryExpressionHaveLuceneSyntaxError', function () {
|
||||
it('should return true for Lucene ranges', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('bar: [1 TO 10]');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return false for KQL ranges', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('bar < 1');
|
||||
expect(result).to.eql(false);
|
||||
});
|
||||
|
||||
it('should return true for Lucene exists', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('_exists_: bar');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return false for KQL exists', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('bar:*');
|
||||
expect(result).to.eql(false);
|
||||
});
|
||||
|
||||
it('should return true for Lucene wildcards', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('bar: ba?');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return false for KQL wildcards', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('bar: ba*');
|
||||
expect(result).to.eql(false);
|
||||
});
|
||||
|
||||
it('should return true for Lucene regex', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('bar: /ba.*/');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return true for Lucene fuzziness', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('bar: ba~');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return true for Lucene proximity', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('bar: "ba"~2');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return true for Lucene boosting', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('bar: ba^2');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return true for Lucene + operator', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('+foo: bar');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return true for Lucene - operators', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('-foo: bar');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return true for Lucene && operators', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('foo: bar && baz: qux');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return true for Lucene || operators', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('foo: bar || baz: qux');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
|
||||
it('should return true for mixed KQL/Lucene queries', function () {
|
||||
const result = ast.doesKueryExpressionHaveLuceneSyntaxError('foo: bar and (baz: qux || bag)');
|
||||
expect(result).to.eql(true);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
|
2
packages/kbn-es-query/src/kuery/ast/ast.d.ts
vendored
|
@ -46,3 +46,5 @@ export function fromKueryExpression(
|
|||
): KueryNode;
|
||||
|
||||
export function toElasticsearchQuery(node: KueryNode, indexPattern: StaticIndexPattern): JsonObject;
|
||||
|
||||
export function doesKueryExpressionHaveLuceneSyntaxError(expression: string): boolean;
|
||||
|
|
|
@ -61,3 +61,12 @@ export function toElasticsearchQuery(node, indexPattern) {
|
|||
|
||||
return nodeTypes[node.type].toElasticsearchQuery(node, indexPattern);
|
||||
}
|
||||
|
||||
export function doesKueryExpressionHaveLuceneSyntaxError(expression) {
|
||||
try {
|
||||
fromExpression(expression, { errorOnLuceneSyntax: true }, parseKuery);
|
||||
return false;
|
||||
} catch (e) {
|
||||
return (e.message.startsWith('Lucene'));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,4 +17,4 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export { fromLegacyKueryExpression, fromKueryExpression, fromLiteralExpression, toElasticsearchQuery } from './ast';
|
||||
export * from './ast';
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
// Initialization block
|
||||
{
|
||||
const { parseCursor, cursorSymbol, allowLeadingWildcards = true, helpers: { nodeTypes } } = options;
|
||||
const { errorOnLuceneSyntax, parseCursor, cursorSymbol, allowLeadingWildcards = true, helpers: { nodeTypes } } = options;
|
||||
const buildFunctionNode = nodeTypes.function.buildNodeWithArgumentNodes;
|
||||
const buildLiteralNode = nodeTypes.literal.buildNode;
|
||||
const buildWildcardNode = nodeTypes.wildcard.buildNode;
|
||||
|
@ -26,7 +26,8 @@ start
|
|||
}
|
||||
|
||||
OrQuery
|
||||
= left:AndQuery Or right:OrQuery {
|
||||
= &{ return errorOnLuceneSyntax; } LuceneQuery
|
||||
/ left:AndQuery Or right:OrQuery {
|
||||
const cursor = [left, right].find(node => node.type === 'cursor');
|
||||
if (cursor) return cursor;
|
||||
return buildFunctionNode('or', [left, right]);
|
||||
|
@ -66,7 +67,7 @@ Expression
|
|||
/ ValueExpression
|
||||
|
||||
FieldRangeExpression
|
||||
= field:Literal Space* operator:RangeOperator Space* value:(QuotedString / UnquotedLiteral) {
|
||||
= field:Literal Space* operator:RangeOperator Space* value:Literal {
|
||||
if (value.type === 'cursor') {
|
||||
return {
|
||||
...value,
|
||||
|
@ -172,12 +173,15 @@ Value
|
|||
|
||||
Or
|
||||
= Space+ 'or'i Space+
|
||||
/ &{ return errorOnLuceneSyntax; } LuceneOr
|
||||
|
||||
And
|
||||
= Space+ 'and'i Space+
|
||||
/ &{ return errorOnLuceneSyntax; } LuceneAnd
|
||||
|
||||
Not
|
||||
= 'not'i Space+
|
||||
/ &{ return errorOnLuceneSyntax; } LuceneNot
|
||||
|
||||
Literal
|
||||
= QuotedString / UnquotedLiteral
|
||||
|
@ -199,7 +203,8 @@ QuotedString
|
|||
}
|
||||
|
||||
QuotedCharacter
|
||||
= '\\' char:[\\"] { return char; }
|
||||
= EscapedWhitespace
|
||||
/ '\\' char:[\\"] { return char; }
|
||||
/ !Cursor char:[^"] { return char; }
|
||||
|
||||
UnquotedLiteral
|
||||
|
@ -226,7 +231,8 @@ UnquotedLiteral
|
|||
}
|
||||
|
||||
UnquotedCharacter
|
||||
= EscapedSpecialCharacter
|
||||
= EscapedWhitespace
|
||||
/ EscapedSpecialCharacter
|
||||
/ EscapedKeyword
|
||||
/ Wildcard
|
||||
/ !SpecialCharacter !Keyword !Cursor char:. { return char; }
|
||||
|
@ -248,6 +254,11 @@ OptionalSpace
|
|||
}
|
||||
/ Space*
|
||||
|
||||
EscapedWhitespace
|
||||
= '\\t' { return '\t'; }
|
||||
/ '\\r' { return '\r'; }
|
||||
/ '\\n' { return '\n'; }
|
||||
|
||||
EscapedSpecialCharacter
|
||||
= '\\' char:SpecialCharacter { return char; }
|
||||
|
||||
|
@ -271,3 +282,109 @@ Space
|
|||
|
||||
Cursor
|
||||
= &{ return parseCursor; } '@kuery-cursor@' { return cursorSymbol; }
|
||||
|
||||
// Temporary error rules (to help users transition from Lucene... should be removed at some point)
|
||||
|
||||
LuceneOr
|
||||
= Space* '||' Space* {
|
||||
error('LuceneOr');
|
||||
}
|
||||
|
||||
LuceneAnd
|
||||
= Space* '&&' Space* {
|
||||
error('LuceneAnd');
|
||||
}
|
||||
/ '+' {
|
||||
error('LuceneAnd');
|
||||
}
|
||||
|
||||
LuceneNot
|
||||
= '-' {
|
||||
error('LuceneNot');
|
||||
}
|
||||
/ '!' {
|
||||
error('LuceneNot');
|
||||
}
|
||||
|
||||
LuceneQuery
|
||||
= LuceneFieldQuery
|
||||
/ LuceneValue
|
||||
/ LuceneExists
|
||||
|
||||
LuceneFieldQuery
|
||||
= LuceneLiteral Space* ':' Space* LuceneValue
|
||||
|
||||
LuceneValue
|
||||
= LuceneRange
|
||||
/ LuceneWildcard
|
||||
/ LuceneRegex
|
||||
/ LuceneFuzzy
|
||||
/ LuceneProximity
|
||||
/ LuceneBoost
|
||||
|
||||
LuceneExists
|
||||
= '_exists_' Space* ':' Space* LuceneLiteral {
|
||||
error('LuceneExists');
|
||||
}
|
||||
|
||||
LuceneRange
|
||||
= RangeOperator Space* LuceneLiteral {
|
||||
error('LuceneRange');
|
||||
}
|
||||
/ LuceneRangeStart Space* LuceneLiteral LuceneTo LuceneLiteral LuceneRangeEnd {
|
||||
error('LuceneRange');
|
||||
}
|
||||
|
||||
LuceneWildcard
|
||||
= (LuceneUnquotedCharacter / '*')* '?' LuceneWildcard* {
|
||||
error('LuceneWildcard');
|
||||
}
|
||||
|
||||
LuceneRegex
|
||||
= '/' [^/]* '/' {
|
||||
error('LuceneRegex');
|
||||
}
|
||||
|
||||
LuceneFuzzy
|
||||
= LuceneUnquotedLiteral '~' [0-9]* {
|
||||
error('LuceneFuzzy');
|
||||
}
|
||||
|
||||
LuceneProximity
|
||||
= QuotedString '~' [0-9]* {
|
||||
error('LuceneProximity');
|
||||
}
|
||||
|
||||
LuceneBoost
|
||||
= LuceneLiteral '^' [0-9]* {
|
||||
error('LuceneBoost');
|
||||
}
|
||||
|
||||
LuceneLiteral
|
||||
= QuotedString / LuceneUnquotedLiteral
|
||||
|
||||
LuceneUnquotedLiteral
|
||||
= LuceneUnquotedCharacter+
|
||||
|
||||
LuceneUnquotedCharacter
|
||||
= EscapedWhitespace
|
||||
/ EscapedLuceneSpecialCharacter
|
||||
/ !LuceneSpecialCharacter !LuceneKeyword .
|
||||
|
||||
LuceneKeyword
|
||||
= Or / And / LuceneOr / LuceneAnd / LuceneNot / LuceneTo
|
||||
|
||||
EscapedLuceneSpecialCharacter
|
||||
= '\\' LuceneSpecialCharacter { return char; }
|
||||
|
||||
LuceneSpecialCharacter
|
||||
= '+' / '-' / '=' / '>' / '<' / '!' / '(' / ')' / '{' / '}' / '[' / ']' / '^' / '"' / '~' / '*' / '?' / ':' / '\\' / '/'
|
||||
|
||||
LuceneTo
|
||||
= Space+ 'TO' Space+
|
||||
|
||||
LuceneRangeStart
|
||||
= '[' / '{'
|
||||
|
||||
LuceneRangeEnd
|
||||
= ']' / '}'
|
||||
|
|
|
@ -79,6 +79,11 @@ describe('kuery node types', function () {
|
|||
expect(wildcard.test(node, 'bazbar')).to.be(false);
|
||||
});
|
||||
|
||||
it('should return a true even when the string has newlines or tabs', function () {
|
||||
const node = wildcard.buildNode('foo*bar');
|
||||
expect(wildcard.test(node, 'foo\nbar')).to.be(true);
|
||||
expect(wildcard.test(node, 'foo\tbar')).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasLeadingWildcard', function () {
|
||||
|
|
|
@ -47,7 +47,7 @@ export function test(node, string) {
|
|||
const regex = value
|
||||
.split(wildcardSymbol)
|
||||
.map(escapeRegExp)
|
||||
.join('.*');
|
||||
.join('[\\s\\S]*');
|
||||
const regexp = new RegExp(`^${regex}$`);
|
||||
return regexp.test(string);
|
||||
}
|
||||
|
|
|
@ -33,7 +33,8 @@ exports.getKibanaPath = function(config, projectRoot) {
|
|||
if (inConfig && config.kibanaPath !== '.') {
|
||||
throw new Error(
|
||||
'The `kibanaPath` option has been removed from `eslint-import-resolver-kibana`. ' +
|
||||
'During development your plugin must live in `../kibana-extra/{pluginName}` ' +
|
||||
'During development your plugin must live in `./plugins/{pluginName}` ' +
|
||||
'inside the Kibana folder or `../kibana-extra/{pluginName}` ' +
|
||||
'relative to the Kibana folder to work with this package.'
|
||||
);
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ Messages can contain placeholders for embedding a value of a variable. For examp
|
|||
}
|
||||
```
|
||||
|
||||
Mostly such placeholders have meaningful name according to the сontent.
|
||||
Mostly such placeholders have meaningful name according to the content.
|
||||
|
||||
### Pluralization
|
||||
|
||||
|
|
|
@ -175,7 +175,7 @@ export function getGreetingMessage(userName) {
|
|||
return i18n.translate('hello.wonderful.world', {
|
||||
defaultMessage: 'Greetings, {name}!',
|
||||
values: { name: userName },
|
||||
context: 'This is greeting message for main screen.'
|
||||
description: 'This is greeting message for main screen.'
|
||||
});
|
||||
}
|
||||
```
|
||||
|
|
|
@ -13,19 +13,19 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.2.3",
|
||||
"@babel/core": "^7.2.2",
|
||||
"@babel/plugin-proposal-class-properties": "^7.3.0",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.3.1",
|
||||
"@babel/preset-env": "^7.3.1",
|
||||
"@babel/core": "^7.3.4",
|
||||
"@babel/plugin-proposal-class-properties": "^7.3.4",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.3.4",
|
||||
"@babel/preset-env": "^7.3.4",
|
||||
"@babel/preset-react": "^7.0.0",
|
||||
"@babel/preset-typescript": "^7.1.0",
|
||||
"@babel/preset-typescript": "^7.3.3",
|
||||
"@kbn/dev-utils": "1.0.0",
|
||||
"@types/intl-relativeformat": "^2.1.0",
|
||||
"@types/react-intl": "^2.3.15",
|
||||
"del": "^3.0.0",
|
||||
"getopts": "^2.2.3",
|
||||
"supports-color": "^6.1.0",
|
||||
"typescript": "^3.0.3"
|
||||
"typescript": "^3.3.3333"
|
||||
},
|
||||
"dependencies": {
|
||||
"intl-format-cache": "^2.1.0",
|
||||
|
|
|
@ -885,7 +885,7 @@ describe('I18n engine', () => {
|
|||
});
|
||||
|
||||
describe('load', () => {
|
||||
let mockFetch: jest.Mock<unknown>;
|
||||
let mockFetch: jest.Mock;
|
||||
beforeEach(() => {
|
||||
mockFetch = jest.spyOn(global as any, 'fetch').mockImplementation();
|
||||
});
|
||||
|
|
|
@ -52,7 +52,7 @@ function translateFormattedMessageUsingPseudoLocale(message: string) {
|
|||
* with the pseudo localization function.
|
||||
* @param child I18nProvider child component.
|
||||
*/
|
||||
function wrapIntlFormatMessage(child: React.ReactNode) {
|
||||
function wrapIntlFormatMessage(child: React.ReactElement) {
|
||||
return React.createElement(
|
||||
injectI18n(({ intl }) => {
|
||||
const formatMessage = intl.formatMessage;
|
||||
|
@ -81,7 +81,7 @@ export class I18nProvider extends React.PureComponent {
|
|||
formats={i18n.getFormats()}
|
||||
textComponent={React.Fragment}
|
||||
>
|
||||
{isPseudoLocale(i18n.getLocale())
|
||||
{isPseudoLocale(i18n.getLocale()) && React.isValidElement(this.props.children)
|
||||
? wrapIntlFormatMessage(this.props.children)
|
||||
: this.props.children}
|
||||
</IntlProvider>
|
||||
|
|
|
@ -1,16 +1,20 @@
|
|||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"include": [
|
||||
"src/**/*.ts",
|
||||
"src/**/*.tsx",
|
||||
"types/intl_format_cache.d.ts",
|
||||
"types/intl_relativeformat.d.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"target"
|
||||
],
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"declarationDir": "./target/types",
|
||||
}
|
||||
}
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"include": [
|
||||
"src/**/*.ts",
|
||||
"src/**/*.tsx",
|
||||
"types/intl_format_cache.d.ts",
|
||||
"types/intl_relativeformat.d.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"target"
|
||||
],
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"declarationDir": "./target/types",
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,12 +18,13 @@
|
|||
*/
|
||||
|
||||
import { FUNCTIONS_URL } from './consts';
|
||||
import _ from 'lodash';
|
||||
|
||||
/**
|
||||
* Create a function which executes an Expression function on the
|
||||
* server as part of a larger batch of executions.
|
||||
*/
|
||||
export function batchedFetch({ kfetch, serialize, ms = 10 }) {
|
||||
export function batchedFetch({ ajaxStream, serialize, ms = 10 }) {
|
||||
// Uniquely identifies each function call in a batch operation
|
||||
// so that the appropriate promise can be resolved / rejected later.
|
||||
let id = 0;
|
||||
|
@ -42,7 +43,7 @@ export function batchedFetch({ kfetch, serialize, ms = 10 }) {
|
|||
};
|
||||
|
||||
const runBatch = () => {
|
||||
processBatch(kfetch, batch);
|
||||
processBatch(ajaxStream, batch);
|
||||
reset();
|
||||
};
|
||||
|
||||
|
@ -51,12 +52,30 @@ export function batchedFetch({ kfetch, serialize, ms = 10 }) {
|
|||
timeout = setTimeout(runBatch, ms);
|
||||
}
|
||||
|
||||
const id = nextId();
|
||||
const request = {
|
||||
functionName,
|
||||
args,
|
||||
context: serialize(context),
|
||||
};
|
||||
|
||||
// Check to see if this is a duplicate server function.
|
||||
const duplicate = Object.values(batch).find(batchedRequest =>
|
||||
_.isMatch(batchedRequest.request, request)
|
||||
);
|
||||
|
||||
// If it is, just return the promise of the duplicated request.
|
||||
if (duplicate) {
|
||||
return duplicate.future.promise;
|
||||
}
|
||||
|
||||
// If not, create a new promise, id, and add it to the batched collection.
|
||||
const future = createFuture();
|
||||
const id = nextId();
|
||||
request.id = id;
|
||||
|
||||
batch[id] = {
|
||||
future,
|
||||
request: { id, functionName, args, context: serialize(context) },
|
||||
request,
|
||||
};
|
||||
|
||||
return future.promise;
|
||||
|
@ -70,14 +89,15 @@ export function batchedFetch({ kfetch, serialize, ms = 10 }) {
|
|||
function createFuture() {
|
||||
let resolve;
|
||||
let reject;
|
||||
const promise = new Promise((res, rej) => {
|
||||
resolve = res;
|
||||
reject = rej;
|
||||
});
|
||||
|
||||
return {
|
||||
resolve(val) { return resolve(val); },
|
||||
reject(val) { return reject(val); },
|
||||
promise: new Promise((res, rej) => {
|
||||
resolve = res;
|
||||
reject = rej;
|
||||
}),
|
||||
resolve,
|
||||
reject,
|
||||
promise,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -85,23 +105,22 @@ function createFuture() {
|
|||
* Runs the specified batch of functions on the server, then resolves
|
||||
* the related promises.
|
||||
*/
|
||||
async function processBatch(kfetch, batch) {
|
||||
async function processBatch(ajaxStream, batch) {
|
||||
try {
|
||||
const { results } = await kfetch({
|
||||
pathname: FUNCTIONS_URL,
|
||||
method: 'POST',
|
||||
await ajaxStream({
|
||||
url: FUNCTIONS_URL,
|
||||
body: JSON.stringify({
|
||||
functions: Object.values(batch).map(({ request }) => request),
|
||||
}),
|
||||
});
|
||||
onResponse({ id, statusCode, result }) {
|
||||
const { future } = batch[id];
|
||||
|
||||
results.forEach(({ id, result }) => {
|
||||
const { future } = batch[id];
|
||||
if (result.statusCode && result.err) {
|
||||
future.reject(result);
|
||||
} else {
|
||||
future.resolve(result);
|
||||
}
|
||||
if (statusCode >= 400) {
|
||||
future.reject(result);
|
||||
} else {
|
||||
future.resolve(result);
|
||||
}
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
Object.values(batch).forEach(({ future }) => {
|
||||
|
|
72
packages/kbn-interpreter/src/public/batched_fetch.test.js
Normal file
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { batchedFetch } from './batched_fetch';
|
||||
|
||||
const serialize = o => JSON.stringify(o);
|
||||
|
||||
const ajaxStream = jest.fn(async ({ body, onResponse }) => {
|
||||
const { functions } = JSON.parse(body);
|
||||
functions.map(({ id, functionName, context, args }) =>
|
||||
onResponse({
|
||||
id,
|
||||
statusCode: context,
|
||||
result: context >= 400 ? { err: {} } : `${functionName}${context}${args}`,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
describe('batchedFetch', () => {
|
||||
it('resolves the correct promise', async () => {
|
||||
const ajax = batchedFetch({ ajaxStream, serialize, ms: 1 });
|
||||
|
||||
const result = await Promise.all([
|
||||
ajax({ functionName: 'a', context: 1, args: 'aaa' }),
|
||||
ajax({ functionName: 'b', context: 2, args: 'bbb' }),
|
||||
]);
|
||||
|
||||
expect(result).toEqual(['a1aaa', 'b2bbb']);
|
||||
});
|
||||
|
||||
it('dedupes duplicate calls', async () => {
|
||||
const ajax = batchedFetch({ ajaxStream, serialize, ms: 1 });
|
||||
|
||||
const result = await Promise.all([
|
||||
ajax({ functionName: 'a', context: 1, args: 'aaa' }),
|
||||
ajax({ functionName: 'b', context: 2, args: 'bbb' }),
|
||||
ajax({ functionName: 'a', context: 1, args: 'aaa' }),
|
||||
ajax({ functionName: 'a', context: 1, args: 'aaa' }),
|
||||
]);
|
||||
|
||||
expect(result).toEqual(['a1aaa', 'b2bbb', 'a1aaa', 'a1aaa']);
|
||||
expect(ajaxStream).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('rejects responses whose statusCode is >= 300', async () => {
|
||||
const ajax = batchedFetch({ ajaxStream, serialize, ms: 1 });
|
||||
|
||||
const result = await Promise.all([
|
||||
ajax({ functionName: 'a', context: 500, args: 'aaa' }).catch(() => 'fail'),
|
||||
ajax({ functionName: 'b', context: 400, args: 'bbb' }).catch(() => 'fail'),
|
||||
ajax({ functionName: 'c', context: 200, args: 'ccc' }),
|
||||
]);
|
||||
|
||||
expect(result).toEqual(['fail', 'fail', 'c200ccc']);
|
||||
});
|
||||
});
|
|
@ -23,11 +23,11 @@ import { createHandlers } from './create_handlers';
|
|||
import { batchedFetch } from './batched_fetch';
|
||||
import { FUNCTIONS_URL } from './consts';
|
||||
|
||||
export async function initializeInterpreter(kfetch, typesRegistry, functionsRegistry) {
|
||||
export async function initializeInterpreter({ kfetch, ajaxStream, typesRegistry, functionsRegistry }) {
|
||||
const serverFunctionList = await kfetch({ pathname: FUNCTIONS_URL });
|
||||
const types = typesRegistry.toJS();
|
||||
const { serialize } = serializeProvider(types);
|
||||
const batch = batchedFetch({ kfetch, serialize });
|
||||
const batch = batchedFetch({ ajaxStream, serialize });
|
||||
|
||||
// For every sever-side function, register a client-side
|
||||
// function that matches its definition, but which simply
|
||||
|
|
|
@ -35,26 +35,21 @@ jest.mock('./create_handlers', () => ({
|
|||
describe('kbn-interpreter/interpreter', () => {
|
||||
it('loads server-side functions', async () => {
|
||||
const kfetch = jest.fn(async () => ({}));
|
||||
const ajaxStream = jest.fn(async () => ({}));
|
||||
|
||||
await initializeInterpreter(kfetch, { toJS: () => ({}) }, ({ register: () => {} }));
|
||||
await initializeInterpreter({
|
||||
kfetch,
|
||||
ajaxStream,
|
||||
typesRegistry: { toJS: () => ({}) },
|
||||
functionsRegistry: { register: () => {} },
|
||||
});
|
||||
|
||||
expect(kfetch).toHaveBeenCalledTimes(1);
|
||||
expect(kfetch).toHaveBeenCalledWith({ pathname: FUNCTIONS_URL });
|
||||
});
|
||||
|
||||
it('registers client-side functions that pass through to the server', async () => {
|
||||
const kfetch = jest.fn(async ({ method }) => {
|
||||
if (method === 'POST') {
|
||||
return {
|
||||
results: [{
|
||||
id: 1,
|
||||
result: {
|
||||
hello: 'world',
|
||||
},
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
const kfetch = jest.fn(async () => {
|
||||
return {
|
||||
hello: { name: 'hello' },
|
||||
world: { name: 'world' },
|
||||
|
@ -62,12 +57,20 @@ describe('kbn-interpreter/interpreter', () => {
|
|||
});
|
||||
|
||||
const register = jest.fn();
|
||||
const ajaxStream = jest.fn(async ({ onResponse }) => {
|
||||
onResponse({ id: 1, result: { hello: 'world' } });
|
||||
});
|
||||
|
||||
await initializeInterpreter(kfetch, { toJS: () => ({}) }, ({ register }));
|
||||
await initializeInterpreter({
|
||||
kfetch,
|
||||
ajaxStream,
|
||||
typesRegistry: { toJS: () => ({}) },
|
||||
functionsRegistry: { register },
|
||||
});
|
||||
|
||||
expect(register).toHaveBeenCalledTimes(2);
|
||||
|
||||
const [ hello, world ] = register.mock.calls.map(([fn]) => fn());
|
||||
const [hello, world] = register.mock.calls.map(([fn]) => fn());
|
||||
|
||||
expect(hello.name).toEqual('hello');
|
||||
expect(typeof hello.fn).toEqual('function');
|
||||
|
@ -81,18 +84,19 @@ describe('kbn-interpreter/interpreter', () => {
|
|||
|
||||
expect(result).toEqual({ hello: 'world' });
|
||||
|
||||
expect(kfetch).toHaveBeenCalledWith({
|
||||
pathname: FUNCTIONS_URL,
|
||||
method: 'POST',
|
||||
expect(ajaxStream).toHaveBeenCalledWith({
|
||||
url: FUNCTIONS_URL,
|
||||
onResponse: expect.any(Function),
|
||||
body: JSON.stringify({
|
||||
functions: [{
|
||||
id: 1,
|
||||
functionName: 'hello',
|
||||
args,
|
||||
context,
|
||||
}]
|
||||
functions: [
|
||||
{
|
||||
functionName: 'hello',
|
||||
args,
|
||||
context,
|
||||
id: 1,
|
||||
},
|
||||
],
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -18,7 +18,7 @@ To target the current development version of Kibana just use the default `maste
|
|||
|
||||
```sh
|
||||
node scripts/generate_plugin my_plugin_name
|
||||
# generates a plugin in `../kibana-extra/my_plugin_name`
|
||||
# generates a plugin in `plugins/my_plugin_name`
|
||||
```
|
||||
|
||||
To target 6.3, use the `6.x` branch (until the `6.3` branch is created).
|
||||
|
|
|
@ -42,7 +42,7 @@ exports.run = function run(argv) {
|
|||
dedent(chalk`
|
||||
{dim usage:} node scripts/generate-plugin {bold [name]}
|
||||
|
||||
generate a fresh Kibana plugin in the ../kibana-extra/ directory
|
||||
generate a fresh Kibana plugin in the plugins/ directory
|
||||
`) + '\n'
|
||||
);
|
||||
process.exit(1);
|
||||
|
@ -50,8 +50,8 @@ exports.run = function run(argv) {
|
|||
|
||||
const name = options._[0];
|
||||
const template = resolve(__dirname, './sao_template');
|
||||
const kibanaExtra = resolve(__dirname, '../../../kibana-extra');
|
||||
const targetPath = resolve(kibanaExtra, snakeCase(name));
|
||||
const kibanaPlugins = resolve(__dirname, '../../plugins');
|
||||
const targetPath = resolve(kibanaPlugins, snakeCase(name));
|
||||
|
||||
sao({
|
||||
template: template,
|
||||
|
|
|
@ -102,7 +102,7 @@ module.exports = function({ name }) {
|
|||
cwd: KBN_DIR,
|
||||
stdio: 'inherit',
|
||||
}).then(() => {
|
||||
const dir = relative(process.cwd(), resolve(KBN_DIR, `../kibana-extra`, snakeCase(name)));
|
||||
const dir = relative(process.cwd(), resolve(KBN_DIR, 'plugins', snakeCase(name)));
|
||||
|
||||
log.success(chalk`🎉
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
"eslint": "^5.6.0",
|
||||
"eslint-plugin-babel": "^5.2.0",
|
||||
"eslint-plugin-import": "^2.14.0",
|
||||
"eslint-plugin-jest": "^21.22.1",
|
||||
"eslint-plugin-jest": "^21.26.2",
|
||||
"eslint-plugin-jsx-a11y": "^6.1.2",
|
||||
"eslint-plugin-mocha": "^5.2.0",
|
||||
"eslint-plugin-no-unsanitized": "^3.0.2",
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
"del": "^2.2.2",
|
||||
"execa": "^1.0.0",
|
||||
"gulp-rename": "1.2.2",
|
||||
"globby": "^8.0.1",
|
||||
"gulp-zip": "^4.1.0",
|
||||
"inquirer": "^1.2.2",
|
||||
"minimatch": "^3.0.4",
|
||||
|
|
|
@ -17,32 +17,30 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
const resolve = require('path').resolve;
|
||||
const delimiter = require('path').delimiter;
|
||||
const execFileSync = require('child_process').execFileSync;
|
||||
const winCmd = require('../../../lib/win_cmd');
|
||||
const globby = require('globby');
|
||||
|
||||
module.exports = function(plugin, run, options) {
|
||||
options = options || {};
|
||||
const kibanaBins = resolve(plugin.kibanaRoot, 'node_modules/.bin');
|
||||
const mochaSetupJs = resolve(plugin.kibanaRoot, 'test/mocha_setup.js');
|
||||
let testPaths = plugin.serverTestPatterns;
|
||||
let testPatterns = plugin.serverTestPatterns;
|
||||
|
||||
// allow server test files to be overridden
|
||||
if (options.files && options.files.length) {
|
||||
testPaths = options.files;
|
||||
testPatterns = options.files;
|
||||
}
|
||||
|
||||
const fullCmd = resolve(plugin.kibanaRoot, 'node_modules', '.bin', 'mocha');
|
||||
const cmd = winCmd(fullCmd);
|
||||
const args = ['--require', mochaSetupJs].concat(testPaths);
|
||||
const path = `${kibanaBins}${delimiter}${process.env.PATH}`;
|
||||
|
||||
execFileSync(cmd, args, {
|
||||
cwd: plugin.root,
|
||||
stdio: ['ignore', 1, 2],
|
||||
env: Object.assign({}, process.env, {
|
||||
PATH: path,
|
||||
}),
|
||||
});
|
||||
execFileSync(
|
||||
process.execPath,
|
||||
[
|
||||
'scripts/mocha',
|
||||
...globby.sync(testPatterns, {
|
||||
cwd: plugin.root,
|
||||
absolute: true,
|
||||
}),
|
||||
],
|
||||
{
|
||||
cwd: plugin.kibanaRoot,
|
||||
stdio: ['ignore', 1, 2],
|
||||
}
|
||||
);
|
||||
};
|
||||
|
|