Merge branch 'master' of github.com:elastic/kibana into feature-secops

This commit is contained in:
Xavier Mouligneau 2019-04-02 16:10:16 -04:00
commit f5a545cdbe
1143 changed files with 21880 additions and 15647 deletions

View file

@ -14,18 +14,27 @@ node scripts/es snapshot --license=oss --download-only;
# download reporting browsers
(cd "x-pack" && yarn gulp prepare);
# cache the chromedriver bin
# cache the chromedriver archive
chromedriverDistVersion="$(node -e "console.log(require('chromedriver').version)")"
chromedriverPkgVersion="$(node -e "console.log(require('./package.json').devDependencies.chromedriver)")"
if [ -z "$chromedriverDistVersion" ] || [ -z "$chromedriverPkgVersion" ]; then
echo "UNABLE TO DETERMINE CHROMEDRIVER VERSIONS"
exit 1
fi
mkdir ".chromedriver"
mkdir -p .chromedriver
curl "https://chromedriver.storage.googleapis.com/$chromedriverDistVersion/chromedriver_linux64.zip" > .chromedriver/chromedriver.zip
echo "$chromedriverPkgVersion" > .chromedriver/pkgVersion
# cache the geckodriver archive
geckodriverPkgVersion="$(node -e "console.log(require('./package.json').devDependencies.geckodriver)")"
if [ -z "$geckodriverPkgVersion" ]; then
echo "UNABLE TO DETERMINE geckodriver VERSIONS"
exit 1
fi
mkdir -p ".geckodriver"
cp "node_modules/geckodriver/geckodriver.tar.gz" .geckodriver/geckodriver.tar.gz
echo "$geckodriverPkgVersion" > .geckodriver/pkgVersion
# archive cacheable directories
mkdir -p "$HOME/.kibana/bootstrap_cache"
tar -cf "$HOME/.kibana/bootstrap_cache/$branch.tar" \
@ -36,7 +45,8 @@ tar -cf "$HOME/.kibana/bootstrap_cache/$branch.tar" \
x-pack/plugins/reporting/.chromium \
test/plugin_functional/plugins/*/node_modules \
.es \
.chromedriver;
.chromedriver \
.geckodriver;
echo "created $HOME/.kibana/bootstrap_cache/$branch.tar"

View file

@ -47,6 +47,38 @@ module.exports = {
rules: {
'no-restricted-imports': [2, restrictedModules],
'no-restricted-modules': [2, restrictedModules],
'@kbn/eslint/no-restricted-paths': [
'error',
{
basePath: __dirname,
zones: [
{
target: [
'src/legacy/**/*',
'x-pack/**/*',
'!x-pack/**/*.test.*',
'src/plugins/**/(public|server)/**/*',
'src/core/(public|server)/**/*',
],
from: [
'src/core/public/**/*',
'!src/core/public/index*',
'!src/core/public/utils/**/*',
'src/core/server/**/*',
'!src/core/server/index*',
'src/plugins/**/public/**/*',
'!src/plugins/**/public/index*',
'src/plugins/**/server/**/*',
'!src/plugins/**/server/index*',
],
allowSameFolder: true,
},
],
},
],
'@kbn/eslint/module_migration': [
'error',
[
@ -97,7 +129,7 @@ module.exports = {
{
files: ['x-pack/test/functional/apps/**/*', 'x-pack/plugins/apm/**/*'],
rules: {
'@kbn/eslint/no-default-export': 'off',
'import/no-default-export': 'off',
'import/no-named-as-default': 'off',
},
},

1
.gitattributes vendored Normal file
View file

@ -0,0 +1 @@
**/__snapshots__/** linguist-generated=true

View file

@ -1,8 +1,5 @@
[[release-notes]]
= {kib} Release Notes
++++
<titleabbrev>Release Notes</titleabbrev>
++++
= Release Notes
[partintro]
--

View file

@ -43,7 +43,7 @@ PUT api/logstash/pipeline/hello-world
{
"pipeline": "input { stdin {} } output { stdout {} }",
"settings": {
"queue.type": "persistent"
"queue.type": "persisted"
}
}
--------------------------------------------------

View file

@ -1,3 +1,5 @@
[role="xpack"]
[[canvas]]
= Canvas

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[canvas-client-functions]]
=== Canvas client functions

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[canvas-common-functions]]
=== Canvas common functions

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[canvas-function-reference]]
== Canvas function reference

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[canvas-getting-started]]
== Getting started with Canvas

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[canvas-server-functions]]
=== Canvas server functions

View file

@ -1,4 +1,4 @@
[role="xpack"]
[[canvas-tinymath-functions]]
=== TinyMath functions

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[canvas-workpad]]
=== Using the workpad

View file

@ -5,6 +5,7 @@
* <<development-dependencies>>
* <<development-modules>>
* <<development-elasticsearch>>
* <<development-unit-tests>>
* <<development-functional-tests>>
include::core/development-basepath.asciidoc[]
@ -15,4 +16,6 @@ include::core/development-modules.asciidoc[]
include::core/development-elasticsearch.asciidoc[]
include::core/development-unit-tests.asciidoc[]
include::core/development-functional-tests.asciidoc[]

View file

@ -0,0 +1,83 @@
[[development-unit-tests]]
=== Unit Testing
We use unit tests to make sure that individual software units of {kib} perform as they were designed to.
[float]
=== Current Frameworks
{kib} is migrating unit testing from `Mocha` to `Jest`. Legacy unit tests still exist in `Mocha` but all new unit tests should be written in `Jest`.
[float]
==== Mocha (legacy)
Mocha tests are contained in `__tests__` directories.
*Running Mocha Unit Tests*
["source","shell"]
-----------
yarn test:mocha
-----------
[float]
==== Jest
Jest tests are stored in the same directory as source code files with the `.test.{js,ts,tsx}` suffix.
*Running Jest Unit Tests*
["source","shell"]
-----------
yarn test:jest
-----------
[float]
===== Writing Jest Unit Tests
In order to write those tests there are two main things you need to be aware of.
The first one is the different between `jest.mock` and `jest.doMock`
and the second one our `jest mocks file pattern`. As we are running `js` and `ts`
test files with `babel-jest` both techniques are needed
specially for the tests implemented on Typescript in order to benefit from the
auto-inference types feature.
[float]
===== Jest.mock vs Jest.doMock
Both methods are essentially the same on their roots however the `jest.mock`
calls will get hoisted to the top of the file and can only reference variables
prefixed with `mock`. On the other hand, `jest.doMock` won't be hoisted and can
reference pretty much any variable we want, however we have to assure those referenced
variables are instantiated at the time we need them which lead us to the next
section where we'll talk about our jest mock files pattern.
[float]
===== Jest Mock Files Pattern
Specially on typescript it is pretty common to have in unit tests
`jest.doMock` calls which reference for example imported types. Any error
will thrown from doing that however the test will fail. The reason behind that
is because despite the `jest.doMock` isn't being hoisted by `babel-jest` the
import with the types we are referencing will be hoisted to the top and at the
time we'll call the function that variable would not be defined.
In order to prevent that we develop a protocol that should be followed:
- Each module could provide a standard mock in `mymodule.mock.ts` in case
there are other tests that could benefit from using definitions here.
This file would not have any `jest.mock` calls, just dummy objects.
- Each test defines its mocks in `mymodule.test.mocks.ts`. This file
could import relevant mocks from the generalised module's mocks
file `(*.mock.ts)` and call `jest.mock` for each of them. If there is
any relevant dummy mock objects to generalise (and to be used by
other tests), the dummy objects could be defined directly on this file.
- Each test would import its mocks from the test mocks
file mymodule.test.mocks.ts. `mymodule.test.ts` has an import
like: `import * as Mocks from './mymodule.test.mocks'`,
`import { mockX } from './mymodule.test.mocks'`
or just `import './mymodule.test.mocks'` if there isn't anything
exported to be used.

View file

@ -3,12 +3,16 @@
This tutorial requires three data sets:
* The complete works of William Shakespeare, suitably parsed into fields. Download
https://download.elastic.co/demos/kibana/gettingstarted/shakespeare_6.0.json[`shakespeare.json`].
* A set of fictitious accounts with randomly generated data. Download
https://download.elastic.co/demos/kibana/gettingstarted/accounts.zip[`accounts.zip`].
* A set of randomly generated log files. Download
https://download.elastic.co/demos/kibana/gettingstarted/logs.jsonl.gz[`logs.jsonl.gz`].
* The complete works of William Shakespeare, suitably parsed into fields
* A set of fictitious accounts with randomly generated data
* A set of randomly generated log files
Create a new working directory where you want to download the files. From that directory, run the following commands:
[source,shell]
curl -O https://download.elastic.co/demos/kibana/gettingstarted/8.x/shakespeare.json
curl -O https://download.elastic.co/demos/kibana/gettingstarted/8.x/accounts.zip
curl -O https://download.elastic.co/demos/kibana/gettingstarted/8.x/logs.jsonl.gz
Two of the data sets are compressed. To extract the files, use these commands:
@ -73,16 +77,14 @@ In Kibana *Dev Tools > Console*, set up a mapping for the Shakespeare data set:
[source,js]
PUT /shakespeare
{
"mappings": {
"doc": {
"properties": {
"mappings": {
"properties": {
"speaker": {"type": "keyword"},
"play_name": {"type": "keyword"},
"line_id": {"type": "integer"},
"speech_number": {"type": "integer"}
}
}
}
}
}
//CONSOLE
@ -100,13 +102,11 @@ as geographic locations by applying the `geo_point` type.
PUT /logstash-2015.05.18
{
"mappings": {
"log": {
"properties": {
"geo": {
"properties": {
"coordinates": {
"type": "geo_point"
}
"properties": {
"geo": {
"properties": {
"coordinates": {
"type": "geo_point"
}
}
}
@ -120,13 +120,11 @@ PUT /logstash-2015.05.18
PUT /logstash-2015.05.19
{
"mappings": {
"log": {
"properties": {
"geo": {
"properties": {
"coordinates": {
"type": "geo_point"
}
"properties": {
"geo": {
"properties": {
"coordinates": {
"type": "geo_point"
}
}
}
@ -140,13 +138,11 @@ PUT /logstash-2015.05.19
PUT /logstash-2015.05.20
{
"mappings": {
"log": {
"properties": {
"geo": {
"properties": {
"coordinates": {
"type": "geo_point"
}
"properties": {
"geo": {
"properties": {
"coordinates": {
"type": "geo_point"
}
}
}
@ -165,13 +161,13 @@ API to load the data sets:
[source,shell]
curl -H 'Content-Type: application/x-ndjson' -XPOST 'localhost:9200/bank/account/_bulk?pretty' --data-binary @accounts.json
curl -H 'Content-Type: application/x-ndjson' -XPOST 'localhost:9200/shakespeare/doc/_bulk?pretty' --data-binary @shakespeare_6.0.json
curl -H 'Content-Type: application/x-ndjson' -XPOST 'localhost:9200/shakespeare/_bulk?pretty' --data-binary @shakespeare.json
curl -H 'Content-Type: application/x-ndjson' -XPOST 'localhost:9200/_bulk?pretty' --data-binary @logs.jsonl
Or for Windows users, in Powershell:
[source,shell]
Invoke-RestMethod "http://localhost:9200/bank/account/_bulk?pretty" -Method Post -ContentType 'application/x-ndjson' -InFile "accounts.json"
Invoke-RestMethod "http://localhost:9200/shakespeare/doc/_bulk?pretty" -Method Post -ContentType 'application/x-ndjson' -InFile "shakespeare_6.0.json"
Invoke-RestMethod "http://localhost:9200/shakespeare/_bulk?pretty" -Method Post -ContentType 'application/x-ndjson' -InFile "shakespeare.json"
Invoke-RestMethod "http://localhost:9200/_bulk?pretty" -Method Post -ContentType 'application/x-ndjson' -InFile "logs.jsonl"
These commands might take some time to execute, depending on the available computing resources.
@ -187,8 +183,8 @@ Your output should look similar to this:
[source,shell]
health status index pri rep docs.count docs.deleted store.size pri.store.size
yellow open bank 5 1 1000 0 418.2kb 418.2kb
yellow open shakespeare 5 1 111396 0 17.6mb 17.6mb
yellow open logstash-2015.05.18 5 1 4631 0 15.6mb 15.6mb
yellow open logstash-2015.05.19 5 1 4624 0 15.7mb 15.7mb
yellow open logstash-2015.05.20 5 1 4750 0 16.4mb 16.4mb
yellow open bank 1 1 1000 0 418.2kb 418.2kb
yellow open shakespeare 1 1 111396 0 17.6mb 17.6mb
yellow open logstash-2015.05.18 1 1 4631 0 15.6mb 15.6mb
yellow open logstash-2015.05.19 1 1 4624 0 15.7mb 15.7mb
yellow open logstash-2015.05.20 1 1 4750 0 16.4mb 16.4mb

View file

@ -1,9 +1,6 @@
[role="xpack"]
[[xpack-graph]]
= Graphing Connections in Your Data
++++
<titleabbrev>Graph</titleabbrev>
++++
[partintro]
--
@ -66,6 +63,7 @@ multi-node clusters and scales with your Elasticsearch deployment.
Advanced options let you control how your data is sampled and summarized.
You can also set timeouts to prevent graph queries from adversely
affecting the cluster.
--
include::getting-started.asciidoc[]

View file

@ -41,9 +41,6 @@ working on big documents. Set this property to `false` to disable highlighting.
`doc_table:hideTimeColumn`:: Hide the 'Time' column in Discover and in all Saved Searches on Dashboards.
`search:includeFrozen`:: Will include {ref}/frozen-indices.html[frozen indices] in results if enabled. Searching through frozen indices
might increase the search time.
`courier:maxSegmentCount`:: Kibana splits requests in the Discover app into segments to limit the size of requests sent to
the Elasticsearch cluster. This setting constrains the length of the segment list. Long segment lists can significantly
increase request processing time.
`courier:ignoreFilterIfFieldNotInIndex`:: Set this property to `true` to skip filters that apply to fields that don't exist in a visualization's index. Useful when dashboards consist of visualizations from multiple index patterns.
`courier:maxConcurrentShardRequests`:: Controls the {ref}/search-multi-search.html[max_concurrent_shard_requests] setting used for _msearch requests sent by Kibana. Set to 0 to disable this config and use the Elasticsearch default.
`fields:popularLimit`:: This setting governs how many of the top most popular fields are shown.

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[adding-policy-to-index]]
=== Adding a policy to an index

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[creating-index-lifecycle-policies]]
=== Creating an index lifecycle policy

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[example-using-index-lifecycle-policy]]
=== Example of using an index lifecycle policy

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[index-lifecycle-policies]]
== Index lifecycle policies

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[managing-index-lifecycle-policies]]
=== Managing index lifecycle policies

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[managing-indices]]
== Managing Indices

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[data-rollups]]
== Working with rollup indices
@ -14,7 +15,7 @@ historical data for use in visualizations and reports.
Visualize and view it in a dashboard
[role="xpack"]
[[create-and-manage-rollup-job]]
=== Create and manage rollup jobs

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[visualize-rollup-data]]
=== Create a visualization using rolled up data

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[heatmap-layer]]
== Heat map layer

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[maps]]
= Maps

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[maps-getting-started]]
== Getting started with Maps
@ -28,7 +29,8 @@ In this tutorial, you'll learn to:
* Use symbols, colors, and labels to style a layer
* Create layers for {es} data
[role="xpack"]
[[maps-create]]
=== Creating a new map
The first thing to do is to create a new map.
@ -41,7 +43,8 @@ A new map is created using a base tile layer.
[role="screenshot"]
image::maps/images/gs_create_new_map.png[]
[role="xpack"]
[[maps-add-choropleth-layer]]
=== Adding a choropleth layer
Now that you have a map, you'll want to add layers to it.
@ -86,6 +89,8 @@ Your map now looks like this:
[role="screenshot"]
image::maps/images/gs_add_cloropeth_layer.png[]
[role="xpack"]
[[maps-add-elasticsearch-layer]]
=== Adding layers for {es} data
You'll add two layers for {es} data. The first layer displays documents, and the
@ -153,6 +158,8 @@ Your map now looks like this:
[role="screenshot"]
image::maps/images/gs_add_es_layer.png[]
[role="xpack"]
[[maps-save]]
=== Saving the map
Now that your map is complete, you'll want to save it so others can use it.

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[terms-join]]
=== Terms join

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[tile-layer]]
== Tile layer

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[vector-layer]]
== Vector layer

View file

@ -1,3 +1,4 @@
[role="xpack"]
[[vector-style]]
=== Vector style

View file

@ -11,11 +11,20 @@ coming[8.0.0]
See also <<release-highlights>> and <<release-notes>>.
////
The following section is re-used in the Installation and Upgrade Guide
[[breaking_80_notable]]
=== Notable breaking changes
////
* <<breaking_80_setting_changes>>
//NOTE: The notable-breaking-changes tagged regions are re-used in the
//Installation and Upgrade Guide
[float]
[[breaking_80_setting_changes]]
=== Settings changes
// tag::notable-breaking-changes[]
[float]
==== Default logging timezone is now the system's timezone
*Details:* In prior releases the timezone used in logs defaulted to UTC. We now use the host machine's timezone by default.
*Impact:* To restore the previous behavior, in kibana.yml set `logging.timezone: UTC`.
// end::notable-breaking-changes[]

View file

@ -1,6 +1,7 @@
[role="xpack"]
[[elasticsearch-metrics]]
== {es} Monitoring Metrics
[subs="attributes"]
++++
<titleabbrev>{es} Metrics</titleabbrev>
++++

View file

@ -1,6 +1,7 @@
[role="xpack"]
[[kibana-page]]
== {kib} Monitoring Metrics
[subs="attributes"]
++++
<titleabbrev>{kib} Metrics</titleabbrev>
++++

View file

@ -1,6 +1,7 @@
[role="xpack"]
[[monitoring-metricbeat]]
=== Collecting {kib} monitoring data with {metricbeat}
[subs="attributes"]
++++
<titleabbrev>Collecting monitoring data with {metricbeat}</titleabbrev>
++++

View file

@ -1,8 +1,5 @@
[[release-highlights]]
= {kib} Release Highlights
++++
<titleabbrev>Release Highlights</titleabbrev>
++++
= Release Highlights
[partintro]
--

View file

@ -1,11 +1,11 @@
[role="xpack"]
[[automating-report-generation]]
== Automating Report Generation
You can automatically generate reports with a watch, or by submitting
You can automatically generate reports with {watcher}, or by submitting
HTTP POST requests from a script.
To automatically generate reports with a watch, you need to configure
{watcher} to trust the Kibana servers certificate. For more information,
{watcher} to trust the {kib} servers certificate. For more information,
see <<securing-reporting, Securing Reporting>>.
include::report-intervals.asciidoc[]
@ -13,27 +13,28 @@ include::report-intervals.asciidoc[]
To get the URL for triggering a report generation during a given time period:
. Load the saved object.
. Use the time-picker to specify a relative or absolute time period.
. Click *Reporting* in the Kibana toolbar.
. Copy the displayed **Generation URL**.
. Use the timepicker to specify a relative or absolute time period.
. Click *Share* in the Kibana toolbar.
. Select *PDF Reports*.
. Click **Copy POST URL**.
NOTE: The response from this request with be JSON, and will contain a `path` property
with a URL to use to download the generated report. When requesting that path,
you will get a 503 response if it's not completed yet. In this case, retry after the
number of seconds in the `Retry-After` header in the response until you get the PDF.
number of seconds in the `Retry-After` header in the response until the PDF is returned.
To configure a watch to email reports, you use the `reporting` attachment type
in an `email` action. For more information, see
{xpack-ref}/actions-email.html#configuring-email[Configuring Email Accounts].
{stack-ov}/actions-email.html#configuring-email[Configuring Email Accounts].
include::watch-example.asciidoc[]
For more information about configuring watches, see
{xpack-ref}/how-watcher-works.html[How Watcher Works].
{stack-ov}/how-watcher-works.html[How Watcher Works].
== Deprecated Report URLs
The following is deprecated in 6.0, and you should now use Kibana to get the URL for a
The following is deprecated in 6.0, and you should now use {kib} to get the URL for a
particular report.
You may request PDF reports optimized for printing through three {reporting} endpoints:

View file

@ -49,5 +49,4 @@ image:reporting/images/share-button.png["Reporting Button",link="share-button.pn
=== Generating a Report Automatically
If you want to automatically generate reports from a script or with
{watcher}, use the displayed Generation URL. For more information, see
<<automating-report-generation, Automating Report Generation>>
{watcher}, see <<automating-report-generation, Automating Report Generation>>

View file

@ -73,8 +73,7 @@ xpack.security.sessionTimeout: 600000
. Restart {kib}.
[[kibana-roles]]
. Choose an authentication mechanism and grant users the privileges they need to
. [[kibana-roles]]Choose an authentication mechanism and grant users the privileges they need to
use {kib}.
+
--

View file

@ -96,7 +96,7 @@ Some example translations are shown here:
`KIBANA_DEFAULTAPPID`:: `kibana.defaultAppId`
`XPACK_MONITORING_ENABLED`:: `xpack.monitoring.enabled`
In general, any setting listed in <<settings>> or <<settings-xpack-kb>> can be
In general, any setting listed in <<settings>> can be
configured with this technique.
These variables can be set with +docker-compose+ like this:

View file

@ -80,7 +80,7 @@ requests to check Elasticsearch for an updated list of nodes.
`elasticsearch.sniffOnStart:`:: *Default: false* Attempt to find other
Elasticsearch nodes on startup.
`elasticsearch.sniffOnConectionFault:`:: *Default: false* Update the list of
`elasticsearch.sniffOnConnectionFault:`:: *Default: false* Update the list of
Elasticsearch nodes immediately following a connection fault.
`elasticsearch.ssl.alwaysPresentCertificate:`:: *Default: false* Controls
@ -118,6 +118,9 @@ password that the Kibana server uses to perform maintenance on the Kibana index
at startup. Your Kibana users still need to authenticate with Elasticsearch,
which is proxied through the Kibana server.
`interpreter.enableInVisualize`:: *Default: true* Enables use of interpreter in
Visualize.
`kibana.defaultAppId:`:: *Default: "discover"* The default application to load.
`kibana.index:`:: *Default: ".kibana"* Kibana uses an index in Elasticsearch to
@ -300,4 +303,4 @@ include::{docdir}/settings/monitoring-settings.asciidoc[]
include::{docdir}/settings/reporting-settings.asciidoc[]
include::secure-settings.asciidoc[]
include::{docdir}/settings/security-settings.asciidoc[]
include::{docdir}/settings/spaces-settings.asciidoc[]
include::{docdir}/settings/spaces-settings.asciidoc[]

View file

@ -90,13 +90,15 @@
"**/@types/*/**",
"**/grunt-*",
"**/grunt-*/**",
"x-pack/typescript",
"kbn_tp_*/**"
"x-pack/typescript"
]
},
"dependencies": {
"@babel/core": "^7.3.4",
"@babel/polyfill": "^7.2.5",
"@babel/register": "^7.0.0",
"@elastic/datemath": "5.0.2",
"@elastic/eui": "9.5.0",
"@elastic/eui": "9.7.1",
"@elastic/filesaver": "1.1.2",
"@elastic/good": "8.1.1-kibana2",
"@elastic/numeral": "2.3.2",
@ -124,10 +126,7 @@
"angular-sanitize": "1.6.5",
"angular-sortable-view": "0.0.15",
"autoprefixer": "^9.1.0",
"babel-core": "6.26.3",
"babel-loader": "7.1.5",
"babel-polyfill": "6.26.0",
"babel-register": "6.26.0",
"babel-loader": "8.0.5",
"bluebird": "3.5.3",
"boom": "^7.2.0",
"brace": "0.11.1",
@ -174,7 +173,7 @@
"leaflet-responsive-popup": "0.2.0",
"leaflet-vega": "^0.8.6",
"leaflet.heat": "0.2.0",
"less": "2.7.1",
"less": "^2.7.3",
"less-loader": "4.1.0",
"lodash": "npm:@elastic/lodash@3.10.1-kibana1",
"lodash.clonedeep": "^4.5.0",
@ -264,7 +263,7 @@
"@octokit/rest": "^15.10.0",
"@types/angular": "1.6.50",
"@types/angular-mocks": "^1.7.0",
"@types/babel-core": "^6.25.5",
"@types/babel__core": "^7.1.0",
"@types/bluebird": "^3.1.1",
"@types/boom": "^7.2.0",
"@types/chance": "^1.0.0",
@ -277,7 +276,7 @@
"@types/delete-empty": "^2.0.0",
"@types/elasticsearch": "^5.0.30",
"@types/enzyme": "^3.1.12",
"@types/eslint": "^4.16.2",
"@types/eslint": "^4.16.6",
"@types/execa": "^0.9.0",
"@types/fetch-mock": "7.2.1",
"@types/getopts": "^2.0.0",
@ -295,6 +294,7 @@
"@types/json5": "^0.0.30",
"@types/listr": "^0.13.0",
"@types/lodash": "^3.10.1",
"@types/lru-cache": "^5.1.0",
"@types/minimatch": "^2.0.29",
"@types/mocha": "^5.2.6",
"@types/moment-timezone": "^0.5.8",
@ -322,8 +322,8 @@
"@types/zen-observable": "^0.8.0",
"angular-mocks": "1.4.7",
"archiver": "^3.0.0",
"babel-eslint": "^9.0.0",
"babel-jest": "^23.6.0",
"babel-eslint": "^10.0.1",
"babel-jest": "^24.1.0",
"backport": "4.4.1",
"chai": "3.5.0",
"chance": "1.0.10",
@ -337,20 +337,21 @@
"enzyme-adapter-react-16": "^1.9.0",
"enzyme-adapter-utils": "^1.10.0",
"enzyme-to-json": "^3.3.4",
"eslint": "^5.6.0",
"eslint-config-prettier": "^3.1.0",
"eslint-plugin-babel": "^5.2.0",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-jest": "^21.26.2",
"eslint-plugin-jsx-a11y": "^6.1.2",
"eslint-plugin-mocha": "^5.2.0",
"eslint": "^5.15.1",
"eslint-config-prettier": "^4.1.0",
"eslint-plugin-babel": "^5.3.0",
"eslint-plugin-import": "^2.16.0",
"eslint-plugin-jest": "^22.3.0",
"eslint-plugin-jsx-a11y": "^6.2.1",
"eslint-plugin-mocha": "^5.3.0",
"eslint-plugin-no-unsanitized": "^3.0.2",
"eslint-plugin-prefer-object-spread": "^1.2.1",
"eslint-plugin-prettier": "^2.6.2",
"eslint-plugin-react": "^7.11.1",
"eslint-plugin-prettier": "^3.0.1",
"eslint-plugin-react": "^7.12.4",
"eslint-plugin-react-hooks": "^1.6.0",
"faker": "1.1.0",
"fetch-mock": "7.3.0",
"geckodriver": "1.12.2",
"geckodriver": "^1.16.1",
"getopts": "2.0.0",
"grunt": "1.0.3",
"grunt-cli": "^1.2.0",
@ -358,7 +359,7 @@
"grunt-karma": "2.0.0",
"grunt-peg": "^2.0.1",
"grunt-run": "0.7.0",
"gulp-babel": "^7.0.1",
"gulp-babel": "^8.0.0",
"gulp-sourcemaps": "2.6.4",
"has-ansi": "^3.0.0",
"image-diff": "1.6.0",
@ -408,9 +409,6 @@
"supertest": "^3.1.0",
"supertest-as-promised": "^4.0.2",
"tree-kill": "^1.1.0",
"ts-jest": "^23.1.4",
"ts-loader": "^5.2.2",
"ts-node": "^7.0.1",
"tslint": "^5.11.0",
"tslint-config-prettier": "^1.15.0",
"tslint-microsoft-contrib": "^6.0.0",

View file

@ -1,13 +1,16 @@
{
"presets": [["env", {
"targets": {
"node": "current",
"browsers": [
"last 2 versions",
"> 5%",
"Safari 7",
]
"presets": [
["@babel/preset-env", {
"targets": {
"node": "current",
"browsers": [
"last 2 versions",
"> 5%",
"Safari 7"
]
}
}
}]],
]
],
"plugins": ["add-module-exports"]
}
}

View file

@ -11,9 +11,9 @@
"kbn:watch": "yarn build --watch"
},
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-plugin-add-module-exports": "^0.2.1",
"babel-preset-env": "^1.7.0",
"@babel/cli": "^7.2.3",
"@babel/preset-env": "^7.3.4",
"babel-plugin-add-module-exports": "^1.0.0",
"moment": "^2.13.0"
},
"dependencies": {

View file

@ -10,6 +10,7 @@ module.exports = {
'mocha',
'babel',
'react',
'react-hooks',
'import',
'no-unsanitized',
'prefer-object-spread',
@ -127,6 +128,8 @@ module.exports = {
arrow: true,
}],
'react/jsx-first-prop-new-line': ['error', 'multiline-multiprop'],
'react-hooks/rules-of-hooks': 'error', // Checks rules of Hooks
'react-hooks/exhaustive-deps': 'warn', // Checks effect dependencies
'jsx-a11y/accessible-emoji': 'error',
'jsx-a11y/alt-text': 'error',
'jsx-a11y/anchor-has-content': 'error',

View file

@ -15,15 +15,16 @@
},
"homepage": "https://github.com/elastic/eslint-config-kibana#readme",
"peerDependencies": {
"babel-eslint": "^9.0.0",
"eslint": "^5.6.0",
"eslint-plugin-babel": "^5.2.0",
"eslint-plugin-jsx-a11y": "^6.1.2",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-jest": "^21.22.1",
"eslint-plugin-mocha": "^5.2.0",
"babel-eslint": "^10.0.1",
"eslint": "^5.14.1",
"eslint-plugin-babel": "^5.3.0",
"eslint-plugin-jsx-a11y": "^6.2.1",
"eslint-plugin-import": "^2.16.0",
"eslint-plugin-jest": "^22.3.0",
"eslint-plugin-mocha": "^5.3.0",
"eslint-plugin-no-unsanitized": "^3.0.2",
"eslint-plugin-prefer-object-spread": "^1.2.1",
"eslint-plugin-react": "^7.11.1"
"eslint-plugin-react": "^7.12.4",
"eslint-plugin-react-hooks": "^1.6.0"
}
}

View file

@ -15,7 +15,7 @@
"kbn:watch": "yarn build --watch"
},
"devDependencies": {
"babel-cli": "^6.26.0"
"@babel/cli": "^7.2.3"
},
"dependencies": {
"@kbn/babel-preset": "1.0.0",

View file

@ -19,20 +19,33 @@
module.exports = {
presets: [
require.resolve('babel-preset-react'),
require.resolve('@babel/preset-typescript'),
require.resolve('@babel/preset-react')
],
plugins: [
require.resolve('babel-plugin-add-module-exports'),
// stage 3
require.resolve('babel-plugin-transform-async-generator-functions'),
require.resolve('babel-plugin-transform-object-rest-spread'),
// the class properties proposal was merged with the private fields proposal
// The class properties proposal was merged with the private fields proposal
// into the "class fields" proposal. Babel doesn't support this combined
// proposal yet, which includes private field, so this transform is
// TECHNICALLY stage 2, but for all intents and purposes it's stage 3
//
// See https://github.com/babel/proposals/issues/12 for progress
require.resolve('babel-plugin-transform-class-properties'),
require.resolve('@babel/plugin-proposal-class-properties'),
],
overrides: [
{
// Babel 7 don't support the namespace feature on typescript code.
// With namespaces only used for type declarations, we can securely
// strip them off for babel on x-pack infra plugin
//
// See https://github.com/babel/babel/issues/8244#issuecomment-466548733
test: /x-pack[\/\\]plugins[\/\\]infra[\/\\].*[\/\\]graphql/,
plugins: [
[
require.resolve('babel-plugin-typescript-strip-namespaces'),
],
]
}
]
};

View file

@ -1,39 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
module.exports = {
presets: [
require.resolve('@babel/preset-react'),
require.resolve('@babel/preset-typescript'),
],
plugins: [
require.resolve('babel7-plugin-add-module-exports'),
// stage 3
require.resolve('@babel/plugin-proposal-async-generator-functions'),
require.resolve('@babel/plugin-proposal-object-rest-spread'),
// the class properties proposal was merged with the private fields proposal
// into the "class fields" proposal. Babel doesn't support this combined
// proposal yet, which includes private field, so this transform is
// TECHNICALLY stage 2, but for all intents and purposes it's stage 3
//
// See https://github.com/babel/proposals/issues/12 for progress
require.resolve('@babel/plugin-proposal-class-properties'),
],
};

View file

@ -17,34 +17,37 @@
* under the License.
*/
module.exports = {
presets: [
[
require.resolve('babel-preset-env'),
{
targets: {
// only applies the necessary transformations based on the
// current node.js processes version. For example: running
// `nvm install 8 && node ./src/cli` will run kibana in node
// version 8 and babel will stop transpiling async/await
// because they are supported in the "current" version of node
node: 'current',
},
module.exports = () => {
return {
presets: [
[
require.resolve('@babel/preset-env'),
{
targets: {
// only applies the necessary transformations based on the
// current node.js processes version. For example: running
// `nvm install 8 && node ./src/cli` will run kibana in node
// version 8 and babel will stop transpiling async/await
// because they are supported in the "current" version of node
node: 'current',
},
// replaces `import "babel-polyfill"` with a list of require statements
// for just the polyfills that the target versions don't already supply
// on their own
useBuiltIns: true,
},
// replaces `import "@babel/polyfill"` with a list of require statements
// for just the polyfills that the target versions don't already supply
// on their own
useBuiltIns: 'entry',
modules: 'cjs'
},
],
require('./common_preset'),
],
require('./common_preset'),
],
plugins: [
[
require.resolve('babel-plugin-transform-define'),
{
'global.__BUILT_WITH_BABEL__': 'true'
}
plugins: [
[
require.resolve('babel-plugin-transform-define'),
{
'global.__BUILT_WITH_BABEL__': 'true'
}
]
]
]
};
};

View file

@ -1,50 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
module.exports = () => ({
presets: [
[
require.resolve('@babel/preset-env'),
{
targets: {
// only applies the necessary transformations based on the
// current node.js processes version. For example: running
// `nvm install 8 && node ./src/cli` will run kibana in node
// version 8 and babel will stop transpiling async/await
// because they are supported in the "current" version of node
node: 'current',
},
// replaces `import "babel-polyfill"` with a list of require statements
// for just the polyfills that the target versions don't already supply
// on their own
useBuiltIns: 'entry',
},
],
require('./common_preset_7'),
],
plugins: [
[
require.resolve('babel-plugin-transform-define'),
{
'global.__BUILT_WITH_BABEL__': 'true'
}
]
]
});

View file

@ -4,20 +4,12 @@
"version": "1.0.0",
"license": "Apache-2.0",
"dependencies": {
"@babel/core": "^7.3.4",
"@babel/plugin-proposal-async-generator-functions": "^7.2.0",
"@babel/plugin-proposal-class-properties": "^7.3.4",
"@babel/plugin-proposal-object-rest-spread": "^7.3.4",
"@babel/preset-react":"^7.0.0",
"@babel/preset-env": "^7.3.4",
"@babel/preset-react": "^7.0.0",
"@babel/preset-typescript": "^7.3.3",
"babel-plugin-add-module-exports": "^0.2.1",
"babel-plugin-transform-async-generator-functions": "^6.24.1",
"babel-plugin-transform-class-properties": "^6.24.1",
"babel-plugin-add-module-exports": "^1.0.0",
"babel-plugin-transform-define": "^1.3.1",
"babel-plugin-transform-object-rest-spread": "^6.26.0",
"babel-preset-env": "^1.7.0",
"babel-preset-react": "^6.24.1",
"babel7-plugin-add-module-exports": "npm:babel-plugin-add-module-exports@^1.0.0"
"babel-plugin-typescript-strip-namespaces": "^1.1.1"
}
}

View file

@ -17,21 +17,24 @@
* under the License.
*/
module.exports = {
presets: [
[
require.resolve('babel-preset-env'),
{
targets: {
browsers: [
'last 2 versions',
'> 5%',
'Safari 7', // for PhantomJS support: https://github.com/elastic/kibana/issues/27136
],
module.exports = () => {
return {
presets: [
[
require.resolve('@babel/preset-env'),
{
targets: {
browsers: [
'last 2 versions',
'> 5%',
'Safari 7', // for PhantomJS support: https://github.com/elastic/kibana/issues/27136
],
},
useBuiltIns: 'entry',
modules: 'cjs'
},
useBuiltIns: true,
},
],
require('./common_preset'),
]
],
require('./common_preset'),
]
};
};

View file

@ -19,9 +19,9 @@
"tslib": "^1.9.3"
},
"devDependencies": {
"@babel/cli": "^7.2.3",
"@kbn/babel-preset": "1.0.0",
"@kbn/expect": "1.0.0",
"babel-cli": "^6.26.0",
"chance": "1.0.6"
}
}

View file

@ -144,7 +144,9 @@
"scripted": false,
"searchable": true,
"aggregatable": true,
"readFromDocValues": true
"readFromDocValues": true,
"parent": "machine.os",
"subType": "multi"
},
{
"name": "geo.src",

View file

@ -6,6 +6,7 @@
"private": true,
"dependencies": {
"@kbn/dev-utils": "1.0.0",
"abort-controller": "^2.0.3",
"chalk": "^2.4.1",
"dedent": "^0.7.0",
"del": "^3.0.0",

View file

@ -0,0 +1,307 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
const fetch = require('node-fetch');
const AbortController = require('abort-controller');
const fs = require('fs');
const { promisify } = require('util');
const { pipeline, Transform } = require('stream');
const mkdirp = require('mkdirp');
const chalk = require('chalk');
const { createHash } = require('crypto');
const path = require('path');
const asyncPipeline = promisify(pipeline);
const V1_VERSIONS_API = 'https://artifacts-api.elastic.co/v1/versions';
const { cache } = require('./utils');
const { createCliError } = require('./errors');
const TEST_ES_SNAPSHOT_VERSION = process.env.TEST_ES_SNAPSHOT_VERSION
? process.env.TEST_ES_SNAPSHOT_VERSION
: 'latest';
function getChecksumType(checksumUrl) {
if (checksumUrl.endsWith('.sha512')) {
return 'sha512';
}
throw new Error(`unable to determine checksum type: ${checksumUrl}`);
}
function getPlatform(key) {
if (key.includes('-linux-')) {
return 'linux';
}
if (key.includes('-windows-')) {
return 'win32';
}
if (key.includes('-darwin-')) {
return 'darwin';
}
}
function headersToString(headers, indent = '') {
return [...headers.entries()].reduce(
(acc, [key, value]) => `${acc}\n${indent}${key}: ${value}`,
''
);
}
exports.Artifact = class Artifact {
/**
* Fetch an Artifact from the Artifact API for a license level and version
* @param {('oss'|'basic'|'trial')} license
* @param {string} version
* @param {ToolingLog} log
*/
static async get(license, version, log) {
const urlVersion = `${encodeURIComponent(version)}-SNAPSHOT`;
const urlBuild = encodeURIComponent(TEST_ES_SNAPSHOT_VERSION);
const url = `${V1_VERSIONS_API}/${urlVersion}/builds/${urlBuild}/projects/elasticsearch`;
log.info('downloading artifact info from %s', chalk.bold(url));
const abc = new AbortController();
const resp = await fetch(url, { signal: abc.signal });
const json = await resp.text();
if (resp.status === 404) {
abc.abort();
throw createCliError(
`Snapshots for ${version}/${TEST_ES_SNAPSHOT_VERSION} are not available`
);
}
if (!resp.ok) {
abc.abort();
throw new Error(`Unable to read artifact info from ${url}: ${resp.statusText}\n ${json}`);
}
// parse the api response into an array of Artifact objects
const {
project: { packages: artifactInfoMap },
} = JSON.parse(json);
const filenames = Object.keys(artifactInfoMap);
const hasNoJdkVersions = filenames.some(filename => filename.includes('-no-jdk-'));
const artifactSpecs = filenames.map(filename => ({
filename,
url: artifactInfoMap[filename].url,
checksumUrl: artifactInfoMap[filename].sha_url,
checksumType: getChecksumType(artifactInfoMap[filename].sha_url),
type: artifactInfoMap[filename].type,
isOss: filename.includes('-oss-'),
platform: getPlatform(filename),
jdkRequired: hasNoJdkVersions ? filename.includes('-no-jdk-') : true,
}));
// pick the artifact we are going to use for this license/version combo
const reqOss = license === 'oss';
const reqPlatform = artifactSpecs.some(a => a.platform !== undefined)
? process.platform
: undefined;
const reqJdkRequired = hasNoJdkVersions ? false : true;
const reqType = process.platform === 'win32' ? 'zip' : 'tar';
const artifactSpec = artifactSpecs.find(
spec =>
spec.isOss === reqOss &&
spec.type === reqType &&
spec.platform === reqPlatform &&
spec.jdkRequired === reqJdkRequired
);
if (!artifactSpec) {
throw new Error(
`Unable to determine artifact for license [${license}] and version [${version}]\n` +
` options: ${filenames.join(',')}`
);
}
return new Artifact(artifactSpec, log);
}
constructor(spec, log) {
this._spec = spec;
this._log = log;
}
getUrl() {
return this._spec.url;
}
getChecksumUrl() {
return this._spec.checksumUrl;
}
getChecksumType() {
return this._spec.checksumType;
}
getFilename() {
return this._spec.filename;
}
/**
* Download the artifact to disk, skips the download if the cache is
* up-to-date, verifies checksum when downloaded
* @param {string} dest
* @return {Promise<void>}
*/
async download(dest) {
const cacheMeta = cache.readMeta(dest);
const tmpPath = `${dest}.tmp`;
const artifactResp = await this._download(tmpPath, cacheMeta.etag, cacheMeta.ts);
if (artifactResp.cached) {
return;
}
await this._verifyChecksum(artifactResp);
// cache the etag for future downloads
cache.writeMeta(dest, { etag: artifactResp.etag });
// rename temp download to the destination location
fs.renameSync(tmpPath, dest);
}
/**
* Fetch the artifact with an etag
* @param {string} tmpPath
* @param {string} etag
* @param {string} ts
* @return {{ cached: true }|{ checksum: string, etag: string, first500Bytes: Buffer }}
*/
async _download(tmpPath, etag, ts) {
const url = this.getUrl();
if (etag) {
this._log.info('verifying cache of %s', chalk.bold(url));
} else {
this._log.info('downloading artifact from %s', chalk.bold(url));
}
const abc = new AbortController();
const resp = await fetch(url, {
signal: abc.signal,
headers: {
'If-None-Match': etag,
},
});
if (resp.status === 304) {
this._log.info('etags match, reusing cache from %s', chalk.bold(ts));
abc.abort();
return {
cached: true,
};
}
if (!resp.ok) {
abc.abort();
throw new Error(
`Unable to download elasticsearch snapshot: ${resp.statusText}${headersToString(
resp.headers,
' '
)}`
);
}
if (etag) {
this._log.info('cache invalid, redownloading');
}
const hash = createHash(this.getChecksumType());
let first500Bytes = Buffer.alloc(0);
let contentLength = 0;
mkdirp.sync(path.dirname(tmpPath));
await asyncPipeline(
resp.body,
new Transform({
transform(chunk, encoding, cb) {
contentLength += Buffer.byteLength(chunk);
if (first500Bytes.length < 500) {
first500Bytes = Buffer.concat(
[first500Bytes, chunk],
first500Bytes.length + chunk.length
).slice(0, 500);
}
hash.update(chunk, encoding);
cb(null, chunk);
},
}),
fs.createWriteStream(tmpPath)
);
return {
checksum: hash.digest('hex'),
etag: resp.headers.get('etag'),
contentLength,
first500Bytes,
headers: resp.headers,
};
}
/**
* Verify the checksum of the downloaded artifact with the checksum at checksumUrl
* @param {{ checksum: string, contentLength: number, first500Bytes: Buffer }} artifactResp
* @return {Promise<void>}
*/
async _verifyChecksum(artifactResp) {
this._log.info('downloading artifact checksum from %s', chalk.bold(this.getChecksumUrl()));
const abc = new AbortController();
const resp = await fetch(this.getChecksumUrl(), {
signal: abc.signal,
});
if (!resp.ok) {
abc.abort();
throw new Error(
`Unable to download elasticsearch checksum: ${resp.statusText}${headersToString(
resp.headers,
' '
)}`
);
}
// in format of stdout from `shasum` cmd, which is `<checksum> <filename>`
const [expectedChecksum] = (await resp.text()).split(' ');
if (artifactResp.checksum !== expectedChecksum) {
const len = Buffer.byteLength(artifactResp.first500Bytes);
const lenString = `${len} / ${artifactResp.contentLength}`;
throw createCliError(
`artifact downloaded from ${this.getUrl()} does not match expected checksum\n` +
` expected: ${expectedChecksum}\n` +
` received: ${artifactResp.checksum}\n` +
` headers: ${headersToString(artifactResp.headers, ' ')}\n` +
` content[${lenString} base64]: ${artifactResp.first500Bytes.toString('base64')}`
);
}
this._log.info('checksum verified');
}
};

View file

@ -17,15 +17,12 @@
* under the License.
*/
const fetch = require('node-fetch');
const fs = require('fs');
const os = require('os');
const mkdirp = require('mkdirp');
const chalk = require('chalk');
const path = require('path');
const { BASE_PATH } = require('../paths');
const { installArchive } = require('./archive');
const { log: defaultLog, cache } = require('../utils');
const { log: defaultLog } = require('../utils');
const { Artifact } = require('../artifact');
/**
* Download an ES snapshot
@ -44,15 +41,13 @@ exports.downloadSnapshot = async function installSnapshot({
installPath = path.resolve(basePath, version),
log = defaultLog,
}) {
const fileName = getFilename(license, version);
const url = getUrl(fileName);
const dest = path.resolve(basePath, 'cache', fileName);
log.info('version: %s', chalk.bold(version));
log.info('install path: %s', chalk.bold(installPath));
log.info('license: %s', chalk.bold(license));
await downloadFile(url, dest, log);
const artifact = await Artifact.get(license, version, log);
const dest = path.resolve(basePath, 'cache', artifact.getFilename());
await artifact.download(dest);
return {
downloadPath: dest,
@ -94,83 +89,3 @@ exports.installSnapshot = async function installSnapshot({
log,
});
};
/**
* Downloads to tmp and moves once complete
*
* @param {String} url
* @param {String} dest
* @param {ToolingLog} log
* @returns {Promise}
*/
function downloadFile(url, dest, log) {
const downloadPath = `${dest}.tmp`;
const cacheMeta = cache.readMeta(dest);
mkdirp.sync(path.dirname(dest));
log.info('downloading from %s', chalk.bold(url));
return fetch(url, { headers: { 'If-None-Match': cacheMeta.etag } }).then(
res =>
new Promise((resolve, reject) => {
if (res.status === 304) {
log.info('etags match, using cache from %s', chalk.bold(cacheMeta.ts));
return resolve();
}
if (!res.ok) {
return reject(new Error(`Unable to download elasticsearch snapshot: ${res.statusText}`));
}
const stream = fs.createWriteStream(downloadPath);
res.body
.pipe(stream)
.on('error', error => {
reject(error);
})
.on('finish', () => {
if (res.ok) {
const etag = res.headers.get('etag');
cache.writeMeta(dest, { etag });
fs.renameSync(downloadPath, dest);
resolve();
} else {
reject(new Error(res.statusText));
}
});
})
);
}
function getFilename(license, version) {
const platform = os.platform();
let suffix = null;
switch (platform) {
case 'darwin':
suffix = 'darwin-x86_64.tar.gz';
break;
case 'linux':
suffix = 'linux-x86_64.tar.gz';
break;
case 'win32':
suffix = 'windows-x86_64.zip';
break;
default:
throw new Error(`Unsupported platform ${platform}`);
}
const basename = `elasticsearch${license === 'oss' ? '-oss-' : '-'}${version}`;
return `${basename}-SNAPSHOT-${suffix}`;
}
function getUrl(fileName) {
if (process.env.TEST_ES_SNAPSHOT_VERSION) {
return `https://snapshots.elastic.co/${
process.env.TEST_ES_SNAPSHOT_VERSION
}/downloads/elasticsearch/${fileName}`;
} else {
return `https://snapshots.elastic.co/downloads/elasticsearch/${fileName}`;
}
}

View file

@ -21,7 +21,7 @@ module.exports = {
rules: {
'require-license-header': require('./rules/require_license_header'),
'disallow-license-headers': require('./rules/disallow_license_headers'),
'no-default-export': require('./rules/no_default_export'),
'no-restricted-paths': require('./rules/no_restricted_paths'),
module_migration: require('./rules/module_migration'),
},
};

View file

@ -4,10 +4,12 @@
"private": true,
"license": "Apache-2.0",
"peerDependencies": {
"eslint": "^5.6.0",
"babel-eslint": "^9.0.0"
"eslint": "^5.14.1",
"babel-eslint": "^10.0.1"
},
"dependencies": {
"dedent": "^0.7.0"
"micromatch": "3.1.10",
"dedent": "^0.7.0",
"eslint-module-utils": "^2.3.0"
}
}

View file

@ -0,0 +1 @@
/* eslint-disable */

View file

@ -0,0 +1 @@
/* eslint-disable */

View file

@ -0,0 +1 @@
/* eslint-disable */

View file

@ -0,0 +1,283 @@
/* eslint-disable-line @kbn/eslint/require-license-header */
/*
* This product uses import/no-restricted-paths which is available under a
* "MIT" license.
*
* The MIT License (MIT)
*
* Copyright (c) 2015-present, Ben Mosher
* https://github.com/benmosher/eslint-plugin-import
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
const path = require('path');
const { RuleTester } = require('eslint');
const rule = require('../no_restricted_paths');
const ruleTester = new RuleTester({
parser: 'babel-eslint',
parserOptions: {
sourceType: 'module',
ecmaVersion: 2015,
},
});
ruleTester.run('@kbn/eslint/no-restricted-paths', rule, {
valid: [
{
code: 'import a from "../client/a.js"',
filename: path.join(__dirname, './files/no_restricted_paths/server/b.js'),
options: [
{
basePath: __dirname,
zones: [
{
target: 'files/no_restricted_paths/server/**/*',
from: 'files/no_restricted_paths/other/**/*',
},
],
},
],
},
{
code: 'const a = require("../client/a.js")',
filename: path.join(__dirname, './files/no_restricted_paths/server/b.js'),
options: [
{
basePath: __dirname,
zones: [
{
target: 'files/no_restricted_paths/server/**/*',
from: 'files/no_restricted_paths/other/**/*',
},
],
},
],
},
{
code: 'import b from "../server/b.js"',
filename: path.join(__dirname, './files/no_restricted_paths/client/a.js'),
options: [
{
basePath: __dirname,
zones: [
{
target: '**/no_restricted_paths/client/**/*',
from: '**/no_restricted_paths/other/**/*',
},
],
},
],
},
// irrelevant function calls
{
code: 'notrequire("../server/b.js")',
options: [
{
basePath: __dirname,
},
],
},
{
code: 'notrequire("../server/b.js")',
filename: path.join(__dirname, './files/no_restricted_paths/client/a.js'),
options: [
{
basePath: __dirname,
zones: [
{
target: 'files/no_restricted_paths/client/**/*',
from: 'files/no_restricted_paths/server/**/*',
},
],
},
],
},
// no config
{
code: 'require("../server/b.js")',
options: [
{
basePath: __dirname,
},
],
},
{
code: 'import b from "../server/b.js"',
options: [
{
basePath: __dirname,
},
],
},
// builtin (ignore)
{
code: 'require("os")',
options: [
{
basePath: __dirname,
},
],
},
{
code: 'const d = require("./deep/d.js")',
filename: path.join(__dirname, './files/no_restricted_paths/server/b.js'),
options: [
{
basePath: __dirname,
zones: [
{
allowSameFolder: true,
target: 'files/no_restricted_paths/**/*',
from: 'files/no_restricted_paths/**/*',
},
],
},
],
},
],
invalid: [
{
code: 'import b from "../server/b.js"',
filename: path.join(__dirname, './files/no_restricted_paths/client/a.js'),
options: [
{
basePath: __dirname,
zones: [
{
target: 'files/no_restricted_paths/client/**/*',
from: 'files/no_restricted_paths/server/**/*',
},
],
},
],
errors: [
{
message: 'Unexpected path "../server/b.js" imported in restricted zone.',
line: 1,
column: 15,
},
],
},
{
code: 'import a from "../client/a"\nimport c from "./c"',
filename: path.join(__dirname, './files/no_restricted_paths/server/b.js'),
options: [
{
basePath: __dirname,
zones: [
{
target: 'files/no_restricted_paths/server/**/*',
from: 'files/no_restricted_paths/client/**/*',
},
{
target: 'files/no_restricted_paths/server/**/*',
from: 'files/no_restricted_paths/server/c.js',
},
],
},
],
errors: [
{
message: 'Unexpected path "../client/a" imported in restricted zone.',
line: 1,
column: 15,
},
{
message: 'Unexpected path "./c" imported in restricted zone.',
line: 2,
column: 15,
},
],
},
{
code: 'const b = require("../server/b.js")',
filename: path.join(__dirname, './files/no_restricted_paths/client/a.js'),
options: [
{
basePath: __dirname,
zones: [
{
target: '**/no_restricted_paths/client/**/*',
from: '**/no_restricted_paths/server/**/*',
},
],
},
],
errors: [
{
message: 'Unexpected path "../server/b.js" imported in restricted zone.',
line: 1,
column: 19,
},
],
},
{
code: 'const b = require("../server/b.js")',
filename: path.join(__dirname, './files/no_restricted_paths/client/a.js'),
options: [
{
basePath: path.join(__dirname, 'files', 'no_restricted_paths'),
zones: [
{
target: 'client/**/*',
from: 'server/**/*',
},
],
},
],
errors: [
{
message: 'Unexpected path "../server/b.js" imported in restricted zone.',
line: 1,
column: 19,
},
],
},
{
code: 'const d = require("./deep/d.js")',
filename: path.join(__dirname, './files/no_restricted_paths/server/b.js'),
options: [
{
basePath: __dirname,
zones: [
{
target: 'files/no_restricted_paths/**/*',
from: 'files/no_restricted_paths/**/*',
},
],
},
],
errors: [
{
message: 'Unexpected path "./deep/d.js" imported in restricted zone.',
line: 1,
column: 19,
},
],
},
],
});

View file

@ -0,0 +1,135 @@
/* eslint-disable-line @kbn/eslint/require-license-header */
/*
* This product uses import/no-restricted-paths which is available under a
* "MIT" license.
*
* The MIT License (MIT)
*
* Copyright (c) 2015-present, Ben Mosher
* https://github.com/benmosher/eslint-plugin-import
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
const path = require('path');
const resolve = require('eslint-module-utils/resolve').default;
const mm = require('micromatch');
function isStaticRequire(node) {
return (
node &&
node.callee &&
node.callee.type === 'Identifier' &&
node.callee.name === 'require' &&
node.arguments.length === 1 &&
node.arguments[0].type === 'Literal' &&
typeof node.arguments[0].value === 'string'
);
}
function traverseToTopFolder(src, pattern) {
while (mm([src], pattern).length > 0) {
const srcIdx = src.lastIndexOf(path.sep);
src = src.slice(0, srcIdx);
}
return src;
}
function isSameFolderOrDescendent(src, imported, pattern) {
const srcFileFolderRoot = traverseToTopFolder(src, pattern);
const importedFileFolderRoot = traverseToTopFolder(imported, pattern);
return srcFileFolderRoot === importedFileFolderRoot;
}
module.exports = {
meta: {
schema: [
{
type: 'object',
properties: {
zones: {
type: 'array',
minItems: 1,
items: {
type: 'object',
properties: {
target: {
anyOf: [{ type: 'string' }, { type: 'array', items: { type: 'string' } }],
},
from: {
anyOf: [{ type: 'string' }, { type: 'array', items: { type: 'string' } }],
},
allowSameFolder: { type: 'boolean' },
},
additionalProperties: false,
},
},
basePath: { type: 'string' },
},
additionalProperties: false,
},
],
},
create(context) {
const options = context.options[0] || {};
const zones = options.zones || [];
const basePath = options.basePath;
if (!basePath || !path.isAbsolute(basePath)) {
throw new Error('basePath option must be specified and must be absolute');
}
function checkForRestrictedImportPath(importPath, node) {
const absoluteImportPath = resolve(importPath, context);
if (!absoluteImportPath) return;
const currentFilename = context.getFilename();
for (const { target, from, allowSameFolder } of zones) {
const srcFilePath = resolve(currentFilename, context);
const relativeSrcFile = path.relative(basePath, srcFilePath);
const relativeImportFile = path.relative(basePath, absoluteImportPath);
if (
!mm([relativeSrcFile], target).length ||
!mm([relativeImportFile], from).length ||
(allowSameFolder && isSameFolderOrDescendent(relativeSrcFile, relativeImportFile, from))
)
continue;
context.report({
node,
message: `Unexpected path "${importPath}" imported in restricted zone.`,
});
}
}
return {
ImportDeclaration(node) {
checkForRestrictedImportPath(node.source.value, node.source);
},
CallExpression(node) {
if (isStaticRequire(node)) {
const [firstArgument] = node.arguments;
checkForRestrictedImportPath(firstArgument.value, firstArgument);
}
},
};
},
};

View file

@ -921,7 +921,7 @@ describe('I18n engine', () => {
await expect(i18n.load('some-url')).resolves.toBeUndefined();
expect(mockFetch).toHaveBeenCalledTimes(1);
expect(mockFetch).toHaveBeenCalledWith('some-url');
expect(mockFetch).toHaveBeenCalledWith('some-url', { credentials: 'same-origin' });
expect(i18n.getTranslation()).toEqual(translations);
});

View file

@ -240,7 +240,11 @@ export function init(newTranslation?: Translation) {
* @param translationsUrl URL pointing to the JSON bundle with translations.
*/
export async function load(translationsUrl: string) {
const response = await fetch(translationsUrl);
// Once this package is integrated into core Kibana we should switch to an abstraction
// around `fetch` provided by the platform, e.g. `kfetch`.
const response = await fetch(translationsUrl, {
credentials: 'same-origin',
});
if (response.status >= 300) {
throw new Error(`Translations request failed with status code: ${response.status}`);

View file

@ -1,20 +1,20 @@
{
"extends": "../../tsconfig.json",
"include": [
"src/**/*.ts",
"src/**/*.tsx",
"types/intl_format_cache.d.ts",
"types/intl_relativeformat.d.ts"
],
"exclude": [
"target"
],
"compilerOptions": {
"declaration": true,
"declarationDir": "./target/types",
"types": [
"jest",
"node"
]
}
}
{
"extends": "../../tsconfig.json",
"include": [
"src/**/*.ts",
"src/**/*.tsx",
"types/intl_format_cache.d.ts",
"types/intl_relativeformat.d.ts"
],
"exclude": [
"target"
],
"compilerOptions": {
"declaration": true,
"declarationDir": "./target/types",
"types": [
"jest",
"node"
]
}
}

View file

@ -1,8 +1,7 @@
{
"presets": ["@kbn/babel-preset/webpack_preset"],
"plugins": [
["babel-plugin-transform-runtime", {
"polyfill": false,
["@babel/plugin-transform-runtime", {
"regenerator": true
}]
]

View file

@ -9,19 +9,20 @@
"kbn:watch": "node scripts/build --dev --watch"
},
"dependencies": {
"@babel/runtime": "^7.3.4",
"@kbn/i18n": "1.0.0",
"lodash": "npm:@elastic/lodash@3.10.1-kibana1",
"lodash.clone": "^4.5.0",
"uuid": "3.0.1"
},
"devDependencies": {
"@babel/cli": "^7.2.3",
"@babel/core": "7.3.4",
"@babel/plugin-transform-runtime": "^7.3.4",
"@babel/polyfill": "7.2.5",
"@kbn/babel-preset": "1.0.0",
"@kbn/dev-utils": "1.0.0",
"babel-cli": "^6.26.0",
"babel-core": "6.26.3",
"babel-loader": "7.1.5",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-polyfill": "6.20.0",
"babel-loader": "8.0.5",
"copy-webpack-plugin": "^4.6.0",
"css-loader": "1.0.0",
"del": "^3.0.0",

View file

@ -19,7 +19,7 @@
const { extname } = require('path');
const { transform } = require('babel-core');
const { transform } = require('@babel/core');
exports.createServerCodeTransformer = (sourceMaps) => {
return (content, path) => {

View file

@ -28,15 +28,14 @@ describe('js support', () => {
it('transpiles js file', () => {
const transformer = createServerCodeTransformer();
expect(transformer(JS_FIXTURE, JS_FIXTURE_PATH)).toMatchInlineSnapshot(`
"'use strict';
"\\"use strict\\";
var _util = require('util');
var _util2 = _interopRequireDefault(_util);
var _util = _interopRequireDefault(require(\\"util\\"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
console.log(_util2.default.format('hello world')); /* eslint-disable */"
/* eslint-disable */
console.log(_util.default.format('hello world'));"
`);
});

View file

@ -1,3 +1,8 @@
<% if (generateScss) { -%>
import { resolve } from 'path';
import { existsSync } from 'fs';
<% } -%>
<% if (generateApi) { -%>
import exampleRoute from './server/routes/example';
@ -20,7 +25,7 @@ export default function (kibana) {
],
<%_ } -%>
<%_ if (generateScss) { -%>
styleSheetPaths: require('path').resolve(__dirname, 'public/app.scss'),
styleSheetPaths: [resolve(__dirname, 'public/app.scss'), resolve(__dirname, 'public/app.css')].find(p => existsSync(p)),
<%_ } -%>
},

View file

@ -8,9 +8,9 @@
"templateVersion": "<%= templateVersion %>"
},
"scripts": {
"preinstall": "node ../../kibana/preinstall_check",
"kbn": "node ../../kibana/scripts/kbn",
"es": "node ../../kibana/scripts/es",
"preinstall": "node ../../preinstall_check",
"kbn": "node ../../scripts/kbn",
"es": "node ../../scripts/es",
"lint": "eslint .",
"start": "plugin-helpers start",
"test:server": "plugin-helpers test:server",
@ -19,23 +19,23 @@
},
<%_ if (generateTranslations) { _%>
"dependencies": {
"@kbn/i18n": "link:../../kibana/packages/kbn-i18n"
"@kbn/i18n": "link:../../packages/kbn-i18n"
},
<%_ } _%>
"devDependencies": {
"@elastic/eslint-config-kibana": "link:../../kibana/packages/eslint-config-kibana",
"@elastic/eslint-import-resolver-kibana": "link:../../kibana/packages/kbn-eslint-import-resolver-kibana",
"@kbn/expect": "1.0.0",
"@kbn/plugin-helpers": "link:../../kibana/packages/kbn-plugin-helpers",
"babel-eslint": "^9.0.0",
"eslint": "^5.6.0",
"eslint-plugin-babel": "^5.2.0",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-jest": "^21.26.2",
"eslint-plugin-jsx-a11y": "^6.1.2",
"eslint-plugin-mocha": "^5.2.0",
"@elastic/eslint-config-kibana": "link:../../packages/eslint-config-kibana",
"@elastic/eslint-import-resolver-kibana": "link:../../packages/kbn-eslint-import-resolver-kibana",
"@kbn/expect": "link:../../packages/kbn-expect",
"@kbn/plugin-helpers": "link:../../packages/kbn-plugin-helpers",
"babel-eslint": "^10.0.1",
"eslint": "^5.14.1",
"eslint-plugin-babel": "^5.3.0",
"eslint-plugin-import": "^2.16.0",
"eslint-plugin-jest": "^22.3.0",
"eslint-plugin-jsx-a11y": "^6.2.1",
"eslint-plugin-mocha": "^5.3.0",
"eslint-plugin-no-unsanitized": "^3.0.2",
"eslint-plugin-prefer-object-spread": "^1.2.1",
"eslint-plugin-react": "^7.11.1"
"eslint-plugin-react": "^7.12.4"
}
}

View file

@ -25,7 +25,7 @@ function babelRegister() {
const plugin = pluginConfig();
try {
// add support for moved babel-register source: https://github.com/elastic/kibana/pull/13973
// add support for moved @babel/register source: https://github.com/elastic/kibana/pull/13973
require(resolve(plugin.kibanaRoot, 'src/setup_node_env/babel_register')); // eslint-disable-line import/no-dynamic-require
} catch (error) {
if (error.code === 'MODULE_NOT_FOUND') {

View file

@ -1,10 +1,14 @@
{
"presets": [
"stage-3",
["env", {
"@babel/typescript",
["@babel/preset-env", {
"targets": {
"node": "current"
}
}]
],
"plugins": [
"@babel/proposal-class-properties",
"@babel/proposal-object-rest-spread"
]
}
}

File diff suppressed because it is too large Load diff

View file

@ -10,6 +10,11 @@
"prettier": "prettier --write './src/**/*.ts'"
},
"devDependencies": {
"@babel/core": "^7.3.4",
"@babel/plugin-proposal-class-properties": "^7.3.4",
"@babel/plugin-proposal-object-rest-spread": "^7.3.4",
"@babel/preset-env": "^7.3.4",
"@babel/preset-typescript": "^7.3.3",
"@types/cmd-shim": "^2.0.0",
"@types/cpy": "^5.1.0",
"@types/dedent": "^0.7.0",
@ -32,10 +37,7 @@
"@types/tempy": "^0.1.0",
"@types/wrap-ansi": "^2.0.14",
"@types/write-pkg": "^3.1.0",
"babel-core": "^6.26.3",
"babel-loader": "^7.1.5",
"babel-preset-env": "^1.7.0",
"babel-preset-stage-3": "^6.24.1",
"babel-loader": "^8.0.5",
"chalk": "^2.4.1",
"cmd-shim": "^2.0.2",
"cpy": "^7.0.1",
@ -60,7 +62,6 @@
"strip-ansi": "^4.0.0",
"strong-log-transformer": "^2.1.0",
"tempy": "^0.2.1",
"ts-loader": "^5.2.2",
"typescript": "^3.3.3333",
"unlazy-loader": "^0.1.3",
"webpack": "^4.23.1",

View file

@ -4,10 +4,11 @@
"private": true,
"main": "./target/index.js",
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-preset-env": "^1.6.1"
"@babel/cli": "^7.2.3",
"@babel/core": "^7.3.4",
"@babel/preset-env": "^7.3.4"
},
"scripts": {
"build": "babel --presets env --out-dir target src"
"build": "babel --presets=@babel/preset-env --out-dir target src"
}
}

View file

@ -7,11 +7,12 @@
"@elastic/bar": "link:../bar"
},
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-preset-env": "^1.6.1",
"@babel/core": "^7.3.4",
"@babel/cli": "^7.2.3",
"@babel/preset-env": "^7.3.4",
"moment": "2.20.1"
},
"scripts": {
"build": "babel --presets env --out-dir target src"
"build": "babel --presets=@babel/preset-env --out-dir target src"
}
}

View file

@ -20,14 +20,15 @@ Array [
exports[`kbn-pm production builds and copies projects for production: packages/bar/package.json 1`] = `
Object {
"devDependencies": Object {
"babel-cli": "^6.26.0",
"babel-preset-env": "^1.6.1",
"@babel/cli": "^7.2.3",
"@babel/core": "^7.3.4",
"@babel/preset-env": "^7.3.4",
},
"main": "./target/index.js",
"name": "@elastic/bar",
"private": true,
"scripts": Object {
"build": "babel --presets env --out-dir target src",
"build": "babel --presets=@babel/preset-env --out-dir target src",
},
"version": "1.0.0",
}
@ -47,15 +48,16 @@ Object {
"@elastic/bar": "link:../bar",
},
"devDependencies": Object {
"babel-cli": "^6.26.0",
"babel-preset-env": "^1.6.1",
"@babel/cli": "^7.2.3",
"@babel/core": "^7.3.4",
"@babel/preset-env": "^7.3.4",
"moment": "2.20.1",
},
"main": "./target/index.js",
"name": "@elastic/foo",
"private": true,
"scripts": Object {
"build": "babel --presets env --out-dir target src",
"build": "babel --presets=@babel/preset-env --out-dir target src",
},
"version": "1.0.0",
}

View file

@ -4,7 +4,7 @@
"dist"
],
"include": [
"./src/**/*.ts"
"./src/**/*.ts",
],
"compilerOptions": {
"types": [

View file

@ -44,15 +44,6 @@ module.exports = {
{
loader: 'babel-loader',
},
{
loader: 'ts-loader',
options: {
compilerOptions: {
// enable esnext modules so webpack can do its thing better
module: 'esnext',
},
},
},
],
exclude: /node_modules/,
},

View file

@ -18,6 +18,6 @@
*/
module.exports = {
presets: ['@kbn/babel-preset/node_preset_7'],
presets: ['@kbn/babel-preset/node_preset'],
ignore: ['**/*.test.js'],
};

View file

@ -10,9 +10,9 @@
"kbn:watch": "yarn build --watch"
},
"devDependencies": {
"@babel/cli": "^7.2.3",
"@kbn/babel-preset": "1.0.0",
"@kbn/dev-utils": "1.0.0",
"@babel/cli": "^7.2.3"
"@kbn/dev-utils": "1.0.0"
},
"dependencies": {
"chalk": "^2.4.1",

View file

@ -56,10 +56,6 @@
* 4. Fix an IE bug which causes the last child to overflow the container.
* 5. Fixing this bug means we now need to align the children to the right.
*/
:focus:not([class^="eui"]) {
-webkit-animation: 350ms cubic-bezier(0.694, 0.0482, 0.335, 1) 1 normal forwards focusRingAnimate;
animation: 350ms cubic-bezier(0.694, 0.0482, 0.335, 1) 1 normal forwards focusRingAnimate; }
/**
* 1. Required for IE11.
*/

View file

@ -19,7 +19,7 @@
require('./main.scss');
import 'babel-polyfill';
import '@babel/polyfill';
import React from 'react';
import ReactDOM from 'react-dom';
import { Provider } from 'react-redux';

View file

@ -26,16 +26,16 @@
"uuid": "3.0.1"
},
"peerDependencies": {
"enzyme": "3.2.0",
"enzyme-adapter-react-16": "^1.1.1"
"enzyme": "^3.8.0",
"enzyme-adapter-react-16": "^1.9.1"
},
"devDependencies": {
"@babel/core": "^7.3.4",
"@babel/polyfill": "^7.2.5",
"@elastic/eui": "0.0.23",
"@kbn/babel-preset": "1.0.0",
"autoprefixer": "6.5.4",
"babel-core": "^6.26.3",
"babel-loader": "^7.1.5",
"babel-polyfill": "^6.26.0",
"babel-loader": "^8.0.5",
"brace": "0.10.0",
"chalk": "^2.4.1",
"chokidar": "1.6.0",
@ -43,7 +43,7 @@
"expose-loader": "^0.7.5",
"file-loader": "^2.0.0",
"grunt": "1.0.3",
"grunt-babel": "^7.0.0",
"grunt-babel": "^8.0.0",
"grunt-contrib-clean": "^1.1.0",
"grunt-contrib-copy": "^1.0.0",
"highlight.js": "9.0.0",

View file

@ -17,5 +17,5 @@
* under the License.
*/
const context = require.context('../filters', false, /[\/\\](?!\.|_)[^\/\\]+\.js/);
context.keys().forEach(key => context(key));
require('../src/setup_node_env');
require('../src/dev/sass/run_build_sass_cli');

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { BehaviorSubject } from 'rxjs';
import { Brand, Breadcrumb, ChromeService, ChromeSetup } from './chrome_service';
import { ChromeBrand, ChromeBreadcrumb, ChromeService, ChromeSetup } from './chrome_service';
const createSetupContractMock = () => {
const setupContract: jest.Mocked<ChromeSetup> = {
@ -35,11 +35,11 @@ const createSetupContractMock = () => {
getHelpExtension$: jest.fn(),
setHelpExtension: jest.fn(),
};
setupContract.getBrand$.mockReturnValue(new BehaviorSubject({} as Brand));
setupContract.getBrand$.mockReturnValue(new BehaviorSubject({} as ChromeBrand));
setupContract.getIsVisible$.mockReturnValue(new BehaviorSubject(false));
setupContract.getIsCollapsed$.mockReturnValue(new BehaviorSubject(false));
setupContract.getApplicationClasses$.mockReturnValue(new BehaviorSubject(['class-name']));
setupContract.getBreadcrumbs$.mockReturnValue(new BehaviorSubject([{} as Breadcrumb]));
setupContract.getBreadcrumbs$.mockReturnValue(new BehaviorSubject([{} as ChromeBreadcrumb]));
setupContract.getHelpExtension$.mockReturnValue(new BehaviorSubject(undefined));
return setupContract;
};

View file

@ -32,18 +32,18 @@ function isEmbedParamInHash() {
return Boolean(query.embed);
}
export interface Brand {
export interface ChromeBrand {
logo?: string;
smallLogo?: string;
}
export interface Breadcrumb {
export interface ChromeBreadcrumb {
text: string;
href?: string;
'data-test-subj'?: string;
}
export type HelpExtension = (element: HTMLDivElement) => (() => void);
export type ChromeHelpExtension = (element: HTMLDivElement) => (() => void);
interface ConstructorParams {
browserSupportsCsp: boolean;
@ -65,12 +65,12 @@ export class ChromeService {
public setup({ injectedMetadata, notifications }: SetupDeps) {
const FORCE_HIDDEN = isEmbedParamInHash();
const brand$ = new Rx.BehaviorSubject<Brand>({});
const brand$ = new Rx.BehaviorSubject<ChromeBrand>({});
const isVisible$ = new Rx.BehaviorSubject(true);
const isCollapsed$ = new Rx.BehaviorSubject(!!localStorage.getItem(IS_COLLAPSED_KEY));
const applicationClasses$ = new Rx.BehaviorSubject<Set<string>>(new Set());
const helpExtension$ = new Rx.BehaviorSubject<HelpExtension | undefined>(undefined);
const breadcrumbs$ = new Rx.BehaviorSubject<Breadcrumb[]>([]);
const helpExtension$ = new Rx.BehaviorSubject<ChromeHelpExtension | undefined>(undefined);
const breadcrumbs$ = new Rx.BehaviorSubject<ChromeBreadcrumb[]>([]);
if (!this.browserSupportsCsp && injectedMetadata.getCspConfig().warnLegacyBrowsers) {
notifications.toasts.addWarning(
@ -95,7 +95,7 @@ export class ChromeService {
* })
*
*/
setBrand: (brand: Brand) => {
setBrand: (brand: ChromeBrand) => {
brand$.next(
Object.freeze({
logo: brand.logo,
@ -179,7 +179,7 @@ export class ChromeService {
/**
* Override the current set of breadcrumbs
*/
setBreadcrumbs: (newBreadcrumbs: Breadcrumb[]) => {
setBreadcrumbs: (newBreadcrumbs: ChromeBreadcrumb[]) => {
breadcrumbs$.next(newBreadcrumbs);
},
@ -191,7 +191,7 @@ export class ChromeService {
/**
* Override the current set of breadcrumbs
*/
setHelpExtension: (helpExtension?: HelpExtension) => {
setHelpExtension: (helpExtension?: ChromeHelpExtension) => {
helpExtension$.next(helpExtension);
},
};

View file

@ -17,4 +17,10 @@
* under the License.
*/
export { Breadcrumb, ChromeService, ChromeSetup, Brand, HelpExtension } from './chrome_service';
export {
ChromeBreadcrumb,
ChromeService,
ChromeSetup,
ChromeBrand,
ChromeHelpExtension,
} from './chrome_service';

View file

@ -0,0 +1,92 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { basePathServiceMock } from './base_path/base_path_service.mock';
import { chromeServiceMock } from './chrome/chrome_service.mock';
import { fatalErrorsServiceMock } from './fatal_errors/fatal_errors_service.mock';
import { httpServiceMock } from './http/http_service.mock';
import { i18nServiceMock } from './i18n/i18n_service.mock';
import { injectedMetadataServiceMock } from './injected_metadata/injected_metadata_service.mock';
import { legacyPlatformServiceMock } from './legacy/legacy_service.mock';
import { notificationServiceMock } from './notifications/notifications_service.mock';
import { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock';
export const MockLegacyPlatformService = legacyPlatformServiceMock.create();
export const LegacyPlatformServiceConstructor = jest
.fn()
.mockImplementation(() => MockLegacyPlatformService);
jest.doMock('./legacy', () => ({
LegacyPlatformService: LegacyPlatformServiceConstructor,
}));
export const MockInjectedMetadataService = injectedMetadataServiceMock.create();
export const InjectedMetadataServiceConstructor = jest
.fn()
.mockImplementation(() => MockInjectedMetadataService);
jest.doMock('./injected_metadata', () => ({
InjectedMetadataService: InjectedMetadataServiceConstructor,
}));
export const MockFatalErrorsService = fatalErrorsServiceMock.create();
export const FatalErrorsServiceConstructor = jest
.fn()
.mockImplementation(() => MockFatalErrorsService);
jest.doMock('./fatal_errors', () => ({
FatalErrorsService: FatalErrorsServiceConstructor,
}));
export const MockI18nService = i18nServiceMock.create();
export const I18nServiceConstructor = jest.fn().mockImplementation(() => MockI18nService);
jest.doMock('./i18n', () => ({
I18nService: I18nServiceConstructor,
}));
export const MockNotificationsService = notificationServiceMock.create();
export const NotificationServiceConstructor = jest
.fn()
.mockImplementation(() => MockNotificationsService);
jest.doMock('./notifications', () => ({
NotificationsService: NotificationServiceConstructor,
}));
export const MockHttpService = httpServiceMock.create();
export const HttpServiceConstructor = jest.fn().mockImplementation(() => MockHttpService);
jest.doMock('./http', () => ({
HttpService: HttpServiceConstructor,
}));
export const MockBasePathService = basePathServiceMock.create();
export const BasePathServiceConstructor = jest.fn().mockImplementation(() => MockBasePathService);
jest.doMock('./base_path', () => ({
BasePathService: BasePathServiceConstructor,
}));
export const MockUiSettingsService = uiSettingsServiceMock.create();
export const UiSettingsServiceConstructor = jest
.fn()
.mockImplementation(() => MockUiSettingsService);
jest.doMock('./ui_settings', () => ({
UiSettingsService: UiSettingsServiceConstructor,
}));
export const MockChromeService = chromeServiceMock.create();
export const ChromeServiceConstructor = jest.fn().mockImplementation(() => MockChromeService);
jest.doMock('./chrome', () => ({
ChromeService: ChromeServiceConstructor,
}));

View file

@ -17,73 +17,26 @@
* under the License.
*/
import { basePathServiceMock } from './base_path/base_path_service.mock';
import { chromeServiceMock } from './chrome/chrome_service.mock';
import { fatalErrorsServiceMock } from './fatal_errors/fatal_errors_service.mock';
import { httpServiceMock } from './http/http_service.mock';
import { i18nServiceMock } from './i18n/i18n_service.mock';
import { injectedMetadataServiceMock } from './injected_metadata/injected_metadata_service.mock';
import { legacyPlatformServiceMock } from './legacy/legacy_service.mock';
import { notificationServiceMock } from './notifications/notifications_service.mock';
import { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock';
const MockLegacyPlatformService = legacyPlatformServiceMock.create();
const LegacyPlatformServiceConstructor = jest
.fn()
.mockImplementation(() => MockLegacyPlatformService);
jest.mock('./legacy', () => ({
LegacyPlatformService: LegacyPlatformServiceConstructor,
}));
const MockInjectedMetadataService = injectedMetadataServiceMock.create();
const InjectedMetadataServiceConstructor = jest
.fn()
.mockImplementation(() => MockInjectedMetadataService);
jest.mock('./injected_metadata', () => ({
InjectedMetadataService: InjectedMetadataServiceConstructor,
}));
const MockFatalErrorsService = fatalErrorsServiceMock.create();
const FatalErrorsServiceConstructor = jest.fn().mockImplementation(() => MockFatalErrorsService);
jest.mock('./fatal_errors', () => ({
FatalErrorsService: FatalErrorsServiceConstructor,
}));
const MockI18nService = i18nServiceMock.create();
const I18nServiceConstructor = jest.fn().mockImplementation(() => MockI18nService);
jest.mock('./i18n', () => ({
I18nService: I18nServiceConstructor,
}));
const MockNotificationsService = notificationServiceMock.create();
const NotificationServiceConstructor = jest.fn().mockImplementation(() => MockNotificationsService);
jest.mock('./notifications', () => ({
NotificationsService: NotificationServiceConstructor,
}));
const MockHttpService = httpServiceMock.create();
const HttpServiceConstructor = jest.fn().mockImplementation(() => MockHttpService);
jest.mock('./http', () => ({
HttpService: HttpServiceConstructor,
}));
const MockBasePathService = basePathServiceMock.create();
const BasePathServiceConstructor = jest.fn().mockImplementation(() => MockBasePathService);
jest.mock('./base_path', () => ({
BasePathService: BasePathServiceConstructor,
}));
const MockUiSettingsService = uiSettingsServiceMock.create();
const UiSettingsServiceConstructor = jest.fn().mockImplementation(() => MockUiSettingsService);
jest.mock('./ui_settings', () => ({
UiSettingsService: UiSettingsServiceConstructor,
}));
const MockChromeService = chromeServiceMock.create();
const ChromeServiceConstructor = jest.fn().mockImplementation(() => MockChromeService);
jest.mock('./chrome', () => ({
ChromeService: ChromeServiceConstructor,
}));
import {
BasePathServiceConstructor,
ChromeServiceConstructor,
FatalErrorsServiceConstructor,
HttpServiceConstructor,
I18nServiceConstructor,
InjectedMetadataServiceConstructor,
LegacyPlatformServiceConstructor,
MockBasePathService,
MockChromeService,
MockFatalErrorsService,
MockHttpService,
MockI18nService,
MockInjectedMetadataService,
MockLegacyPlatformService,
MockNotificationsService,
MockUiSettingsService,
NotificationServiceConstructor,
UiSettingsServiceConstructor,
} from './core_system.test.mocks';
import { CoreSystem } from './core_system';
jest.spyOn(CoreSystem.prototype, 'stop');

View file

@ -53,8 +53,7 @@ exports[`rendering render matches snapshot 1`] = `
body={
<p>
<FormattedMessage
defaultMessage="Try refreshing the page. If that doesn't work, go back to the previous page or
clear your session data."
defaultMessage="Try refreshing the page. If that doesn't work, go back to the previous page or clear your session data."
id="core.fatalErrors.tryRefreshingPageDescription"
values={Object {}}
/>

View file

@ -17,5 +17,9 @@
* under the License.
*/
const context = require.context('../directives', false, /[\/\\](?!\.|_)[^\/\\]+\.js/);
context.keys().forEach(key => context(key));
export const mockRender = jest.fn();
jest.mock('react-dom', () => {
return {
render: mockRender,
};
});

View file

@ -24,12 +24,7 @@ expect.addSnapshotSerializer({
print: () => `Rx.Observable`,
});
const mockRender = jest.fn();
jest.mock('react-dom', () => {
return {
render: mockRender,
};
});
import { mockRender } from './fatal_errors_service.test.mocks';
import { FatalErrorsService } from './fatal_errors_service';

Some files were not shown because too many files have changed in this diff Show more