Migrate x-pack-kibana source to kibana

This commit is contained in:
Jenkins CI 2018-04-20 19:13:41 +00:00 committed by spalger
parent e8ac7d8d32
commit bc5b51554d
3256 changed files with 277621 additions and 2324 deletions

View file

@ -1,3 +1,9 @@
node_modules
bower_components
/data
/build
/.es
/plugins
/optimize
/src/fixtures/vislib/mock_data
/src/ui/public/angular-bootstrap
@ -9,8 +15,6 @@
/src/core_plugins/console/public/tests/webpackShims
/src/ui/public/utils/decode_geo_hash.js
/src/core_plugins/timelion/public/webpackShims/jquery.flot.*
/tasks/vendor
/packages/*/node_modules
/packages/*/target
/packages/eslint-config-kibana
/packages/eslint-plugin-kibana-custom
@ -19,5 +23,8 @@
/packages/kbn-ui-framework/dist
/packages/kbn-ui-framework/doc_site/build
/packages/kbn-ui-framework/generator-kui/*/templates/
!/.eslintrc.js
/x-pack/coverage
/x-pack/build
/x-pack/plugins/**/__tests__/fixtures/**
**/*.js.snap
!/.eslintrc.js

View file

@ -1,3 +1,7 @@
const { resolve } = require('path');
const { readdirSync } = require('fs');
const dedent = require('dedent');
module.exports = {
extends: [
'@elastic/eslint-config-kibana',
@ -5,16 +9,13 @@ module.exports = {
],
settings: {
'import/resolver': {
'@kbn/eslint-import-resolver-kibana': {
rootPackageName: 'kibana',
kibanaPath: '.',
},
},
'import/resolver': 'eslint-import-resolver-node',
},
overrides: [
// Enable Prettier
/**
* Prettier
*/
{
files: [
'.eslintrc.js',
@ -23,36 +24,68 @@ module.exports = {
'packages/kbn-datemath/**/*.js',
'packages/kbn-plugin-generator/**/*',
'packages/kbn-eslint-import-resolver-kibana/**/*',
'x-pack/plugins/apm/**/*',
],
plugins: ['prettier'],
rules: Object.assign(
{ 'prettier/prettier': 'error' },
{
'prettier/prettier': ['error'],
},
require('eslint-config-prettier').rules,
require('eslint-config-prettier/react').rules
),
},
// files not transpiled by babel can't using things like object-spread
/**
* Allow default exports
*/
{
files: [
'.eslintrc.js',
'packages/kbn-plugin-helpers/**/*',
'packages/kbn-plugin-generator/**/*',
],
files: ['x-pack/test/functional/apps/**/*', 'x-pack/plugins/apm/**/*'],
rules: {
'prefer-object-spread/prefer-object-spread': 'off',
'kibana-custom/no-default-export': 'off',
'import/no-named-as-default': 'off',
},
},
// files that are not allowed to use devDepenedncies
/**
* Files that are allowed to import webpack-specific stuff
*/
{
files: ['packages/kbn-ui-framework/**/*'],
excludedFiles: [
'packages/kbn-ui-framework/**/*.test.js',
'packages/kbn-ui-framework/doc_site/**/*',
'packages/kbn-ui-framework/generator-kui/**/*',
'packages/kbn-ui-framework/Gruntfile.js',
files: [
'**/public/**',
'**/webpackShims/**',
'packages/kbn-ui-framework/doc_site/src/**',
'src/fixtures/**', // TODO: this directory needs to be more obviously "public" (or go away)
],
settings: {
// instructs import/no-extraneous-dependencies to treat modules
// in plugins/ or ui/ namespace as "core modules" so they don't
// trigger failures for not being listed in package.json
'import/core-modules': ['plugins', 'ui'],
'import/resolver': {
'@kbn/eslint-import-resolver-kibana': {
rootPackageName: 'kibana',
kibanaPath: '.',
pluginMap: readdirSync(resolve(__dirname, 'x-pack/plugins')).reduce(
(acc, name) => {
if (!name.startsWith('_')) {
acc[name] = `x-pack/plugins/${name}`;
}
return acc;
},
{}
),
},
},
},
},
/**
* files that ARE NOT allowed to use devDepenedncies
*/
{
files: ['packages/kbn-ui-framework/**/*', 'x-pack/**/*'],
rules: {
'import/no-extraneous-dependencies': [
'error',
@ -64,13 +97,20 @@ module.exports = {
},
},
// files that are allowed to use devDepenedncies
/**
* files that ARE allowed to use devDepenedncies
*/
{
files: [
'packages/kbn-ui-framework/**/*.test.js',
'packages/kbn-ui-framework/doc_site/**/*',
'packages/kbn-ui-framework/generator-kui/**/*',
'packages/kbn-ui-framework/Gruntfile.js',
'x-pack/{dev-tools,gulp_helpers,scripts,test,build_chromium}/**/*',
'x-pack/**/{__tests__,__test__,__jest__,__fixtures__,__mocks__}/**/*',
'x-pack/**/*.test.js',
'x-pack/gulpfile.js',
'x-pack/plugins/apm/public/utils/testHelpers.js',
],
rules: {
'import/no-extraneous-dependencies': [
@ -82,5 +122,127 @@ module.exports = {
],
},
},
/**
* Files that are not transpiled with babel
*/
{
files: [
'.eslintrc.js',
'**/webpackShims/**/*',
'packages/kbn-plugin-generator/**/*',
'packages/kbn-plugin-helpers/**/*',
'packages/kbn-eslint-import-resolver-kibana/**/*',
'packages/kbn-eslint-plugin-license-header/**/*',
'x-pack/gulpfile.js',
'x-pack/dev-tools/mocha/setup_mocha.js',
'x-pack/scripts/*',
],
rules: {
'import/no-commonjs': 'off',
'prefer-object-spread/prefer-object-spread': 'off',
'no-restricted-syntax': [
'error',
'ImportDeclaration',
'ExportNamedDeclaration',
'ExportDefaultDeclaration',
'ExportAllDeclaration',
],
},
},
/**
* X-Pack global overrides
*/
{
files: ['x-pack/**/*'],
rules: {
quotes: 'off',
},
},
/**
* Files that require the license headers
*/
{
files: ['x-pack/**/*.js'],
plugins: ['@kbn/eslint-plugin-license-header'],
rules: {
'@kbn/license-header/require-license-header': [
'error',
{
license: dedent`
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
`,
},
],
},
},
/**
* APM overrides
*/
{
files: ['x-pack/plugins/apm/**/*'],
rules: {
'no-unused-vars': ['error', { ignoreRestSiblings: true }],
'no-console': ['warn', { allow: ['error'] }],
},
},
/**
* Graph overrides
*/
{
files: ['x-pack/plugins/graph/**/*'],
globals: {
angular: true,
$: true,
},
rules: {
'block-scoped-var': 'off',
camelcase: 'off',
eqeqeq: 'off',
'guard-for-in': 'off',
'new-cap': 'off',
'no-loop-func': 'off',
'no-redeclare': 'off',
'no-shadow': 'off',
'no-unused-vars': 'off',
'one-var': 'off',
},
},
/**
* ML overrides
*/
{
files: ['x-pack/plugins/ml/**/*'],
rules: {
quotes: 'error',
'no-shadow': 'error',
},
},
/**
* Monitoring overrides
*/
{
files: ['x-pack/plugins/monitoring/**/*'],
rules: {
'block-spacing': ['error', 'always'],
curly: ['error', 'all'],
'no-unused-vars': ['error', { args: 'all', argsIgnorePattern: '^_' }],
'no-else-return': 'error',
},
},
{
files: ['x-pack/plugins/monitoring/public/**/*'],
env: { browser: true },
},
],
};

5
.gitignore vendored
View file

@ -5,7 +5,8 @@
.DS_Store
.node_binaries
node_modules
!/tasks/lib/packages/__tests__/fixtures/fixture1/node_modules
!/src/dev/npm/__tests__/fixtures/fixture1/node_modules
!/src/dev/notice/__fixtures__/node_modules
trash
/optimize
target
@ -38,6 +39,6 @@ selenium
!/packages/kbn-ui-framework/doc_site/build/index.html
package-lock.json
.yo-rc.json
/.vscode
.vscode
npm-debug.log*
.tern-project

View file

@ -174,7 +174,7 @@ Start elasticsearch from a nightly snapshot.
yarn es snapshot
```
Additional options are available, pass `--help` for more information.
This will run Elasticsearch with a `trial` license and bootstraped with the user `elastic` and password `changeme`. Additional options are available, pass `--help` for more information.
> You'll need to have a `java` binary in `PATH` or set `JAVA_HOME`.
@ -195,6 +195,20 @@ Start the development server.
Now you can point your web browser to https://localhost:5601 and start using Kibana! When running `yarn start`, Kibana will also log that it is listening on port 5603 due to the base path proxy, but you should still access Kibana on port 5601.
#### Running Kibana in Open-Source mode
If you're looking to only work with the open-source software, supply the license type to `yarn es`:
```bash
yarn es snapshot --license oss
```
And start Kibana with only open-source code:
```bash
yarn start --oss
```
#### Unsupported URL Type
If you're installing dependencies and seeing an error that looks something like
@ -326,12 +340,11 @@ yarn test:browser --dev # remove the --dev flag to run them once and close
### Building OS packages
Packages are built using fpm, pleaserun, dpkg, and rpm. fpm and pleaserun can be installed using gem. Package building has only been tested on Linux and is not supported on any other platform.
Packages are built using fpm, dpkg, and rpm. Package building has only been tested on Linux and is not supported on any other platform.
```bash
apt-get install ruby-dev rpm
gem install fpm -v 1.5.0
gem install pleaserun -v 0.0.24
yarn build --skip-archives
```

View file

@ -5,51 +5,11 @@ module.exports = function (grunt) {
// and once during so that we have access to it via
// grunt.config.get() within the config files
const config = {
pkg: grunt.file.readJSON('package.json'),
root: __dirname,
src: __dirname + '/src',
buildDir: __dirname + '/build', // temporary build directory
plugins: __dirname + '/src/core_plugins',
server: __dirname + '/src/server',
target: __dirname + '/target', // location of the compressed build targets
configFile: __dirname + '/src/config/kibana.yml',
karmaBrowser: (function () {
if (grunt.option('browser')) {
return grunt.option('browser');
}
switch (require('os').platform()) {
case 'win32':
return 'IE';
default:
return 'Chrome';
}
}()),
nodeVersion: grunt.file.read('.node-version').trim(),
meta: {
banner: '/*! <%= package.name %> - v<%= package.version %> - ' +
'<%= grunt.template.today("yyyy-mm-dd") %>\n' +
'<%= package.homepage ? " * " + package.homepage + "\\n" : "" %>' +
' * Copyright (c) <%= grunt.template.today("yyyy") %> <%= package.author.company %>;' +
' Licensed <%= package.license %> */\n'
},
};
grunt.config.merge(config);
// must run before even services/platforms
grunt.config.set('build', require('./tasks/config/build')(grunt));
config.packageScriptsDir = __dirname + '/tasks/build/package_scripts';
// ensure that these run first, other configs need them
config.services = require('./tasks/config/services')(grunt);
config.platforms = require('./tasks/config/platforms')(grunt);
grunt.config.merge(config);
// load plugins
require('load-grunt-config')(grunt, {
configPath: __dirname + '/tasks/config',
@ -62,5 +22,4 @@ module.exports = function (grunt) {
// load task definitions
grunt.task.loadTasks('tasks');
grunt.task.loadTasks('tasks/build');
};

View file

@ -1,7 +0,0 @@
Copyright 20122017 Elasticsearch BV
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

13
LICENSE.txt Normal file
View file

@ -0,0 +1,13 @@
Source code in this repository is variously licensed under the Apache License
Version 2.0, an Apache compatible license, or the Elastic License. Outside of
the "x-pack" folder, source code in a given file is licensed under the Apache
License Version 2.0, unless otherwise noted at the beginning of the file or a
LICENSE file present in the directory subtree declares a separate license.
Within the "x-pack" folder, source code in a given file is licensed under the
Elastic License, unless otherwise noted at the beginning of the file or a
LICENSE file present in the directory subtree declares a separate license.
The build produces two sets of binaries - one set that falls under the Elastic
License and another set that falls under Apache License Version 2.0. The
binaries that contain `-oss` in the artifact name are licensed under the Apache
License Version 2.0.

View file

@ -1,6 +1,36 @@
Kibana
Kibana source code with Kibana X-Pack source code
Copyright 2012-2018 Elasticsearch B.V.
---
This product includes code that was extracted from angular-ui-bootstrap@0.13.1
which is available under an "MIT" license
The MIT License
Copyright (c) 2012-2016 the AngularUI Team, http://angular-ui.github.io/bootstrap/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
---
This product uses Noto fonts that are licensed under the SIL Open
Font License, Version 1.1.
---
This product bundles angular-ui-bootstrap@0.12.1 which is available under a
"MIT" license.
@ -83,3 +113,50 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
---
This product includes code that was extracted from angular@1.3.
Original license:
The MIT License
Copyright (c) 2010-2014 Google, Inc. http://angularjs.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
---
This product uses "radioactive button" styles that were published on
https://zurb.com/playground/radioactive-buttons under an "MIT" License.
Copyright (c) ZURB
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in the
Software without restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -1,8 +1,6 @@
# Kibana 6.3.0
Kibana is your window into the [Elastic Stack](https://www.elastic.co/products). Specifically, it's
an open source ([Apache Licensed](LICENSE.md)),
browser-based analytics and search dashboard for Elasticsearch.
Kibana is your window into the [Elastic Stack](https://www.elastic.co/products). Specifically, it's a browser-based analytics and search dashboard for Elasticsearch.
- [Getting Started](#getting-started)
- [Using a Kibana Release](#using-a-kibana-release)
@ -43,11 +41,11 @@ out an open PR:
For the daring, snapshot builds are available. These builds are created nightly and have undergone no formal QA, so they should never be run in production. All builds are 64 bit.
| platform | |
| --- | --- |
| OSX | [tar](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-darwin-x86_64.tar.gz) |
| Linux | [tar](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-linux-x86_64.tar.gz) [deb](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-amd64.deb) [rpm](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-x86_64.rpm) |
| Windows | [zip](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-windows-x86_64.zip) |
| platform | default | OSS |
| --- | --- | --- |
| OSX | [tar](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-darwin-x86_64.tar.gz) | [tar](https://snapshots.elastic.co/downloads/kibana/kibana-oss-7.0.0-alpha1-SNAPSHOT-darwin-x86_64.tar.gz) |
| Linux | [tar](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-linux-x86_64.tar.gz) [deb](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-amd64.deb) [rpm](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-x86_64.rpm) | [tar](https://snapshots.elastic.co/downloads/kibana/kibana-oss-7.0.0-alpha1-SNAPSHOT-linux-x86_64.tar.gz) [deb](https://snapshots.elastic.co/downloads/kibana/kibana-oss-7.0.0-alpha1-SNAPSHOT-amd64.deb) [rpm](https://snapshots.elastic.co/downloads/kibana/kibana-oss-7.0.0-alpha1-SNAPSHOT-x86_64.rpm) |
| Windows | [zip](https://snapshots.elastic.co/downloads/kibana/kibana-6.3.0-SNAPSHOT-windows-x86_64.zip) | [zip](https://snapshots.elastic.co/downloads/kibana/kibana-oss-7.0.0-alpha1-SNAPSHOT-windows-x86_64.zip) |
## Documentation

View file

@ -4,7 +4,6 @@
:include-xpack: true
:lang: en
:kib-repo-dir: {docdir}
:xes-repo-dir: {docdir}/../../elasticsearch-extra/x-pack-elasticsearch/docs/{lang}
:xkb-repo-dir: {docdir}/../../kibana-extra/x-pack-kibana/docs/{lang}
:xkb-repo-dir: {docdir}/../x-pack/docs/{lang}
include::{xkb-repo-dir}/index.asciidoc[]

View file

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,223 @@
ELASTIC LICENSE AGREEMENT
PLEASE READ CAREFULLY THIS ELASTIC LICENSE AGREEMENT (THIS "AGREEMENT"), WHICH
CONSTITUTES A LEGALLY BINDING AGREEMENT AND GOVERNS ALL OF YOUR USE OF ALL OF
THE ELASTIC SOFTWARE WITH WHICH THIS AGREEMENT IS INCLUDED ("ELASTIC SOFTWARE")
THAT IS PROVIDED IN OBJECT CODE FORMAT, AND, IN ACCORDANCE WITH SECTION 2 BELOW,
CERTAIN OF THE ELASTIC SOFTWARE THAT IS PROVIDED IN SOURCE CODE FORMAT. BY
INSTALLING OR USING ANY OF THE ELASTIC SOFTWARE GOVERNED BY THIS AGREEMENT, YOU
ARE ASSENTING TO THE TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE
WITH SUCH TERMS AND CONDITIONS, YOU MAY NOT INSTALL OR USE THE ELASTIC SOFTWARE
GOVERNED BY THIS AGREEMENT. IF YOU ARE INSTALLING OR USING THE SOFTWARE ON
BEHALF OF A LEGAL ENTITY, YOU REPRESENT AND WARRANT THAT YOU HAVE THE ACTUAL
AUTHORITY TO AGREE TO THE TERMS AND CONDITIONS OF THIS AGREEMENT ON BEHALF OF
SUCH ENTITY.
Posted Date: April 20, 2018
This Agreement is entered into by and between Elasticsearch BV ("Elastic") and
You, or the legal entity on behalf of whom You are acting (as applicable,
"You").
1. OBJECT CODE END USER LICENSES, RESTRICTIONS AND THIRD PARTY OPEN SOURCE
SOFTWARE
1.1 Object Code End User License. Subject to the terms and conditions of
Section 1.2 of this Agreement, Elastic hereby grants to You, AT NO CHARGE and
for so long as you are not in breach of any provision of this Agreement, a
License to the Basic Features and Functions of the Elastic Software.
1.2 Reservation of Rights; Restrictions. As between Elastic and You, Elastic
and its licensors own all right, title and interest in and to the Elastic
Software, and except as expressly set forth in Sections 1.1, and 2.1 of this
Agreement, no other license to the Elastic Software is granted to You under
this Agreement, by implication, estoppel or otherwise. You agree not to: (i)
reverse engineer or decompile, decrypt, disassemble or otherwise reduce any
Elastic Software provided to You in Object Code, or any portion thereof, to
Source Code, except and only to the extent any such restriction is prohibited
by applicable law, (ii) except as expressly permitted in this Agreement,
prepare derivative works from, modify, copy or use the Elastic Software Object
Code or the Commercial Software Source Code in any manner; (iii) except as
expressly permitted in Section 1.1 above, transfer, sell, rent, lease,
distribute, sublicense, loan or otherwise transfer, Elastic Software Object
Code, in whole or in part, to any third party; (iv) use Elastic Software
Object Code for providing time-sharing services, any software-as-a-service,
service bureau services or as part of an application services provider or
other service offering (collectively, "SaaS Offering") where obtaining access
to the Elastic Software or the features and functions of the Elastic Software
is a primary reason or substantial motivation for users of the SaaS Offering
to access and/or use the SaaS Offering ("Prohibited SaaS Offering"); (v)
circumvent the limitations on use of Elastic Software provided to You in
Object Code format that are imposed or preserved by any License Key, or (vi)
alter or remove any Marks and Notices in the Elastic Software. If You have any
question as to whether a specific SaaS Offering constitutes a Prohibited SaaS
Offering, or are interested in obtaining Elastic's permission to engage in
commercial or non-commercial distribution of the Elastic Software, please
contact elastic_license@elastic.co.
1.3 Third Party Open Source Software. The Commercial Software may contain or
be provided with third party open source libraries, components, utilities and
other open source software (collectively, "Open Source Software"), which Open
Source Software may have applicable license terms as identified on a website
designated by Elastic. Notwithstanding anything to the contrary herein, use of
the Open Source Software shall be subject to the license terms and conditions
applicable to such Open Source Software, to the extent required by the
applicable licensor (which terms shall not restrict the license rights granted
to You hereunder, but may contain additional rights). To the extent any
condition of this Agreement conflicts with any license to the Open Source
Software, the Open Source Software license will govern with respect to such
Open Source Software only. Elastic may also separately provide you with
certain open source software that is licensed by Elastic. Your use of such
Elastic open source software will not be governed by this Agreement, but by
the applicable open source license terms.
2. COMMERCIAL SOFTWARE SOURCE CODE
2.1 Limited License. Subject to the terms and conditions of Section 2.2 of
this Agreement, Elastic hereby grants to You, AT NO CHARGE and for so long as
you are not in breach of any provision of this Agreement, a limited,
non-exclusive, non-transferable, fully paid up royalty free right and license
to the Commercial Software in Source Code format, without the right to grant
or authorize sublicenses, to prepare Derivative Works of the Commercial
Software, provided You (i) do not hack the licensing mechanism, or otherwise
circumvent the intended limitations on the use of Elastic Software to enable
features other than Basic Features and Functions or those features You are
entitled to as part of a Subscription, and (ii) use the resulting object code
only for reasonable testing purposes.
2.2 Restrictions. Nothing in Section 2.1 grants You the right to (i) use the
Commercial Software Source Code other than in accordance with Section 2.1
above, (ii) use a Derivative Work of the Commercial Software outside of a
Non-production Environment, in any production capacity, on a temporary or
permanent basis, or (iii) transfer, sell, rent, lease, distribute, sublicense,
loan or otherwise make available the Commercial Software Source Code, in whole
or in part, to any third party. Notwithstanding the foregoing, You may
maintain a copy of the repository in which the Source Code of the Commercial
Software resides and that copy may be publicly accessible, provided that you
include this Agreement with Your copy of the repository.
3. TERMINATION
3.1 Termination. This Agreement will automatically terminate, whether or not
You receive notice of such Termination from Elastic, if You breach any of its
provisions.
3.2 Post Termination. Upon any termination of this Agreement, for any reason,
You shall promptly cease the use of the Elastic Software in Object Code format
and cease use of the Commercial Software in Source Code format. For the
avoidance of doubt, termination of this Agreement will not affect Your right
to use Elastic Software, in either Object Code or Source Code formats, made
available under the Apache License Version 2.0.
3.3 Survival. Sections 1.2, 2.2. 3.3, 4 and 5 shall survive any termination or
expiration of this Agreement.
4. DISCLAIMER OF WARRANTIES AND LIMITATION OF LIABILITY
4.1 Disclaimer of Warranties. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE
LAW, THE ELASTIC SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR
STATUTORY REGARDING OR RELATING TO THE ELASTIC SOFTWARE. TO THE MAXIMUM EXTENT
PERMITTED UNDER APPLICABLE LAW, ELASTIC AND ITS LICENSORS SPECIFICALLY
DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE ELASTIC SOFTWARE, AND WITH
RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS
OF USE OR THAT THE ELASTIC SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE
ELASTIC SOFTWARE WILL BE UNINTERRUPTED.
4.2 Limitation of Liability. IN NO EVENT SHALL ELASTIC OR ITS LICENSORS BE
LIABLE TO YOU OR ANY THIRD PARTY FOR ANY DIRECT OR INDIRECT DAMAGES,
INCLUDING, WITHOUT LIMITATION, FOR ANY LOSS OF PROFITS, LOSS OF USE, BUSINESS
INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY
SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, IN CONNECTION WITH
OR ARISING OUT OF THE USE OR INABILITY TO USE THE ELASTIC SOFTWARE, OR THE
PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A
BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF ELASTIC
HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
5. MISCELLANEOUS
This Agreement completely and exclusively states the entire agreement of the
parties regarding the subject matter herein, and it supersedes, and its terms
govern, all prior proposals, agreements, or other communications between the
parties, oral or written, regarding such subject matter. This Agreement may be
modified by Elastic from time to time, and any such modifications will be
effective upon the "Posted Date" set forth at the top of the modified
Agreement. If any provision hereof is held unenforceable, this Agreement will
continue without said provision and be interpreted to reflect the original
intent of the parties. This Agreement and any non-contractual obligation
arising out of or in connection with it, is governed exclusively by Dutch law.
This Agreement shall not be governed by the 1980 UN Convention on Contracts
for the International Sale of Goods. All disputes arising out of or in
connection with this Agreement, including its existence and validity, shall be
resolved by the courts with jurisdiction in Amsterdam, The Netherlands, except
where mandatory law provides for the courts at another location in The
Netherlands to have jurisdiction. The parties hereby irrevocably waive any and
all claims and defenses either might otherwise have in any such action or
proceeding in any of such courts based upon any alleged lack of personal
jurisdiction, improper venue, forum non conveniens or any similar claim or
defense. A breach or threatened breach, by You of Section 2 may cause
irreparable harm for which damages at law may not provide adequate relief, and
therefore Elastic shall be entitled to seek injunctive relief without being
required to post a bond. You may not assign this Agreement (including by
operation of law in connection with a merger or acquisition), in whole or in
part to any third party without the prior written consent of Elastic, which
may be withheld or granted by Elastic in its sole and absolute discretion.
Any assignment in violation of the preceding sentence is void. Notices to
Elastic may also be sent to legal@elastic.co.
6. DEFINITIONS
The following terms have the meanings ascribed:
6.1 "Affiliate" means, with respect to a party, any entity that controls, is
controlled by, or which is under common control with, such party, where
"control" means ownership of at least fifty percent (50%) of the outstanding
voting shares of the entity, or the contractual right to establish policy for,
and manage the operations of, the entity.
6.2 "Basic Features and Functions" means those features and functions of the
Elastic Software that are eligible for use under a Basic license, as set forth
at https://www.elastic.co/subscriptions, as may be modified by Elastic from
time to time.
6.3 "Commercial Software" means the Elastic Software Source Code in any file
containing a header stating the contents are subject to the Elastic License or
which is contained in the repository folder labeled "x-pack", unless a LICENSE
file present in the directory subtree declares a different license.
6.4 "Derivative Work of the Commercial Software" means, for purposes of this
Agreement, any modification(s) or enhancement(s) to the Commercial Software,
which represent, as a whole, an original work of authorship.
6.5 "License" means a limited, non-exclusive, non-transferable, fully paid up,
royalty free, right and license, without the right to grant or authorize
sublicenses, solely for Your internal business operations to (i) install and
use the applicable Features and Functions of the Elastic Software in Object
Code, and (ii) permit Contractors and Your Affiliates to use the Elastic
software as set forth in (i) above, provided that such use by Contractors must
be solely for Your benefit and/or the benefit of Your Affiliates, and You
shall be responsible for all acts and omissions of such Contractors and
Affiliates in connection with their use of the Elastic software that are
contrary to the terms and conditions of this Agreement.
6.6 "License Key" means a sequence of bytes, including but not limited to a
JSON blob, that is used to enable certain features and functions of the
Elastic Software.
6.7 "Marks and Notices" means all Elastic trademarks, trade names, logos and
notices present on the Documentation as originally provided by Elastic.
6.8 "Non-production Environment" means an environment for development, testing
or quality assurance, where software is not used for production purposes.
6.9 "Object Code" means any form resulting from mechanical transformation or
translation of Source Code form, including but not limited to compiled object
code, generated documentation, and conversions to other media types.
6.10 "Source Code" means the preferred form of computer software for making
modifications, including but not limited to software source code,
documentation source, and configuration files.
6.11 "Subscription" means the right to receive Support Services and a License
to the Commercial Software.

View file

@ -54,10 +54,8 @@
"test:ui:runner": "echo 'use `node scripts/functional_test_runner`' && false",
"test:server": "grunt test:server",
"test:coverage": "grunt test:coverage",
"test:visualRegression": "grunt test:visualRegression:buildGallery",
"checkLicenses": "grunt licenses",
"build": "grunt build",
"release": "grunt release",
"build": "node scripts/build",
"start": "sh ./bin/kibana --dev",
"precommit": "node scripts/precommit_hook",
"karma": "karma start",
@ -213,6 +211,7 @@
"webpack-merge": "4.1.0",
"whatwg-fetch": "^2.0.3",
"wreck": "12.4.0",
"x-pack": "link:x-pack",
"yauzl": "2.7.0"
},
"devDependencies": {
@ -220,17 +219,20 @@
"@elastic/eslint-plugin-kibana-custom": "link:packages/eslint-plugin-kibana-custom",
"@kbn/es": "link:packages/kbn-es",
"@kbn/eslint-import-resolver-kibana": "link:packages/kbn-eslint-import-resolver-kibana",
"@kbn/eslint-plugin-license-header": "link:packages/kbn-eslint-plugin-license-header",
"@kbn/plugin-generator": "link:packages/kbn-plugin-generator",
"angular-mocks": "1.4.7",
"babel-eslint": "8.1.2",
"babel-jest": "^22.4.3",
"backport": "2.2.0",
"chai": "3.5.0",
"chalk": "2.3.0",
"chance": "1.0.6",
"cheerio": "0.22.0",
"chokidar": "1.6.0",
"chromedriver": "2.36",
"classnames": "2.2.5",
"dedent": "^0.7.0",
"enzyme": "3.2.0",
"enzyme-adapter-react-16": "^1.1.1",
"enzyme-to-json": "3.3.0",
@ -248,17 +250,15 @@
"expect.js": "0.3.1",
"faker": "1.1.0",
"getopts": "2.0.0",
"globby": "^8.0.0",
"grunt": "1.0.1",
"grunt-aws-s3": "0.14.5",
"grunt-babel": "6.0.0",
"grunt-cli": "0.1.13",
"grunt-contrib-clean": "1.0.0",
"grunt-contrib-copy": "0.8.1",
"grunt-contrib-watch": "^1.0.0",
"grunt-karma": "2.0.0",
"grunt-peg": "^2.0.1",
"grunt-run": "0.7.0",
"grunt-simple-mocha": "0.4.0",
"gulp-babel": "^7.0.1",
"gulp-sourcemaps": "1.7.3",
"husky": "0.8.1",
"image-diff": "1.6.0",

View file

@ -8,13 +8,14 @@ exports.description = 'Install and run from an Elasticsearch tar';
exports.usage = 'es archive <path> [<args>]';
exports.help = (defaults = {}) => {
const { password = 'changeme', 'base-path': basePath } = defaults;
return dedent`
Options:
--base-path Path containing cache/installations [default: ${
defaults['base-path']
}]
--base-path Path containing cache/installations [default: ${basePath}]
--install-path Installation path, defaults to 'source' within base-path
--password Sets password for elastic user [default: ${password}]
-E Additional key=value settings to pass to Elasticsearch
Example:

View file

@ -5,14 +5,20 @@ const { Cluster } = require('../cluster');
exports.description = 'Downloads and run from a nightly snapshot';
exports.help = (defaults = {}) => {
const {
license = 'basic',
password = 'changeme',
'base-path': basePath,
} = defaults;
return dedent`
Options:
--license Run with a 'oss', 'basic', or 'trial' license [default: ${license}]
--version Version of ES to download [default: ${defaults.version}]
--base-path Path containing cache/installations [default: ${
defaults['base-path']
}]
--base-path Path containing cache/installations [default: ${basePath}]
--install-path Installation path, defaults to 'source' within base-path
--password Sets password for elastic user [default: ${password}]
-E Additional key=value settings to pass to Elasticsearch
Example:

View file

@ -5,14 +5,20 @@ const { Cluster } = require('../cluster');
exports.description = 'Build and run from source';
exports.help = (defaults = {}) => {
const {
license = 'basic',
password = 'changeme',
'base-path': basePath,
} = defaults;
return dedent`
Options:
--license Run with a 'oss', 'basic', or 'trial' license [default: ${license}]
--source-path Path to ES source [default: ${defaults['source-path']}]
--base-path Path containing cache/installations [default: ${
defaults['base-path']
}]
--base-path Path containing cache/installations [default: ${basePath}]
--install-path Installation path, defaults to 'source' within base-path
--password Sets password for elastic user [default: ${password}]
-E Additional key=value settings to pass to Elasticsearch
Example:

View file

@ -151,10 +151,9 @@ exports.Cluster = class Cluster {
this._log.info(chalk.bold('Starting'));
this._log.indent(4);
const args = extractConfigFiles(esArgs, this._installPath).reduce(
(acc, cur) => acc.concat(['-E', cur]),
[]
);
const args = extractConfigFiles(esArgs, installPath, {
log: this._log,
}).reduce((acc, cur) => acc.concat(['-E', cur]), []);
this._log.debug('%s %s', ES_BIN, args.join(' '));

View file

@ -1,26 +1,29 @@
const fs = require('fs');
const path = require('path');
const chalk = require('chalk');
const execa = require('execa');
const { log: defaultLog, extractTarball } = require('../utils');
const { BASE_PATH } = require('../paths');
const { BASE_PATH, ES_CONFIG, ES_KEYSTORE_BIN } = require('../paths');
/**
* Extracts an ES archive and optionally installs plugins
*
* @param {String} archive - path to tar
* @param {Object} options
* @property {('oss'|'basic'|'trial')} options.license
* @property {String} options.basePath
* @property {String} options.installPath
* @property {ToolingLog} options.log
*/
exports.installArchive = async function installArchive(
archive,
{
exports.installArchive = async function installArchive(archive, options = {}) {
const {
license = 'basic',
password = 'changeme',
basePath = BASE_PATH,
installPath = path.resolve(basePath, path.basename(archive, '.tar.gz')),
log = defaultLog,
}
) {
} = options;
if (fs.existsSync(installPath)) {
log.info('install directory already exists, removing');
rmrfSync(installPath);
@ -30,6 +33,17 @@ exports.installArchive = async function installArchive(
await extractTarball(archive, installPath);
log.info('extracted to %s', chalk.bold(installPath));
if (license !== 'oss') {
await appendToConfig(
installPath,
'xpack.license.self_generated.type',
license
);
await appendToConfig(installPath, 'xpack.security.enabled', 'true');
await configureKeystore(installPath, password, log);
}
return { installPath };
};
@ -52,3 +66,36 @@ function rmrfSync(path) {
fs.rmdirSync(path);
}
}
/**
* Appends single line to elasticsearch.yml config file
*
* @param {String} installPath
* @param {String} key
* @param {String} value
*/
async function appendToConfig(installPath, key, value) {
fs.appendFileSync(
path.resolve(installPath, ES_CONFIG),
`${key}: ${value}\n`,
'utf8'
);
}
/**
* Creates and configures Keystore
*
* @param {String} installPath
* @param {String} password
* @param {ToolingLog} log
*/
async function configureKeystore(installPath, password, log = defaultLog) {
log.info('setting bootstrap password to %s', chalk.bold(password));
await execa(ES_KEYSTORE_BIN, ['create'], { cwd: installPath });
await execa(ES_KEYSTORE_BIN, ['add', 'bootstrap.password', '-x'], {
input: password,
cwd: installPath,
});
}

View file

@ -8,27 +8,40 @@ const { installArchive } = require('./archive');
const { log: defaultLog, cache } = require('../utils');
/**
* Installs ES from snapshot
*
* @param {Object} options
* @property {('oss'|'basic'|'trial')} options.license
* @property {String} options.password
* @property {String} options.version
* @property {String} options.basePath
* @property {String} options.installPath
* @property {ToolingLog} options.log
*/
exports.installSnapshot = async function installSnapshot({
license = 'basic',
password = 'password',
version,
basePath = BASE_PATH,
installPath = path.resolve(basePath, version),
log = defaultLog,
}) {
const fileName = `elasticsearch-${version}-SNAPSHOT.tar.gz`;
const fileName = getFilename(license, version);
const url = `https://snapshots.elastic.co/downloads/elasticsearch/${fileName}`;
const dest = path.resolve(basePath, 'cache', fileName);
log.info('version: %s', chalk.bold(version));
log.info('install path: %s', chalk.bold(installPath));
log.info('license: %s', chalk.bold(license));
await downloadFile(url, dest, log);
return await installArchive(dest, { installPath, basePath, log });
return await installArchive(dest, {
license,
password,
basePath,
installPath,
log,
});
};
/**
@ -82,3 +95,11 @@ function downloadFile(url, dest, log) {
})
);
}
function getFilename(license, version) {
const basename = `elasticsearch${
license === 'oss' ? '-oss-' : '-'
}${version}`;
return `${basename}-SNAPSHOT.tar.gz`;
}

View file

@ -8,7 +8,12 @@ const simpleGit = require('simple-git/promise');
const { installArchive } = require('./archive');
const { createCliError } = require('../errors');
const { findMostRecentlyChanged, log: defaultLog, cache } = require('../utils');
const { GRADLE_BIN, ES_ARCHIVE_PATTERN, BASE_PATH } = require('../paths');
const {
GRADLE_BIN,
ES_ARCHIVE_PATTERN,
ES_OSS_ARCHIVE_PATTERN,
BASE_PATH,
} = require('../paths');
const onceEvent = (emitter, event) =>
new Promise(resolve => emitter.once(event, resolve));
@ -17,12 +22,16 @@ const onceEvent = (emitter, event) =>
* Installs ES from source
*
* @param {Object} options
* @property {('oss'|'basic'|'trial')} options.license
* @property {String} options.password
* @property {String} options.sourcePath
* @property {String} options.basePath
* @property {String} options.installPath
* @property {ToolingLog} options.log
*/
exports.installSource = async function installSource({
license = 'basic',
password = 'changeme',
sourcePath,
basePath = BASE_PATH,
installPath = path.resolve(basePath, 'source'),
@ -30,15 +39,16 @@ exports.installSource = async function installSource({
}) {
log.info('source path: %s', chalk.bold(sourcePath));
log.info('install path: %s', chalk.bold(installPath));
log.info('license: %s', chalk.bold(license));
const { filename, etag } = await sourceInfo(sourcePath, log);
const cacheDest = path.resolve(basePath, 'cache', filename);
const metadata = await sourceInfo(sourcePath, license, log);
const dest = path.resolve(basePath, 'cache', metadata.filename);
const cacheMeta = cache.readMeta(cacheDest);
const isCached = cacheMeta.exists && cacheMeta.etag === etag;
const cacheMeta = cache.readMeta(dest);
const isCached = cacheMeta.exists && cacheMeta.etag === metadata.etag;
const archive = isCached
? cacheDest
: await createSnapshot({ sourcePath, log });
? dest
: await createSnapshot({ sourcePath, log, license });
if (isCached) {
log.info(
@ -46,11 +56,17 @@ exports.installSource = async function installSource({
chalk.bold(cacheMeta.ts)
);
} else {
cache.writeMeta(cacheDest, { etag });
fs.copyFileSync(archive, cacheDest);
cache.writeMeta(dest, metadata);
fs.copyFileSync(archive, dest);
}
return await installArchive(cacheDest, { basePath, installPath, log });
return await installArchive(dest, {
license,
password,
basePath,
installPath,
log,
});
};
/**
@ -58,7 +74,7 @@ exports.installSource = async function installSource({
* @param {String} cwd
* @param {ToolingLog} log
*/
async function sourceInfo(cwd, log = defaultLog) {
async function sourceInfo(cwd, license, log = defaultLog) {
if (!fs.existsSync(cwd)) {
throw new Error(`${cwd} does not exist`);
}
@ -83,11 +99,16 @@ async function sourceInfo(cwd, log = defaultLog) {
const cwdHash = crypto
.createHash('md5')
.update(cwd)
.digest('hex');
.digest('hex')
.substr(0, 8);
const basename = `${branch}${license === 'oss' ? '-oss-' : '-'}${cwdHash}`;
const filename = `${basename}.tar.gz`;
return {
etag: etag.digest('hex'),
filename: `${branch}-${cwdHash.substr(0, 8)}.tar.gz`,
filename,
cwd,
branch,
};
}
@ -96,12 +117,14 @@ async function sourceInfo(cwd, log = defaultLog) {
* Creates archive from source
*
* @param {Object} options
* @property {('oss'|'basic'|'trial')} options.license
* @property {String} options.sourcePath
* @property {ToolingLog} options.log
* @returns {Object} containing archive and optional plugins
*/
async function createSnapshot({ sourcePath, log = defaultLog }) {
const buildArgs = [':distribution:archives:tar:assemble'];
async function createSnapshot({ license, sourcePath, log = defaultLog }) {
const tarTask = license === 'oss' ? 'oss-tar' : 'tar';
const buildArgs = [`:distribution:archives:${tarTask}:assemble`];
log.info('%s %s', GRADLE_BIN, buildArgs.join(' '));
@ -126,9 +149,15 @@ async function createSnapshot({ sourcePath, log = defaultLog }) {
throw createCliError('unable to build ES');
}
const archivePattern =
license === 'oss' ? ES_OSS_ARCHIVE_PATTERN : ES_ARCHIVE_PATTERN;
const esTarballPath = findMostRecentlyChanged(
path.resolve(sourcePath, ES_ARCHIVE_PATTERN)
path.resolve(sourcePath, archivePattern)
);
if (!esTarballPath) {
throw createCliError('could not locate ES distribution');
}
return esTarballPath;
}

View file

@ -11,5 +11,11 @@ exports.BASE_PATH = path.resolve(tempDir, 'kbn-es');
exports.GRADLE_BIN = useBat('./gradlew');
exports.ES_BIN = useBat('bin/elasticsearch');
exports.ES_CONFIG = 'config/elasticsearch.yml';
exports.ES_KEYSTORE_BIN = useBat('./bin/elasticsearch-keystore');
exports.ES_ARCHIVE_PATTERN =
'distribution/archives/tar/build/distributions/elasticsearch-*.tar.gz';
'distribution/archives/tar/build/distributions/elasticsearch-*-SNAPSHOT.tar.gz';
exports.ES_OSS_ARCHIVE_PATTERN =
'distribution/archives/oss-tar/build/distributions/elasticsearch-*-SNAPSHOT.tar.gz';

View file

@ -0,0 +1,30 @@
const mockFs = require('mock-fs');
const { findMostRecentlyChanged } = require('./find_most_recently_changed');
beforeEach(() => {
mockFs({
'/data': {
'oldest.yml': mockFs.file({
content: 'foo',
ctime: new Date(2018, 2, 1),
}),
'newest.yml': mockFs.file({
content: 'bar',
ctime: new Date(2018, 2, 3),
}),
'middle.yml': mockFs.file({
content: 'baz',
ctime: new Date(2018, 2, 2),
}),
},
});
});
afterEach(() => {
mockFs.restore();
});
test('returns newest file', () => {
const file = findMostRecentlyChanged('/data/*.yml');
expect(file).toEqual('/data/newest.yml');
});

View file

@ -0,0 +1,91 @@
const { join, dirname, extname } = require('path');
const webpackResolver = require('eslint-import-resolver-webpack');
const nodeResolver = require('eslint-import-resolver-node');
const {
getKibanaPath,
getProjectRoot,
getWebpackConfig,
isFile,
isProbablyWebpackShim,
getIsPathRequest,
resolveWebpackAlias,
} = require('./lib');
// cache context, it shouldn't change
let context;
function initContext(file, config) {
if (context) {
return context;
}
const projectRoot = getProjectRoot(file, config);
const kibanaPath = getKibanaPath(config, projectRoot);
const webpackConfig = getWebpackConfig(kibanaPath, projectRoot, config);
const aliasEntries = Object.entries(webpackConfig.resolve.alias || {});
context = {
webpackConfig,
aliasEntries,
};
return context;
}
exports.resolve = function resolveKibanaPath(importRequest, file, config) {
config = config || {};
const { webpackConfig, aliasEntries } = initContext(file, config);
let isPathRequest = getIsPathRequest(importRequest);
// if the importRequest is not a path we might be able to map it to a path
// by comparing it to the webpack aliases. If we can convert it to a path
// without actually invoking the webpack resolver we can save a lot of time
if (!isPathRequest) {
const resolvedAlias = resolveWebpackAlias(importRequest, aliasEntries);
if (resolvedAlias) {
importRequest = resolvedAlias;
isPathRequest = true;
}
}
// if the importRequest is a path, and it has a file extension, then
// we just resolve it. This is most helpful with relative imports for
// .css and .html files because those don't work with the node resolver
// and we can resolve them much quicker than webpack
if (isPathRequest && extname(importRequest)) {
const abs = join(dirname(file), importRequest);
if (isFile(abs)) {
return {
found: true,
path: abs,
};
}
}
// only use the node resolver if the importRequest is a path, or is
// a module request but not one that's probably a webpackShim. This
// prevents false positives as webpackShims are likely to be resolved
// to the node_modules directory by the node resolver, but we want
// them to resolve to the actual shim
if (isPathRequest || !isProbablyWebpackShim(importRequest, file)) {
const nodeResult = nodeResolver.resolve(
importRequest,
file,
Object.assign({}, config, {
isFile,
})
);
if (nodeResult && nodeResult.found) {
return nodeResult;
}
}
return webpackResolver.resolve(importRequest, file, {
config: webpackConfig,
});
};
// use version 2 of the resolver interface, https://github.com/benmosher/eslint-plugin-import/blob/master/resolvers/README.md#interfaceversion--number
exports.interfaceVersion = 2;

View file

@ -1,29 +0,0 @@
const webpackResolver = require('eslint-import-resolver-webpack');
const nodeResolver = require('eslint-import-resolver-node');
const getProjectRoot = require('./lib/get_project_root');
const getWebpackConfig = require('./lib/get_webpack_config');
// cache expensive resolution results
let projectRoot;
let webpackConfig;
exports.resolve = function resolveKibanaPath(source, file, config) {
const settings = config || {};
// try to resolve with the node resolver first
const resolvedWithNode = nodeResolver.resolve(source, file, config);
if (resolvedWithNode && resolvedWithNode.found) {
return resolvedWithNode;
}
// fall back to the webpack resolver
projectRoot = projectRoot || getProjectRoot(file, settings);
webpackConfig =
webpackConfig || getWebpackConfig(source, projectRoot, settings);
return webpackResolver.resolve(source, file, {
config: webpackConfig,
});
};
// use version 2 of the resolver interface, https://github.com/benmosher/eslint-plugin-import/blob/master/resolvers/README.md#interfaceversion--number
exports.interfaceVersion = 2;

View file

@ -1,3 +1 @@
const debug = require('debug')('eslint-plugin-import:resolver:kibana');
module.exports = debug;
exports.debug = require('debug')('eslint-plugin-import:resolver:kibana');

View file

@ -0,0 +1,17 @@
// "path" imports point to a specific location and don't require
// module directory resolution. This RegExp should capture import
// statements that:
//
// - start with `./`
// - start with `../`
// - equal `..`
// - equal `.`
// - start with `C:\`
// - start with `C:/`
// - start with `/`
//
const PATH_IMPORT_RE = /^(?:\.\.?(?:\/|$)|\/|([A-Za-z]:)?[/\\])/;
exports.getIsPathRequest = function(source) {
return PATH_IMPORT_RE.test(source);
};

View file

@ -1,12 +1,13 @@
const { resolve } = require('path');
const debug = require('./debug');
const { debug } = require('./debug');
const DEFAULT_PLUGIN_PATH = '../../kibana';
/*
* Resolves the path to Kibana, either from default setting or config
*/
module.exports = function getKibanaPath(config, projectRoot) {
exports.getKibanaPath = function(config, projectRoot) {
const inConfig = config != null && config.kibanaPath;
// We only allow `.` in the config as we need it for Kibana itself

View file

@ -0,0 +1,41 @@
const { statSync } = require('fs');
const LRU = require('lru-cache');
const DIR = Symbol('dir');
const FILE = Symbol('file');
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE
? new Map()
: new LRU({ maxAge: 1000 });
function getPathType(path) {
const cached = cache.get(path);
if (cached !== undefined) {
return cached;
}
let type = null;
try {
const stats = statSync(path);
if (stats.isDirectory()) {
type = DIR;
} else if (stats.isFile() || stats.isFIFO()) {
type = FILE;
}
} catch (error) {
if (!error || (error.code !== 'ENOENT' && error.code !== 'ENOTDIR')) {
throw error;
}
}
cache.set(path, type);
return type;
}
exports.isDirectory = function(path) {
return getPathType(path) === DIR;
};
exports.isFile = function(path) {
return getPathType(path) === FILE;
};

View file

@ -1,7 +1,8 @@
const { dirname, resolve } = require('path');
const glob = require('glob-all');
module.exports = function getPlugins(config, kibanaPath, projectRoot) {
exports.getPlugins = function(config, kibanaPath, projectRoot) {
const resolveToRoot = path => resolve(projectRoot, path);
const pluginDirs = [

View file

@ -1,6 +1,7 @@
const { dirname, resolve, parse } = require('path');
const { accessSync, readFileSync } = require('fs');
const debug = require('./debug');
const { debug } = require('./debug');
function getConfig(config) {
const defaults = {
@ -40,7 +41,7 @@ function getRootPackageDir(dirRoot, dir, rootPackageName) {
}
}
module.exports = function getProjectRoot(file, config) {
exports.getProjectRoot = function(file, config) {
const { root, dir } = parse(resolve(file));
const { rootPackageName } = config;

View file

@ -1,11 +1,9 @@
const { resolve } = require('path');
const debug = require('./debug');
const getKibanaPath = require('./get_kibana_path');
const getPlugins = require('./get_plugins');
const { debug } = require('./debug');
const { getPlugins } = require('./get_plugins');
module.exports = function getWebpackConfig(source, projectRoot, config) {
const kibanaPath = getKibanaPath(config, projectRoot);
exports.getWebpackConfig = function(kibanaPath, projectRoot, config) {
const fromKibana = (...path) => resolve(kibanaPath, ...path);
const alias = {

View file

@ -0,0 +1,10 @@
module.exports = Object.assign(
{},
require('./get_kibana_path'),
require('./get_project_root'),
require('./get_webpack_config'),
require('./get_path_type'),
require('./is_probably_webpack_shim'),
require('./get_is_path_request'),
require('./resolve_webpack_alias')
);

View file

@ -0,0 +1,45 @@
const { readdirSync } = require('fs');
const { join, dirname } = require('path');
const LRU = require('lru-cache');
const { isDirectory } = require('./get_path_type');
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE
? new Map()
: new LRU({ max: 1000 });
function readShimNames(shimDirectory) {
if (!isDirectory(shimDirectory)) {
return [];
}
return readdirSync(shimDirectory)
.filter(name => !name.startsWith('.') && !name.startsWith('_'))
.map(name => (name.endsWith('.js') ? name.slice(0, -3) : name));
}
function findRelativeWebpackShims(directory) {
const cached = cache.get(directory);
if (cached) {
return cached;
}
const ownShims = readShimNames(join(directory, 'webpackShims'));
const parent = dirname(directory);
const parentShims =
parent !== directory ? findRelativeWebpackShims(parent) : [];
const allShims = !ownShims.length
? parentShims
: ownShims.concat(parentShims);
cache.set(directory, allShims);
return allShims;
}
exports.isProbablyWebpackShim = function(source, file) {
const shims = findRelativeWebpackShims(dirname(file));
return shims.some(shim => source === shim || source.startsWith(shim + '/'));
};

View file

@ -0,0 +1,23 @@
/**
* Attempt to apply basic webpack alias transfomations so we can
* avoid triggering the webpack resolver for many imports
*
* @param {string} source
* @param {Array<[alias,path]>} aliasEntries
* @return {string|undefined}
*/
exports.resolveWebpackAlias = function(source, aliasEntries) {
for (const [alias, path] of aliasEntries) {
if (source === alias) {
return path;
}
if (alias.endsWith('$')) {
if (source === alias.slice(0, -1)) {
return path;
}
} else if (source.startsWith(alias + '/')) {
return path + '/' + source.slice(alias.length + 1);
}
}
};

View file

@ -3,7 +3,7 @@
"description": "eslint-plugin-import resolver for Kibana",
"private": true,
"version": "2.0.0",
"main": "index.js",
"main": "import_resolver_kibana.js",
"license": "Apache-2.0",
"repository": {
"type": "git",
@ -14,6 +14,8 @@
"eslint-import-resolver-node": "^0.3.0",
"eslint-import-resolver-webpack": "^0.8.1",
"glob-all": "^3.1.0",
"lru-cache": "^4.1.2",
"resolve": "^1.7.1",
"webpack": "3.6.0"
}
}

View file

@ -1397,6 +1397,13 @@ lru-cache@^4.0.1:
pseudomap "^1.0.2"
yallist "^2.1.2"
lru-cache@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.2.tgz#45234b2e6e2f2b33da125624c4664929a0224c3f"
dependencies:
pseudomap "^1.0.2"
yallist "^2.1.2"
map-cache@^0.2.2:
version "0.2.2"
resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf"
@ -1946,9 +1953,9 @@ resolve-url@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a"
resolve@^1.2.0, resolve@^1.5.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.7.0.tgz#2bdf5374811207285df0df652b78f118ab8f3c5e"
resolve@^1.2.0, resolve@^1.5.0, resolve@^1.7.1:
version "1.7.1"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.7.1.tgz#aadd656374fd298aee895bc026b8297418677fd3"
dependencies:
path-parse "^1.0.5"
@ -2250,8 +2257,8 @@ tar@^2.2.1:
inherits "2"
timers-browserify@^2.0.4:
version "2.0.6"
resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.6.tgz#241e76927d9ca05f4d959819022f5b3664b64bae"
version "2.0.7"
resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.7.tgz#e74093629cb62c20332af587ddc0c86b4ba97a05"
dependencies:
setimmediate "^1.0.4"

View file

@ -0,0 +1,6 @@
module.exports = {
rules: {
'require-license-header': require('./rules/require_license_header'),
'remove-outdated-license-header': require('./rules/remove_outdated_license_header'),
},
};

View file

@ -0,0 +1,28 @@
exports.assert = function assert(truth, message) {
if (truth) {
return;
}
const error = new Error(message);
error.failedAssertion = true;
throw error;
};
exports.normalizeWhitespace = function normalizeWhitespace(string) {
return string.replace(/\s+/g, ' ');
};
exports.init = function (context, program, initStep) {
try {
return initStep();
} catch (error) {
if (error.failedAssertion) {
context.report({
node: program,
message: error.message
});
} else {
throw error;
}
}
};

View file

@ -0,0 +1,13 @@
{
"name": "@kbn/eslint-plugin-license-header",
"version": "1.0.0",
"private": true,
"license": "Apache-2.0",
"peerDependencies": {
"eslint": ">=4.0.0",
"babel-eslint": "^8.2.1"
},
"dependencies": {
"dedent": "^0.7.0"
}
}

View file

@ -0,0 +1,136 @@
const { RuleTester } = require('eslint');
const rule = require('../remove_outdated_license_header');
const dedent = require('dedent');
const RULE_NAME = '@kbn/license-header/remove-outdated-license-header';
const ruleTester = new RuleTester({
parser: 'babel-eslint',
parserOptions: {
ecmaVersion: 2015
}
});
ruleTester.run(RULE_NAME, rule, {
valid: [
{
code: dedent`
/* license */
console.log('foo')
`,
options: [{
licenses: [
'// license'
]
}],
},
{
code: dedent`
// license
console.log('foo')
`,
options: [{
licenses: [
'/* license */',
]
}],
}
],
invalid: [
// missing license option
{
code: dedent`
console.log('foo')
`,
options: [],
errors: [
{
message: '"licenses" option is required',
}
]
},
// license cannot contain multiple block comments
{
code: dedent`
console.log('foo')
`,
options: [{
licenses: [
'/* one *//* two */'
]
}],
errors: [
{
message: '"licenses[0]" option must only include a single comment',
}
]
},
// license cannot contain multiple line comments
{
code: dedent`
console.log('foo')
`,
options: [{
licenses: [
`// one\n// two`
]
}],
errors: [
{
message: '"licenses[0]" option must only include a single comment',
}
]
},
// license cannot contain expressions
{
code: dedent`
console.log('foo')
`,
options: [{
licenses: [
'// old license',
dedent`
/* license */
console.log('hello world');
`
]
}],
errors: [
{
message: '"licenses[1]" option must only include a single comment',
}
]
},
// license is not a single comment
{
code: dedent`
console.log('foo')
`,
options: [{
licenses: [
'// old license',
'// older license',
`console.log('hello world');`
]
}],
errors: [
{
message: '"licenses[2]" option must only include a single comment',
}
]
},
]
});

View file

@ -0,0 +1,185 @@
const { RuleTester } = require('eslint');
const rule = require('../require_license_header');
const dedent = require('dedent');
const RULE_NAME = '@kbn/license-header/require-license-header';
const ruleTester = new RuleTester({
parser: 'babel-eslint',
parserOptions: {
ecmaVersion: 2015
}
});
ruleTester.run(RULE_NAME, rule, {
valid: [
{
code: dedent`
/* license */
console.log('foo')
`,
options: [{ license: '/* license */' }],
},
{
code: dedent`
// license
console.log('foo')
`,
options: [{ license: '// license' }],
}
],
invalid: [
// missing license option
{
code: dedent`
console.log('foo')
`,
options: [],
errors: [
{
message: '"license" option is required',
}
]
},
// content cannot contain multiple block comments
{
code: dedent`
console.log('foo')
`,
options: [
{ license: '/* one *//* two */' }
],
errors: [
{
message: '"license" option must only include a single comment',
}
]
},
// content cannot contain multiple line comments
{
code: dedent`
console.log('foo')
`,
options: [
{ license: `// one\n// two` }
],
errors: [
{
message: '"license" option must only include a single comment',
}
]
},
// content cannot contain expressions
{
code: dedent`
console.log('foo')
`,
options: [
{
license: dedent`
/* license */
console.log('hello world');
`
}
],
errors: [
{
message: '"license" option must only include a single comment',
}
]
},
// content is not a single comment
{
code: dedent`
console.log('foo')
`,
options: [
{ license: `console.log('hello world');` }
],
errors: [
{
message: '"license" option must only include a single comment',
}
]
},
// missing license header
{
code: dedent`
console.log('foo')
`,
options: [{ license: '/* license */' }],
errors: [
{
message: 'File must start with a license header',
}
],
output: dedent`
/* license */
console.log('foo')
`
},
// strips newlines before the license comment
{
code: '\n\n' + dedent`
/* license */
console.log('foo')
`,
options: [{ license: '/* license */' }],
errors: [
{
message: 'License header must be at the very beginning of the file',
}
],
output: dedent`
/* license */
console.log('foo')
`
},
// moves license header before other nodes if necessary
{
code: dedent`
/* not license */
/* license */
console.log('foo')
`,
options: [{ license: '/* license */' }],
errors: [
{
message: 'License header must be at the very beginning of the file',
}
],
output: dedent`
/* license */
/* not license */
console.log('foo')
`
},
]
});

View file

@ -0,0 +1,63 @@
const babelEslint = require('babel-eslint');
const { assert, normalizeWhitespace, init } = require('../lib');
module.exports = {
meta: {
fixable: 'code',
schema: [{
type: 'object',
properties: {
licenses: {
type: 'array',
items: {
type: 'string'
}
},
},
additionalProperties: false,
}]
},
create: context => {
return {
Program(program) {
const nodeValues = init(context, program, () => {
const options = context.options[0] || {};
const licenses = options.licenses;
assert(!!licenses, '"licenses" option is required');
return licenses.map((license, i) => {
const parsed = babelEslint.parse(license);
assert(!parsed.body.length, `"licenses[${i}]" option must only include a single comment`);
assert(parsed.comments.length === 1, `"licenses[${i}]" option must only include a single comment`);
return normalizeWhitespace(parsed.comments[0].value);
});
});
if (!nodeValues) return;
const sourceCode = context.getSourceCode();
sourceCode
.getAllComments()
.filter(node => (
nodeValues.find(nodeValue => (
normalizeWhitespace(node.value) === nodeValue
))
))
.forEach(node => {
context.report({
node,
message: 'Remove outdated license header.',
fix(fixer) {
return fixer.remove(node);
}
});
});
},
};
}
};

View file

@ -0,0 +1,85 @@
const babelEslint = require('babel-eslint');
const { assert, normalizeWhitespace, init } = require('../lib');
module.exports = {
meta: {
fixable: 'code',
schema: [{
type: 'object',
properties: {
license: {
type: 'string',
},
},
additionalProperties: false,
}]
},
create: context => {
return {
Program(program) {
const license = init(context, program, function () {
const options = context.options[0] || {};
const license = options.license;
assert(!!license, '"license" option is required');
const parsed = babelEslint.parse(license);
assert(!parsed.body.length, '"license" option must only include a single comment');
assert(parsed.comments.length === 1, '"license" option must only include a single comment');
return {
source: license,
nodeValue: normalizeWhitespace(parsed.comments[0].value)
};
});
if (!license) {
return;
}
const sourceCode = context.getSourceCode();
const comment = sourceCode.getAllComments().find(node => (
normalizeWhitespace(node.value) === license.nodeValue
));
// no licence comment
if (!comment) {
context.report({
message: 'File must start with a license header',
loc: {
start: { line: 1, column: 0 },
end: { line: 1, column: sourceCode.lines[0].length - 1 }
},
fix(fixer) {
return fixer.replaceTextRange([0, 0], license.source + '\n\n');
}
});
return;
}
// ensure there is nothing before the comment
const sourceBeforeNode = sourceCode.getText().slice(0, sourceCode.getIndexFromLoc(comment.loc.start));
if (sourceBeforeNode.length) {
context.report({
node: comment,
message: 'License header must be at the very beginning of the file',
fix(fixer) {
// replace leading whitespace if possible
if (sourceBeforeNode.trim() === '') {
return fixer.replaceTextRange([0, sourceBeforeNode.length], '');
}
// inject content at top and remove node from current location
// if removing whitespace is not possible
return [
fixer.remove(comment),
fixer.replaceTextRange([0, 0], license.source + '\n\n'),
];
}
});
}
},
};
}
};

View file

@ -0,0 +1,7 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
dedent@^0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c"

View file

@ -17,7 +17,9 @@ module.exports = function (root) {
return Object.assign({
root: root,
kibanaRoot: resolve(root, '../kibana'),
kibanaRoot: pkg.name === 'x-pack'
? resolve(root, '..')
: resolve(root, '../kibana'),
serverTestPatterns: ['server/**/__tests__/**/*.js'],
buildSourcePatterns: buildSourcePatterns,
skipInstallDependencies: false,

View file

@ -36109,7 +36109,12 @@ var _path = __webpack_require__(3);
*/
function getProjectPaths(rootPath, options) {
const skipKibanaExtra = Boolean(options['skip-kibana-extra']);
const ossOnly = Boolean(options.oss);
const projectPaths = [rootPath, (0, _path.resolve)(rootPath, 'packages/*')];
if (!ossOnly) {
projectPaths.push((0, _path.resolve)(rootPath, 'x-pack'));
projectPaths.push((0, _path.resolve)(rootPath, 'x-pack/plugins/*'));
}
if (!skipKibanaExtra) {
projectPaths.push((0, _path.resolve)(rootPath, '../kibana-extra/*'));
projectPaths.push((0, _path.resolve)(rootPath, '../kibana-extra/*/packages/*'));
@ -36325,6 +36330,7 @@ function help() {
-e, --exclude Exclude specified project. Can be specified multiple times to exclude multiple projects, e.g. '-e kibana -e @kbn/pm'.
-i, --include Include only specified projects. If left unspecified, it defaults to including all projects.
--oss Do not include the x-pack when running command.
--skip-kibana-extra Filter all plugins in ../kibana-extra when running command.
`);
}

View file

@ -25,6 +25,7 @@ function help() {
-e, --exclude Exclude specified project. Can be specified multiple times to exclude multiple projects, e.g. '-e kibana -e @kbn/pm'.
-i, --include Include only specified projects. If left unspecified, it defaults to including all projects.
--oss Do not include the x-pack when running command.
--skip-kibana-extra Filter all plugins in ../kibana-extra when running command.
`);
}

View file

@ -2,6 +2,7 @@ import { resolve } from 'path';
export type ProjectPathOptions = {
'skip-kibana-extra'?: boolean;
'oss'?: boolean;
};
/**
@ -9,9 +10,15 @@ export type ProjectPathOptions = {
*/
export function getProjectPaths(rootPath: string, options: ProjectPathOptions) {
const skipKibanaExtra = Boolean(options['skip-kibana-extra']);
const ossOnly = Boolean(options.oss);
const projectPaths = [rootPath, resolve(rootPath, 'packages/*')];
if (!ossOnly) {
projectPaths.push(resolve(rootPath, 'x-pack'));
projectPaths.push(resolve(rootPath, 'x-pack/plugins/*'));
}
if (!skipKibanaExtra) {
projectPaths.push(resolve(rootPath, '../kibana-extra/*'));
projectPaths.push(resolve(rootPath, '../kibana-extra/*/packages/*'));

View file

@ -1,7 +1,7 @@
const isUsingNpm = process.env.npm_config_git !== undefined;
if (isUsingNpm) {
throw "Use Yarn instead of npm, see Kibana's contributing guidelines";
throw `Use Yarn instead of npm, see Kibana's contributing guidelines`;
}
// The value of the `npm_config_argv` env for each command:
@ -29,4 +29,3 @@ try {
} catch (e) {
// if it fails we do nothing, as this is just intended to be a helpful message
}

2
scripts/build.js Normal file
View file

@ -0,0 +1,2 @@
require('../src/babel-register');
require('../src/dev/build/cli');

View file

@ -4,6 +4,8 @@ const kbnEs = require('@kbn/es');
kbnEs
.run({
license: 'trial',
password: 'changeme',
version: pkg.version,
'source-path': path.resolve(__dirname, '../../elasticsearch'),
'base-path': path.resolve(__dirname, '../.es'),

View file

@ -8,8 +8,19 @@ if (!process.env.BABEL_CACHE_PATH) {
// paths that babel-register should ignore
const ignore = [
/[\\\/](node_modules|bower_components)[\\\/]/,
/[\\\/](kbn-pm\/dist)[\\\/]/
/\/bower_components\//,
/\/kbn-pm\/dist\//,
// TODO: remove this and just transpile plugins at build time, but
// has tricky edge cases that will probably require better eslint
// restrictions to make sure that code destined for the server/browser
// follows respects the limitations of each environment.
//
// https://github.com/elastic/kibana/issues/14800#issuecomment-366130268
// ignore paths matching `/node_modules/{a}/{b}`, unless `a`
// is `x-pack` and `b` is not `node_modules`
/\/node_modules\/(?!x-pack\/(?!node_modules)([^\/]+))([^\/]+\/[^\/]+)/
];
if (global.__BUILT_WITH_BABEL__) {

View file

@ -102,6 +102,10 @@ export default class ClusterManager {
fromRoot('src/server'),
fromRoot('src/ui'),
fromRoot('src/utils'),
fromRoot('x-pack/common'),
fromRoot('x-pack/plugins'),
fromRoot('x-pack/server'),
fromRoot('x-pack/webpackShims'),
fromRoot('config'),
...extraPaths
].map(path => resolve(path));
@ -109,7 +113,8 @@ export default class ClusterManager {
this.watcher = chokidar.watch(uniq(watchPaths), {
cwd: fromRoot('.'),
ignored: [
/[\\\/](\..*|node_modules|bower_components|public|__tests__|coverage)[\\\/]/,
/[\\\/](\..*|node_modules|bower_components|public|__[a-z0-9_]+__|coverage)[\\\/]/,
/\.test\.js$/,
...extraIgnores
]
});

View file

@ -1,4 +1,4 @@
import { join, relative, resolve } from 'path';
import { relative, resolve } from 'path';
import { readYamlConfig } from './read_yaml_config';
function fixture(name) {
@ -49,15 +49,14 @@ describe('cli/serve/read_yaml_config', function () {
});
describe('different cwd()', function () {
const oldCwd = process.cwd();
const newCwd = join(oldCwd, '..');
const originalCwd = process.cwd();
const tempCwd = resolve(__dirname);
beforeAll(function () {
process.chdir(newCwd);
});
beforeAll(() => process.chdir(tempCwd));
afterAll(() => process.chdir(originalCwd));
it('resolves relative files based on the cwd', function () {
const relativePath = relative(newCwd, fixture('one.yml'));
const relativePath = relative(tempCwd, fixture('one.yml'));
const config = readYamlConfig(relativePath);
expect(config).toEqual({
foo: 1,
@ -67,12 +66,13 @@ describe('cli/serve/read_yaml_config', function () {
it('fails to load relative paths, not found because of the cwd', function () {
expect(function () {
readYamlConfig(relative(oldCwd, fixture('one.yml')));
}).toThrowError(/ENOENT/);
});
const relativePath = relative(
resolve(__dirname, '../../'),
fixture('one.yml')
);
afterAll(function () {
process.chdir(oldCwd);
readYamlConfig(relativePath);
}).toThrowError(/ENOENT/);
});
});

View file

@ -1,5 +1,5 @@
import _ from 'lodash';
import { statSync } from 'fs';
import { statSync, lstatSync, realpathSync } from 'fs';
import { isWorker } from 'cluster';
import { resolve } from 'path';
@ -10,14 +10,40 @@ import { readKeystore } from './read_keystore';
import { DEV_SSL_CERT_PATH, DEV_SSL_KEY_PATH } from '../dev_ssl';
let canCluster;
try {
require.resolve('../cluster/cluster_manager');
canCluster = true;
} catch (e) {
canCluster = false;
function canRequire(path) {
try {
require.resolve(path);
return true;
} catch (error) {
if (error.code === 'MODULE_NOT_FOUND') {
return false;
} else {
throw error;
}
}
}
function isSymlinkTo(link, dest) {
try {
const stat = lstatSync(link);
return stat.isSymbolicLink() && realpathSync(link) === dest;
} catch (error) {
if (error.code !== 'ENOENT') {
throw error;
}
}
}
const CLUSTER_MANAGER_PATH = resolve(__dirname, '../cluster/cluster_manager');
const CAN_CLUSTER = canRequire(CLUSTER_MANAGER_PATH);
// xpack is installed in both dev and the distributable, it's optional if
// install is a link to the source, not an actual install
const XPACK_INSTALLED_DIR = resolve(__dirname, '../../../node_modules/x-pack');
const XPACK_SOURCE_DIR = resolve(__dirname, '../../../x-pack');
const XPACK_INSTALLED = canRequire(XPACK_INSTALLED_DIR);
const XPACK_OPTIONAL = isSymlinkTo(XPACK_INSTALLED_DIR, XPACK_SOURCE_DIR);
const pathCollector = function () {
const paths = [];
return function (path) {
@ -41,6 +67,14 @@ function readServerSettings(opts, extraCliOptions) {
set('env', 'development');
set('optimize.watch', true);
if (!has('elasticsearch.username')) {
set('elasticsearch.username', 'elastic');
}
if (!has('elasticsearch.password')) {
set('elasticsearch.password', 'changeme');
}
if (opts.ssl) {
set('server.ssl.enabled', true);
}
@ -66,7 +100,11 @@ function readServerSettings(opts, extraCliOptions) {
set('plugins.paths', _.compact([].concat(
get('plugins.paths'),
opts.pluginPath
opts.pluginPath,
XPACK_INSTALLED && (!XPACK_OPTIONAL || !opts.oss)
? [XPACK_INSTALLED_DIR]
: [],
)));
merge(readKeystore());
@ -98,7 +136,7 @@ export default function (program) {
.option(
'--plugin-dir <path>',
'A path to scan for plugins, this can be specified multiple ' +
'times to specify multiple directories',
'times to specify multiple directories',
pluginDirCollector,
[
fromRoot('plugins'),
@ -114,7 +152,12 @@ export default function (program) {
)
.option('--plugins <path>', 'an alias for --plugin-dir', pluginDirCollector);
if (canCluster) {
if (XPACK_OPTIONAL) {
command
.option('--oss', 'Start Kibana without X-Pack');
}
if (CAN_CLUSTER) {
command
.option('--dev', 'Run the server with development mode defaults')
.option('--ssl', 'Run the dev server using HTTPS')
@ -138,9 +181,9 @@ export default function (program) {
const getCurrentSettings = () => readServerSettings(opts, this.getUnknownOptions());
const settings = getCurrentSettings();
if (canCluster && opts.dev && !isWorker) {
// stop processing the action and handoff to cluster manager
const ClusterManager = require('../cluster/cluster_manager');
if (CAN_CLUSTER && opts.dev && !isWorker) {
// stop processing the action and handoff to cluster manager
const ClusterManager = require(CLUSTER_MANAGER_PATH);
new ClusterManager(opts, settings);
return;
}

View file

@ -5,6 +5,7 @@ import { cleanPrevious, cleanArtifacts } from './cleanup';
import { extract, getPackData } from './pack';
import { renamePlugin } from './rename';
import { sync as rimrafSync } from 'rimraf';
import { errorIfXPackInstall } from '../lib/error_if_x_pack';
import { existingInstall, rebuildCache, assertVersion } from './kibana';
import { prepareExternalProjectDependencies } from '@kbn/pm';
import mkdirp from 'mkdirp';
@ -13,6 +14,8 @@ const mkdir = Promise.promisify(mkdirp);
export default async function install(settings, logger) {
try {
errorIfXPackInstall(settings, logger);
await cleanPrevious(settings, logger);
await mkdir(settings.workingPath);

View file

@ -0,0 +1,53 @@
import expect from 'expect.js';
import sinon from 'sinon';
import { errorIfXPackInstall, errorIfXPackRemove } from '../error_if_x_pack';
describe('error_if_xpack', () => {
const logger = {
error: sinon.stub()
};
beforeEach(() => {
sinon.stub(process, 'exit');
});
it('should exit on install if x-pack by name', () => {
errorIfXPackInstall({
plugin: 'x-pack'
}, logger);
expect(process.exit.called).to.be(true);
});
it('should exit on install if x-pack by url', () => {
errorIfXPackInstall({
plugin: ' http://localhost/x-pack/x-pack-7.0.0-alpha1-SNAPSHOT.zip'
}, logger);
expect(process.exit.called).to.be(true);
});
it('should not exit on install if not x-pack', () => {
errorIfXPackInstall({
plugin: 'foo'
}, logger);
expect(process.exit.called).to.be(false);
});
it('should exit on remove if x-pack', () => {
errorIfXPackRemove({
plugin: 'x-pack'
}, logger);
expect(process.exit.called).to.be(true);
});
it('should not exit on remove if not x-pack', () => {
errorIfXPackRemove({
plugin: 'bar'
}, logger);
expect(process.exit.called).to.be(false);
});
afterEach(() => {
process.exit.restore();
});
});

View file

@ -0,0 +1,11 @@
import expect from 'expect.js';
import { isOSS } from '../is_oss';
describe('is_oss', () => {
describe('x-pack installed', () => {
it('should return false', () => {
expect(isOSS()).to.be(false);
});
});
});

View file

@ -0,0 +1,31 @@
import { isOSS } from './is_oss';
function isXPack(plugin) {
return /x-pack/.test(plugin);
}
export function errorIfXPackInstall(settings, logger) {
if (isXPack(settings.plugin)) {
if (isOSS()) {
logger.error(
'You are using the OSS-only distribution of Kibana. ' +
'As of version 6.3+ X-Pack is bundled in the standard distribution of this software by default; ' +
'consequently it is no longer available as a plugin. Please use the standard distribution of Kibana to use X-Pack features.'
);
} else {
logger.error(
'Kibana now contains X-Pack by default, there is no longer any need to install it as it is already present.'
);
}
process.exit(1);
}
}
export function errorIfXPackRemove(settings, logger) {
if (isXPack(settings.plugin) && !isOSS()) {
logger.error(
'You are using the standard distrbution of Kibana. Please install the OSS-only distribution to remove X-Pack features.'
);
process.exit(1);
}
}

View file

@ -0,0 +1,12 @@
export function isOSS() {
try {
require.resolve('x-pack');
return false;
} catch (error) {
if (error.code !== 'MODULE_NOT_FOUND') {
throw error;
}
return true;
}
}

View file

@ -1,8 +1,12 @@
import { statSync } from 'fs';
import { errorIfXPackRemove } from '../lib/error_if_x_pack';
import rimraf from 'rimraf';
export default function remove(settings, logger) {
try {
errorIfXPackRemove(settings, logger);
let stat;
try {
stat = statSync(settings.pluginPath);

View file

@ -1,8 +1,5 @@
---
root: true
extends: '../../../../.eslintrc.js'
rules:
block-scoped-var: off
camelcase: off

41
src/dev/build/README.md Normal file
View file

@ -0,0 +1,41 @@
# dev/build
Build the default and OSS distributables of Kibana.
# Quick Start
```sh
# checkout the help for this script
node scripts/build --help
# build a release version
node scripts/build --release
# reuse already downloaded node executables, turn on debug logging, and only build the default distributable
node scripts/build --skip-node-download --debug --no-oss
```
# Structure
The majority of this logic is extracted from the grunt build that has existed forever, and is designed to maintain the general structure grunt provides including tasks and config. The [build_distributables.js] file defines which tasks are run.
**Task**: [tasks/\*] define individual parts of the build. Each task is an object with a `run()` method, a `description` property, and optionally a `global` property. They are executed with the runner either once (if they are global) or once for each build. Non-global/local tasks are called once for each build, meaning they will be called twice be default, once for the OSS build and once for the default build and receive a build object as the third argument to `run()` which can be used to determine paths and properties for that build.
**Config**: [lib/config.js] defines the config used to execute tasks. It is mostly used to determine absolute paths to specific locations, and to get access to the Platforms.
**Platform**: [lib/platform.js] defines the Platform objects, which define the different platforms we build for. Use `config.getPlatforms()` to get the list of platforms, or `config.getLinux/Windows/MacPlatform()` to get a specific platform
**Log**: We uses the `ToolingLog` defined in [../tooling_log/tooling_log.js]
**Runner**: [lib/runner.js] defines the runner used to execute tasks. It calls tasks with specific arguments based on whether they are global or not.
**Build**: [lib/build.js], created by the runner and passed to tasks so they can resolve paths and get information about the build they are operating on.
[tasks/\*]: ./tasks
[lib/config.js]: ./lib/config.js
[lib/platform.js]: ./lib/platform.js
[lib/runner.js]: ./lib/runner.js
[lib/build.js]: ./lib/build.js
[build_distributables.js]: ./build_distributables.js
[../tooling_log/tooling_log.js]: ../tooling_log/tooling_log.js

View file

@ -0,0 +1,115 @@
import { getConfig, createRunner } from './lib';
import {
BootstrapTask,
BuildPackagesTask,
CleanExtraBinScriptsTask,
CleanExtraFilesFromModulesTask,
CleanPackagesTask,
CleanTask,
CopySourceTask,
CreateArchivesSourcesTask,
CreateArchivesTask,
CreateDebPackageTask,
CreateEmptyDirsAndFilesTask,
CreateNoticeFileTask,
CreatePackageJsonTask,
CreateReadmeTask,
CreateRpmPackageTask,
DownloadNodeBuildsTask,
ExtractNodeBuildsTask,
InstallDependenciesTask,
OptimizeBuildTask,
RemovePackageJsonDepsTask,
TranspileSourceTask,
UpdateLicenseFileTask,
VerifyEnvTask,
VerifyExistingNodeBuildsTask,
WriteShaSumsTask,
} from './tasks';
export async function buildDistributables(options) {
const {
log,
isRelease,
buildOssDist,
buildDefaultDist,
downloadFreshNode,
createArchives,
createRpmPackage,
createDebPackage,
} = options;
log.verbose('building distributables with options:', {
isRelease,
buildOssDist,
buildDefaultDist,
downloadFreshNode,
createArchives,
createRpmPackage,
createDebPackage,
});
const config = await getConfig({
isRelease,
});
const run = createRunner({
config,
log,
buildOssDist,
buildDefaultDist,
});
/**
* verify, reset, and initialize the build environment
*/
await run(VerifyEnvTask);
await run(CleanTask);
await run(BootstrapTask);
await run(downloadFreshNode ? DownloadNodeBuildsTask : VerifyExistingNodeBuildsTask);
await run(ExtractNodeBuildsTask);
/**
* run platform-generic build tasks
*/
await run(CopySourceTask);
await run(CreateEmptyDirsAndFilesTask);
await run(CreateReadmeTask);
await run(TranspileSourceTask);
await run(BuildPackagesTask);
await run(CreatePackageJsonTask);
await run(InstallDependenciesTask);
await run(CleanPackagesTask);
await run(CreateNoticeFileTask);
await run(UpdateLicenseFileTask);
await run(RemovePackageJsonDepsTask);
await run(CleanExtraFilesFromModulesTask);
await run(OptimizeBuildTask);
/**
* copy generic build outputs into platform-specific build
* directories and perform platform-specific steps
*/
await run(CreateArchivesSourcesTask);
await run(CleanExtraBinScriptsTask);
/**
* package platform-specific builds into archives
* or os-specific packages in the target directory
*/
if (createArchives) { // control w/ --skip-archives
await run(CreateArchivesTask);
}
if (createDebPackage) { // control w/ --deb or --skip-os-packages
await run(CreateDebPackageTask);
}
if (createRpmPackage) { // control w/ --rpm or --skip-os-packages
await run(CreateRpmPackageTask);
}
/**
* finalize artifacts by writing sha1sums of each into the target directory
*/
await run(WriteShaSumsTask);
}

98
src/dev/build/cli.js Normal file
View file

@ -0,0 +1,98 @@
import { resolve } from 'path';
import getopts from 'getopts';
import dedent from 'dedent';
import chalk from 'chalk';
import { createToolingLog, pickLevelFromFlags } from '../tooling_log';
import { buildDistributables } from './build_distributables';
import { isErrorLogged } from './lib';
// ensure the cwd() is always the repo root
process.chdir(resolve(__dirname, '../../../'));
const unknownFlags = [];
const flags = getopts(process.argv.slice(0), {
boolean: [
'oss',
'no-oss',
'skip-archives',
'skip-os-packages',
'rpm',
'deb',
'release',
'skip-node-download',
'verbose',
'debug',
],
alias: {
v: 'verbose',
d: 'debug',
},
unknown: (flag) => {
unknownFlags.push(flag);
}
});
if (unknownFlags.length && !flags.help) {
const pluralized = unknownFlags.length > 1 ? 'flags' : 'flag';
console.log(chalk`\n{red Unknown ${pluralized}: ${unknownFlags.join(', ')}}\n`);
flags.help = true;
}
if (flags.help) {
console.log(
dedent(chalk`
{dim usage:} node scripts/build
build the Kibana distributable
options:
--oss {dim Only produce the OSS distributable of Kibana}
--no-oss {dim Only produce the default distributable of Kibana}
--skip-archives {dim Don't produce tar/zip archives}
--skip-os-packages {dim Don't produce rpm/deb packages}
--rpm {dim Only build the rpm package}
--deb {dim Only build the deb package}
--release {dim Produce a release-ready distributable}
--skip-node-download {dim Reuse existing downloads of node.js}
--verbose,-v {dim Turn on verbose logging}
--debug,-d {dim Turn on debug logging}
`) + '\n'
);
process.exit(1);
}
const log = createToolingLog(pickLevelFromFlags(flags));
log.pipe(process.stdout);
function isOsPackageDesired(name) {
if (flags['skip-os-packages']) {
return false;
}
// build all if no flags specified
if (flags.rpm === undefined && flags.deb === undefined) {
return true;
}
return Boolean(flags[name]);
}
buildDistributables({
log,
isRelease: Boolean(flags.release),
buildOssDist: flags.oss !== false,
buildDefaultDist: !flags.oss,
downloadFreshNode: !Boolean(flags['skip-node-download']),
createArchives: !Boolean(flags['skip-archives']),
createRpmPackage: isOsPackageDesired('rpm'),
createDebPackage: isOsPackageDesired('deb'),
}).catch(error => {
if (!isErrorLogged(error)) {
log.error('Uncaught error');
log.error(error);
}
process.exit(1);
});

View file

@ -0,0 +1,134 @@
import expect from 'expect.js';
import sinon from 'sinon';
import { createBuild } from '../build';
describe('dev/build/lib/build', () => {
describe('Build instance', () => {
describe('#isOss()', () => {
it('returns true if passed oss: true', () => {
const build = createBuild({
oss: true
});
expect(build.isOss()).to.be(true);
});
it('returns false if passed oss: false', () => {
const build = createBuild({
oss: false
});
expect(build.isOss()).to.be(false);
});
});
describe('#getName()', () => {
it('returns kibana when oss: false', () => {
const build = createBuild({
oss: false
});
expect(build.getName()).to.be('kibana');
});
it('returns kibana-oss when oss: true', () => {
const build = createBuild({
oss: true
});
expect(build.getName()).to.be('kibana-oss');
});
});
describe('#getLogTag()', () => {
it('returns string with build name in it', () => {
const build = createBuild({});
expect(build.getLogTag()).to.contain(build.getName());
});
});
describe('#resolvePath()', () => {
it('uses passed config to resolve a path relative to the build', () => {
const resolveFromRepo = sinon.stub();
const build = createBuild({
config: { resolveFromRepo }
});
build.resolvePath('bar');
sinon.assert.calledWithExactly(resolveFromRepo, 'build', 'kibana', 'bar');
});
it('passes all arguments to config.resolveFromRepo()', () => {
const resolveFromRepo = sinon.stub();
const build = createBuild({
config: { resolveFromRepo }
});
build.resolvePath('bar', 'baz', 'box');
sinon.assert.calledWithExactly(resolveFromRepo, 'build', 'kibana', 'bar', 'baz', 'box');
});
});
describe('#resolvePathForPlatform()', () => {
it('uses config.resolveFromRepo(), config.getBuildVersion(), and platform.getBuildName() to create path', () => {
const resolveFromRepo = sinon.stub();
const getBuildVersion = sinon.stub().returns('buildVersion');
const build = createBuild({
oss: true,
config: { resolveFromRepo, getBuildVersion }
});
const getBuildName = sinon.stub().returns('platformName');
const platform = {
getBuildName,
};
build.resolvePathForPlatform(platform, 'foo', 'bar');
sinon.assert.calledWithExactly(getBuildName);
sinon.assert.calledWithExactly(getBuildVersion);
sinon.assert.calledWithExactly(resolveFromRepo, 'build', 'oss', `kibana-buildVersion-platformName`, 'foo', 'bar');
});
});
describe('#getPlatformArchivePath()', () => {
const sandbox = sinon.sandbox.create();
const config = {
resolveFromRepo: sandbox.stub(),
getBuildVersion: sandbox.stub().returns('buildVersion')
};
const build = createBuild({
oss: false,
config
});
const platform = {
getBuildName: sandbox.stub().returns('platformName'),
isWindows: sandbox.stub().returns(false),
};
beforeEach(() => {
sandbox.reset();
});
it('uses config.resolveFromRepo(), config.getBuildVersion, and platform.getBuildName() to create path', () => {
build.getPlatformArchivePath(platform);
sinon.assert.calledWithExactly(platform.getBuildName);
sinon.assert.calledWithExactly(platform.isWindows);
sinon.assert.calledWithExactly(config.getBuildVersion);
sinon.assert.calledWithExactly(config.resolveFromRepo, 'target', `kibana-buildVersion-platformName.tar.gz`);
});
it('creates .zip path if platform is windows', () => {
platform.isWindows.returns(true);
build.getPlatformArchivePath(platform);
sinon.assert.calledWithExactly(platform.getBuildName);
sinon.assert.calledWithExactly(platform.isWindows);
sinon.assert.calledWithExactly(config.getBuildVersion);
sinon.assert.calledWithExactly(config.resolveFromRepo, 'target', `kibana-buildVersion-platformName.zip`);
});
});
});
});

View file

@ -0,0 +1,113 @@
import { resolve } from 'path';
import expect from 'expect.js';
import pkg from '../../../../../package.json';
import { getConfig } from '../config';
import { getVersionInfo } from '../version_info';
describe('dev/build/lib/config', () => {
let config;
let buildInfo;
before(async () => {
const isRelease = Boolean(Math.round(Math.random()));
config = await getConfig({
isRelease,
});
buildInfo = await getVersionInfo({
isRelease,
pkg
});
});
after(() => {
config = null;
});
describe('#getKibanaPkg()', () => {
it('returns the parsed package.json from the Kibana repo', () => {
expect(config.getKibanaPkg()).to.eql(pkg);
});
});
describe('#getNodeVersion()', () => {
it('returns the node version from the kibana package.json', () => {
expect(config.getNodeVersion()).to.eql(pkg.engines.node);
});
});
describe('#getRepoRelativePath()', () => {
it('converts an absolute path to relative path, from the root of the repo', () => {
expect(config.getRepoRelativePath(__dirname)).to.match(/^src[\/\\]dev[\/\\]build/);
});
});
describe('#resolveFromRepo()', () => {
it('resolves a relative path', () => {
expect(config.resolveFromRepo('src/dev/build/lib/__tests__'))
.to.be(__dirname);
});
it('resolves a series of relative paths', () => {
expect(config.resolveFromRepo('src', 'dev', 'build', 'lib', '__tests__'))
.to.be(__dirname);
});
});
describe('#getPlatforms()', () => {
it('returns an array of platform objects', () => {
const platforms = config.getPlatforms();
expect(platforms).to.be.an('array');
for (const platform of platforms) {
expect(['windows', 'linux', 'darwin']).to.contain(platform.getName());
}
});
});
describe('#getLinuxPlatform()', () => {
it('returns the linux platform', () => {
expect(config.getLinuxPlatform().getName()).to.be('linux');
});
});
describe('#getWindowsPlatform()', () => {
it('returns the windows platform', () => {
expect(config.getWindowsPlatform().getName()).to.be('windows');
});
});
describe('#getMacPlatform()', () => {
it('returns the mac platform', () => {
expect(config.getMacPlatform().getName()).to.be('darwin');
});
});
describe('#getPlatformForThisOs()', () => {
it('returns the platform that matches the arch of this machine', () => {
expect(config.getPlatformForThisOs().getName()).to.be(process.platform);
});
});
describe('#getBuildVersion()', () => {
it('returns the version from the build info', () => {
expect(config.getBuildVersion()).to.be(buildInfo.buildVersion);
});
});
describe('#getBuildNumber()', () => {
it('returns the number from the build info', () => {
expect(config.getBuildNumber()).to.be(buildInfo.buildNumber);
});
});
describe('#getBuildSha()', () => {
it('returns the sha from the build info', () => {
expect(config.getBuildSha()).to.be(buildInfo.buildSha);
});
});
describe('#resolveFromTarget()', () => {
it('resolves a relative path, from the target directory', () => {
expect(config.resolveFromTarget()).to.be(resolve(__dirname, '../../../../../target'));
});
});
});

View file

@ -0,0 +1,26 @@
import expect from 'expect.js';
import { isErrorLogged, markErrorLogged } from '../errors';
describe('dev/build/lib/errors', () => {
describe('isErrorLogged()/markErrorLogged()', () => {
it('returns true if error has been passed to markErrorLogged()', () => {
const error = new Error();
expect(isErrorLogged(error)).to.be(false);
markErrorLogged(error);
expect(isErrorLogged(error)).to.be(true);
});
describe('isErrorLogged()', () => {
it('handles any value type', () => {
expect(isErrorLogged(null)).to.be(false);
expect(isErrorLogged(undefined)).to.be(false);
expect(isErrorLogged(1)).to.be(false);
expect(isErrorLogged([])).to.be(false);
expect(isErrorLogged({})).to.be(false);
expect(isErrorLogged(/foo/)).to.be(false);
expect(isErrorLogged(new Date())).to.be(false);
});
});
});
});

View file

@ -0,0 +1,67 @@
import { resolve } from 'path';
import sinon from 'sinon';
import chalk from 'chalk';
import { createToolingLog } from '../../../tooling_log';
import { exec } from '../exec';
describe('dev/build/lib/exec', () => {
// disable colors so logging is easier to test
const chalkWasEnabled = chalk.enabled;
before(() => chalk.enabled = false);
after(() => chalk.enabled = chalkWasEnabled);
const sandbox = sinon.sandbox.create();
afterEach(() => sandbox.reset());
const log = createToolingLog('verbose');
const onLogLine = sandbox.stub();
log.on('data', onLogLine);
it('executes a command, logs the command, and logs the output', async () => {
await exec(log, process.execPath, ['-e', 'console.log("hi")']);
// logs the command before execution
sinon.assert.calledWithExactly(onLogLine, sinon.match(`$ ${process.execPath}`));
// log output of the process
sinon.assert.calledWithExactly(onLogLine, sinon.match(/debg\s+hi/));
});
it('logs using level: option', async () => {
await exec(log, process.execPath, ['-e', 'console.log("hi")'], {
level: 'info'
});
// log output of the process
sinon.assert.calledWithExactly(onLogLine, sinon.match(/info\s+hi/));
});
it('send the proc SIGKILL if it logs a line matching exitAfter regexp', async function () {
// fixture proc will exit after 10 seconds if sigint not received, but the test won't fail
// unless we see the log line `SIGINT not received`, so we let the test take up to 30 seconds
// for potentially huge delays here and there
this.timeout(30000);
await exec(log, process.execPath, [require.resolve('./fixtures/log_on_sigint')], {
exitAfter: /listening for SIGINT/
});
sinon.assert.calledWithExactly(onLogLine, sinon.match(/listening for SIGINT/));
sinon.assert.neverCalledWith(onLogLine, sinon.match(/SIGINT not received/));
});
it('logs using level: option', async () => {
const parentDir = resolve(process.cwd(), '..');
await exec(log, process.execPath, ['-e', 'console.log(process.cwd())'], {
level: 'info',
cwd: parentDir,
});
// log output of the process, checking for \n to ensure cwd() doesn't log
// the subdir that this process is executing in
sinon.assert.calledWithExactly(onLogLine, sinon.match(parentDir + '\n'));
});
});

Binary file not shown.

View file

@ -0,0 +1 @@
dotfile

View file

@ -0,0 +1 @@
bar

View file

@ -0,0 +1 @@
foo

View file

@ -0,0 +1,4 @@
console.log('listening for SIGINT');
setTimeout(() => {
console.log('SIGINT not received');
}, 10000);

View file

@ -0,0 +1,326 @@
import { resolve } from 'path';
import { chmodSync, statSync } from 'fs';
import del from 'del';
import expect from 'expect.js';
import { mkdirp, write, read, getChildPaths, copy, copyAll, getFileHash, untar } from '../fs';
const TMP = resolve(__dirname, '__tmp__');
const FIXTURES = resolve(__dirname, 'fixtures');
const FOO_TAR_PATH = resolve(FIXTURES, 'foo_dir.tar.gz');
const BAR_TXT_PATH = resolve(FIXTURES, 'foo_dir/bar.txt');
const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable');
// get the mode of a file as a string, like 777, or 644,
function getCommonMode(path) {
return statSync(path).mode.toString(8).slice(-3);
}
function assertNonAbsoluteError(error) {
expect(error).to.be.an(Error);
expect(error.message).to.contain('Please use absolute paths');
}
describe('dev/build/lib/fs', () => {
// ensure WORLD_EXECUTABLE is actually executable by all
before(async () => {
chmodSync(WORLD_EXECUTABLE, 0o777);
});
// clean and recreate TMP directory
beforeEach(async () => {
await del(TMP);
await mkdirp(TMP);
});
// cleanup TMP directory
after(async () => {
await del(TMP);
});
describe('mkdirp()', () => {
it('rejects if path is not absolute', async () => {
try {
await mkdirp('foo/bar');
throw new Error('Expected mkdirp() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('makes directory and necessary parent directories', async () => {
const destination = resolve(TMP, 'a/b/c/d/e/f/g');
expect(await mkdirp(destination)).to.be(undefined);
expect(statSync(destination).isDirectory()).to.be(true);
});
});
describe('write()', () => {
it('rejects if path is not absolute', async () => {
try {
await write('foo/bar');
throw new Error('Expected write() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('writes content to a file with existing parent directory', async () => {
const destination = resolve(TMP, 'a');
expect(await write(destination, 'bar')).to.be(undefined);
expect(await read(destination)).to.be('bar');
});
it('writes conent to a file with missing parents', async () => {
const destination = resolve(TMP, 'a/b/c/d/e');
expect(await write(destination, 'bar')).to.be(undefined);
expect(await read(destination)).to.be('bar');
});
});
describe('read()', () => {
it('rejects if path is not absolute', async () => {
try {
await read('foo/bar');
throw new Error('Expected read() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('reads file, resolves with result', async () => {
expect(await read(BAR_TXT_PATH)).to.be('bar\n');
});
});
describe('getChildPaths()', () => {
it('rejects if path is not absolute', async () => {
try {
await getChildPaths('foo/bar');
throw new Error('Expected getChildPaths() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('resolves with absolute paths to the children of directory', async () => {
const path = resolve(FIXTURES, 'foo_dir');
expect((await getChildPaths(path)).sort()).to.eql([
resolve(FIXTURES, 'foo_dir/.bar'),
BAR_TXT_PATH,
resolve(FIXTURES, 'foo_dir/foo'),
]);
});
it('rejects with ENOENT if path does not exist', async () => {
try {
await getChildPaths(resolve(FIXTURES, 'notrealpath'));
throw new Error('Expected getChildPaths() to reject');
} catch (error) {
expect(error).to.have.property('code', 'ENOENT');
}
});
});
describe('copy()', () => {
it('rejects if source path is not absolute', async () => {
try {
await copy('foo/bar', __dirname);
throw new Error('Expected getChildPaths() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if destination path is not absolute', async () => {
try {
await copy(__dirname, 'foo/bar');
throw new Error('Expected getChildPaths() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if neither path is not absolute', async () => {
try {
await copy('foo/bar', 'foo/bar');
throw new Error('Expected getChildPaths() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('copies the contents of one file to another', async () => {
const destination = resolve(TMP, 'bar.txt');
await copy(BAR_TXT_PATH, destination);
expect(await read(destination)).to.be('bar\n');
});
it('copies the mode of the source file', async () => {
const destination = resolve(TMP, 'dest.txt');
await copy(WORLD_EXECUTABLE, destination);
expect(getCommonMode(destination)).to.be('777');
});
});
describe('copyAll()', () => {
it('rejects if source path is not absolute', async () => {
try {
await copyAll('foo/bar', __dirname);
throw new Error('Expected copyAll() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if destination path is not absolute', async () => {
try {
await copyAll(__dirname, 'foo/bar');
throw new Error('Expected copyAll() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if neither path is not absolute', async () => {
try {
await copyAll('foo/bar', 'foo/bar');
throw new Error('Expected copyAll() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('copies files and directories from source to dest, creating dest if necessary, respecting mode', async () => {
const destination = resolve(TMP, 'a/b/c');
await copyAll(FIXTURES, destination);
expect((await getChildPaths(resolve(destination, 'foo_dir'))).sort()).to.eql([
resolve(destination, 'foo_dir/bar.txt'),
resolve(destination, 'foo_dir/foo'),
]);
expect(getCommonMode(resolve(destination, 'bin/world_executable'))).to.be('777');
expect(getCommonMode(resolve(destination, 'foo_dir/bar.txt'))).to.be('644');
});
it('applies select globs if specified, ignores dot files', async () => {
const destination = resolve(TMP, 'a/b/c/d');
await copyAll(FIXTURES, destination, {
select: ['**/*bar*']
});
try {
statSync(resolve(destination, 'bin/world_executable'));
throw new Error('expected bin/world_executable to not by copied');
} catch (error) {
expect(error).to.have.property('code', 'ENOENT');
}
try {
statSync(resolve(destination, 'foo_dir/.bar'));
throw new Error('expected foo_dir/.bar to not by copied');
} catch (error) {
expect(error).to.have.property('code', 'ENOENT');
}
expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n');
});
it('supports select globs and dot option together', async () => {
const destination = resolve(TMP, 'a/b/c/d');
await copyAll(FIXTURES, destination, {
select: ['**/*bar*'],
dot: true,
});
try {
statSync(resolve(destination, 'bin/world_executable'));
throw new Error('expected bin/world_executable to not by copied');
} catch (error) {
expect(error).to.have.property('code', 'ENOENT');
}
expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n');
expect(await read(resolve(destination, 'foo_dir/.bar'))).to.be('dotfile\n');
});
});
describe('getFileHash()', () => {
it('rejects if path is not absolute', async () => {
try {
await getFileHash('foo/bar');
throw new Error('Expected getFileHash() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('resolves with the sha1 hash of a file', async () => {
expect(await getFileHash(BAR_TXT_PATH, 'sha1'))
.to.be('e242ed3bffccdf271b7fbaf34ed72d089537b42f');
});
it('resolves with the sha256 hash of a file', async () => {
expect(await getFileHash(BAR_TXT_PATH, 'sha256'))
.to.be('7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730');
});
it('resolves with the md5 hash of a file', async () => {
expect(await getFileHash(BAR_TXT_PATH, 'md5'))
.to.be('c157a79031e1c40f85931829bc5fc552');
});
});
describe('untar()', () => {
it('rejects if source path is not absolute', async () => {
try {
await untar('foo/bar', '**/*', __dirname);
throw new Error('Expected untar() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if destination path is not absolute', async () => {
try {
await untar(__dirname, '**/*', 'foo/bar');
throw new Error('Expected untar() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if neither path is not absolute', async () => {
try {
await untar('foo/bar', '**/*', 'foo/bar');
throw new Error('Expected untar() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('extracts tarbar from source into destination, creating destination if necessary', async () => {
const destination = resolve(TMP, 'a/b/c/d/e/f');
await untar(FOO_TAR_PATH, destination);
expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n');
expect(await read(resolve(destination, 'foo_dir/foo/foo.txt'))).to.be('foo\n');
});
it('passed thrid argument to Extract class, overriding path with destination', async () => {
const destination = resolve(TMP, 'a/b/c');
await untar(FOO_TAR_PATH, destination, {
path: '/dev/null',
strip: 1
});
expect(await read(resolve(destination, 'bar.txt'))).to.be('bar\n');
expect(await read(resolve(destination, 'foo/foo.txt'))).to.be('foo\n');
});
});
});

View file

@ -0,0 +1,47 @@
import expect from 'expect.js';
import { createPlatform } from '../platform';
describe('src/dev/build/lib/platform', () => {
describe('getName()', () => {
it('returns the name argument', () => {
expect(createPlatform('foo').getName()).to.be('foo');
});
});
describe('getNodeArch()', () => {
it('returns the node arch for the passed name', () => {
expect(createPlatform('windows').getNodeArch()).to.be('windows-x64');
});
});
describe('getBuildName()', () => {
it('returns the build name for the passed name', () => {
expect(createPlatform('windows').getBuildName()).to.be('windows-x86_64');
});
});
describe('isWindows()', () => {
it('returns true if name is windows', () => {
expect(createPlatform('windows').isWindows()).to.be(true);
expect(createPlatform('linux').isWindows()).to.be(false);
expect(createPlatform('darwin').isWindows()).to.be(false);
});
});
describe('isLinux()', () => {
it('returns true if name is linux', () => {
expect(createPlatform('windows').isLinux()).to.be(false);
expect(createPlatform('linux').isLinux()).to.be(true);
expect(createPlatform('darwin').isLinux()).to.be(false);
});
});
describe('isMac()', () => {
it('returns true if name is darwin', () => {
expect(createPlatform('windows').isMac()).to.be(false);
expect(createPlatform('linux').isMac()).to.be(false);
expect(createPlatform('darwin').isMac()).to.be(true);
});
});
});

View file

@ -0,0 +1,161 @@
import sinon from 'sinon';
import expect from 'expect.js';
import { createToolingLog } from '../../../tooling_log';
import { createRunner } from '../runner';
import { isErrorLogged, markErrorLogged } from '../errors';
describe('dev/build/lib/runner', () => {
const sandbox = sinon.sandbox.create();
const config = {};
const log = createToolingLog('verbose');
const onLogLine = sandbox.stub();
log.on('data', onLogLine);
const buildMatcher = sinon.match({
isOss: sinon.match.func,
resolvePath: sinon.match.func,
resolvePathForPlatform: sinon.match.func,
getPlatformArchivePath: sinon.match.func,
getName: sinon.match.func,
getLogTag: sinon.match.func,
});
const ossBuildMatcher = buildMatcher.and(
sinon.match(b => b.isOss(), 'is oss build')
);
const defaultBuildMatcher = buildMatcher.and(
sinon.match(b => !b.isOss(), 'is not oss build')
);
afterEach(() => sandbox.reset());
describe('defaults', () => {
const run = createRunner({
config,
log,
});
it('returns a promise', () => {
expect(run({ run: sinon.stub() })).to.be.a(Promise);
});
it('runs global task once, passing config and log', async () => {
const runTask = sinon.stub();
await run({ global: true, run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log);
});
it('does not call local tasks', async () => {
const runTask = sinon.stub();
await run({ run: runTask });
sinon.assert.notCalled(runTask);
});
});
describe('buildOssDist = true, buildDefaultDist = true', () => {
const run = createRunner({
config,
log,
buildOssDist: true,
buildDefaultDist: true
});
it('runs global task once, passing config and log', async () => {
const runTask = sinon.stub();
await run({ global: true, run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log);
});
it('runs local tasks twice, passing config log and both builds', async () => {
const runTask = sinon.stub();
await run({ run: runTask });
sinon.assert.calledTwice(runTask);
sinon.assert.calledWithExactly(runTask, config, log, ossBuildMatcher);
sinon.assert.calledWithExactly(runTask, config, log, defaultBuildMatcher);
});
});
describe('just default dist', () => {
const run = createRunner({
config,
log,
buildDefaultDist: true
});
it('runs global task once, passing config and log', async () => {
const runTask = sinon.stub();
await run({ global: true, run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log);
});
it('runs local tasks once, passing config log and default build', async () => {
const runTask = sinon.stub();
await run({ run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log, defaultBuildMatcher);
});
});
describe('just oss dist', () => {
const run = createRunner({
config,
log,
buildOssDist: true,
});
it('runs global task once, passing config and log', async () => {
const runTask = sinon.stub();
await run({ global: true, run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log);
});
it('runs local tasks once, passing config log and oss build', async () => {
const runTask = sinon.stub();
await run({ run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log, ossBuildMatcher);
});
});
describe('task rejects', () => {
const run = createRunner({
config,
log,
buildOssDist: true,
});
it('rejects, logs error, and marks error logged', async () => {
try {
await run({ async run() { throw new Error('FOO'); } });
throw new Error('expected run() to reject');
} catch (error) {
expect(error).to.have.property('message').be('FOO');
sinon.assert.calledWith(onLogLine, sinon.match(/FOO/));
expect(isErrorLogged(error)).to.be(true);
}
});
it('just rethrows errors that have already been logged', async () => {
try {
await run({
async run() {
throw markErrorLogged(new Error('FOO'));
}
});
throw new Error('expected run() to reject');
} catch (error) {
expect(error).to.have.property('message').be('FOO');
sinon.assert.neverCalledWith(onLogLine, sinon.match(/FOO/));
expect(isErrorLogged(error)).to.be(true);
}
});
});
});

View file

@ -0,0 +1,31 @@
import expect from 'expect.js';
import pkg from '../../../../../package.json';
import { getVersionInfo } from '../version_info';
describe('dev/build/lib/version_info', () => {
describe('isRelease = true', () => {
it('returns unchanged package.version, build sha, and build number', async () => {
const versionInfo = await getVersionInfo({
isRelease: true,
pkg
});
expect(versionInfo).to.have.property('buildVersion', pkg.version);
expect(versionInfo).to.have.property('buildSha').match(/^[0-9a-f]{40}$/);
expect(versionInfo).to.have.property('buildNumber').a('number').greaterThan(1000);
});
});
describe('isRelease = false', () => {
it('returns snapshot version, build sha, and build number', async () => {
const versionInfo = await getVersionInfo({
isRelease: false,
pkg
});
expect(versionInfo).to.have.property('buildVersion').contain(pkg.version).match(/-SNAPSHOT$/);
expect(versionInfo).to.have.property('buildSha').match(/^[0-9a-f]{40}$/);
expect(versionInfo).to.have.property('buildNumber').a('number').greaterThan(1000);
});
});
});

View file

@ -0,0 +1,41 @@
import chalk from 'chalk';
export function createBuild({ config, oss }) {
const name = oss ? 'kibana-oss' : 'kibana';
const logTag = oss ? chalk`{magenta [kibana-oss]}` : chalk`{cyan [ kibana ]}`;
return new class Build {
isOss() {
return !!oss;
}
resolvePath(...args) {
return config.resolveFromRepo('build', name, ...args);
}
resolvePathForPlatform(platform, ...args) {
return config.resolveFromRepo(
'build',
oss ? 'oss' : 'default',
`kibana-${config.getBuildVersion()}-${platform.getBuildName()}`,
...args
);
}
getPlatformArchivePath(platform) {
const ext = platform.isWindows() ? 'zip' : 'tar.gz';
return config.resolveFromRepo(
'target',
`${name}-${config.getBuildVersion()}-${platform.getBuildName()}.${ext}`
);
}
getName() {
return name;
}
getLogTag() {
return logTag;
}
}();
}

136
src/dev/build/lib/config.js Normal file
View file

@ -0,0 +1,136 @@
import { dirname, resolve, relative } from 'path';
import { platform as getOsPlatform } from 'os';
import { getVersionInfo } from './version_info';
import { createPlatform } from './platform';
export async function getConfig({ isRelease }) {
const pkgPath = resolve(__dirname, '../../../../package.json');
const pkg = require(pkgPath);
const repoRoot = dirname(pkgPath);
const nodeVersion = pkg.engines.node;
const platforms = ['darwin', 'linux', 'windows'].map(createPlatform);
const versionInfo = await getVersionInfo({
isRelease,
pkg,
});
return new class Config {
/**
* Get Kibana's parsed package.json file
* @return {Object}
*/
getKibanaPkg() {
return pkg;
}
/**
* Get the node version required by Kibana
* @return {String}
*/
getNodeVersion() {
return nodeVersion;
}
/**
* Convert an absolute path to a relative path, based from the repo
* @param {String} absolutePath
* @return {String}
*/
getRepoRelativePath(absolutePath) {
return relative(repoRoot, absolutePath);
}
/**
* Resolve a set of relative paths based from the directory of the Kibana repo
* @param {...String} ...subPaths
* @return {String}
*/
resolveFromRepo(...subPaths) {
return resolve(repoRoot, ...subPaths);
}
/**
* Return the list of Platforms we are targeting
* @return {Array<Platform>}
*/
getPlatforms() {
return platforms;
}
/**
* Get the linux platform object
* @return {Platform}
*/
getLinuxPlatform() {
return platforms.find(p => p.isLinux());
}
/**
* Get the windows platform object
* @return {Platform}
*/
getWindowsPlatform() {
return platforms.find(p => p.isWindows());
}
/**
* Get the mac platform object
* @return {Platform}
*/
getMacPlatform() {
return platforms.find(p => p.isMac());
}
/**
* Get the platform object representing the OS on this machine
* @return {Platform}
*/
getPlatformForThisOs() {
switch (getOsPlatform()) {
case 'darwin':
return this.getMacPlatform();
case 'win32':
return this.getWindowsPlatform();
case 'linux':
return this.getLinuxPlatform();
default:
throw new Error(`Unable to find platform for this os`);
}
}
/**
* Get the version to use for this build
* @return {String}
*/
getBuildVersion() {
return versionInfo.buildVersion;
}
/**
* Get the build number of this build
* @return {Number}
*/
getBuildNumber() {
return versionInfo.buildNumber;
}
/**
* Get the git sha for this build
* @return {String}
*/
getBuildSha() {
return versionInfo.buildSha;
}
/**
* Resolve a set of paths based from the target directory for this build.
* @param {...String} ...subPaths
* @return {String}
*/
resolveFromTarget(...subPaths) {
return resolve(repoRoot, 'target', ...subPaths);
}
}();
}

View file

@ -0,0 +1,10 @@
const loggedErrors = new WeakSet();
export function markErrorLogged(error) {
loggedErrors.add(error);
return error;
}
export function isErrorLogged(error) {
return loggedErrors.has(error);
}

75
src/dev/build/lib/exec.js Normal file
View file

@ -0,0 +1,75 @@
import execa from 'execa';
import chalk from 'chalk';
import { Transform } from 'stream';
import {
createPromiseFromStreams,
createSplitStream,
createMapStream,
} from '../../../utils';
// creates a stream that skips empty lines unless they are followed by
// another line, preventing the empty lines produced by splitStream
function skipLastEmptyLineStream() {
let skippedEmptyLine = false;
return new Transform({
objectMode: true,
transform(line, enc, cb) {
if (skippedEmptyLine) {
this.push('');
skippedEmptyLine = false;
}
if (line === '') {
skippedEmptyLine = true;
return cb();
} else {
return cb(null, line);
}
}
});
}
export async function exec(log, cmd, args, options = {}) {
const {
level = 'debug',
cwd,
exitAfter,
} = options;
log[level](chalk.dim('$'), cmd, ...args);
const proc = execa(cmd, args, {
stdio: ['ignore', 'pipe', 'pipe'],
cwd,
});
function onLogLine(line) {
log[level](line);
if (exitAfter && exitAfter.test(line)) {
proc.kill('SIGINT');
}
}
await Promise.all([
proc.catch(error => {
// ignore the error thrown by execa if it's because we killed with SIGINT
if (error.signal !== 'SIGINT') {
throw error;
}
}),
createPromiseFromStreams([
proc.stdout,
createSplitStream('\n'),
skipLastEmptyLineStream(),
createMapStream(onLogLine),
]),
createPromiseFromStreams([
proc.stderr,
createSplitStream('\n'),
skipLastEmptyLineStream(),
createMapStream(onLogLine),
]),
]);
}

117
src/dev/build/lib/fs.js Normal file
View file

@ -0,0 +1,117 @@
import fs from 'fs';
import { createHash } from 'crypto';
import { resolve, dirname, isAbsolute } from 'path';
import { createGunzip } from 'zlib';
import vfs from 'vinyl-fs';
import { promisify } from 'bluebird';
import mkdirpCb from 'mkdirp';
import { createPromiseFromStreams } from '../../../utils';
import { Extract } from 'tar';
const mkdirpAsync = promisify(mkdirpCb);
const statAsync = promisify(fs.stat);
const chmodAsync = promisify(fs.chmod);
const writeFileAsync = promisify(fs.writeFile);
const readFileAsync = promisify(fs.readFile);
const readdirAsync = promisify(fs.readdir);
function assertAbsolute(path) {
if (!isAbsolute(path)) {
throw new TypeError(
'Please use absolute paths to keep things explicit. You probably want to use `build.resolvePath()` or `config.resolveFromRepo()`.'
);
}
}
export async function mkdirp(path) {
assertAbsolute(path);
await mkdirpAsync(path);
}
export async function write(path, contents) {
assertAbsolute(path);
await mkdirp(dirname(path));
await writeFileAsync(path, contents);
}
export async function read(path) {
assertAbsolute(path);
return await readFileAsync(path, 'utf8');
}
export async function getChildPaths(path) {
assertAbsolute(path);
const childNames = await readdirAsync(path);
return childNames.map(name => resolve(path, name));
}
export async function copy(source, destination) {
assertAbsolute(source);
assertAbsolute(destination);
const stat = await statAsync(source);
// mkdirp after the stat(), stat will throw if source
// doesn't exist and ideally we won't create the parent directory
// unless the source exists
await mkdirp(dirname(destination));
await createPromiseFromStreams([
fs.createReadStream(source),
fs.createWriteStream(destination),
]);
await chmodAsync(destination, stat.mode);
}
export async function copyAll(sourceDir, destination, options = {}) {
const {
select = ['**/*'],
dot = false,
} = options;
assertAbsolute(sourceDir);
assertAbsolute(destination);
await createPromiseFromStreams([
vfs.src(select, {
buffer: false,
cwd: sourceDir,
base: sourceDir,
dot,
}),
vfs.dest(destination)
]);
}
export async function getFileHash(path, algo) {
assertAbsolute(path);
const hash = createHash(algo);
const readStream = fs.createReadStream(path);
await new Promise((resolve, reject) => {
readStream
.on('data', chunk => hash.update(chunk))
.on('error', reject)
.on('end', resolve);
});
return hash.digest('hex');
}
export async function untar(source, destination, extractOptions = {}) {
assertAbsolute(source);
assertAbsolute(destination);
await createPromiseFromStreams([
fs.createReadStream(source),
createGunzip(),
new Extract({
...extractOptions,
path: destination
}),
]);
}

View file

@ -0,0 +1,13 @@
export { getConfig } from './config';
export { createRunner } from './runner';
export { isErrorLogged } from './errors';
export { exec } from './exec';
export {
read,
write,
mkdirp,
copy,
copyAll,
getFileHash,
untar,
} from './fs';

View file

@ -0,0 +1,27 @@
export function createPlatform(name) {
return new class Platform {
getName() {
return name;
}
getNodeArch() {
return `${name}-x64`;
}
getBuildName() {
return `${name}-x86_64`;
}
isWindows() {
return name === 'windows';
}
isMac() {
return name === 'darwin';
}
isLinux() {
return name === 'linux';
}
};
}

View file

@ -0,0 +1,73 @@
import chalk from 'chalk';
import { isErrorLogged, markErrorLogged } from './errors';
import { createBuild } from './build';
export function createRunner({ config, log, buildOssDist, buildDefaultDist }) {
async function execTask(desc, fn, ...args) {
log.info(desc);
log.indent(4);
const start = Date.now();
const time = () => {
const sec = (Date.now() - start) / 1000;
const minStr = sec > 60 ? `${Math.floor(sec / 60)} min ` : '';
const secStr = `${Math.round(sec % 60)} sec`;
return chalk.dim(`${minStr}${secStr}`);
};
try {
await fn(config, log, ...args);
log.success(chalk.green('✓'), time());
} catch (error) {
if (!isErrorLogged(error)) {
log.error('failure', time());
log.error(error);
markErrorLogged(error);
}
throw error;
} finally {
log.indent(-4);
log.write('');
}
}
const builds = [];
if (buildDefaultDist) {
builds.push(
createBuild({
config,
oss: false,
})
);
}
if (buildOssDist) {
builds.push(
createBuild({
config,
oss: true,
})
);
}
/**
* Run a task by calling its `run()` method with three arguments:
* `config`: an object with methods for determining top-level config values, see `./config.js`
* `log`: an instance of the `ToolingLog`, see `../../tooling_log/tooling_log.js`
* `builds?`: If task does is not defined as `global: true` then it is called for each build and passed each one here.
*
* @param {Task} task
* @return {Promise<undefined>}
*/
return async function run(task) {
if (task.global) {
await execTask(chalk`{dim [ global ]} ${task.description}`, task.run);
} else {
for (const build of builds) {
await execTask(`${build.getLogTag()} ${task.description}`, task.run, build);
}
}
};
}

View file

@ -0,0 +1,22 @@
import os from 'os';
import execa from 'execa';
async function getBuildNumber() {
if (/^win/.test(os.platform())) {
// Windows does not have the wc process and `find /C /V ""` does not consistently work
const log = await execa('git', ['log', '--format="%h"']);
return log.stdout.split('\n').length;
}
const wc = await execa.shell('git log --format="%h" | wc -l');
return parseFloat(wc.stdout.trim());
}
export async function getVersionInfo({ isRelease, pkg }) {
return {
buildSha: await execa.stdout('git', ['rev-parse', 'HEAD']),
buildVersion: isRelease ? pkg.version : `${pkg.version}-SNAPSHOT`,
buildNumber: await getBuildNumber(),
};
}

10
src/dev/build/tasks/bootstrap_task.js vendored Normal file
View file

@ -0,0 +1,10 @@
import { exec } from '../lib';
export const BootstrapTask = {
global: true,
description: 'Running `yarn kbn bootstrap` to make sure all dependencies are up-to-date',
async run(config, log) {
await exec(log, 'yarn', ['kbn', 'bootstrap', '--skip-kibana-extra']);
},
};

View file

@ -46,19 +46,14 @@ import { buildProductionProjects } from '@kbn/pm';
* in some way by Kibana itself in production, as it won't otherwise be
* included in the production build.
*/
module.exports = function (grunt) {
grunt.registerTask('_build:packages', async function () {
const done = this.async();
const kibanaRoot = grunt.config.get('root');
const buildRoot = `${kibanaRoot}/build/kibana`;
try {
await buildProductionProjects({ kibanaRoot, buildRoot });
done();
} catch (err) {
grunt.fail.fatal(err);
done(err);
}
});
export const BuildPackagesTask = {
description: 'Building distributable versions of packages',
async run(config, log, build) {
await buildProductionProjects({
kibanaRoot: config.resolveFromRepo(),
buildRoot: build.resolvePath(),
oss: build.isOss(),
});
},
};

View file

@ -0,0 +1,47 @@
import del from 'del';
export const CleanTask = {
global: true,
description: 'Cleaning artifacts from previous builds',
async run(config) {
await del([
config.resolveFromRepo('build'),
config.resolveFromRepo('target'),
]);
},
};
export const CleanPackagesTask = {
description: 'Cleaning source for packages that are now installed in node_modules',
async run(config, log, build) {
await del([build.resolvePath('packages'), build.resolvePath('x-pack')]);
},
};
export const CleanExtraFilesFromModulesTask = {
description: 'Cleaning tests, examples, docs, etc. from node_modules',
async run(config, log, build) {
await del([
build.resolvePath('node_modules/**/test/**/*'),
build.resolvePath('node_modules/**/tests/**/*'),
build.resolvePath('node_modules/**/example/**/*'),
build.resolvePath('node_modules/**/examples/**/*'),
]);
},
};
export const CleanExtraBinScriptsTask = {
description: 'Cleaning extra bin/* scripts from platform-specific builds',
async run(config, log, build) {
for (const platform of config.getPlatforms()) {
const patterns = platform.isWindows() ? ['*', '!*.bat'] : ['*.bat'];
await del(patterns, {
cwd: build.resolvePathForPlatform(platform, 'bin')
});
}
}
};

View file

@ -1,8 +1,12 @@
module.exports = function () {
return {
devSource: {
options: { mode: true },
src: [
import { copyAll } from '../lib';
export const CopySourceTask = {
description: 'Copying source into platform-generic build directory',
async run(config, log, build) {
await copyAll(config.resolveFromRepo(), build.resolvePath(), {
dot: false,
select: [
'yarn.lock',
'src/**',
'!src/**/__tests__/**',
@ -20,8 +24,6 @@ module.exports = function () {
'webpackShims/**',
'config/kibana.yml',
],
dest: 'build/kibana',
expand: true
},
};
});
},
};

View file

@ -0,0 +1,24 @@
import { copyAll } from '../lib';
import { getNodeDownloadInfo } from './nodejs';
export const CreateArchivesSourcesTask = {
description: 'Creating platform-specific archive source directories',
async run(config, log, build) {
await Promise.all(config.getPlatforms().map(async platform => {
// copy all files from generic build source directory into platform-specific build directory
await copyAll(
build.resolvePath('.'),
build.resolvePathForPlatform(platform, '.'),
{ dot: true },
);
log.debug('Generic build source copied into', platform.getName(), 'specific build directory');
// copy node.js install
await copyAll(
getNodeDownloadInfo(config, platform).extractDir,
build.resolvePathForPlatform(platform, 'node')
);
log.debug('Node.js copied into', platform.getName(), 'specific build directory');
}));
}
};

View file

@ -0,0 +1,41 @@
import { dirname, extname, basename } from 'path';
import { mkdirp, exec } from '../lib';
export const CreateArchivesTask = {
description: 'Creating the archives for each platform',
async run(config, log, build) {
await Promise.all(config.getPlatforms().map(async platform => {
const source = build.resolvePathForPlatform(platform, '.');
const destination = build.getPlatformArchivePath(platform);
log.info('archiving', source, 'to', destination);
await mkdirp(dirname(destination));
const cwd = dirname(source);
const sourceName = basename(source);
switch (extname(destination)) {
case '.zip':
await exec(log, 'zip', ['-rq', '-ll', destination, sourceName], { cwd });
break;
case '.gz':
const args = ['-zchf', destination, sourceName];
// Add a flag to handle filepaths with colons (i.e. C://...) on windows
if (config.getPlatformForThisOs().isWindows()) {
args.push('--force-local');
}
await exec(log, 'tar', args, { cwd });
break;
default:
throw new Error(`Unexpected extension for archive destination: ${destination}`);
}
}));
}
};

View file

@ -0,0 +1,13 @@
import { mkdirp, write } from '../lib';
export const CreateEmptyDirsAndFilesTask = {
description: 'Creating some empty directories and files to prevent file-permission issues',
async run(config, log, build) {
await Promise.all([
mkdirp(build.resolvePath('plugins')),
mkdirp(build.resolvePath('data')),
write(build.resolvePath('optimize/.babelcache.json'), '{}'),
]);
},
};

View file

@ -0,0 +1,53 @@
import { transformDependencies } from '@kbn/pm';
import { read, write } from '../lib';
export const CreatePackageJsonTask = {
description: 'Creating build-ready version of package.json',
async run(config, log, build) {
const pkg = config.getKibanaPkg();
const newPkg = {
name: pkg.name,
description: pkg.description,
keywords: pkg.keywords,
version: config.getBuildVersion(),
branch: pkg.branch,
build: {
number: config.getBuildNumber(),
sha: config.getBuildSha(),
},
repository: pkg.repository,
engines: {
node: pkg.engines.node,
},
dependencies: transformDependencies(pkg.dependencies),
};
if (build.isOss()) {
delete newPkg.dependencies['x-pack'];
}
await write(
build.resolvePath('package.json'),
JSON.stringify(newPkg, null, ' ')
);
},
};
export const RemovePackageJsonDepsTask = {
description: 'Removing dependencies from package.json',
async run(config, log, build) {
const path = build.resolvePath('package.json');
const pkg = JSON.parse(await read(path));
delete pkg.dependencies;
await write(
build.resolvePath('package.json'),
JSON.stringify(pkg, null, ' ')
);
},
};

View file

@ -0,0 +1,14 @@
import { write, read } from '../lib';
export const CreateReadmeTask = {
description: 'Creating README.md file',
async run(config, log, build) {
const readme = await read(config.resolveFromRepo('README.md'));
await write(
build.resolvePath('README.txt'),
readme.replace(/\s##\sSnapshot\sBuilds[\s\S]*/, '')
);
},
};

View file

@ -0,0 +1,18 @@
export * from './bootstrap_task';
export * from './build_packages_task';
export * from './clean_tasks';
export * from './copy_source_task';
export * from './create_archives_sources_task';
export * from './create_archives_task';
export * from './create_empty_dirs_and_files_task';
export * from './create_package_json_task';
export * from './create_readme_task';
export * from './install_dependencies_task';
export * from './license_file_task';
export * from './nodejs';
export * from './notice_file_task';
export * from './optimize_task';
export * from './os_packages';
export * from './transpile_source_task';
export * from './verify_env_task';
export * from './write_sha_sums_task';

View file

@ -1,13 +1,18 @@
import { execFile } from 'child_process';
module.exports = function (grunt) {
grunt.registerTask('_build:installDependencies', function () {
import { exec } from '../lib';
export const InstallDependenciesTask = {
description: 'Installing node_modules, including production builds of packages',
async run(config, log, build) {
// We're using `pure-lockfile` instead of `frozen-lockfile` because we
// rewrite `link:` dependencies to `file:` dependencies earlier in the
// build. This means the lockfile won't be consistent, so instead of
// verifying it, we just skip writing a new lockfile. However, this does
// still use the existing lockfile for dependency resolution.
execFile('yarn', ['--production', '--ignore-optional', '--pure-lockfile'], {
cwd: grunt.config.process('<%= root %>/build/kibana')
}, this.async());
});
const args = ['--production', '--ignore-optional', '--pure-lockfile'];
await exec(log, 'yarn', args, {
cwd: build.resolvePath(),
});
},
};

View file

@ -0,0 +1,21 @@
import { write, read } from '../lib';
export const UpdateLicenseFileTask = {
description: 'Updating LICENSE.txt file',
async run(config, log, build) {
if (build.isOss()) {
log.info('Copying Apache 2.0 license to LICENSE.txt');
await write(
build.resolvePath('LICENSE.txt'),
await read(config.resolveFromRepo('licenses/APACHE-LICENSE-2.0.txt'))
);
} else {
log.info('Copying Elastic license to LICENSE.txt');
await write(
build.resolvePath('LICENSE.txt'),
await read(config.resolveFromRepo('licenses/ELASTIC-LICENSE.txt'))
);
}
},
};

Some files were not shown because too many files have changed in this diff Show more