Merge branch 'master' into feature/ingest

This commit is contained in:
Matthew Bargar 2016-03-04 14:50:18 -05:00
commit 2bb23ab1f1
1004 changed files with 33925 additions and 33257 deletions

View file

@ -1,75 +1,2 @@
---
parser: babel-eslint
plugins:
- mocha
env:
es6: true
amd: true
node: true
mocha: true
browser: true
rules:
block-scoped-var: 2
camelcase: [ 2, { properties: never } ]
comma-dangle: 0
comma-style: [ 2, last ]
consistent-return: 0
curly: [ 2, multi-line ]
dot-location: [ 2, property ]
dot-notation: [ 2, { allowKeywords: true } ]
eqeqeq: [ 2, allow-null ]
guard-for-in: 2
indent: [ 2, 2, { SwitchCase: 1 } ]
key-spacing: [ 0, { align: value } ]
max-len: [ 2, 140, 2, { ignoreComments: true, ignoreUrls: true } ]
new-cap: [ 2, { capIsNewExceptions: [ Private ] } ]
no-bitwise: 0
no-caller: 2
no-cond-assign: 0
no-debugger: 2
no-empty: 2
no-eval: 2
no-extend-native: 2
no-extra-parens: 0
no-irregular-whitespace: 2
no-iterator: 2
no-loop-func: 2
no-multi-spaces: 0
no-multi-str: 2
no-nested-ternary: 2
no-new: 0
no-path-concat: 0
no-proto: 2
no-return-assign: 0
no-script-url: 2
no-sequences: 2
no-shadow: 0
no-trailing-spaces: 2
no-undef: 2
no-underscore-dangle: 0
no-unused-expressions: 0
no-unused-vars: 0
no-use-before-define: [ 2, nofunc ]
no-with: 2
one-var: [ 2, never ]
quotes: [ 2, single ]
semi-spacing: [ 2, { before: false, after: true } ]
semi: [ 2, always ]
space-after-keywords: [ 2, always ]
space-before-blocks: [ 2, always ]
space-before-function-paren: [ 2, { anonymous: always, named: never } ]
space-in-parens: [ 2, never ]
space-infix-ops: [ 2, { int32Hint: false } ]
space-return-throw-case: [ 2 ]
space-unary-ops: [ 2 ]
strict: [ 2, never ]
valid-typeof: 2
wrap-iife: [ 2, outside ]
yoda: 0
mocha/no-exclusive-tests: 2
mocha/handle-done-callback: 2
extends: '@elastic/kibana'

View file

@ -1 +1 @@
4.2.4
4.3.1

View file

@ -13,7 +13,7 @@ At any given time the Kibana team at Elastic is working on dozens of features an
Let's just get this out there: **Feel free to +1 an issue**. That said, a +1 isn't a vote. We keep up on highly commented issues, but comments are but one of many reasons we might, or might not, work on an issue. A solid write up of your use case is more likely to make your case than a comment that says *+10000*.
#### My issue isn't getting enough attention
First of all, sorry about that, we want you to have a great time with Kibana! You should join us on IRC (#kibana on freenode) and chat about it. Github is terrible for conversations. With that out of the way, there are a number of variables that go into deciding what to work on. These include priority, impact, difficulty, applicability to use cases, and last, and importantly: What we feel like working on.
First of all, sorry about that, we want you to have a great time with Kibana! You should join us on IRC ([#kibana](https://kiwiirc.com/client/irc.freenode.net/?#kibana) on freenode) and chat about it. Github is terrible for conversations. With that out of the way, there are a number of variables that go into deciding what to work on. These include priority, impact, difficulty, applicability to use cases, and last, and importantly: What we feel like working on.
### I want to help!
**Now we're talking**. If you have a bugfix or new feature that you would like to contribute to Kibana, please **find or open an issue about it before you start working on it.** Talk about what you would like to do. It may be that somebody is already working on it, or that there are particular issues that you should know about before implementing the change.
@ -111,42 +111,80 @@ Before running the tests you will need to install the projects dependencies as d
Once that is complete just run:
```sh
```
sh
npm run test && npm run build
```
#### Testing and debugging tests
#### Debugging unit tests
The standard `npm run test` task runs several sub tasks and can take several minutes to complete, making debugging failures pretty painful. In order to ease the pain specialized tasks provide alternate methods for running the tests.
<dl>
<dt><code>npm run test:quick</code></dt>
<dd>Runs both server and browser tests, but skips linting</dd>
<dt><code>npm run test:server</code> or <code>npm run test:browser</code></dt>
<dd>Runs the tests for just the server or browser</dd>
`npm run test:quick`
Runs both server and browser tests, but skips linting
<dt><code>npm run test:dev</code></dt>
<dd>
Initializes an environment for debugging the browser tests. Includes an dedicated instance of the kibana server for building the test bundle, and a karma server. When running this task the build is optimized for the first time and then a karma-owned instance of the browser is opened. Click the "debug" button to open a new tab that executes the unit tests.
<br>
<img src="http://i.imgur.com/DwHxgfq.png">
</dd>
`npm run test:server`
Run only the server tests
<dt><code>npm run mocha [test file or dir]</code> or <code>npm run mocha:debug [test file or dir]</code></dt>
<dd>
Run a one off test with the local project version of mocha, babel compilation, and optional debugging. Great
for development and fixing individual tests.
</dd>
</dl>
`npm run test:browser`
Run only the browser tests
Distributable packages can be found in `target/` after the build completes.
`npm run test:dev`
Initializes an environment for debugging the browser tests. Includes an dedicated instance of the kibana server for building the test bundle, and a karma server. When running this task the build is optimized for the first time and then a karma-owned instance of the browser is opened. Click the "debug" button to open a new tab that executes the unit tests.
![Browser test debugging](http://i.imgur.com/DwHxgfq.png)
`npm run mocha [test file or dir]` or `npm run mocha:debug [test file or dir]`
Run a one off test with the local project version of mocha, babel compilation, and optional debugging. Great
for development and fixing individual tests.
#### Unit testing plugins
This should work super if you're using the [Kibana plugin generator](https://github.com/elastic/generator-kibana-plugin). If you're not using the generator, well, you're on your own. We suggest you look at how the generator works.
`npm run test:dev -- --kbnServer.testsBundle.pluginId=some_special_plugin --kbnServer.plugin-path=../some_special_plugin`
Run the tests for just your particular plugin. Assuming you plugin lives outside of the `installedPlugins directory`, which it should.
#### Running browser automation tests:
*The Selenium server that is started currently only runs the tests in Firefox*
The following will start Kibana, Elasticsearch and Selenium for you. To run the functional UI tests use the following commands
`npm run test:ui`
Run the functional UI tests one time and exit. This is used by the CI systems and is great for quickly checking that things pass. It is essentially a combination of the next two tasks.
`npm run test:ui:server`
Start the server required for the `test:ui:runner` tasks. Once the server is started `test:ui:runner` can be run multiple times without waiting for the server to start.
`npm run test:ui:runner`
Execute the front-end selenium tests. This requires the server started by the `test:ui:server` task.
##### If you already have ElasticSearch, Kibana, and Selenium Server running:
Set your es and kibana ports in `test/intern.js` to 9220 and 5620, respectively. You can configure your Selenium server to run the tests on Chrome,IE, or other browsers here.
Once you've got the services running, execute the following:
```
sh
npm run test:ui:runner
```
#### Browser automation notes:
- Using Page Objects pattern (https://theintern.github.io/intern/#writing-functional-test)
- At least the initial tests for the Settings, Discover, and Visualize tabs all depend on a very specific set of logstash-type data (generated with makelogs). Since that is a static set of data, all the Discover and Visualize tests use a specific Absolute time range. This guarantees the same results each run.
- These tests have been developed and tested with Chrome and Firefox browser. In theory, they should work on all browsers (that's the benefit of Intern using Leadfoot).
- These tests should also work with an external testing service like https://saucelabs.com/ or https://www.browserstack.com/ but that has not been tested.
- https://theintern.github.io/
- https://theintern.github.io/leadfoot/Element.html
#### Building OS packages
Packages are built using fpm, pleaserun, dpkg, and rpm. fpm and pleaserun can be installed using gem. Package building has only been tested on Linux and is not supported on any other platform.
```sh
gem install pleaserun
apt-get install ruby-dev
gem install fpm
npm run build:ospackages
```
@ -156,48 +194,7 @@ To specify a package to build you can add `rpm` or `deb` as an argument.
npm run build:ospackages -- --rpm
```
### Functional UI Testing
#### Handy references
- https://theintern.github.io/
- https://theintern.github.io/leadfoot/Element.html
#### Running tests using npm task:
*The Selenium server that is started currently only runs the tests in Firefox*
To run the functional UI tests use the following commands
<dl>
<dt><code>npm run test:ui</code></dt>
<dd>Run the functional UI tests one time and exit. This is used by the CI systems and is great for quickly checking that things pass. It is essentially a combination of the next two tasks.</dd>
<dt><code>npm run test:ui:server</code></dt>
<dd>Start the server required for the <code>test:ui:runner</code> tasks. Once the server is started <code>test:ui:runner</code> can be run multiple times without waiting for the server to start.</dd>
<dt><code>npm run test:ui:runner</code></dt>
<dd>Execute the front-end selenium tests. This requires the server started by the <code>test:ui:server</code> task.</dd>
</dl>
#### Running tests locally with your existing (and already running) ElasticSearch, Kibana, and Selenium Server:
Set your es and kibana ports in `test/intern.js` to 9220 and 5620, respecitively. You can configure your Selenium server to run the tests on Chrome,IE, or other browsers here.
Once you've got the services running, execute the following:
```sh
npm run test:ui:runner
```
#### General notes:
- Using Page Objects pattern (https://theintern.github.io/intern/#writing-functional-test)
- At least the initial tests for the Settings, Discover, and Visualize tabs all depend on a very specific set of logstash-type data (generated with makelogs). Since that is a static set of data, all the Discover and Visualize tests use a specific Absolute time range. This gaurantees the same results each run.
- These tests have been developed and tested with Chrome and Firefox browser. In theory, they should work on all browsers (that's the benefit of Intern using Leadfoot).
- These tests should also work with an external testing service like https://saucelabs.com/ or https://www.browserstack.com/ but that has not been tested.
Distributable packages can be found in `target/` after the build completes.
## Submitting a pull request

4
FAQ.md
View file

@ -4,10 +4,10 @@
**A:** Kibana 4 packages are architecture specific. Ensure you are using the correct package for your architecture.
**Q:** Where do I go for support?
**A:** Please join us at [discuss.elastic.co](discuss.elastic.co) with questions. Your problem might be a bug, but it might just be a misunderstanding, or feature we could improve. We're also available on Freenode in #kibana
**A:** Please join us at [discuss.elastic.co](https://discuss.elastic.co) with questions. Your problem might be a bug, but it might just be a misunderstanding, or feature we could improve. We're also available on Freenode in #kibana
**Q:** Ok, we talked about it and its definitely a bug
**A:** Doh, ok, let's get that fixed. File an issue on [github.com/elastic/kibana](github.com/elastic/kibana). I'd recommend reading the beginning of the CONTRIBUTING.md, just so you know how we'll handle the issue.
**A:** Doh, ok, let's get that fixed. File an issue on [github.com/elastic/kibana](https://github.com/elastic/kibana). I'd recommend reading the beginning of the CONTRIBUTING.md, just so you know how we'll handle the issue.
### Kibana 3 Migration
**Q:** Where is feature X that I loved from Kibana 3?

View file

@ -4,7 +4,7 @@ Kibana is an open source ([Apache Licensed](https://github.com/elastic/kibana/bl
## Requirements
- Elasticsearch version 2.2.0 or later
- Elasticsearch master
- Kibana binary package
## Installation

View file

@ -586,7 +586,7 @@ Use slashes for both single line and multi line comments. Try to write
comments that explain higher level mechanisms or clarify difficult
segments of your code. **Don't use comments to restate trivial things**.
***Exception:*** Comment blocks describing a function and it's arguments (docblock) should start with `/**`, contain a single `*` at the begining of each line, and end with `*/`.
***Exception:*** Comment blocks describing a function and its arguments (docblock) should start with `/**`, contain a single `*` at the beginning of each line, and end with `*/`.
*Right:*
@ -656,7 +656,7 @@ function ClassName() {
var ClassName = function () {};
```
### Inhertiance should be done with a utility
### Inheritance should be done with a utility
While you can do it with pure JS, a utility will remove a lot of boilerplate, and be more readable and functional.
@ -685,7 +685,7 @@ Square.prototype = Object.create(Shape);
### Keep Constructors Small
It is often the case that there are properties that can't be defined on the prototype, or work that needs to be done to completely create an object (like call it's Super class). This is all that should be done within constructors.
It is often the case that there are properties that can't be defined on the prototype, or work that needs to be done to completely create an object (like call its Super class). This is all that should be done within constructors.
Try to follow the [Write small functions](#write-small-functions) rule here too.
@ -775,7 +775,7 @@ Several already exist, and can be found in `src/kibana/utils/_mixins.js`
## Filenames
All filenames should use `snake_case` and *can* start with an underscore if the module is not intended to be used outside of it's containing module.
All filenames should use `snake_case` and *can* start with an underscore if the module is not intended to be used outside of its containing module.
*Right:*
- `src/kibana/index_patterns/index_pattern.js`
@ -858,7 +858,7 @@ app.service('CustomService', function(Promise, otherDeps) {
### Routes
Angular routes are defined using a custom require modules named `routes` that remove much of the required boilerplate.
Angular routes are defined using a custom require module named `routes` that remove much of the required boilerplate.
```js
require('ui/routes')
@ -871,7 +871,7 @@ require('ui/routes')
## Multiple attribute values
When a node has multiple attributes that would cause it to exceed the line character limit, each attribute including the first should be on its own line with a single indent. Also, when a node that is styled in this way has child nodes, there should be a blank line between the openening parent tag and the first child tag.
When a node has multiple attributes that would cause it to exceed the line character limit, each attribute including the first should be on its own line with a single indent. Also, when a node that is styled in this way has child nodes, there should be a blank line between the opening parent tag and the first child tag.
```
<ul

View file

@ -56,7 +56,7 @@
# Time in milliseconds to wait for responses from the back end or Elasticsearch. This value
# must be a positive integer.
# elasticsearch.requestTimeout: 300000
# elasticsearch.requestTimeout: 30000
# Time in milliseconds for Elasticsearch to wait for responses from shards. Set to 0 to disable.
# elasticsearch.shardTimeout: 0

View file

@ -22,11 +22,11 @@ has the following fingerprint:
wget -qO - https://packages.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
--------------------------------------------------
+
. Add the repository definition to your `/etc/apt/sources.list` file:
. Add the repository definition to your `/etc/apt/sources.list.d/kibana.list` file:
+
[source, sh]
--------------------------------------------------
echo "deb http://packages.elastic.co/kibana/{branch}/debian stable main" | sudo tee -a /etc/apt/sources.list
echo "deb http://packages.elastic.co/kibana/{branch}/debian stable main" | sudo tee -a /etc/apt/sources.list.d/kibana.list
--------------------------------------------------
+
[WARNING]
@ -37,7 +37,7 @@ When the `deb-src` entry, is present, the commands in this procedure generate an
Unable to find expected entry 'main/source/Sources' in Release file (Wrong sources.list entry or malformed file)
Delete the `deb-src` entry from the `/etc/apt/sources.list` file to clear the error.
Delete the `deb-src` entry from the `/etc/apt/sources.list.d/kibana.list` file to clear the error.
==================================================
+
. Run `apt-get update` and the repository is ready for use. Install Kibana with the following command:

View file

@ -3,7 +3,7 @@
You can set up Kibana and start exploring your Elasticsearch indices in minutes.
All you need is:
* Elasticsearch 2.1 or later
* Elasticsearch master
* A modern web browser - http://www.elastic.co/subscriptions/matrix#matrix_browsers[Supported Browsers].
* Information about your Elasticsearch installation:
** URL of the Elasticsearch instance you want to connect to.
@ -82,8 +82,8 @@ simply be the name of a single index.
reads the index mapping to list all of the fields that contain a timestamp. If your index doesn't have time-based data,
disable the *Index contains time-based events* option.
+
WARNING: Using event times to create index names is *deprecated* in this release of Kibana. Support for this functionality
will be removed entirely in the next major Kibana release. Elasticsearch 2.1 includes sophisticated date parsing APIs that
WARNING: Using event times to create index names is *deprecated* in this release of Kibana. Support for this functionality
will be removed entirely in the next major Kibana release. Elasticsearch 2.1 includes sophisticated date parsing APIs that
Kibana uses to determine date information, removing the need to specify dates in the index pattern name.
+
. Click *Create* to add the index pattern. This first pattern is automatically configured as the default.

View file

@ -56,8 +56,10 @@
"elasticsearchWithPlugins": "grunt esvm:withPlugins:keepalive",
"lint": "grunt eslint:source",
"lintroller": "grunt eslint:fixSource",
"makelogs": "makelogs",
"mocha": "mocha",
"mocha:debug": "mocha --debug-brk"
"mocha:debug": "mocha --debug-brk",
"sterilize": "grunt sterilize"
},
"repository": {
"type": "git",
@ -92,14 +94,15 @@
"commander": "2.8.1",
"css-loader": "0.17.0",
"d3": "3.5.6",
"elasticsearch": "8.0.1",
"elasticsearch-browser": "8.0.1",
"elasticsearch": "10.1.2",
"elasticsearch-browser": "10.1.2",
"expiry-js": "0.1.7",
"exports-loader": "0.6.2",
"expose-loader": "0.7.0",
"extract-text-webpack-plugin": "0.8.2",
"file-loader": "0.8.4",
"font-awesome": "4.4.0",
"glob-all": "3.0.1",
"good": "6.3.0",
"good-squeeze": "2.1.0",
"gridster": "0.5.6",
@ -125,11 +128,10 @@
"moment-timezone": "0.4.1",
"raw-loader": "0.5.1",
"request": "2.61.0",
"requirefrom": "0.2.0",
"rimraf": "2.4.3",
"rjs-repack-loader": "1.0.6",
"script-loader": "0.6.1",
"semver": "4.3.6",
"semver": "5.1.0",
"style-loader": "0.12.3",
"tar": "2.2.0",
"url-loader": "0.5.6",
@ -139,22 +141,22 @@
"wreck": "6.2.0"
},
"devDependencies": {
"@elastic/eslint-config-kibana": "0.0.2",
"Nonsense": "0.1.2",
"angular-mocks": "1.4.7",
"auto-release-sinon": "1.0.3",
"babel-eslint": "4.1.7",
"babel-eslint": "4.1.8",
"chokidar": "1.0.5",
"eslint": "1.5.1",
"eslint-plugin-mocha": "1.0.0",
"eslint": "1.10.3",
"eslint-plugin-mocha": "1.1.0",
"expect.js": "0.3.1",
"faker": "1.1.0",
"glob": "4.5.3",
"grunt": "0.4.5",
"grunt-babel": "5.0.1",
"grunt-cli": "0.1.13",
"grunt-contrib-clean": "0.6.0",
"grunt-contrib-copy": "0.8.1",
"grunt-esvm": "2.0.0",
"grunt-esvm": "2.1.1",
"grunt-karma": "0.12.0",
"grunt-run": "0.5.0",
"grunt-s3": "0.2.0-alpha.3",
@ -175,6 +177,7 @@
"libesvm": "3.3.0",
"license-checker": "3.1.0",
"load-grunt-config": "0.7.2",
"makelogs": "3.0.0-beta3",
"marked-text-renderer": "0.1.0",
"mocha": "2.3.0",
"nock": "2.10.0",
@ -186,7 +189,7 @@
"supertest-as-promised": "2.0.2"
},
"engines": {
"node": "4.2.4",
"npm": "2.14.15"
"node": "4.3.1",
"npm": "2.14.21"
}
}

View file

@ -1,9 +1,9 @@
let _ = require('lodash');
let Command = require('commander').Command;
import _ from 'lodash';
let red = require('./color').red;
let yellow = require('./color').yellow;
let help = require('./help');
import help from './help';
import { Command } from 'commander';
import { red } from './color';
import { yellow } from './color';
Command.prototype.error = function (err) {
if (err && err.message) err = err.message;

View file

@ -1,11 +1,11 @@
let _ = require('lodash');
let ansicolors = require('ansicolors');
import _ from 'lodash';
import ansicolors from 'ansicolors';
let log = _.restParam(function (color, label, rest1) {
console.log.apply(console, [color(` ${_.trim(label)} `)].concat(rest1));
});
let color = require('./color');
import color from './color';
module.exports = class Log {
constructor(quiet, silent) {

View file

@ -1,8 +1,7 @@
let _ = require('lodash');
import _ from 'lodash';
let utils = require('requirefrom')('src/utils');
let pkg = utils('packageJson');
let Command = require('./Command');
import pkg from '../utils/packageJson';
import Command from './Command';
let argv = process.env.kbnWorkerArgv ? JSON.parse(process.env.kbnWorkerArgv) : process.argv.slice();
let program = new Command('bin/kibana');

View file

@ -1,12 +1,12 @@
const cluster = require('cluster');
import cluster from 'cluster';
const { join } = require('path');
const { format: formatUrl } = require('url');
const Hapi = require('hapi');
import Hapi from 'hapi';
const { debounce, compact, get, invoke, bindAll, once, sample } = require('lodash');
const Log = require('../Log');
const Worker = require('./worker');
const BasePathProxy = require('./base_path_proxy');
import Log from '../Log';
import Worker from './worker';
import BasePathProxy from './base_path_proxy';
process.env.kbnWorkerType = 'managr';
@ -63,7 +63,13 @@ module.exports = class ClusterManager {
bindAll(this, 'onWatcherAdd', 'onWatcherError', 'onWatcherChange');
if (opts.watch) this.setupWatching();
if (opts.watch) {
this.setupWatching([
...settings.plugins.paths,
...settings.plugins.scanDirs
]);
}
else this.startCluster();
}
@ -75,10 +81,9 @@ module.exports = class ClusterManager {
}
}
setupWatching() {
setupWatching(extraPaths) {
const chokidar = require('chokidar');
const utils = require('requirefrom')('src/utils');
const fromRoot = utils('fromRoot');
const fromRoot = require('../../utils/fromRoot');
this.watcher = chokidar.watch([
'src/plugins',
@ -86,7 +91,7 @@ module.exports = class ClusterManager {
'src/ui',
'src/utils',
'config',
'installedPlugins'
...extraPaths
], {
cwd: fromRoot('.'),
ignored: /[\\\/](\..*|node_modules|bower_components|public|__tests__)[\\\/]/

View file

@ -1,9 +1,9 @@
let _ = require('lodash');
let cluster = require('cluster');
import _ from 'lodash';
import cluster from 'cluster';
let { resolve } = require('path');
let { EventEmitter } = require('events');
let fromRoot = require('../../utils/fromRoot');
import fromRoot from '../../utils/fromRoot';
let cliPath = fromRoot('src/cli');
let baseArgs = _.difference(process.argv.slice(2), ['--no-watch']);

View file

@ -1,6 +1,6 @@
var _ = require('lodash');
var ansicolors = require('ansicolors');
import _ from 'lodash';
import ansicolors from 'ansicolors';
exports.green = _.flow(ansicolors.black, ansicolors.bgGreen);
exports.red = _.flow(ansicolors.white, ansicolors.bgRed);

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = function (command, spaces) {
if (!_.size(command.commands)) {

View file

@ -0,0 +1,31 @@
import expect from 'expect.js';
import fileType, { ZIP, TAR } from '../file_type';
describe('kibana cli', function () {
describe('file_type', function () {
it('returns ZIP for .zip filename', function () {
const type = fileType('wat.zip');
expect(type).to.equal(ZIP);
});
it('returns TAR for .tar.gz filename', function () {
const type = fileType('wat.tar.gz');
expect(type).to.equal(TAR);
});
it('returns TAR for .tgz filename', function () {
const type = fileType('wat.tgz');
expect(type).to.equal(TAR);
});
it('returns undefined for unknown file type', function () {
const type = fileType('wat.unknown');
expect(type).to.equal(undefined);
});
it('accepts paths', function () {
const type = fileType('/some/path/to/wat.zip');
expect(type).to.equal(ZIP);
});
it('accepts urls', function () {
const type = fileType('http://example.com/wat.zip');
expect(type).to.equal(ZIP);
});
});
});

View file

@ -1,6 +1,6 @@
const expect = require('expect.js');
const sinon = require('sinon');
const plugin = require('../plugin');
import expect from 'expect.js';
import sinon from 'sinon';
import plugin from '../plugin';
describe('kibana cli', function () {

View file

@ -1,10 +1,10 @@
const expect = require('expect.js');
const sinon = require('sinon');
const fs = require('fs');
const rimraf = require('rimraf');
import expect from 'expect.js';
import sinon from 'sinon';
import fs from 'fs';
import rimraf from 'rimraf';
const pluginCleaner = require('../plugin_cleaner');
const pluginLogger = require('../plugin_logger');
import pluginCleaner from '../plugin_cleaner';
import pluginLogger from '../plugin_logger';
describe('kibana cli', function () {

View file

@ -1,12 +1,12 @@
const expect = require('expect.js');
const sinon = require('sinon');
const nock = require('nock');
const glob = require('glob');
const rimraf = require('rimraf');
const { join } = require('path');
const mkdirp = require('mkdirp');
const pluginLogger = require('../plugin_logger');
const pluginDownloader = require('../plugin_downloader');
import expect from 'expect.js';
import sinon from 'sinon';
import nock from 'nock';
import glob from 'glob-all';
import rimraf from 'rimraf';
import mkdirp from 'mkdirp';
import pluginLogger from '../plugin_logger';
import pluginDownloader from '../plugin_downloader';
import { join } from 'path';
describe('kibana cli', function () {
@ -124,6 +124,25 @@ describe('kibana cli', function () {
});
});
it('should consider .tgz files as archive type .tar.gz', function () {
const filePath = join(__dirname, 'replies/test_plugin_master.tar.gz');
const couchdb = nock('http://www.files.com')
.defaultReplyHeaders({
'content-length': '10'
})
.get('/plugin.tgz')
.replyWithFile(200, filePath);
const sourceUrl = 'http://www.files.com/plugin.tgz';
return downloader._downloadSingle(sourceUrl)
.then(function (data) {
expect(data.archiveType).to.be('.tar.gz');
expectWorkingPathNotEmpty();
});
});
it('should download a zip from a valid http url', function () {
const filePath = join(__dirname, 'replies/test_plugin_master.zip');

View file

@ -1,13 +1,13 @@
const expect = require('expect.js');
const sinon = require('sinon');
const glob = require('glob');
const rimraf = require('rimraf');
const { join } = require('path');
const mkdirp = require('mkdirp');
import expect from 'expect.js';
import sinon from 'sinon';
import glob from 'glob-all';
import rimraf from 'rimraf';
import mkdirp from 'mkdirp';
const pluginLogger = require('../plugin_logger');
const extract = require('../plugin_extractor');
const pluginDownloader = require('../plugin_downloader');
import pluginLogger from '../plugin_logger';
import extract from '../plugin_extractor';
import pluginDownloader from '../plugin_downloader';
import { join } from 'path';
describe('kibana cli', function () {

View file

@ -1,10 +1,10 @@
const expect = require('expect.js');
const sinon = require('sinon');
const rimraf = require('rimraf');
const { mkdirSync } = require('fs');
const { join } = require('path');
const pluginLogger = require('../plugin_logger');
const pluginInstaller = require('../plugin_installer');
import expect from 'expect.js';
import sinon from 'sinon';
import rimraf from 'rimraf';
import pluginLogger from '../plugin_logger';
import pluginInstaller from '../plugin_installer';
import { mkdirSync } from 'fs';
import { join } from 'path';
describe('kibana cli', function () {

View file

@ -1,6 +1,6 @@
const expect = require('expect.js');
const sinon = require('sinon');
const pluginLogger = require('../plugin_logger');
import expect from 'expect.js';
import sinon from 'sinon';
import pluginLogger from '../plugin_logger';
describe('kibana cli', function () {

View file

@ -1,7 +1,7 @@
const expect = require('expect.js');
const sinon = require('sinon');
const progressReporter = require('../progress_reporter');
const pluginLogger = require('../plugin_logger');
import expect from 'expect.js';
import sinon from 'sinon';
import progressReporter from '../progress_reporter';
import pluginLogger from '../plugin_logger';
describe('kibana cli', function () {

View file

@ -1,9 +1,8 @@
var path = require('path');
var expect = require('expect.js');
import path from 'path';
import expect from 'expect.js';
var utils = require('requirefrom')('src/utils');
var fromRoot = utils('fromRoot');
var settingParser = require('../setting_parser');
import fromRoot from '../../../utils/fromRoot';
import settingParser from '../setting_parser';
describe('kibana cli', function () {

View file

@ -1,5 +1,6 @@
const { createWriteStream, createReadStream, unlinkSync, statSync } = require('fs');
const getProgressReporter = require('../progress_reporter');
import getProgressReporter from '../progress_reporter';
import { createWriteStream, createReadStream, unlinkSync, statSync } from 'fs';
import fileType from '../file_type';
function openSourceFile({ sourcePath }) {
try {
@ -36,15 +37,6 @@ async function copyFile({ readStream, writeStream, progressReporter }) {
});
}
function getArchiveTypeFromFilename(path) {
if (/\.zip$/i.test(path)) {
return '.zip';
}
if (/\.tar\.gz$/i.test(path)) {
return '.tar.gz';
}
}
/*
// Responsible for managing local file transfers
*/
@ -67,7 +59,7 @@ export default async function copyLocalFile(logger, sourcePath, targetPath) {
}
// all is well, return our archive type
const archiveType = getArchiveTypeFromFilename(sourcePath);
const archiveType = fileType(sourcePath);
return { archiveType };
} catch (err) {
logger.error(err);

View file

@ -1,7 +1,8 @@
const { fromNode: fn } = require('bluebird');
const { createWriteStream, unlinkSync } = require('fs');
const Wreck = require('wreck');
const getProgressReporter = require('../progress_reporter');
import Wreck from 'wreck';
import getProgressReporter from '../progress_reporter';
import { fromNode as fn } from 'bluebird';
import { createWriteStream, unlinkSync } from 'fs';
import fileType, { ZIP, TAR } from '../file_type';
function sendRequest({ sourceUrl, timeout }) {
const maxRedirects = 11; //Because this one goes to 11.
@ -49,18 +50,12 @@ function getArchiveTypeFromResponse(resp, sourceUrl) {
const contentType = (resp.headers['content-type'] || '');
switch (contentType.toLowerCase()) {
case 'application/zip': return '.zip';
case 'application/x-gzip': return '.tar.gz';
case 'application/zip': return ZIP;
case 'application/x-gzip': return TAR;
default:
//If we can't infer the archive type from the content-type header,
//fall back to checking the extension in the url
if (/\.zip$/i.test(sourceUrl)) {
return '.zip';
}
if (/\.tar\.gz$/i.test(sourceUrl)) {
return '.tar.gz';
}
break;
return fileType(sourceUrl);
}
}

View file

@ -1,6 +1,6 @@
const zlib = require('zlib');
const fs = require('fs');
const tar = require('tar');
import zlib from 'zlib';
import fs from 'fs';
import tar from 'tar';
async function extractArchive(settings) {
await new Promise((resolve, reject) => {

View file

@ -1,4 +1,4 @@
const DecompressZip = require('@bigfunger/decompress-zip');
import DecompressZip from '@bigfunger/decompress-zip';
async function extractArchive(settings) {
await new Promise((resolve, reject) => {

View file

@ -0,0 +1,14 @@
export const TAR = '.tar.gz';
export const ZIP = '.zip';
export default function fileType(filename) {
if (/\.zip$/i.test(filename)) {
return ZIP;
}
if (/\.tar\.gz$/i.test(filename)) {
return TAR;
}
if (/\.tgz$/i.test(filename)) {
return TAR;
}
}

View file

@ -1,10 +1,9 @@
const utils = require('requirefrom')('src/utils');
const fromRoot = utils('fromRoot');
const settingParser = require('./setting_parser');
const installer = require('./plugin_installer');
const remover = require('./plugin_remover');
const lister = require('./plugin_lister');
const pluginLogger = require('./plugin_logger');
import fromRoot from '../../utils/fromRoot';
import settingParser from './setting_parser';
import installer from './plugin_installer';
import remover from './plugin_remover';
import lister from './plugin_lister';
import pluginLogger from './plugin_logger';
export default function pluginCli(program) {
function processCommand(command, options) {

View file

@ -1,5 +1,5 @@
const rimraf = require('rimraf');
const fs = require('fs');
import rimraf from 'rimraf';
import fs from 'fs';
export default function createPluginCleaner(settings, logger) {
function cleanPrevious() {

View file

@ -1,7 +1,7 @@
const _ = require('lodash');
const urlParse = require('url').parse;
const downloadHttpFile = require('./downloaders/http');
const downloadLocalFile = require('./downloaders/file');
import _ from 'lodash';
import downloadHttpFile from './downloaders/http';
import downloadLocalFile from './downloaders/file';
import { parse as urlParse } from 'url';
export default function createPluginDownloader(settings, logger) {
let archiveType;

View file

@ -1,12 +1,13 @@
const zipExtract = require('./extractors/zip');
const tarGzExtract = require('./extractors/tar_gz');
import zipExtract from './extractors/zip';
import tarGzExtract from './extractors/tar_gz';
import { ZIP, TAR } from './file_type';
export default function extractArchive(settings, logger, archiveType) {
switch (archiveType) {
case '.zip':
case ZIP:
return zipExtract(settings, logger);
break;
case '.tar.gz':
case TAR:
return tarGzExtract(settings, logger);
break;
default:

View file

@ -1,14 +1,13 @@
const _ = require('lodash');
const utils = require('requirefrom')('src/utils');
const fromRoot = utils('fromRoot');
const pluginDownloader = require('./plugin_downloader');
const pluginCleaner = require('./plugin_cleaner');
const pluginExtractor = require('./plugin_extractor');
const KbnServer = require('../../server/KbnServer');
const readYamlConfig = require('../serve/read_yaml_config');
const { statSync, renameSync } = require('fs');
const Promise = require('bluebird');
const rimrafSync = require('rimraf').sync;
import _ from 'lodash';
import fromRoot from '../../utils/fromRoot';
import pluginDownloader from './plugin_downloader';
import pluginCleaner from './plugin_cleaner';
import pluginExtractor from './plugin_extractor';
import KbnServer from '../../server/KbnServer';
import readYamlConfig from '../serve/read_yaml_config';
import Promise from 'bluebird';
import { sync as rimrafSync } from 'rimraf';
import { statSync, renameSync } from 'fs';
const mkdirp = Promise.promisify(require('mkdirp'));
export default {

View file

@ -1,4 +1,4 @@
const fs = require('fs');
import fs from 'fs';
export function list(settings, logger) {
fs.readdirSync(settings.pluginDir)

View file

@ -1,5 +1,5 @@
const fs = require('fs');
const rimraf = require('rimraf');
import fs from 'fs';
import rimraf from 'rimraf';
module.exports = {
remove: remove

View file

@ -1,6 +1,6 @@
const { resolve } = require('path');
const expiry = require('expiry-js');
import expiry from 'expiry-js';
import { intersection } from 'lodash';
import { resolve } from 'path';
export default function createSettingParser(options) {
function parseMilliseconds(val) {

View file

@ -1,9 +1,8 @@
let _ = require('lodash');
let fs = require('fs');
let yaml = require('js-yaml');
import _ from 'lodash';
import fs from 'fs';
import yaml from 'js-yaml';
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');
import fromRoot from '../../utils/fromRoot';
let legacySettingMap = {
// server
@ -67,4 +66,3 @@ module.exports = function (path) {
apply(config, val, key);
}, {});
};

View file

@ -1,10 +1,9 @@
const _ = require('lodash');
import _ from 'lodash';
const { isWorker } = require('cluster');
const { resolve } = require('path');
const cwd = process.cwd();
const src = require('requirefrom')('src');
const fromRoot = src('utils/fromRoot');
import fromRoot from '../../utils/fromRoot';
let canCluster;
try {
@ -61,7 +60,11 @@ function initServerSettings(opts, extraCliOptions) {
opts.pluginDir
)));
set('plugins.paths', [].concat(opts.pluginPath || []));
set('plugins.paths', _.compact([].concat(
get('plugins.paths'),
opts.pluginPath
)));
merge(extraCliOptions);
return settings;
@ -123,7 +126,7 @@ module.exports = function (program) {
}
let kbnServer = {};
const KbnServer = src('server/KbnServer');
const KbnServer = require('../../server/KbnServer');
try {
kbnServer = new KbnServer(settings);
await kbnServer.ready();

View file

@ -1,83 +1,81 @@
define(function (require) {
return function GeoHashGridAggResponseFixture() {
import _ from 'lodash';
export default function GeoHashGridAggResponseFixture() {
var _ = require('lodash');
// for vis:
//
// vis = new Vis(indexPattern, {
// type: 'tile_map',
// aggs:[
// { schema: 'metric', type: 'avg', params: { field: 'bytes' } },
// { schema: 'split', type: 'terms', params: { field: '@tags', size: 10 } },
// { schema: 'segment', type: 'geohash_grid', params: { field: 'geo.coordinates', precision: 3 } }
// ],
// params: {
// isDesaturated: true,
// mapType: 'Scaled%20Circle%20Markers'
// },
// });
// for vis:
//
// vis = new Vis(indexPattern, {
// type: 'tile_map',
// aggs:[
// { schema: 'metric', type: 'avg', params: { field: 'bytes' } },
// { schema: 'split', type: 'terms', params: { field: '@tags', size: 10 } },
// { schema: 'segment', type: 'geohash_grid', params: { field: 'geo.coordinates', precision: 3 } }
// ],
// params: {
// isDesaturated: true,
// mapType: 'Scaled%20Circle%20Markers'
// },
// });
var geoHashCharts = _.union(
_.range(48, 57), // 0-9
_.range(65, 90), // A-Z
_.range(97, 122) // a-z
);
var geoHashCharts = _.union(
_.range(48, 57), // 0-9
_.range(65, 90), // A-Z
_.range(97, 122) // a-z
);
var totalDocCount = 0;
var totalDocCount = 0;
var tags = _.times(_.random(4, 20), function (i) {
// random number of tags
var docCount = 0;
var buckets = _.times(_.random(40, 200), function () {
return _.sample(geoHashCharts, 3).join('');
})
.sort()
.map(function (geoHash) {
var count = _.random(1, 5000);
var tags = _.times(_.random(4, 20), function (i) {
// random number of tags
var docCount = 0;
var buckets = _.times(_.random(40, 200), function () {
return _.sample(geoHashCharts, 3).join('');
})
.sort()
.map(function (geoHash) {
var count = _.random(1, 5000);
totalDocCount += count;
docCount += count;
return {
key: geoHash,
doc_count: count,
1: {
value: 2048 + i
}
};
});
totalDocCount += count;
docCount += count;
return {
key: 'tag ' + (i + 1),
doc_count: docCount,
3: {
buckets: buckets
},
key: geoHash,
doc_count: count,
1: {
value: 1000 + i
value: 2048 + i
}
};
});
return {
took: 3,
timed_out: false,
_shards: {
total: 4,
successful: 4,
failed: 0
key: 'tag ' + (i + 1),
doc_count: docCount,
3: {
buckets: buckets
},
hits: {
total: 298,
max_score: 0.0,
hits: []
},
aggregations: {
2: {
buckets: tags
}
1: {
value: 1000 + i
}
};
});
return {
took: 3,
timed_out: false,
_shards: {
total: 4,
successful: 4,
failed: 0
},
hits: {
total: 298,
max_score: 0.0,
hits: []
},
aggregations: {
2: {
buckets: tags
}
}
};
});
};

View file

@ -1,22 +1,20 @@
define(function (require) {
var results = {};
var results = {};
results.timeSeries = {
data: {
ordered: {
date: true,
interval: 600000,
max: 1414437217559,
min: 1414394017559
}
},
label: 'apache',
value: 44,
point: {
label: 'apache',
x: 1414400400000,
y: 44,
y0: 0
results.timeSeries = {
data: {
ordered: {
date: true,
interval: 600000,
max: 1414437217559,
min: 1414394017559
}
};
});
},
label: 'apache',
value: 44,
point: {
label: 'apache',
x: 1414400400000,
y: 44,
y0: 0
}
};

View file

@ -1,228 +1,226 @@
define(function (require) {
var data = { };
var data = { };
data.metricOnly = {
hits: { total: 1000, hits: [], max_score: 0 },
aggregations: {
agg_1: { value: 412032 },
}
};
data.metricOnly = {
hits: { total: 1000, hits: [], max_score: 0 },
aggregations: {
agg_1: { value: 412032 },
}
};
data.threeTermBuckets = {
hits: { total: 1000, hits: [], max_score: 0 },
aggregations: {
agg_2: {
buckets: [
{
key: 'png',
doc_count: 50,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'IT',
doc_count: 10,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 4, agg_1: { value: 0 } },
{ key: 'mac', doc_count: 6, agg_1: { value: 9299 } }
]
}
},
{
key: 'US',
doc_count: 20,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 8, agg_1: { value: 3029 } }
]
}
data.threeTermBuckets = {
hits: { total: 1000, hits: [], max_score: 0 },
aggregations: {
agg_2: {
buckets: [
{
key: 'png',
doc_count: 50,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'IT',
doc_count: 10,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 4, agg_1: { value: 0 } },
{ key: 'mac', doc_count: 6, agg_1: { value: 9299 } }
]
}
]
}
},
{
key: 'css',
doc_count: 20,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'MX',
doc_count: 7,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 3, agg_1: { value: 4992 } },
{ key: 'mac', doc_count: 4, agg_1: { value: 5892 } }
]
}
},
{
key: 'US',
doc_count: 13,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 1, agg_1: { value: 3029 } }
]
}
},
{
key: 'US',
doc_count: 20,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 8, agg_1: { value: 3029 } }
]
}
]
}
},
{
key: 'html',
doc_count: 90,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'CN',
doc_count: 85,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 46, agg_1: { value: 4992 } },
{ key: 'mac', doc_count: 39, agg_1: { value: 5892 } }
]
}
},
{
key: 'FR',
doc_count: 15,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 3, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 12, agg_1: { value: 3029 } }
]
}
}
]
}
}
]
}
]
}
}
};
data.oneRangeBucket = {
'took': 35,
'timed_out': false,
'_shards': {
'total': 1,
'successful': 1,
'failed': 0
},
'hits': {
'total': 6039,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': {
'0.0-1000.0': {
'from': 0,
'from_as_string': '0.0',
'to': 1000,
'to_as_string': '1000.0',
'doc_count': 606
},
'1000.0-2000.0': {
'from': 1000,
'from_as_string': '1000.0',
'to': 2000,
'to_as_string': '2000.0',
'doc_count': 298
},
{
key: 'css',
doc_count: 20,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'MX',
doc_count: 7,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 3, agg_1: { value: 4992 } },
{ key: 'mac', doc_count: 4, agg_1: { value: 5892 } }
]
}
},
{
key: 'US',
doc_count: 13,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 1, agg_1: { value: 3029 } }
]
}
}
]
}
},
{
key: 'html',
doc_count: 90,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'CN',
doc_count: 85,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 46, agg_1: { value: 4992 } },
{ key: 'mac', doc_count: 39, agg_1: { value: 5892 } }
]
}
},
{
key: 'FR',
doc_count: 15,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 3, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 12, agg_1: { value: 3029 } }
]
}
}
]
}
}
]
}
}
};
data.oneRangeBucket = {
'took': 35,
'timed_out': false,
'_shards': {
'total': 1,
'successful': 1,
'failed': 0
},
'hits': {
'total': 6039,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': {
'0.0-1000.0': {
'from': 0,
'from_as_string': '0.0',
'to': 1000,
'to_as_string': '1000.0',
'doc_count': 606
},
'1000.0-2000.0': {
'from': 1000,
'from_as_string': '1000.0',
'to': 2000,
'to_as_string': '2000.0',
'doc_count': 298
}
}
}
};
}
};
data.oneFilterBucket = {
'took': 11,
'timed_out': false,
'_shards': {
'total': 1,
'successful': 1,
'failed': 0
},
'hits': {
'total': 6005,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': {
'_type:apache': {
'doc_count': 4844
},
'_type:nginx': {
'doc_count': 1161
}
data.oneFilterBucket = {
'took': 11,
'timed_out': false,
'_shards': {
'total': 1,
'successful': 1,
'failed': 0
},
'hits': {
'total': 6005,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': {
'_type:apache': {
'doc_count': 4844
},
'_type:nginx': {
'doc_count': 1161
}
}
}
};
}
};
data.oneHistogramBucket = {
'took': 37,
'timed_out': false,
'_shards': {
'total': 6,
'successful': 6,
'failed': 0
},
'hits': {
'total': 49208,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': [
{
'key_as_string': '2014-09-28T00:00:00.000Z',
'key': 1411862400000,
'doc_count': 8247
},
{
'key_as_string': '2014-09-29T00:00:00.000Z',
'key': 1411948800000,
'doc_count': 8184
},
{
'key_as_string': '2014-09-30T00:00:00.000Z',
'key': 1412035200000,
'doc_count': 8269
},
{
'key_as_string': '2014-10-01T00:00:00.000Z',
'key': 1412121600000,
'doc_count': 8141
},
{
'key_as_string': '2014-10-02T00:00:00.000Z',
'key': 1412208000000,
'doc_count': 8148
},
{
'key_as_string': '2014-10-03T00:00:00.000Z',
'key': 1412294400000,
'doc_count': 8219
}
]
}
data.oneHistogramBucket = {
'took': 37,
'timed_out': false,
'_shards': {
'total': 6,
'successful': 6,
'failed': 0
},
'hits': {
'total': 49208,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': [
{
'key_as_string': '2014-09-28T00:00:00.000Z',
'key': 1411862400000,
'doc_count': 8247
},
{
'key_as_string': '2014-09-29T00:00:00.000Z',
'key': 1411948800000,
'doc_count': 8184
},
{
'key_as_string': '2014-09-30T00:00:00.000Z',
'key': 1412035200000,
'doc_count': 8269
},
{
'key_as_string': '2014-10-01T00:00:00.000Z',
'key': 1412121600000,
'doc_count': 8141
},
{
'key_as_string': '2014-10-02T00:00:00.000Z',
'key': 1412208000000,
'doc_count': 8148
},
{
'key_as_string': '2014-10-03T00:00:00.000Z',
'key': 1412294400000,
'doc_count': 8219
}
]
}
};
}
};
return data;
});
export default data;

View file

@ -1,22 +1,20 @@
define(function (require) {
var _ = require('lodash');
var longString = Array(200).join('_');
import _ from 'lodash';
var longString = Array(200).join('_');
return function (id, mapping) {
function fakeVals(type) {
return _.mapValues(mapping, function (f, c) {
return c + '_' + type + '_' + id + longString;
});
}
export default function (id, mapping) {
function fakeVals(type) {
return _.mapValues(mapping, function (f, c) {
return c + '_' + type + '_' + id + longString;
});
}
return {
_id: id,
_index: 'test',
_source: fakeVals('original'),
sort: [id],
$$_formatted: fakeVals('formatted'),
$$_partialFormatted: fakeVals('formatted'),
$$_flattened: fakeVals('_flattened')
};
return {
_id: id,
_index: 'test',
_source: fakeVals('original'),
sort: [id],
$$_formatted: fakeVals('formatted'),
$$_partialFormatted: fakeVals('formatted'),
$$_flattened: fakeVals('_flattened')
};
});
};

View file

@ -1,62 +1,60 @@
define(function (require) {
return {
test: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'long'
}
export default {
test: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'long'
}
},
'foo.bar': {
full_name: 'foo.bar',
mapping: {
bar: {
type: 'string',
}
}
},
'foo.bar': {
full_name: 'foo.bar',
mapping: {
bar: {
type: 'string',
}
},
'not_analyzed_field': {
full_name: 'not_analyzed_field',
mapping: {
bar: {
type: 'string',
index: 'not_analyzed'
}
}
},
'not_analyzed_field': {
full_name: 'not_analyzed_field',
mapping: {
bar: {
type: 'string',
index: 'not_analyzed'
}
},
'index_no_field': {
full_name: 'index_no_field',
mapping: {
bar: {
type: 'string',
index: 'no'
}
}
},
'index_no_field': {
full_name: 'index_no_field',
mapping: {
bar: {
type: 'string',
index: 'no'
}
},
_id: {
full_name: '_id',
mapping: {
_id: {
store: false,
index: 'no',
}
}
},
_id: {
full_name: '_id',
mapping: {
_id: {
store: false,
index: 'no',
}
},
_timestamp: {
full_name: '_timestamp',
mapping: {
_timestamp: {
store: true,
index: 'no',
}
}
},
_timestamp: {
full_name: '_timestamp',
mapping: {
_timestamp: {
store: true,
index: 'no',
}
}
}
}
}
};
});
}
};

View file

@ -1,7 +1,5 @@
define(function (require) {
return {
meta: {
index: 'logstash-*'
}
};
});
export default {
meta: {
index: 'logstash-*'
}
};

View file

@ -1,24 +1,22 @@
define(function (require) {
var _ = require('lodash');
return function fitsFixture() {
return _.map([
{_source: {'@timestamp': 0, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 10, request: 'foo'}},
{_source: {'@timestamp': 1, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 20, request: 'bar'}},
{_source: {'@timestamp': 2, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'bar'}},
{_source: {'@timestamp': 3, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 4, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 5, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 6, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 7, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 8, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 9, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
], function (p, i) {
return _.merge({}, p, {
_score: 1,
_id: 1000 + i,
_type: 'test',
_index: 'test-index'
});
import _ from 'lodash';
export default function fitsFixture() {
return _.map([
{_source: {'@timestamp': 0, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 10, request: 'foo'}},
{_source: {'@timestamp': 1, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 20, request: 'bar'}},
{_source: {'@timestamp': 2, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'bar'}},
{_source: {'@timestamp': 3, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 4, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 5, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 6, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 7, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 8, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 9, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
], function (p, i) {
return _.merge({}, p, {
_score: 1,
_id: 1000 + i,
_type: 'test',
_index: 'test-index'
});
};
});
});
};

View file

@ -1,37 +1,35 @@
define(function (require) {
function stubbedLogstashFields() {
var sourceData = [
{ name: 'bytes', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true, count: 10 },
{ name: 'ssl', type: 'boolean', indexed: true, analyzed: true, sortable: true, filterable: true, count: 20 },
{ name: '@timestamp', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
{ name: 'time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
{ name: '@tags', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'utc_time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'phpmemory', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'ip', type: 'ip', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'request_body', type: 'attachment', indexed: true, analyzed: true, sortable: false, filterable: true },
{ name: 'point', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: false },
{ name: 'area', type: 'geo_shape', indexed: true, analyzed: true, sortable: true, filterable: false },
{ name: 'hashed', type: 'murmur3', indexed: true, analyzed: true, sortable: false, filterable: false },
{ name: 'geo.coordinates', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: true },
{ name: 'extension', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'machine.os', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'geo.src', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: '_type', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: '_id', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: true},
{ name: '_source', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: false},
{ name: 'custom_user_field', type: 'conflict', indexed: false, analyzed: false, sortable: false, filterable: true },
{ name: 'script string', type: 'string', scripted: true, script: '\'i am a string\'', lang: 'expression' },
{ name: 'script number', type: 'number', scripted: true, script: '1234', lang: 'expression' },
{ name: 'script murmur3', type: 'murmur3', scripted: true, script: '1234', lang: 'expression'},
].map(function (field) {
field.count = field.count || 0;
field.scripted = field.scripted || false;
return field;
});
function stubbedLogstashFields() {
var sourceData = [
{ name: 'bytes', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true, count: 10 },
{ name: 'ssl', type: 'boolean', indexed: true, analyzed: true, sortable: true, filterable: true, count: 20 },
{ name: '@timestamp', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
{ name: 'time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
{ name: '@tags', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'utc_time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'phpmemory', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'ip', type: 'ip', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'request_body', type: 'attachment', indexed: true, analyzed: true, sortable: false, filterable: true },
{ name: 'point', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: false },
{ name: 'area', type: 'geo_shape', indexed: true, analyzed: true, sortable: true, filterable: false },
{ name: 'hashed', type: 'murmur3', indexed: true, analyzed: true, sortable: false, filterable: false },
{ name: 'geo.coordinates', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: true },
{ name: 'extension', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'machine.os', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'geo.src', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: '_type', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: '_id', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: true},
{ name: '_source', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: false},
{ name: 'custom_user_field', type: 'conflict', indexed: false, analyzed: false, sortable: false, filterable: true },
{ name: 'script string', type: 'string', scripted: true, script: '\'i am a string\'', lang: 'expression' },
{ name: 'script number', type: 'number', scripted: true, script: '1234', lang: 'expression' },
{ name: 'script murmur3', type: 'murmur3', scripted: true, script: '1234', lang: 'expression'},
].map(function (field) {
field.count = field.count || 0;
field.scripted = field.scripted || false;
return field;
});
return sourceData;
}
return sourceData;
}
return stubbedLogstashFields;
});
export default stubbedLogstashFields;

View file

@ -1,40 +1,38 @@
define(function (require) {
return {
test: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'long'
}
}
},
'foo.bar': {
full_name: 'foo.bar',
mapping: {
bar: {
type: 'string'
}
export default {
test: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'long'
}
}
}
}
},
duplicates: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'date'
}
},
'foo.bar': {
full_name: 'foo.bar',
mapping: {
bar: {
type: 'string'
}
}
}
}
}
};
});
},
duplicates: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'date'
}
}
}
}
}
}
};

View file

@ -1,17 +1,16 @@
define(function (require) {
var _ = require('lodash');
var sinon = require('auto-release-sinon');
import _ from 'lodash';
import sinon from 'auto-release-sinon';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
return function (Private, Promise) {
var indexPatterns = Private(require('fixtures/stubbed_logstash_index_pattern'));
var getIndexPatternStub = sinon.stub();
getIndexPatternStub.returns(Promise.resolve(indexPatterns));
export default function (Private, Promise) {
var indexPatterns = Private(FixturesStubbedLogstashIndexPatternProvider);
var getIndexPatternStub = sinon.stub();
getIndexPatternStub.returns(Promise.resolve(indexPatterns));
var courier = {
indexPatterns: { get: getIndexPatternStub },
getStub: getIndexPatternStub
};
return courier;
var courier = {
indexPatterns: { get: getIndexPatternStub },
getStub: getIndexPatternStub
};
});
return courier;
};

View file

@ -1,19 +1,17 @@
define(function (require) {
var _ = require('lodash');
var sinon = require('auto-release-sinon');
import _ from 'lodash';
import sinon from 'auto-release-sinon';
function MockState(defaults) {
this.on = _.noop;
this.off = _.noop;
this.save = sinon.stub();
this.replace = sinon.stub();
_.assign(this, defaults);
}
function MockState(defaults) {
this.on = _.noop;
this.off = _.noop;
this.save = sinon.stub();
this.replace = sinon.stub();
_.assign(this, defaults);
}
MockState.prototype.resetStub = function () {
this.save = sinon.stub();
return this;
};
MockState.prototype.resetStub = function () {
this.save = sinon.stub();
return this;
};
return MockState;
});
export default MockState;

View file

@ -1,15 +1,13 @@
define(function (require) {
var _ = require('lodash');
var keys = {};
return {
get: function (path, def) {
return keys[path] == null ? def : keys[path];
},
set: function (path, val) {
keys[path] = val;
return val;
},
on: _.noop,
off: _.noop
}
})
import _ from 'lodash';
var keys = {};
export default {
get: function (path, def) {
return keys[path] == null ? def : keys[path];
},
set: function (path, val) {
keys[path] = val;
return val;
},
on: _.noop,
off: _.noop
}

View file

@ -1,227 +1,225 @@
define(function (require) {
/*
Extensions:
gif: 5
html: 8
php: 5 (thus 5 with phpmemory fields)
png: 2
/*
Extensions:
gif: 5
html: 8
php: 5 (thus 5 with phpmemory fields)
png: 2
_type:
apache: 18
nginx: 2
_type:
apache: 18
nginx: 2
Bytes (all unique except):
374: 2
Bytes (all unique except):
374: 2
All have the same index, ids are unique
*/
All have the same index, ids are unique
*/
return [
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '61',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 360.20000000000005
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '388',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 5848.700000000001
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '403',
'_score': 1,
'_source': {
'extension': 'png',
'bytes': 841.6
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '415',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1626.4
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '460',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 2070.6,
'phpmemory': 276080
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '496',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 8421.6
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '511',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 994.8000000000001
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '701',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 374
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '838',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 506.09999999999997,
'phpmemory': 67480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '890',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 506.09999999999997,
'phpmemory': 67480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'nginx',
'_id': '927',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 2591.1,
'phpmemory': 345480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1034',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1450
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1142',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 1803.8999999999999,
'phpmemory': 240520
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1180',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1626.4
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'nginx',
'_id': '1224',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10617.2
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1243',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10961.5
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1510',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 382.8
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1628',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 374
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1729',
'_score': 1,
'_source': {
'extension': 'png',
'bytes': 3059.2000000000003
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1945',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10617.2
}
export default [
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '61',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 360.20000000000005
}
];
});
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '388',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 5848.700000000001
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '403',
'_score': 1,
'_source': {
'extension': 'png',
'bytes': 841.6
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '415',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1626.4
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '460',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 2070.6,
'phpmemory': 276080
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '496',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 8421.6
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '511',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 994.8000000000001
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '701',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 374
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '838',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 506.09999999999997,
'phpmemory': 67480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '890',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 506.09999999999997,
'phpmemory': 67480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'nginx',
'_id': '927',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 2591.1,
'phpmemory': 345480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1034',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1450
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1142',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 1803.8999999999999,
'phpmemory': 240520
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1180',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1626.4
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'nginx',
'_id': '1224',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10617.2
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1243',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10961.5
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1510',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 382.8
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1628',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 374
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1729',
'_score': 1,
'_source': {
'extension': 'png',
'bytes': 3059.2000000000003
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1945',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10617.2
}
}
];

View file

@ -1,18 +1,16 @@
define(function (require) {
var hits = require('fixtures/real_hits');
import hits from 'fixtures/real_hits';
return {
took: 73,
timed_out: false,
_shards: {
total: 144,
successful: 144,
failed: 0
},
hits: {
total : 49487,
max_score : 1.0,
hits: hits
}
};
});
export default {
took: 73,
timed_out: false,
_shards: {
total: 144,
successful: 144,
failed: 0
},
hits: {
total : 49487,
max_score : 1.0,
hits: hits
}
};

View file

@ -1,22 +1,22 @@
define(function (require) {
function stubbedDocSourceResponse(Private) {
var mockLogstashFields = Private(require('fixtures/logstash_fields'));
import FixturesLogstashFieldsProvider from 'fixtures/logstash_fields';
return function (id, index) {
index = index || '.kibana';
return {
_id: id,
_index: index,
_type: 'index-pattern',
_version: 2,
found: true,
_source: {
customFormats: '{}',
fields: JSON.stringify(mockLogstashFields)
}
};
function stubbedDocSourceResponse(Private) {
var mockLogstashFields = Private(FixturesLogstashFieldsProvider);
return function (id, index) {
index = index || '.kibana';
return {
_id: id,
_index: index,
_type: 'index-pattern',
_version: 2,
found: true,
_source: {
customFormats: '{}',
fields: JSON.stringify(mockLogstashFields)
}
};
}
};
}
return stubbedDocSourceResponse;
});
export default stubbedDocSourceResponse;

View file

@ -1,24 +1,25 @@
define(function (require) {
return function stubbedLogstashIndexPatternService(Private) {
var StubIndexPattern = Private(require('testUtils/stub_index_pattern'));
var fieldTypes = Private(require('ui/index_patterns/_field_types'));
var mockLogstashFields = Private(require('fixtures/logstash_fields'));
import _ from 'lodash';
import TestUtilsStubIndexPatternProvider from 'testUtils/stub_index_pattern';
import IndexPatternsFieldTypesProvider from 'ui/index_patterns/_field_types';
import FixturesLogstashFieldsProvider from 'fixtures/logstash_fields';
export default function stubbedLogstashIndexPatternService(Private) {
var StubIndexPattern = Private(TestUtilsStubIndexPatternProvider);
var fieldTypes = Private(IndexPatternsFieldTypesProvider);
var mockLogstashFields = Private(FixturesLogstashFieldsProvider);
var _ = require('lodash');
var fields = mockLogstashFields.map(function (field) {
field.displayName = field.name;
var type = fieldTypes.byName[field.type];
if (!type) throw new TypeError('unknown type ' + field.type);
if (!_.has(field, 'sortable')) field.sortable = type.sortable;
if (!_.has(field, 'filterable')) field.filterable = type.filterable;
return field;
});
var fields = mockLogstashFields.map(function (field) {
field.displayName = field.name;
var type = fieldTypes.byName[field.type];
if (!type) throw new TypeError('unknown type ' + field.type);
if (!_.has(field, 'sortable')) field.sortable = type.sortable;
if (!_.has(field, 'filterable')) field.filterable = type.filterable;
return field;
});
var indexPattern = new StubIndexPattern('logstash-*', 'time', fields);
indexPattern.id = 'logstash-*';
var indexPattern = new StubIndexPattern('logstash-*', 'time', fields);
indexPattern.id = 'logstash-*';
return indexPattern;
return indexPattern;
};
});
};

View file

@ -1,39 +1,38 @@
define(function (require) {
var sinon = require('auto-release-sinon');
var searchResponse = require('fixtures/search_response');
import sinon from 'auto-release-sinon';
import searchResponse from 'fixtures/search_response';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
return function stubSearchSource(Private, $q, Promise) {
var deferedResult = $q.defer();
var indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
export default function stubSearchSource(Private, $q, Promise) {
var deferedResult = $q.defer();
var indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
return {
sort: sinon.spy(),
size: sinon.spy(),
fetch: sinon.spy(),
destroy: sinon.spy(),
get: function (param) {
switch (param) {
case 'index':
return indexPattern;
default:
throw new Error('Param "' + param + '" is not implemented in the stubbed search source');
}
},
crankResults: function () {
deferedResult.resolve(searchResponse);
deferedResult = $q.defer();
},
onResults: function () {
// Up to the test to resolve this manually
// For example:
// someHandler.resolve(require('fixtures/search_response'))
return deferedResult.promise;
},
onError: function () { return $q.defer().promise; },
_flatten: function () {
return Promise.resolve({ index: indexPattern, body: {} });
return {
sort: sinon.spy(),
size: sinon.spy(),
fetch: sinon.spy(),
destroy: sinon.spy(),
get: function (param) {
switch (param) {
case 'index':
return indexPattern;
default:
throw new Error('Param "' + param + '" is not implemented in the stubbed search source');
}
};
},
crankResults: function () {
deferedResult.resolve(searchResponse);
deferedResult = $q.defer();
},
onResults: function () {
// Up to the test to resolve this manually
// For example:
// someHandler.resolve(require('fixtures/search_response'))
return deferedResult.promise;
},
onError: function () { return $q.defer().promise; },
_flatten: function () {
return Promise.resolve({ index: indexPattern, body: {} });
}
};
});
};

View file

@ -1,21 +1,19 @@
define(function (require) {
var sinon = require('auto-release-sinon');
import sinon from 'auto-release-sinon';
function MockMap(container, chartData, params) {
this.container = container;
this.chartData = chartData;
this.params = params;
function MockMap(container, chartData, params) {
this.container = container;
this.chartData = chartData;
this.params = params;
// stub required methods
this.addStubs();
}
// stub required methods
this.addStubs();
}
MockMap.prototype.addStubs = function () {
this.addTitle = sinon.stub();
this.addFitControl = sinon.stub();
this.addBoundingControl = sinon.stub();
this.destroy = sinon.stub();
};
MockMap.prototype.addStubs = function () {
this.addTitle = sinon.stub();
this.addFitControl = sinon.stub();
this.addBoundingControl = sinon.stub();
this.destroy = sinon.stub();
};
return MockMap;
});
export default MockMap;

View file

@ -1,7 +1,20 @@
var $ = require('jquery');
var _ = require('lodash');
import _ from 'lodash';
import $ from 'jquery';
import VislibVisProvider from 'ui/vislib/vis';
var $visCanvas = $('<div>')
.attr('id', 'vislib-vis-fixtures')
.css({
height: '500px',
width: '1024px',
display: 'flex',
position: 'fixed',
top: '0px',
left: '0px',
overflow: 'hidden'
})
.appendTo('body');
var $visCanvas = $('<div>').attr('id', 'vislib-vis-fixtures').appendTo('body');
var count = 0;
var visHeight = $visCanvas.height();
@ -19,7 +32,7 @@ afterEach(function () {
module.exports = function VislibFixtures(Private) {
return function (visLibParams) {
var Vis = Private(require('ui/vislib/vis'));
var Vis = Private(VislibVisProvider);
return new Vis($visCanvas.new(), _.defaults({}, visLibParams || {}, {
shareYAxis: true,
addTooltip: true,

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [
{

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'valueFormatter': _.identity,

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,17 +1,17 @@
let { inherits } = require('util');
let { defaults } = require('lodash');
let { resolve } = require('path');
let { writeFile } = require('fs');
let webpack = require('webpack');
var Boom = require('boom');
let DirectoryNameAsMain = require('webpack-directory-name-as-main');
let ExtractTextPlugin = require('extract-text-webpack-plugin');
var CommonsChunkPlugin = require('webpack/lib/optimize/CommonsChunkPlugin');
import webpack from 'webpack';
import Boom from 'boom';
import DirectoryNameAsMain from 'webpack-directory-name-as-main';
import ExtractTextPlugin from 'extract-text-webpack-plugin';
import CommonsChunkPlugin from 'webpack/lib/optimize/CommonsChunkPlugin';
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');
let babelOptions = require('./babelOptions');
import fromRoot from '../utils/fromRoot';
import babelOptions from './babelOptions';
import { inherits } from 'util';
import { defaults, transform } from 'lodash';
import { resolve } from 'path';
import { writeFile } from 'fs';
let babelExclude = [/[\/\\](webpackShims|node_modules|bower_components)[\/\\]/];
import pkg from '../../package.json';
class BaseOptimizer {
constructor(opts) {
@ -133,12 +133,20 @@ class BaseOptimizer {
resolve: {
extensions: ['.js', '.json', '.jsx', '.less', ''],
postfixes: [''],
modulesDirectories: ['webpackShims', 'node_modules'],
modulesDirectories: ['webpackShims', 'node_modules', fromRoot('webpackShims'), fromRoot('node_modules')],
loaderPostfixes: ['-loader', ''],
root: fromRoot('.'),
alias: this.env.aliases,
unsafeCache: this.unsafeCache,
},
resolveLoader: {
alias: transform(pkg.dependencies, function (aliases, version, name) {
if (name.endsWith('-loader')) {
aliases[name.replace(/-loader$/, '')] = require.resolve(name);
}
}, {})
}
};
}

View file

@ -1,8 +1,8 @@
let { fromNode } = require('bluebird');
let { writeFile } = require('fs');
let BaseOptimizer = require('./BaseOptimizer');
let fromRoot = require('../utils/fromRoot');
import BaseOptimizer from './BaseOptimizer';
import fromRoot from '../utils/fromRoot';
import { fromNode } from 'bluebird';
import { writeFile } from 'fs';
module.exports = class FsOptimizer extends BaseOptimizer {
async init() {

View file

@ -1,4 +1,4 @@
var cloneDeep = require('lodash').cloneDeep;
import { cloneDeep } from 'lodash';
var fromRoot = require('path').resolve.bind(null, __dirname, '../../');
if (!process.env.BABEL_CACHE_PATH) {

View file

@ -12,3 +12,5 @@ exports.webpack = {
};
exports.node = cloneDeep(exports.webpack);
exports.node.optional = ['asyncToGenerator'];
exports.node.blacklist = ['regenerator'];

View file

@ -1,3 +1,4 @@
import FsOptimizer from './FsOptimizer';
module.exports = async (kbnServer, server, config) => {
if (!config.get('optimize.enabled')) return;
@ -33,7 +34,6 @@ module.exports = async (kbnServer, server, config) => {
}
// only require the FsOptimizer when we need to
let FsOptimizer = require('./FsOptimizer');
let optimizer = new FsOptimizer({
env: bundles.env,
bundles: bundles,

View file

@ -1,9 +1,9 @@
let { once, pick, size } = require('lodash');
let { join } = require('path');
let Boom = require('boom');
import Boom from 'boom';
let BaseOptimizer = require('../BaseOptimizer');
let WeirdControlFlow = require('./WeirdControlFlow');
import BaseOptimizer from '../BaseOptimizer';
import WeirdControlFlow from './WeirdControlFlow';
import { once, pick, size } from 'lodash';
import { join } from 'path';
module.exports = class LazyOptimizer extends BaseOptimizer {
constructor(opts) {

View file

@ -1,7 +1,7 @@
let { Server } = require('hapi');
let { fromNode } = require('bluebird');
let Boom = require('boom');
import Boom from 'boom';
import { Server } from 'hapi';
import { fromNode } from 'bluebird';
module.exports = class LazyServer {

View file

@ -1,5 +1,5 @@
import { fromNode } from 'bluebird';
let { fromNode } = require('bluebird');
module.exports = class WeirdControlFlow {
constructor(work) {

View file

@ -1,6 +1,6 @@
import { isWorker } from 'cluster';
module.exports = async (kbnServer, server, config) => {
let { isWorker } = require('cluster');
if (!isWorker) {
throw new Error(`lazy optimization is only available in "watch" mode`);

View file

@ -1,10 +1,8 @@
module.exports = async (kbnServer, kibanaHapiServer, config) => {
let src = require('requirefrom')('src');
let fromRoot = src('utils/fromRoot');
let LazyServer = require('./LazyServer');
let LazyOptimizer = require('./LazyOptimizer');
import LazyServer from './LazyServer';
import LazyOptimizer from './LazyOptimizer';
import fromRoot from '../../utils/fromRoot';
export default async (kbnServer, kibanaHapiServer, config) => {
let server = new LazyServer(
config.get('optimize.lazyHost'),
config.get('optimize.lazyPort'),
@ -20,7 +18,6 @@ module.exports = async (kbnServer, kibanaHapiServer, config) => {
})
);
let ready = false;
let sendReady = () => {

View file

@ -1,5 +1,5 @@
let { fromNode } = require('bluebird');
let { get, once } = require('lodash');
import { fromNode } from 'bluebird';
import { get, once } from 'lodash';
module.exports = (kbnServer, server, config) => {

View file

@ -1,4 +1,7 @@
var angular = require('angular');
import angular from 'angular';
import 'angular-mocks';
import 'mocha';
if (angular.mocks) {
throw new Error(
'Don\'t require angular-mocks directly or the tests ' +
@ -6,5 +9,4 @@ if (angular.mocks) {
);
}
require('angular-mocks');
module.exports = angular.mock;

View file

@ -1,20 +1,19 @@
define(function (require) {
// register the spy mode or it won't show up in the spys
require('ui/registry/spy_modes').register(VisDetailsSpyProvider);
import visDebugSpyPanelTemplate from 'plugins/devMode/visDebugSpyPanel.html';
// register the spy mode or it won't show up in the spys
require('ui/registry/spy_modes').register(VisDetailsSpyProvider);
function VisDetailsSpyProvider(Notifier, $filter, $rootScope, config) {
return {
name: 'debug',
display: 'Debug',
template: require('plugins/devMode/visDebugSpyPanel.html'),
order: 5,
link: function ($scope, $el) {
$scope.$watch('vis.getState() | json', function (json) {
$scope.visStateJson = json;
});
}
};
}
function VisDetailsSpyProvider(Notifier, $filter, $rootScope, config) {
return {
name: 'debug',
display: 'Debug',
template: visDebugSpyPanelTemplate,
order: 5,
link: function ($scope, $el) {
$scope.$watch('vis.getState() | json', function (json) {
$scope.visStateJson = json;
});
}
};
}
return VisDetailsSpyProvider;
});
export default VisDetailsSpyProvider;

Some files were not shown because too many files have changed in this diff Show more