mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
Merge branch 'master' into testVisualize
This commit is contained in:
commit
723931356c
202 changed files with 2574 additions and 2240 deletions
|
@ -1 +1 @@
|
|||
0.12.7
|
||||
0.12.9
|
||||
|
|
28
.travis.yml
28
.travis.yml
|
@ -1,28 +0,0 @@
|
|||
language: node_js
|
||||
node_js: '0.12.7'
|
||||
install:
|
||||
- npm install
|
||||
script: ./node_modules/.bin/grunt travis
|
||||
sudo: false
|
||||
addons:
|
||||
firefox: "40.0"
|
||||
cache:
|
||||
directories:
|
||||
- esvm
|
||||
- node_modules
|
||||
- selenium
|
||||
before_cache:
|
||||
- rm -rf esvm/*/logs esvm/data_dir
|
||||
before_script:
|
||||
- export DISPLAY=:99.0
|
||||
- sh -e /etc/init.d/xvfb start
|
||||
notifications:
|
||||
email:
|
||||
- rashid.khan@elastic.co
|
||||
hipchat:
|
||||
rooms:
|
||||
secure: UKrVR+5KztHarodQruQe97UJfwftutD6RNdXlVkr+oIr2GqccisDIIN9pAzS/kxl+eAnP1uT6VHzc9YI/jgbrmiSkz3DHViw+MwDwY2aIDgI8aHEbd/4B2ihtb15+OYTVbb+lytyz4+W8A8hSmbkTR/P/uFIJ+EYcBeYZfw1elo=
|
||||
format: html
|
||||
on_success: change
|
||||
template:
|
||||
- ! '%{repository_slug}/%{branch} by %{author}: %{commit_message} (<a href="%{build_url}">open</a>)'
|
|
@ -31,6 +31,8 @@ Please make sure you have signed the [Contributor License Agreement](http://www.
|
|||
|
||||
- Start elasticsearch
|
||||
|
||||
Note: you need to have a java binary in `PATH` or set `JAVA_HOME`.
|
||||
|
||||
```sh
|
||||
npm run elasticsearch
|
||||
```
|
||||
|
@ -100,6 +102,12 @@ The standard `npm run test` task runs several sub tasks and can take several min
|
|||
<br>
|
||||
<img src="http://i.imgur.com/DwHxgfq.png">
|
||||
</dd>
|
||||
|
||||
<dt><code>npm run mocha [test file or dir]</code> or <code>npm run mocha:debug [test file or dir]</code></dt>
|
||||
<dd>
|
||||
Run a one off test with the local project version of mocha, babel compilation, and optional debugging. Great
|
||||
for development and fixing individual tests.
|
||||
</dd>
|
||||
</dl>
|
||||
|
||||
### Functional UI Testing
|
||||
|
@ -113,15 +121,20 @@ The standard `npm run test` task runs several sub tasks and can take several min
|
|||
|
||||
*The Selenium server that is started currently only runs the tests in Firefox*
|
||||
|
||||
To runt the functional UI tests, execute the following command:
|
||||
To run the functional UI tests use the following commands
|
||||
|
||||
`npm run test:ui`
|
||||
<dl>
|
||||
|
||||
The task above takes a little time to start the servers. You can also start the servers and leave them running, and then run the tests separately:
|
||||
<dt><code>npm run test:ui</code></dt>
|
||||
<dd>Run the functional UI tests one time and exit. This is used by the CI systems and is great for quickly checking that things pass. It is essentially a combination of the next two tasks.</dd>
|
||||
|
||||
`npm run test:ui:server` will start the server required to run the selenium tests, leave this open
|
||||
<dt><code>npm run test:ui:server</code></dt>
|
||||
<dd>Start the server required for the <code>test:ui:runner</code> tasks. Once the server is started <code>test:ui:runner</code> can be run multiple times without waiting for the server to start.</dd>
|
||||
|
||||
`npm run test:ui:runner` will run the frontend tests and close when complete
|
||||
<dt><code>npm run test:ui:runner</code></dt>
|
||||
<dd>Execute the front-end selenium tests. This requires the server started by the <code>test:ui:server</code> task.</dd>
|
||||
|
||||
</dl>
|
||||
|
||||
#### Running tests locally with your existing (and already running) ElasticSearch, Kibana, and Selenium Server:
|
||||
|
||||
|
@ -129,7 +142,9 @@ Set your es and kibana ports in `test/intern.js` to 9220 and 5620, respecitively
|
|||
|
||||
Once you've got the services running, execute the following:
|
||||
|
||||
`npm run test:ui:runner`
|
||||
```sh
|
||||
npm run test:ui:runner
|
||||
```
|
||||
|
||||
#### General notes:
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
# Kibana 5.0.0-snapshot
|
||||
|
||||
[](https://travis-ci.org/elastic/kibana?branch=master)
|
||||
|
||||
Kibana is an open source ([Apache Licensed](https://github.com/elastic/kibana/blob/master/LICENSE.md)), browser based analytics and search dashboard for Elasticsearch. Kibana is a snap to setup and start using. Kibana strives to be easy to get started with, while also being flexible and powerful, just like Elasticsearch.
|
||||
|
||||
## Requirements
|
||||
|
|
|
@ -21,5 +21,4 @@ if [ ! -x "$NODE" ]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
exec "${NODE}" "${DIR}/src/cli" ${@}
|
||||
|
||||
exec "${NODE}" $NODE_OPTIONS "${DIR}/src/cli" ${@}
|
||||
|
|
|
@ -6,7 +6,11 @@ set SCRIPT_DIR=%~dp0
|
|||
for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
|
||||
|
||||
set NODE=%DIR%\node\node.exe
|
||||
for /f "delims=" %%i in ('WHERE node') do set SYS_NODE=%%i
|
||||
|
||||
WHERE /Q node
|
||||
IF %ERRORLEVEL% EQU 0 (
|
||||
for /f "delims=" %%i in ('WHERE node') do set SYS_NODE=%%i
|
||||
)
|
||||
|
||||
If Not Exist "%NODE%" (
|
||||
IF Exist "%SYS_NODE%" (
|
||||
|
@ -18,7 +22,7 @@ If Not Exist "%NODE%" (
|
|||
)
|
||||
|
||||
TITLE Kibana Server
|
||||
"%NODE%" "%DIR%\src\cli" %*
|
||||
"%NODE%" %NODE_OPTIONS% "%DIR%\src\cli" %*
|
||||
|
||||
:finally
|
||||
|
||||
|
|
|
@ -4,10 +4,6 @@
|
|||
# The host to bind the server to.
|
||||
# server.host: "0.0.0.0"
|
||||
|
||||
# A value to use as a XSRF token. This token is sent back to the server on each request
|
||||
# and required if you want to execute requests from other clients (like curl).
|
||||
# server.xsrf.token: ""
|
||||
|
||||
# If you are running kibana behind a proxy, and want to mount it at a path,
|
||||
# specify that path here. The basePath can't end in a slash.
|
||||
# server.basePath: ""
|
||||
|
|
10
package.json
10
package.json
|
@ -49,11 +49,13 @@
|
|||
"test:coverage": "grunt test:coverage",
|
||||
"build": "grunt build",
|
||||
"start": "./bin/kibana --dev",
|
||||
"precommit": "grunt lintStagedFiles",
|
||||
"precommit": "grunt precommit",
|
||||
"karma": "karma start",
|
||||
"elasticsearch": "grunt esvm:dev:keepalive",
|
||||
"lint": "grunt eslint:source",
|
||||
"lintroller": "grunt eslint:fixSource"
|
||||
"lintroller": "grunt eslint:fixSource",
|
||||
"mocha": "mocha --compilers js:babel/register",
|
||||
"mocha:debug": "mocha --debug-brk --compilers js:babel/register"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
@ -65,6 +67,7 @@
|
|||
"@spalger/leaflet-draw": "0.2.3",
|
||||
"@spalger/leaflet-heat": "0.1.3",
|
||||
"@spalger/numeral": "^2.0.0",
|
||||
"@spalger/test-subj-selector": "0.2.1",
|
||||
"@spalger/ui-ace": "0.2.3",
|
||||
"angular": "1.4.7",
|
||||
"angular-bootstrap-colorpicker": "3.0.19",
|
||||
|
@ -130,7 +133,6 @@
|
|||
"whatwg-fetch": "0.9.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@spalger/test-subj-selector": "0.2.1",
|
||||
"Nonsense": "0.1.2",
|
||||
"angular-mocks": "1.4.7",
|
||||
"auto-release-sinon": "1.0.3",
|
||||
|
@ -178,7 +180,7 @@
|
|||
"wreck": "6.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "0.12",
|
||||
"node": "0.12.9",
|
||||
"npm": "2.14.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ var expect = require('expect.js');
|
|||
var sinon = require('sinon');
|
||||
|
||||
var plugin = require('../plugin');
|
||||
var installer = require('../pluginInstaller');
|
||||
var installer = require('../plugin_installer');
|
||||
var remover = require('../pluginRemover');
|
||||
var settingParser = require('../settingParser');
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ var sinon = require('sinon');
|
|||
var fs = require('fs');
|
||||
var rimraf = require('rimraf');
|
||||
|
||||
var pluginCleaner = require('../pluginCleaner');
|
||||
var pluginCleaner = require('../plugin_cleaner');
|
||||
var pluginLogger = require('../pluginLogger');
|
||||
|
||||
describe('kibana cli', function () {
|
|
@ -7,7 +7,7 @@ var { join } = require('path');
|
|||
var Promise = require('bluebird');
|
||||
|
||||
var pluginLogger = require('../pluginLogger');
|
||||
var pluginInstaller = require('../pluginInstaller');
|
||||
var pluginInstaller = require('../plugin_installer');
|
||||
|
||||
describe('kibana cli', function () {
|
||||
|
|
@ -2,7 +2,7 @@ var utils = require('requirefrom')('src/utils');
|
|||
var fromRoot = utils('fromRoot');
|
||||
|
||||
var settingParser = require('./settingParser');
|
||||
var installer = require('./pluginInstaller');
|
||||
var installer = require('./plugin_installer');
|
||||
var remover = require('./pluginRemover');
|
||||
var pluginLogger = require('./pluginLogger');
|
||||
|
||||
|
|
|
@ -28,7 +28,10 @@ module.exports = function (settings, logger) {
|
|||
// delete the working directory.
|
||||
// At this point we're bailing, so swallow any errors on delete.
|
||||
|
||||
try { rimraf.sync(settings.workingPath); }
|
||||
try {
|
||||
rimraf.sync(settings.workingPath);
|
||||
rimraf.sync(settings.pluginPath);
|
||||
}
|
||||
catch (e) {} // eslint-disable-line no-empty
|
||||
}
|
||||
|
|
@ -2,9 +2,9 @@ let _ = require('lodash');
|
|||
var utils = require('requirefrom')('src/utils');
|
||||
var fromRoot = utils('fromRoot');
|
||||
var pluginDownloader = require('./pluginDownloader');
|
||||
var pluginCleaner = require('./pluginCleaner');
|
||||
var pluginCleaner = require('./plugin_cleaner');
|
||||
var KbnServer = require('../../server/KbnServer');
|
||||
var readYamlConfig = require('../serve/readYamlConfig');
|
||||
var readYamlConfig = require('../serve/read_yaml_config');
|
||||
var fs = require('fs');
|
||||
|
||||
module.exports = {
|
||||
|
@ -30,6 +30,9 @@ function install(settings, logger) {
|
|||
.then(function () {
|
||||
return downloader.download();
|
||||
})
|
||||
.then(function () {
|
||||
fs.renameSync(settings.workingPath, settings.pluginPath);
|
||||
})
|
||||
.then(async function() {
|
||||
logger.log('Optimizing and caching browser bundles...');
|
||||
let serverConfig = _.merge(
|
||||
|
@ -49,8 +52,7 @@ function install(settings, logger) {
|
|||
},
|
||||
plugins: {
|
||||
initialize: false,
|
||||
scanDirs: [settings.pluginDir, fromRoot('src/plugins')],
|
||||
paths: [settings.workingPath]
|
||||
scanDirs: [settings.pluginDir, fromRoot('src/plugins')]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
@ -60,7 +62,6 @@ function install(settings, logger) {
|
|||
await kbnServer.close();
|
||||
})
|
||||
.then(function () {
|
||||
fs.renameSync(settings.workingPath, settings.pluginPath);
|
||||
logger.log('Plugin installation complete');
|
||||
})
|
||||
.catch(function (e) {
|
|
@ -35,6 +35,10 @@ let legacySettingMap = {
|
|||
verify_ssl: 'elasticsearch.ssl.verify',
|
||||
};
|
||||
|
||||
const deprecatedSettings = {
|
||||
'server.xsrf.token': 'server.xsrf.token is deprecated. It is no longer used when providing xsrf protection.'
|
||||
};
|
||||
|
||||
module.exports = function (path) {
|
||||
if (!path) return {};
|
||||
|
||||
|
@ -50,6 +54,10 @@ module.exports = function (path) {
|
|||
}
|
||||
}
|
||||
|
||||
_.each(deprecatedSettings, function (message, setting) {
|
||||
if (_.has(file, setting)) console.error(message);
|
||||
});
|
||||
|
||||
// transform legeacy options into new namespaced versions
|
||||
return _.transform(file, function (config, val, key) {
|
||||
if (legacySettingMap.hasOwnProperty(key)) {
|
|
@ -76,7 +76,7 @@ module.exports = function (program) {
|
|||
return;
|
||||
}
|
||||
|
||||
let readYamlConfig = require('./readYamlConfig');
|
||||
let readYamlConfig = require('./read_yaml_config');
|
||||
let KbnServer = src('server/KbnServer');
|
||||
|
||||
let settings = readYamlConfig(opts.config);
|
||||
|
|
15
src/fixtures/mock_ui_state.js
Normal file
15
src/fixtures/mock_ui_state.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
var keys = {};
|
||||
return {
|
||||
get: function (path, def) {
|
||||
return keys[path] == null ? def : keys[path];
|
||||
},
|
||||
set: function (path, val) {
|
||||
keys[path] = val;
|
||||
return val;
|
||||
},
|
||||
on: _.noop,
|
||||
off: _.noop
|
||||
}
|
||||
})
|
|
@ -1,6 +1,6 @@
|
|||
define(function (require) {
|
||||
return function stubbedLogstashIndexPatternService(Private) {
|
||||
var StubIndexPattern = Private(require('testUtils/stubIndexPattern'));
|
||||
var StubIndexPattern = Private(require('testUtils/stub_index_pattern'));
|
||||
var fieldTypes = Private(require('ui/index_patterns/_field_types'));
|
||||
var mockLogstashFields = Private(require('fixtures/logstash_fields'));
|
||||
|
||||
|
|
|
@ -2,8 +2,9 @@ define(function (require) {
|
|||
var sinon = require('auto-release-sinon');
|
||||
var searchResponse = require('fixtures/search_response');
|
||||
|
||||
return function stubSearchSource(Private, $q) {
|
||||
return function stubSearchSource(Private, $q, Promise) {
|
||||
var deferedResult = $q.defer();
|
||||
var indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
|
||||
return {
|
||||
sort: sinon.spy(),
|
||||
|
@ -13,7 +14,7 @@ define(function (require) {
|
|||
get: function (param) {
|
||||
switch (param) {
|
||||
case 'index':
|
||||
return Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
return indexPattern;
|
||||
default:
|
||||
throw new Error('Param "' + param + '" is not implemented in the stubbed search source');
|
||||
}
|
||||
|
@ -29,7 +30,9 @@ define(function (require) {
|
|||
return deferedResult.promise;
|
||||
},
|
||||
onError: function () { return $q.defer().promise; },
|
||||
|
||||
_flatten: function () {
|
||||
return Promise.resolve({ index: indexPattern, body: {} });
|
||||
}
|
||||
};
|
||||
|
||||
};
|
||||
|
|
|
@ -27,7 +27,8 @@ module.exports = function VislibFixtures(Private) {
|
|||
defaultYExtents: false,
|
||||
setYExtents: false,
|
||||
yAxis: {},
|
||||
type: 'histogram'
|
||||
type: 'histogram',
|
||||
hasTimeField: true
|
||||
}));
|
||||
};
|
||||
};
|
||||
|
|
|
@ -20,6 +20,7 @@ module.exports = {
|
|||
},
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 1415826600000,
|
||||
|
@ -167,6 +168,7 @@ module.exports = {
|
|||
},
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 1415826630000,
|
||||
|
@ -222,6 +224,7 @@ module.exports = {
|
|||
},
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 1415826660000,
|
||||
|
|
|
@ -12,6 +12,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 1411761450000,
|
||||
|
|
|
@ -12,6 +12,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 1411761450000,
|
||||
|
|
|
@ -12,6 +12,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 1411761450000,
|
||||
|
|
|
@ -8,6 +8,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'css',
|
||||
|
@ -41,6 +42,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'css',
|
||||
|
@ -74,6 +76,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'css',
|
||||
|
|
|
@ -8,6 +8,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'css',
|
||||
|
@ -41,6 +42,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'css',
|
||||
|
@ -74,6 +76,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'css',
|
||||
|
|
|
@ -6,6 +6,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'css',
|
||||
|
|
|
@ -11,6 +11,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 2147483600,
|
||||
|
@ -124,6 +125,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 2147483600,
|
||||
|
@ -237,6 +239,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 2147483600,
|
||||
|
|
|
@ -11,6 +11,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 2147483600,
|
||||
|
@ -59,6 +60,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 3221225400,
|
||||
|
@ -159,6 +161,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 10737418200,
|
||||
|
|
|
@ -9,6 +9,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 3221225400,
|
||||
|
|
|
@ -6,6 +6,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': '_all',
|
||||
|
|
|
@ -8,6 +8,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': '0.0-1000.0',
|
||||
|
@ -27,6 +28,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': '0.0-1000.0',
|
||||
|
|
|
@ -8,6 +8,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': '0.0-1000.0',
|
||||
|
@ -29,6 +30,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': '0.0-1000.0',
|
||||
|
@ -50,6 +52,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': '0.0-1000.0',
|
||||
|
|
|
@ -6,6 +6,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': '0.0-1000.0',
|
||||
|
|
|
@ -8,6 +8,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
@ -53,6 +54,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
@ -98,6 +100,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
@ -143,6 +146,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
@ -188,6 +192,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
|
|
@ -8,6 +8,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
@ -53,6 +54,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
@ -98,6 +100,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
@ -143,6 +146,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
@ -188,6 +192,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
|
|
@ -6,6 +6,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'success',
|
||||
|
|
|
@ -8,6 +8,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'jpg',
|
||||
|
@ -53,6 +54,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'jpg',
|
||||
|
@ -98,6 +100,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'jpg',
|
||||
|
|
|
@ -8,6 +8,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'jpg',
|
||||
|
@ -53,6 +54,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'jpg',
|
||||
|
|
|
@ -6,6 +6,7 @@ module.exports = {
|
|||
'yAxisLabel': 'Count of documents',
|
||||
'series': [
|
||||
{
|
||||
'label': 'Count',
|
||||
'values': [
|
||||
{
|
||||
'x': 'jpg',
|
||||
|
|
|
@ -24,7 +24,7 @@ module.exports = function (kibana) {
|
|||
key: Joi.string()
|
||||
}).default(),
|
||||
apiVersion: Joi.string().default('2.0'),
|
||||
minimumVersion: Joi.string().default('2.1.0')
|
||||
engineVersion: Joi.string().valid('^2.1.0').default('^2.1.0')
|
||||
}).default();
|
||||
},
|
||||
|
||||
|
@ -38,6 +38,7 @@ module.exports = function (kibana) {
|
|||
createProxy(server, 'POST', '/{index}/_search');
|
||||
createProxy(server, 'POST', '/{index}/_field_stats');
|
||||
createProxy(server, 'POST', '/_msearch');
|
||||
createProxy(server, 'POST', '/_search/scroll');
|
||||
|
||||
function noBulkCheck(request, reply) {
|
||||
if (/\/_bulk/.test(request.path)) {
|
||||
|
|
|
@ -10,7 +10,7 @@ describe('plugins/elasticsearch', function () {
|
|||
var plugin;
|
||||
|
||||
beforeEach(function () {
|
||||
var get = sinon.stub().withArgs('elasticserach.minimumVersion').returns('1.4.3');
|
||||
var get = sinon.stub().withArgs('elasticsearch.engineVersion').returns('^1.4.3');
|
||||
var config = function () { return { get: get }; };
|
||||
server = {
|
||||
log: _.noop,
|
||||
|
|
|
@ -55,7 +55,7 @@ describe('plugins/elasticsearch', function () {
|
|||
});
|
||||
|
||||
it('should set the cluster green if everything is ready', function () {
|
||||
get.withArgs('elasticsearch.minimumVersion').returns('1.4.4');
|
||||
get.withArgs('elasticsearch.engineVersion').returns('^1.4.4');
|
||||
get.withArgs('kibana.index').returns('.my-kibana');
|
||||
client.ping.returns(Promise.resolve());
|
||||
client.cluster.health.returns(Promise.resolve({ timed_out: false, status: 'green' }));
|
||||
|
@ -74,7 +74,7 @@ describe('plugins/elasticsearch', function () {
|
|||
it('should set the cluster red if the ping fails, then to green', function () {
|
||||
|
||||
get.withArgs('elasticsearch.url').returns('http://localhost:9210');
|
||||
get.withArgs('elasticsearch.minimumVersion').returns('1.4.4');
|
||||
get.withArgs('elasticsearch.engineVersion').returns('^1.4.4');
|
||||
get.withArgs('kibana.index').returns('.my-kibana');
|
||||
client.ping.onCall(0).returns(Promise.reject(new NoConnections()));
|
||||
client.ping.onCall(1).returns(Promise.resolve());
|
||||
|
@ -98,7 +98,7 @@ describe('plugins/elasticsearch', function () {
|
|||
|
||||
it('should set the cluster red if the health check status is red, then to green', function () {
|
||||
get.withArgs('elasticsearch.url').returns('http://localhost:9210');
|
||||
get.withArgs('elasticsearch.minimumVersion').returns('1.4.4');
|
||||
get.withArgs('elasticsearch.engineVersion').returns('^1.4.4');
|
||||
get.withArgs('kibana.index').returns('.my-kibana');
|
||||
client.ping.returns(Promise.resolve());
|
||||
client.cluster.health.onCall(0).returns(Promise.resolve({ timed_out: false, status: 'red' }));
|
||||
|
@ -121,7 +121,7 @@ describe('plugins/elasticsearch', function () {
|
|||
|
||||
it('should set the cluster yellow if the health check timed_out and create index', function () {
|
||||
get.withArgs('elasticsearch.url').returns('http://localhost:9210');
|
||||
get.withArgs('elasticsearch.minimumVersion').returns('1.4.4');
|
||||
get.withArgs('elasticsearch.engineVersion').returns('^1.4.4');
|
||||
get.withArgs('kibana.index').returns('.my-kibana');
|
||||
client.ping.returns(Promise.resolve());
|
||||
client.cluster.health.onCall(0).returns(Promise.resolve({ timed_out: true, status: 'red' }));
|
||||
|
|
|
@ -1,134 +0,0 @@
|
|||
var _ = require('lodash');
|
||||
var versionMath = require('../version_math');
|
||||
var expect = require('expect.js');
|
||||
var versions = [
|
||||
'1.1.12',
|
||||
'1.1.12',
|
||||
'1.1.12',
|
||||
'1.1.12',
|
||||
'0.90.0',
|
||||
'0.90.1',
|
||||
'1.0.0',
|
||||
'1.0',
|
||||
'1.2.3',
|
||||
'2.0.0',
|
||||
'2.0.1',
|
||||
'2.3.1'
|
||||
];
|
||||
|
||||
describe('plugins/elasticsearch', function () {
|
||||
describe('lib/version_math', function () {
|
||||
describe('version math (0.90.0 - 2.3.1)', function () {
|
||||
var methods = 'max,min,eq,is,lt,lte,gt,gte'.split(',');
|
||||
describe('methods', function () {
|
||||
it('should have ' + methods.join(', ') + ' methods', function () {
|
||||
_.each(methods, function (method) {
|
||||
expect(versionMath[method]).to.be.a(Function);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('min & max', function () {
|
||||
it('has a max of 2.3.1', function () {
|
||||
expect(versionMath.max(versions)).to.be('2.3.1');
|
||||
});
|
||||
|
||||
it('has a min of 0.90.0', function () {
|
||||
expect(versionMath.min(versions)).to.be('0.90.0');
|
||||
});
|
||||
});
|
||||
|
||||
describe('eq / lowest version', function () {
|
||||
it('should be true for 0.90.0', function () {
|
||||
expect(versionMath.eq('0.90.0', versions)).to.be(true);
|
||||
});
|
||||
|
||||
it('should be false for 1.0', function () {
|
||||
expect(versionMath.eq('1.0', versions)).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('gt / lowest version', function () {
|
||||
it('is > 0.20.3', function () {
|
||||
expect(versionMath.gt('0.20.3', versions)).to.be(true);
|
||||
});
|
||||
|
||||
it('is not > 0.90.0', function () {
|
||||
expect(versionMath.gt('0.90.0', versions)).to.be(false);
|
||||
});
|
||||
|
||||
it('is not > 1.0.0', function () {
|
||||
expect(versionMath.gt('1.0.0', versions)).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('gte / lowest version', function () {
|
||||
it('is >= 0.20.3', function () {
|
||||
expect(versionMath.gte('0.20.3', versions)).to.be(true);
|
||||
});
|
||||
|
||||
it('is >= 0.90.0', function () {
|
||||
expect(versionMath.gte('0.90.0', versions)).to.be(true);
|
||||
});
|
||||
|
||||
it('is not >= 1.0.0', function () {
|
||||
expect(versionMath.gte('1.0.0', versions)).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('lt / highest version', function () {
|
||||
it('is not < 0.20.3', function () {
|
||||
expect(versionMath.lt('0.20.3', versions)).to.be(false);
|
||||
});
|
||||
|
||||
it('is not < 2.3.1', function () {
|
||||
expect(versionMath.lt('2.3.1', versions)).to.be(false);
|
||||
});
|
||||
|
||||
it('is < 2.5', function () {
|
||||
expect(versionMath.lt('2.5', versions)).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('lte / highest version', function () {
|
||||
it('is not =< 0.20.3', function () {
|
||||
expect(versionMath.lte('0.20.3', versions)).to.be(false);
|
||||
});
|
||||
|
||||
it('is =< 2.3.1', function () {
|
||||
expect(versionMath.lte('2.3.1', versions)).to.be(true);
|
||||
});
|
||||
|
||||
it('is =< 2.5', function () {
|
||||
expect(versionMath.lte('2.5', versions)).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('is', function () {
|
||||
it('exactly, <, <=, >, >=', function () {
|
||||
expect(versionMath.is('0.90.0', versions)).to.be(true);
|
||||
expect(versionMath.is('0.20.0', versions)).to.be(false);
|
||||
|
||||
expect(versionMath.is('>0.20.0', versions)).to.be(true);
|
||||
expect(versionMath.is('>0.90.0', versions)).to.be(false);
|
||||
expect(versionMath.is('>0.90.1', versions)).to.be(false);
|
||||
|
||||
expect(versionMath.is('>=0.20.0', versions)).to.be(true);
|
||||
expect(versionMath.is('>=0.90.0', versions)).to.be(true);
|
||||
expect(versionMath.is('>=0.90.1', versions)).to.be(false);
|
||||
|
||||
expect(versionMath.is('<2.5', versions)).to.be(true);
|
||||
expect(versionMath.is('<2.3.1', versions)).to.be(false);
|
||||
expect(versionMath.is('<0.90.1', versions)).to.be(false);
|
||||
|
||||
expect(versionMath.is('<=2.5', versions)).to.be(true);
|
||||
expect(versionMath.is('<=2.3.1', versions)).to.be(true);
|
||||
expect(versionMath.is('<=0.90.1', versions)).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
45
src/plugins/elasticsearch/lib/__tests__/version_satisfies.js
Normal file
45
src/plugins/elasticsearch/lib/__tests__/version_satisfies.js
Normal file
|
@ -0,0 +1,45 @@
|
|||
var versionSatisfies = require('../version_satisfies');
|
||||
var expect = require('expect.js');
|
||||
|
||||
var versionChecks = [
|
||||
// order is: ['actual version', 'match expression', satisfied (true/false)]
|
||||
['0.90.0', '>=0.90.0', true],
|
||||
['1.2.0', '>=1.2.1 <2.0.0', false],
|
||||
['1.2.1', '>=1.2.1 <2.0.0', true],
|
||||
['1.4.4', '>=1.2.1 <2.0.0', true],
|
||||
['1.7.4', '>=1.3.1 <2.0.0', true],
|
||||
['2.0.0', '>=1.3.1 <2.0.0', false],
|
||||
['1.4.3', '^1.4.3', true],
|
||||
['1.4.3-Beta1', '^1.4.3', true],
|
||||
['1.4.4', '^1.4.3', true],
|
||||
['1.1.12', '^1.0.0', true],
|
||||
['1.1.12', '~1.0.0', false],
|
||||
['1.6.1-SNAPSHOT', '1.6.1', true],
|
||||
['1.6.1-SNAPSHOT', '1.6.2', false],
|
||||
['1.7.1-SNAPSHOT', '^1.3.1', true],
|
||||
['1.3.4', '^1.4.0', false],
|
||||
['2.0.1', '^2.0.0', true],
|
||||
['2.1.1', '^2.1.0', true],
|
||||
['2.2.0', '^2.1.0', true],
|
||||
['3.0.0-snapshot', '^2.1.0', false],
|
||||
['3.0.0', '^2.1.0', false],
|
||||
['2.10.20-snapshot', '^2.10.20', true],
|
||||
['2.10.999', '^2.10.20', true],
|
||||
];
|
||||
|
||||
describe('plugins/elasticsearch', function () {
|
||||
describe('lib/version_satisfies', function () {
|
||||
versionChecks.forEach(function (spec) {
|
||||
var actual = spec[0];
|
||||
var match = spec[1];
|
||||
var satisfied = spec[2];
|
||||
var desc = actual + ' satisfies ' + match;
|
||||
|
||||
describe(desc, function () {
|
||||
it('should be ' + satisfied, function () {
|
||||
expect(versionSatisfies(actual, match)).to.be(satisfied);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,13 +1,13 @@
|
|||
var _ = require('lodash');
|
||||
var esBool = require('./es_bool');
|
||||
var versionMath = require('./version_math');
|
||||
var versionSatisfies = require('./version_satisfies');
|
||||
var SetupError = require('./setup_error');
|
||||
|
||||
module.exports = function (server) {
|
||||
server.log(['plugin', 'debug'], 'Checking Elasticsearch version');
|
||||
|
||||
var client = server.plugins.elasticsearch.client;
|
||||
var minimumElasticsearchVersion = server.config().get('elasticsearch.minimumVersion');
|
||||
var engineVersion = server.config().get('elasticsearch.engineVersion');
|
||||
|
||||
return client.nodes.info()
|
||||
.then(function (info) {
|
||||
|
@ -18,9 +18,8 @@ module.exports = function (server) {
|
|||
return false;
|
||||
}
|
||||
|
||||
// remove nodes that are gte the min version
|
||||
var v = node.version.split('-')[0];
|
||||
return !versionMath.gte(minimumElasticsearchVersion, v);
|
||||
// remove nodes that satify required engine version
|
||||
return !versionSatisfies(node.version, engineVersion);
|
||||
});
|
||||
|
||||
if (!badNodes.length) return true;
|
||||
|
@ -30,7 +29,7 @@ module.exports = function (server) {
|
|||
});
|
||||
|
||||
var message = `This version of Kibana requires Elasticsearch ` +
|
||||
`${minimumElasticsearchVersion} or higher on all nodes. I found ` +
|
||||
`${engineVersion} on all nodes. I found ` +
|
||||
`the following incompatible nodes in your cluster: ${badNodeNames.join(',')}`;
|
||||
|
||||
throw new SetupError(server, message);
|
||||
|
|
|
@ -67,6 +67,7 @@ module.exports = function (server) {
|
|||
server.expose('client', client);
|
||||
server.expose('createClient', createClient);
|
||||
server.expose('callWithRequest', callWithRequest(noAuthClient));
|
||||
server.expose('errors', elasticsearch.errors);
|
||||
|
||||
return client;
|
||||
|
||||
|
|
|
@ -1,139 +0,0 @@
|
|||
var _ = require('lodash');
|
||||
|
||||
function VersionMathException(message) {
|
||||
this.message = message;
|
||||
this.name = 'VersionMathException';
|
||||
}
|
||||
|
||||
// Determine if a specific version meets the minimum requirement
|
||||
var compare = function (required, installed) {
|
||||
if (_.isUndefined(installed)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!required || !installed) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
var a = installed.split('.');
|
||||
var b = required.split('.');
|
||||
var i;
|
||||
|
||||
// leave suffixes as is ("RC1 or -SNAPSHOT")
|
||||
for (i = 0; i < Math.min(a.length, 3); ++i) {
|
||||
a[i] = Number(a[i]);
|
||||
}
|
||||
for (i = 0; i < Math.min(b.length, 3); ++i) {
|
||||
b[i] = Number(b[i]);
|
||||
}
|
||||
if (a.length === 2) {
|
||||
a[2] = 0;
|
||||
}
|
||||
|
||||
if (a[0] > b[0]) { return true; }
|
||||
if (a[0] < b[0]) { return false; }
|
||||
|
||||
if (a[1] > b[1]) { return true; }
|
||||
if (a[1] < b[1]) { return false; }
|
||||
|
||||
if (a[2] > b[2]) { return true; }
|
||||
if (a[2] < b[2]) { return false; }
|
||||
|
||||
if (a.length > 3) {
|
||||
// rc/beta suffix
|
||||
if (b.length <= 3) {
|
||||
return false;
|
||||
} // no suffix on b -> a<b
|
||||
return a[3] >= b[3];
|
||||
}
|
||||
if (b.length > 3) {
|
||||
// b has a suffix but a not -> a>b
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
// Sort versions from lowest to highest
|
||||
var sortVersions = function (versions) {
|
||||
if (!_.isArray(versions)) versions = [versions];
|
||||
|
||||
return _.uniq(versions).sort(function (a, b) {
|
||||
return compare(a, b) ? -1 : 1;
|
||||
});
|
||||
};
|
||||
|
||||
// Get the max version in this cluster
|
||||
var max = function (versions) {
|
||||
return sortVersions(versions).pop();
|
||||
};
|
||||
|
||||
// Return the lowest version in the cluster
|
||||
var min = function (versions) {
|
||||
return sortVersions(versions).shift();
|
||||
};
|
||||
|
||||
// Check if the lowest version in the cluster is >= to `version`
|
||||
var gte = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return compare(version, min(_versions));
|
||||
};
|
||||
|
||||
// Check if the highest version in the cluster is <= to `version`
|
||||
var lte = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return compare(max(_versions), version);
|
||||
};
|
||||
|
||||
// check if lowest version in cluster = `version`
|
||||
var eq = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return version === min(_versions) ? true : false;
|
||||
};
|
||||
|
||||
// version > lowest version in cluster?
|
||||
var gt = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return version === min(_versions) ? false : gte(version, _versions);
|
||||
};
|
||||
|
||||
// version < highest version in cluster?
|
||||
var lt = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return version === max(_versions) ? false : lte(version, _versions);
|
||||
};
|
||||
|
||||
/*
|
||||
Takes a version string with one of the following optional comparison prefixes: >,>=,<.<=
|
||||
and evaluates if the cluster meets the requirement. If the prefix is omitted exact match
|
||||
is assumed
|
||||
*/
|
||||
var is = function (equation, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
var _v = equation;
|
||||
var _cf;
|
||||
|
||||
if (_v.charAt(0) === '>') {
|
||||
_cf = _v.charAt(1) === '=' ? gte(_v.slice(2), _versions) : gt(_v.slice(1), _versions);
|
||||
} else if (_v.charAt(0) === '<') {
|
||||
_cf = _v.charAt(1) === '=' ? lte(_v.slice(2), _versions) : lt(_v.slice(1), _versions);
|
||||
} else {
|
||||
_cf = eq(_v, _versions);
|
||||
}
|
||||
|
||||
return _cf;
|
||||
};
|
||||
|
||||
|
||||
|
||||
module.exports = {
|
||||
min: min,
|
||||
max: max,
|
||||
is: is,
|
||||
eq: eq,
|
||||
gt: gt,
|
||||
gte: gte,
|
||||
lt: lt,
|
||||
lte: lte
|
||||
};
|
||||
|
16
src/plugins/elasticsearch/lib/version_satisfies.js
Normal file
16
src/plugins/elasticsearch/lib/version_satisfies.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
var semver = require('semver');
|
||||
|
||||
module.exports = function (actual, expected) {
|
||||
try {
|
||||
var ver = cleanVersion(actual);
|
||||
return semver.satisfies(ver, expected);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
|
||||
function cleanVersion(version) {
|
||||
var match = version.match(/\d+\.\d+\.\d+/);
|
||||
if (!match) return version;
|
||||
return match[0];
|
||||
}
|
||||
};
|
|
@ -5,10 +5,4 @@
|
|||
Show Tooltip
|
||||
</label>
|
||||
</div>
|
||||
<div class="vis-option-item">
|
||||
<label>
|
||||
<input type="checkbox" ng-model="vis.params.addLegend">
|
||||
Show Legend
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
<div class="panel panel-default" ng-switch on="panel.type" ng-if="savedObj || error">
|
||||
<div class="panel-heading">
|
||||
<span class="panel-title">
|
||||
<span class="panel-title" title="{{::savedObj.title}}">
|
||||
<i
|
||||
class="fa"
|
||||
ng-class="savedObj.vis.type.icon"
|
||||
aria-label="{{savedObj.vis.type.title}} Icon"
|
||||
title="{{savedObj.vis.type.title}}">
|
||||
aria-label="{{::savedObj.vis.type.title}} Icon"
|
||||
title="{{::savedObj.vis.type.title}}">
|
||||
</i>
|
||||
{{savedObj.title}}
|
||||
{{::savedObj.title}}
|
||||
</span>
|
||||
<div class="btn-group">
|
||||
<a aria-label="Edit" ng-show="chrome.getVisible() && editUrl" ng-href="{{editUrl}}">
|
||||
<a aria-label="Edit" ng-show="chrome.getVisible() && editUrl" ng-href="{{::editUrl}}">
|
||||
<i aria-hidden="true" class="fa fa-pencil"></i>
|
||||
</a>
|
||||
<a aria-label="Remove" ng-show="chrome.getVisible()" ng-click="remove()">
|
||||
|
@ -25,7 +25,8 @@
|
|||
<span ng-bind="error"></span>
|
||||
</div>
|
||||
|
||||
<visualize ng-switch-when="visualization"
|
||||
<visualize
|
||||
ng-switch-when="visualization"
|
||||
vis="savedObj.vis"
|
||||
search-source="savedObj.searchSource"
|
||||
show-spy-panel="chrome.getVisible()"
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
define(function (require) {
|
||||
var module = require('ui/modules').get('app/dashboard');
|
||||
var _ = require('lodash');
|
||||
var Scanner = require('ui/utils/scanner');
|
||||
|
||||
// bring in the factory
|
||||
require('plugins/kibana/dashboard/services/_saved_dashboard');
|
||||
|
||||
|
@ -14,6 +16,11 @@ define(function (require) {
|
|||
|
||||
// This is the only thing that gets injected into controllers
|
||||
module.service('savedDashboards', function (Promise, SavedDashboard, kbnIndex, es, kbnUrl) {
|
||||
var scanner = new Scanner(es, {
|
||||
index: kbnIndex,
|
||||
type: 'dashboard'
|
||||
});
|
||||
|
||||
this.type = SavedDashboard.type;
|
||||
this.Class = SavedDashboard;
|
||||
|
||||
|
@ -41,8 +48,21 @@ define(function (require) {
|
|||
});
|
||||
};
|
||||
|
||||
this.scanAll = function (queryString, pageSize = 1000) {
|
||||
return scanner.scanAndMap(queryString, {
|
||||
pageSize,
|
||||
docCount: Infinity
|
||||
}, (hit) => this.mapHits(hit));
|
||||
};
|
||||
|
||||
this.mapHits = function (hit) {
|
||||
var source = hit._source;
|
||||
source.id = hit._id;
|
||||
source.url = this.urlFor(hit._id);
|
||||
return source;
|
||||
};
|
||||
|
||||
this.find = function (searchString, size = 100) {
|
||||
var self = this;
|
||||
var body;
|
||||
if (searchString) {
|
||||
body = {
|
||||
|
@ -64,15 +84,10 @@ define(function (require) {
|
|||
body: body,
|
||||
size: size
|
||||
})
|
||||
.then(function (resp) {
|
||||
.then((resp) => {
|
||||
return {
|
||||
total: resp.hits.total,
|
||||
hits: resp.hits.hits.map(function (hit) {
|
||||
var source = hit._source;
|
||||
source.id = hit._id;
|
||||
source.url = self.urlFor(hit._id);
|
||||
return source;
|
||||
})
|
||||
hits: resp.hits.hits.map((hit) => this.mapHits(hit))
|
||||
};
|
||||
});
|
||||
};
|
||||
|
|
|
@ -110,6 +110,8 @@ define(function (require) {
|
|||
}
|
||||
|
||||
var $state = $scope.state = new AppState(getStateDefaults());
|
||||
$scope.uiState = $state.makeStateful('uiState');
|
||||
|
||||
function getStateDefaults() {
|
||||
return {
|
||||
query: $scope.searchSource.get('query') || '',
|
||||
|
@ -337,14 +339,15 @@ define(function (require) {
|
|||
}());
|
||||
|
||||
var sortFn = null;
|
||||
if (sortBy === 'non-time') {
|
||||
if (sortBy !== 'implicit') {
|
||||
sortFn = new HitSortFn(sort[1]);
|
||||
}
|
||||
|
||||
$scope.updateTime();
|
||||
if (sort[0] === '_score') segmented.setMaxSegments(1);
|
||||
segmented.setDirection(sortBy === 'time' ? (sort[1] || 'desc') : 'desc');
|
||||
segmented.setSize(sortBy === 'time' ? $scope.opts.sampleSize : false);
|
||||
segmented.setSortFn(sortFn);
|
||||
segmented.setSize($scope.opts.sampleSize);
|
||||
|
||||
// triggered when the status updated
|
||||
segmented.on('status', function (status) {
|
||||
|
@ -362,30 +365,30 @@ define(function (require) {
|
|||
return failure.index + failure.shard + failure.reason;
|
||||
});
|
||||
}
|
||||
}));
|
||||
|
||||
segmented.on('mergedSegment', function (merged) {
|
||||
$scope.mergedEsResp = merged;
|
||||
$scope.hits = merged.hits.total;
|
||||
|
||||
var rows = $scope.rows;
|
||||
var indexPattern = $scope.searchSource.get('index');
|
||||
|
||||
// merge the rows and the hits, use a new array to help watchers
|
||||
rows = $scope.rows = rows.concat(resp.hits.hits);
|
||||
|
||||
if (sortFn) {
|
||||
notify.event('resort rows', function () {
|
||||
rows.sort(sortFn);
|
||||
rows = $scope.rows = rows.slice(0, totalSize);
|
||||
$scope.fieldCounts = {};
|
||||
});
|
||||
}
|
||||
// the merge rows, use a new array to help watchers
|
||||
$scope.rows = merged.hits.hits.slice();
|
||||
|
||||
notify.event('flatten hit and count fields', function () {
|
||||
var counts = $scope.fieldCounts;
|
||||
|
||||
// if we haven't counted yet, or need a fresh count because we are sorting, reset the counts
|
||||
if (!counts || sortFn) counts = $scope.fieldCounts = {};
|
||||
|
||||
$scope.rows.forEach(function (hit) {
|
||||
// skip this work if we have already done it and we are NOT sorting.
|
||||
// ---
|
||||
// skip this work if we have already done it
|
||||
if (hit.$$_counted) return;
|
||||
|
||||
// when we are sorting results, we need to redo the counts each time because the
|
||||
// "top 500" may change with each response
|
||||
if (hit.$$_counted && !sortFn) return;
|
||||
hit.$$_counted = true;
|
||||
// "top 500" may change with each response, so don't mark this as counted
|
||||
if (!sortFn) hit.$$_counted = true;
|
||||
|
||||
var fields = _.keys(indexPattern.flattenHit(hit));
|
||||
var n = fields.length;
|
||||
|
@ -396,13 +399,6 @@ define(function (require) {
|
|||
}
|
||||
});
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
segmented.on('mergedSegment', function (merged) {
|
||||
$scope.mergedEsResp = merged;
|
||||
$scope.hits = merged.hits.total;
|
||||
|
||||
});
|
||||
|
||||
segmented.on('complete', function () {
|
||||
|
@ -489,6 +485,7 @@ define(function (require) {
|
|||
}
|
||||
|
||||
$scope.vis = new Vis($scope.indexPattern, {
|
||||
title: savedSearch.title,
|
||||
type: 'histogram',
|
||||
params: {
|
||||
addLegend: false,
|
||||
|
|
|
@ -176,7 +176,13 @@
|
|||
|
||||
</header>
|
||||
|
||||
<visualize ng-if="vis && rows.length != 0" vis="vis" es-resp="mergedEsResp" search-source="searchSource"></visualize>
|
||||
<visualize
|
||||
ng-if="vis && rows.length != 0"
|
||||
vis="vis"
|
||||
ui-state="uiState"
|
||||
es-resp="mergedEsResp"
|
||||
search-source="searchSource">
|
||||
</visualize>
|
||||
</div>
|
||||
|
||||
<div class="discover-table" fixed-scroll>
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
var Scanner = require('ui/utils/scanner');
|
||||
|
||||
require('plugins/kibana/discover/saved_searches/_saved_search');
|
||||
require('ui/notify');
|
||||
|
@ -16,7 +17,10 @@ define(function (require) {
|
|||
});
|
||||
|
||||
module.service('savedSearches', function (Promise, config, kbnIndex, es, createNotifier, SavedSearch, kbnUrl) {
|
||||
|
||||
var scanner = new Scanner(es, {
|
||||
index: kbnIndex,
|
||||
type: 'search'
|
||||
});
|
||||
|
||||
var notify = createNotifier({
|
||||
location: 'Saved Searches'
|
||||
|
@ -31,6 +35,15 @@ define(function (require) {
|
|||
nouns: 'saved searches'
|
||||
};
|
||||
|
||||
|
||||
this.scanAll = function (queryString, pageSize = 1000) {
|
||||
return scanner.scanAndMap(queryString, {
|
||||
pageSize,
|
||||
docCount: Infinity
|
||||
}, (hit) => this.mapHits(hit));
|
||||
};
|
||||
|
||||
|
||||
this.get = function (id) {
|
||||
return (new SavedSearch(id)).init();
|
||||
};
|
||||
|
@ -46,8 +59,14 @@ define(function (require) {
|
|||
});
|
||||
};
|
||||
|
||||
this.mapHits = function (hit) {
|
||||
var source = hit._source;
|
||||
source.id = hit._id;
|
||||
source.url = this.urlFor(hit._id);
|
||||
return source;
|
||||
};
|
||||
|
||||
this.find = function (searchString, size = 100) {
|
||||
var self = this;
|
||||
var body;
|
||||
if (searchString) {
|
||||
body = {
|
||||
|
@ -69,15 +88,10 @@ define(function (require) {
|
|||
body: body,
|
||||
size: size
|
||||
})
|
||||
.then(function (resp) {
|
||||
.then((resp) => {
|
||||
return {
|
||||
total: resp.hits.total,
|
||||
hits: resp.hits.hits.map(function (hit) {
|
||||
var source = hit._source;
|
||||
source.id = hit._id;
|
||||
source.url = self.urlFor(hit._id);
|
||||
return source;
|
||||
})
|
||||
hits: resp.hits.hits.map((hit) => this.mapHits(hit))
|
||||
};
|
||||
});
|
||||
};
|
||||
|
|
|
@ -94,13 +94,14 @@ define(function (require) {
|
|||
|
||||
$scope.exportAll = () => {
|
||||
Promise.map($scope.services, (service) =>
|
||||
service.service.find('', MAX_SIZE).then((results) =>
|
||||
service.service.scanAll('').then((results) =>
|
||||
results.hits.map((hit) => _.extend(hit, {type: service.type}))
|
||||
)
|
||||
).then((results) => retrieveAndExportDocs(_.flattenDeep(results)));
|
||||
};
|
||||
|
||||
function retrieveAndExportDocs(objs) {
|
||||
if (!objs.length) return notify.error('No saved objects to export.');
|
||||
es.mget({
|
||||
index: kbnIndex,
|
||||
body: {docs: objs.map(transformToMget)}
|
||||
|
|
|
@ -138,7 +138,13 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<visualize vis="vis" ui-state="uiState" show-spy-panel="chrome.getVisible()" search-source="savedVis.searchSource" editable-vis="editableVis"></visualize>
|
||||
<visualize
|
||||
vis="vis"
|
||||
ui-state="uiState"
|
||||
show-spy-panel="chrome.getVisible()"
|
||||
editable-vis="editableVis"
|
||||
search-source="savedVis.searchSource">
|
||||
</visualize>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ define(function (require) {
|
|||
|
||||
var angular = require('angular');
|
||||
var ConfigTemplate = require('ui/ConfigTemplate');
|
||||
var Notifier = require('ui/notify/Notifier');
|
||||
var Notifier = require('ui/notify/notifier');
|
||||
var docTitle = Private(require('ui/doc_title'));
|
||||
var brushEvent = Private(require('ui/utils/brush_event'));
|
||||
var queryFilter = Private(require('ui/filter_bar/query_filter'));
|
||||
|
|
|
@ -12,7 +12,7 @@ define(function (require) {
|
|||
vis: '=',
|
||||
},
|
||||
link: function ($scope, $el) {
|
||||
var $optionContainer = $('.visualization-options');
|
||||
var $optionContainer = $el.find('.visualization-options');
|
||||
var $editor = $compile($scope.vis.type.params.editor)($scope);
|
||||
$optionContainer.append($editor);
|
||||
|
||||
|
|
|
@ -103,6 +103,7 @@ define(function (require) {
|
|||
self.visState = Vis.convertOldState(self.typeName, JSON.parse(self.stateJSON));
|
||||
}
|
||||
|
||||
self.visState.title = self.title;
|
||||
self.vis = new Vis(
|
||||
self.searchSource.get('index'),
|
||||
self.visState
|
||||
|
@ -115,6 +116,7 @@ define(function (require) {
|
|||
var self = this;
|
||||
|
||||
self.vis.indexPattern = self.searchSource.get('index');
|
||||
self.visState.title = self.title;
|
||||
self.vis.setState(self.visState);
|
||||
};
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
define(function (require) {
|
||||
var app = require('ui/modules').get('app/visualize');
|
||||
var _ = require('lodash');
|
||||
var Scanner = require('ui/utils/scanner');
|
||||
|
||||
require('plugins/kibana/visualize/saved_visualizations/_saved_vis');
|
||||
|
||||
|
@ -13,6 +14,12 @@ define(function (require) {
|
|||
|
||||
app.service('savedVisualizations', function (Promise, es, kbnIndex, SavedVis, Private, Notifier, kbnUrl) {
|
||||
var visTypes = Private(require('ui/registry/vis_types'));
|
||||
|
||||
var scanner = new Scanner(es, {
|
||||
index: kbnIndex,
|
||||
type: 'visualization'
|
||||
});
|
||||
|
||||
var notify = new Notifier({
|
||||
location: 'Saved Visualization Service'
|
||||
});
|
||||
|
@ -41,8 +48,36 @@ define(function (require) {
|
|||
});
|
||||
};
|
||||
|
||||
this.scanAll = function (queryString, pageSize = 1000) {
|
||||
return scanner.scanAndMap(queryString, {
|
||||
pageSize,
|
||||
docCount: Infinity
|
||||
}, (hit) => this.mapHits(hit));
|
||||
};
|
||||
|
||||
this.mapHits = function (hit) {
|
||||
var source = hit._source;
|
||||
source.id = hit._id;
|
||||
source.url = this.urlFor(hit._id);
|
||||
|
||||
var typeName = source.typeName;
|
||||
if (source.visState) {
|
||||
try { typeName = JSON.parse(source.visState).type; }
|
||||
catch (e) { /* missing typename handled below */ } // eslint-disable-line no-empty
|
||||
}
|
||||
|
||||
if (!typeName || !visTypes.byName[typeName]) {
|
||||
if (!typeName) notify.error('Visualization type is missing. Please add a type to this visualization.', hit);
|
||||
else notify.error('Visualization type of "' + typeName + '" is invalid. Please change to a valid type.', hit);
|
||||
return kbnUrl.redirect('/settings/objects/savedVisualizations/{{id}}', {id: source.id});
|
||||
}
|
||||
|
||||
source.type = visTypes.byName[typeName];
|
||||
source.icon = source.type.icon;
|
||||
return source;
|
||||
};
|
||||
|
||||
this.find = function (searchString, size = 100) {
|
||||
var self = this;
|
||||
var body;
|
||||
if (searchString) {
|
||||
body = {
|
||||
|
@ -64,30 +99,10 @@ define(function (require) {
|
|||
body: body,
|
||||
size: size
|
||||
})
|
||||
.then(function (resp) {
|
||||
.then((resp) => {
|
||||
return {
|
||||
total: resp.hits.total,
|
||||
hits: _.transform(resp.hits.hits, function (hits, hit) {
|
||||
var source = hit._source;
|
||||
source.id = hit._id;
|
||||
source.url = self.urlFor(hit._id);
|
||||
|
||||
var typeName = source.typeName;
|
||||
if (source.visState) {
|
||||
try { typeName = JSON.parse(source.visState).type; }
|
||||
catch (e) { /* missing typename handled below */ } // eslint-disable-line no-empty
|
||||
}
|
||||
|
||||
if (!typeName || !visTypes.byName[typeName]) {
|
||||
if (!typeName) notify.error('Visualization type is missing. Please add a type to this visualization.', hit);
|
||||
else notify.error('Visualization type of "' + typeName + '" is invalid. Please change to a valid type.', hit);
|
||||
return kbnUrl.redirect('/settings/objects/savedVisualizations/{{id}}', {id: source.id});
|
||||
}
|
||||
|
||||
source.type = visTypes.byName[typeName];
|
||||
source.icon = source.type.icon;
|
||||
hits.push(source);
|
||||
}, [])
|
||||
hits: resp.hits.hits.map((hit) => this.mapHits(hit))
|
||||
};
|
||||
});
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<div ng-controller="KbnMetricVisController" class="metric-vis">
|
||||
<div class="metric-container" ng-repeat="metric in metrics">
|
||||
<div class="metric-value" ng-style="{'font-size': vis.params.fontSize+'pt'}">{{metric.value}}</div>
|
||||
<div>{{metric.label}}</div>
|
||||
</div>
|
||||
<div class="metric-container" ng-repeat="metric in metrics">
|
||||
<div class="metric-value" ng-style="{'font-size': vis.params.fontSize+'pt'}">{{metric.value}}</div>
|
||||
<div>{{metric.label}}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -23,6 +23,7 @@ define(function (require) {
|
|||
template: require('plugins/metric_vis/metric_vis.html'),
|
||||
params: {
|
||||
defaults: {
|
||||
handleNoResults: true,
|
||||
fontSize: 60
|
||||
},
|
||||
editor: require('plugins/metric_vis/metric_vis_params.html')
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
// get the kibana/metric_vis module, and make sure that it requires the "kibana" module if it
|
||||
// didn't already
|
||||
var module = require('ui/modules').get('kibana/metric_vis', ['kibana']);
|
||||
|
@ -8,13 +9,21 @@ define(function (require) {
|
|||
|
||||
var metrics = $scope.metrics = [];
|
||||
|
||||
function isInvalid(val) {
|
||||
return _.isUndefined(val) || _.isNull(val) || _.isNaN(val);
|
||||
}
|
||||
|
||||
$scope.processTableGroups = function (tableGroups) {
|
||||
tableGroups.tables.forEach(function (table) {
|
||||
table.columns.forEach(function (column, i) {
|
||||
var fieldFormatter = table.aggConfig(column).fieldFormatter();
|
||||
var value = table.rows[0][i];
|
||||
|
||||
value = isInvalid(value) ? '?' : fieldFormatter(value);
|
||||
|
||||
metrics.push({
|
||||
label: column.title,
|
||||
value: fieldFormatter(table.rows[0][i])
|
||||
value: value
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1 +1,5 @@
|
|||
<kbn-agg-table table="table" per-page="editableVis.params.spyPerPage"></kbn-agg-table>
|
||||
<kbn-agg-table
|
||||
table="table"
|
||||
export-title="vis.title"
|
||||
per-page="editableVis.params.spyPerPage">
|
||||
</kbn-agg-table>
|
||||
|
|
|
@ -30,7 +30,8 @@ describe('Integration', function () {
|
|||
|
||||
$rootScope.vis = vis;
|
||||
$rootScope.esResponse = esResponse;
|
||||
$el = $('<visualize vis="vis" es-resp="esResponse">');
|
||||
$rootScope.uiState = require('fixtures/mock_ui_state');
|
||||
$el = $('<visualize vis="vis" es-resp="esResponse" ui-state="uiState">');
|
||||
$compile($el)($rootScope);
|
||||
$rootScope.$apply();
|
||||
|
||||
|
|
|
@ -5,6 +5,10 @@
|
|||
</div>
|
||||
|
||||
<div ng-if="tableGroups" class="table-vis-container">
|
||||
<kbn-agg-table-group group="tableGroups" per-page="vis.params.perPage"></kbn-agg-table-group>
|
||||
<kbn-agg-table-group
|
||||
group="tableGroups"
|
||||
export-title="vis.title"
|
||||
per-page="vis.params.perPage">
|
||||
</kbn-agg-table-group>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -32,7 +32,6 @@ define(function (require) {
|
|||
if (hasSomeRows) {
|
||||
$scope.tableGroups = tableGroups;
|
||||
}
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -43,8 +43,8 @@ module.exports = () => Joi.object({
|
|||
otherwise: Joi.boolean().default(false)
|
||||
}),
|
||||
xsrf: Joi.object({
|
||||
token: Joi.string().default(randomBytes(32).toString('hex')),
|
||||
disableProtection: Joi.boolean().default(false),
|
||||
token: Joi.string().optional().notes('Deprecated')
|
||||
}).default(),
|
||||
}).default(),
|
||||
|
||||
|
|
|
@ -4,7 +4,9 @@ import KbnServer from '../../KbnServer';
|
|||
describe('cookie validation', function () {
|
||||
let kbnServer;
|
||||
beforeEach(function () {
|
||||
kbnServer = new KbnServer();
|
||||
kbnServer = new KbnServer({
|
||||
server: { autoListen: false }
|
||||
});
|
||||
return kbnServer.ready();
|
||||
});
|
||||
afterEach(function () {
|
||||
|
|
|
@ -8,6 +8,9 @@ const nonDestructiveMethods = ['GET'];
|
|||
const destructiveMethods = ['POST', 'PUT', 'DELETE'];
|
||||
const src = resolve.bind(null, __dirname, '../../../../src');
|
||||
|
||||
const xsrfHeader = 'kbn-version';
|
||||
const version = require(src('../package.json')).version;
|
||||
|
||||
describe('xsrf request filter', function () {
|
||||
function inject(kbnServer, opts) {
|
||||
return fn(cb => {
|
||||
|
@ -17,9 +20,9 @@ describe('xsrf request filter', function () {
|
|||
});
|
||||
}
|
||||
|
||||
const makeServer = async function (token) {
|
||||
const makeServer = async function () {
|
||||
const kbnServer = new KbnServer({
|
||||
server: { autoListen: false, xsrf: { token } },
|
||||
server: { autoListen: false },
|
||||
plugins: { scanDirs: [src('plugins')] },
|
||||
logging: { quiet: true },
|
||||
optimize: { enabled: false },
|
||||
|
@ -41,108 +44,75 @@ describe('xsrf request filter', function () {
|
|||
return kbnServer;
|
||||
};
|
||||
|
||||
describe('issuing tokens', function () {
|
||||
const token = 'secur3';
|
||||
let kbnServer;
|
||||
beforeEach(async () => kbnServer = await makeServer(token));
|
||||
afterEach(async () => await kbnServer.close());
|
||||
let kbnServer;
|
||||
beforeEach(async () => kbnServer = await makeServer());
|
||||
afterEach(async () => await kbnServer.close());
|
||||
|
||||
it('sends a token when rendering an app', async function () {
|
||||
var resp = await inject(kbnServer, {
|
||||
method: 'GET',
|
||||
url: '/app/kibana',
|
||||
for (const method of nonDestructiveMethods) {
|
||||
context(`nonDestructiveMethod: ${method}`, function () { // eslint-disable-line no-loop-func
|
||||
it('accepts requests without a token', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method
|
||||
});
|
||||
|
||||
expect(resp.statusCode).to.be(200);
|
||||
expect(resp.payload).to.be('ok');
|
||||
});
|
||||
|
||||
expect(resp.payload).to.contain(`"xsrfToken":"${token}"`);
|
||||
it('failes on invalid tokens', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method,
|
||||
headers: {
|
||||
[xsrfHeader]: `invalid:${version}`,
|
||||
},
|
||||
});
|
||||
|
||||
expect(resp.statusCode).to.be(400);
|
||||
expect(resp.headers).to.have.property(xsrfHeader, version);
|
||||
expect(resp.payload).to.match(/"Browser client is out of date/);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
context('without configured token', function () {
|
||||
let kbnServer;
|
||||
beforeEach(async () => kbnServer = await makeServer());
|
||||
afterEach(async () => await kbnServer.close());
|
||||
for (const method of destructiveMethods) {
|
||||
context(`destructiveMethod: ${method}`, function () { // eslint-disable-line no-loop-func
|
||||
it('accepts requests with the correct token', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method,
|
||||
headers: {
|
||||
[xsrfHeader]: version,
|
||||
},
|
||||
});
|
||||
|
||||
it('responds with a random token', async function () {
|
||||
var resp = await inject(kbnServer, {
|
||||
method: 'GET',
|
||||
url: '/app/kibana',
|
||||
expect(resp.statusCode).to.be(200);
|
||||
expect(resp.payload).to.be('ok');
|
||||
});
|
||||
|
||||
expect(resp.payload).to.match(/"xsrfToken":".{64}"/);
|
||||
it('rejects requests without a token', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method
|
||||
});
|
||||
|
||||
expect(resp.statusCode).to.be(400);
|
||||
expect(resp.payload).to.match(/"Missing kbn-version header/);
|
||||
});
|
||||
|
||||
it('rejects requests with an invalid token', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method,
|
||||
headers: {
|
||||
[xsrfHeader]: `invalid:${version}`,
|
||||
},
|
||||
});
|
||||
|
||||
expect(resp.statusCode).to.be(400);
|
||||
expect(resp.payload).to.match(/"Browser client is out of date/);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
context('with configured token', function () {
|
||||
const token = 'mytoken';
|
||||
let kbnServer;
|
||||
beforeEach(async () => kbnServer = await makeServer(token));
|
||||
afterEach(async () => await kbnServer.close());
|
||||
|
||||
for (const method of nonDestructiveMethods) {
|
||||
context(`nonDestructiveMethod: ${method}`, function () { // eslint-disable-line no-loop-func
|
||||
it('accepts requests without a token', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method
|
||||
});
|
||||
|
||||
expect(resp.statusCode).to.be(200);
|
||||
expect(resp.payload).to.be('ok');
|
||||
});
|
||||
|
||||
it('ignores invalid tokens', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method,
|
||||
headers: {
|
||||
'kbn-xsrf-token': `invalid:${token}`,
|
||||
},
|
||||
});
|
||||
|
||||
expect(resp.statusCode).to.be(200);
|
||||
expect(resp.headers).to.not.have.property('kbn-xsrf-token');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
for (const method of destructiveMethods) {
|
||||
context(`destructiveMethod: ${method}`, function () { // eslint-disable-line no-loop-func
|
||||
it('accepts requests with the correct token', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method,
|
||||
headers: {
|
||||
'kbn-xsrf-token': token,
|
||||
},
|
||||
});
|
||||
|
||||
expect(resp.statusCode).to.be(200);
|
||||
expect(resp.payload).to.be('ok');
|
||||
});
|
||||
|
||||
it('rejects requests without a token', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method
|
||||
});
|
||||
|
||||
expect(resp.statusCode).to.be(403);
|
||||
expect(resp.payload).to.match(/"Missing XSRF token"/);
|
||||
});
|
||||
|
||||
it('rejects requests with an invalid token', async function () {
|
||||
const resp = await inject(kbnServer, {
|
||||
url: '/xsrf/test/route',
|
||||
method: method,
|
||||
headers: {
|
||||
'kbn-xsrf-token': `invalid:${token}`,
|
||||
},
|
||||
});
|
||||
|
||||
expect(resp.statusCode).to.be(403);
|
||||
expect(resp.payload).to.match(/"Invalid XSRF token"/);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -116,11 +116,11 @@ module.exports = function (kbnServer, server, config) {
|
|||
let response = req.response;
|
||||
|
||||
if (response.isBoom) {
|
||||
response.output.headers['x-app-name'] = kbnServer.name;
|
||||
response.output.headers['x-app-version'] = kbnServer.version;
|
||||
response.output.headers['kbn-name'] = kbnServer.name;
|
||||
response.output.headers['kbn-version'] = kbnServer.version;
|
||||
} else {
|
||||
response.header('x-app-name', kbnServer.name);
|
||||
response.header('x-app-version', kbnServer.version);
|
||||
response.header('kbn-name', kbnServer.name);
|
||||
response.header('kbn-version', kbnServer.version);
|
||||
}
|
||||
|
||||
return reply.continue();
|
||||
|
|
|
@ -1,19 +1,22 @@
|
|||
import { forbidden } from 'boom';
|
||||
import { badRequest } from 'boom';
|
||||
|
||||
export default function (kbnServer, server, config) {
|
||||
const token = config.get('server.xsrf.token');
|
||||
const version = config.get('pkg.version');
|
||||
const disabled = config.get('server.xsrf.disableProtection');
|
||||
|
||||
server.decorate('reply', 'issueXsrfToken', function () {
|
||||
return token;
|
||||
});
|
||||
const header = 'kbn-version';
|
||||
|
||||
server.ext('onPostAuth', function (req, reply) {
|
||||
if (disabled || req.method === 'get') return reply.continue();
|
||||
const noHeaderGet = req.method === 'get' && !req.headers[header];
|
||||
if (disabled || noHeaderGet) return reply.continue();
|
||||
|
||||
const attempt = req.headers['kbn-xsrf-token'];
|
||||
if (!attempt) return reply(forbidden('Missing XSRF token'));
|
||||
if (attempt !== token) return reply(forbidden('Invalid XSRF token'));
|
||||
const submission = req.headers[header];
|
||||
if (!submission) return reply(badRequest(`Missing ${header} header`));
|
||||
if (submission !== version) {
|
||||
return reply(badRequest('Browser client is out of date, please refresh the page', {
|
||||
expected: version,
|
||||
got: submission
|
||||
}));
|
||||
}
|
||||
|
||||
return reply.continue();
|
||||
});
|
||||
|
|
|
@ -23,6 +23,13 @@ define(function (require) {
|
|||
this.routes = IndexPattern.prototype.routes;
|
||||
|
||||
this.toIndexList = _.constant(Promise.resolve([pattern]));
|
||||
this.toDetailedIndexList = _.constant(Promise.resolve([
|
||||
{
|
||||
index: pattern,
|
||||
min: 0,
|
||||
max: 1
|
||||
}
|
||||
]));
|
||||
this.getComputedFields = _.bind(getComputedFields, this);
|
||||
this.flattenHit = flattenHit(this);
|
||||
this.formatHit = formatHit(this, fieldFormats.getDefaultInstance('string'));
|
|
@ -77,7 +77,6 @@ module.exports = async (kbnServer, server, config) => {
|
|||
buildSha: config.get('pkg.buildSha'),
|
||||
basePath: config.get('server.basePath'),
|
||||
vars: defaults(app.getInjectedVars(), defaultInjectedVars),
|
||||
xsrfToken: this.issueXsrfToken(),
|
||||
};
|
||||
|
||||
return this.view(app.templateName, {
|
||||
|
|
|
@ -9,9 +9,10 @@ define(function (require) {
|
|||
* @param {Vis} vis - the vis object that contains all configuration data required to render the vis
|
||||
* @param {jQuery<DOMElement>} $el - a jQuery wrapped element to render into
|
||||
*/
|
||||
function Renderbot(vis, $el) {
|
||||
function Renderbot(vis, $el, uiState) {
|
||||
this.vis = vis;
|
||||
this.$el = $el;
|
||||
this.uiState = uiState;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -66,6 +66,7 @@ define(function (require) {
|
|||
Vis.prototype.type = 'histogram';
|
||||
|
||||
Vis.prototype.setState = function (state) {
|
||||
this.title = state.title || '';
|
||||
this.type = state.type || this.type;
|
||||
if (_.isString(this.type)) this.type = visTypes.byName[this.type];
|
||||
|
||||
|
@ -80,6 +81,7 @@ define(function (require) {
|
|||
|
||||
Vis.prototype.getState = function () {
|
||||
return {
|
||||
title: this.title,
|
||||
type: this.type.name,
|
||||
params: this.params,
|
||||
aggs: this.aggs.map(function (agg) {
|
||||
|
|
|
@ -15,12 +15,14 @@ describe('renderbot', function () {
|
|||
var vis;
|
||||
var $el;
|
||||
var renderbot;
|
||||
var uiState;
|
||||
|
||||
beforeEach(init);
|
||||
beforeEach(function () {
|
||||
vis = { hello: 'world' };
|
||||
$el = 'element';
|
||||
renderbot = new Renderbot(vis, $el);
|
||||
uiState = {};
|
||||
renderbot = new Renderbot(vis, $el, uiState);
|
||||
});
|
||||
|
||||
it('should have expected methods', function () {
|
||||
|
|
|
@ -180,5 +180,22 @@ describe('AggTable Directive', function () {
|
|||
});
|
||||
expect(call.args[1]).to.be('somefilename.csv');
|
||||
});
|
||||
|
||||
it('should use the export-title attribute', function () {
|
||||
var expected = 'export file name';
|
||||
var $el = $compile(`<kbn-agg-table table="table" export-title="exportTitle">`)($scope);
|
||||
$scope.$digest();
|
||||
|
||||
var $tableScope = $el.isolateScope();
|
||||
var aggTable = $tableScope.aggTable;
|
||||
$tableScope.table = {
|
||||
columns: [],
|
||||
rows: []
|
||||
};
|
||||
$tableScope.exportTitle = expected;
|
||||
$scope.$digest();
|
||||
|
||||
expect(aggTable.csv.filename).to.equal(`${expected}.csv`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -13,7 +13,8 @@ define(function (require) {
|
|||
template: require('ui/agg_table/agg_table.html'),
|
||||
scope: {
|
||||
table: '=',
|
||||
perPage: '=?'
|
||||
perPage: '=?',
|
||||
exportTitle: '=?'
|
||||
},
|
||||
controllerAs: 'aggTable',
|
||||
compile: function ($el) {
|
||||
|
@ -75,7 +76,7 @@ define(function (require) {
|
|||
return;
|
||||
}
|
||||
|
||||
self.csv.filename = (table.title() || 'table') + '.csv';
|
||||
self.csv.filename = ($scope.exportTitle || table.title() || 'table') + '.csv';
|
||||
$scope.rows = table.rows;
|
||||
$scope.formattedColumns = table.columns.map(function (col, i) {
|
||||
var agg = $scope.table.aggConfig(col);
|
||||
|
|
|
@ -10,7 +10,12 @@
|
|||
<tr>
|
||||
<td>
|
||||
<kbn-agg-table-group ng-if="table.tables" group="table" per-page="perPage"></kbn-agg-table-group>
|
||||
<kbn-agg-table ng-if="table.rows" table="table" per-page="perPage"></kbn-agg-table>
|
||||
<kbn-agg-table
|
||||
ng-if="table.rows"
|
||||
table="table"
|
||||
export-title="exportTitle"
|
||||
per-page="perPage">
|
||||
</kbn-agg-table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
@ -28,7 +33,12 @@
|
|||
<tr>
|
||||
<td ng-repeat="table in columns">
|
||||
<kbn-agg-table-group ng-if="table.tables" group="table" per-page="perPage"></kbn-agg-table-group>
|
||||
<kbn-agg-table ng-if="table.rows" table="table" per-page="perPage"></kbn-agg-table>
|
||||
<kbn-agg-table
|
||||
ng-if="table.rows"
|
||||
table="table"
|
||||
export-title="exportTitle"
|
||||
per-page="perPage">
|
||||
</kbn-agg-table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
|
|
@ -10,7 +10,8 @@ define(function (require) {
|
|||
template: require('ui/agg_table/agg_table_group.html'),
|
||||
scope: {
|
||||
group: '=',
|
||||
perPage: '=?'
|
||||
perPage: '=?',
|
||||
exportTitle: '=?'
|
||||
},
|
||||
compile: function ($el) {
|
||||
// Use the compile function from the RecursionHelper,
|
||||
|
@ -25,7 +26,7 @@ define(function (require) {
|
|||
|
||||
var firstTable = group.tables[0];
|
||||
var params = firstTable.aggConfig && firstTable.aggConfig.params;
|
||||
// render groups that have Table children as if they were rows, because itteration is cleaner
|
||||
// render groups that have Table children as if they were rows, because iteration is cleaner
|
||||
var childLayout = (params && !params.row) ? 'columns' : 'rows';
|
||||
|
||||
$scope[childLayout] = group.tables;
|
||||
|
|
|
@ -19,10 +19,17 @@ define(function (require) {
|
|||
/**
|
||||
* Read the values for this metric from the
|
||||
* @param {[type]} bucket [description]
|
||||
* @return {[type]} [description]
|
||||
* @return {*} [description]
|
||||
*/
|
||||
MetricAggType.prototype.getValue = function (agg, bucket) {
|
||||
return bucket[agg.id].value;
|
||||
// Metric types where an empty set equals `zero`
|
||||
var isSettableToZero = ['cardinality', 'sum'].indexOf(agg.__type.name) !== -1;
|
||||
|
||||
// Return proper values when no buckets are present
|
||||
// `Count` handles empty sets properly
|
||||
if (!bucket[agg.id] && isSettableToZero) return 0;
|
||||
|
||||
return bucket[agg.id] && bucket[agg.id].value;
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
@ -49,7 +49,7 @@ define(function (require) {
|
|||
getValue: function (agg, bucket) {
|
||||
// values for 1, 5, and 10 will come back as 1.0, 5.0, and 10.0 so we
|
||||
// parse the keys and respond with the value that matches
|
||||
return _.find(bucket[agg.parentId].values, function (value, key) {
|
||||
return _.find(bucket[agg.parentId] && bucket[agg.parentId].values, function (value, key) {
|
||||
return agg.key === parseFloat(key);
|
||||
}) / 100;
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ define(function (require) {
|
|||
getValue: function (agg, bucket) {
|
||||
// percentiles for 1, 5, and 10 will come back as 1.0, 5.0, and 10.0 so we
|
||||
// parse the keys and respond with the value that matches
|
||||
return _.find(bucket[agg.parentId].values, function (value, key) {
|
||||
return _.find(bucket[agg.parentId] && bucket[agg.parentId].values, function (value, key) {
|
||||
return agg.key === parseFloat(key);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -5,43 +5,42 @@ import ngMock from 'ngMock';
|
|||
|
||||
import xsrfChromeApi from '../xsrf';
|
||||
|
||||
const xsrfHeader = 'kbn-xsrf-token';
|
||||
const xsrfToken = 'xsrfToken';
|
||||
const xsrfHeader = 'kbn-version';
|
||||
const { version } = require('../../../../../../package.json');
|
||||
|
||||
describe('chrome xsrf apis', function () {
|
||||
describe('#getXsrfToken()', function () {
|
||||
it('exposes the token', function () {
|
||||
const chrome = {};
|
||||
xsrfChromeApi(chrome, { xsrfToken });
|
||||
expect(chrome.getXsrfToken()).to.be(xsrfToken);
|
||||
xsrfChromeApi(chrome, { version });
|
||||
expect(chrome.getXsrfToken()).to.be(version);
|
||||
});
|
||||
});
|
||||
|
||||
context('jQuery support', function () {
|
||||
it('adds a global jQuery prefilter', function () {
|
||||
stub($, 'ajaxPrefilter');
|
||||
xsrfChromeApi({}, {});
|
||||
xsrfChromeApi({}, { version });
|
||||
expect($.ajaxPrefilter.callCount).to.be(1);
|
||||
});
|
||||
|
||||
context('jQuery prefilter', function () {
|
||||
let prefilter;
|
||||
const xsrfToken = 'xsrfToken';
|
||||
|
||||
beforeEach(function () {
|
||||
stub($, 'ajaxPrefilter');
|
||||
xsrfChromeApi({}, { xsrfToken });
|
||||
xsrfChromeApi({}, { version });
|
||||
prefilter = $.ajaxPrefilter.args[0][0];
|
||||
});
|
||||
|
||||
it('sets the kbn-xsrf-token header', function () {
|
||||
it(`sets the ${xsrfHeader} header`, function () {
|
||||
const setHeader = stub();
|
||||
prefilter({}, {}, { setRequestHeader: setHeader });
|
||||
|
||||
expect(setHeader.callCount).to.be(1);
|
||||
expect(setHeader.args[0]).to.eql([
|
||||
xsrfHeader,
|
||||
xsrfToken
|
||||
version
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -60,7 +59,7 @@ describe('chrome xsrf apis', function () {
|
|||
beforeEach(function () {
|
||||
stub($, 'ajaxPrefilter');
|
||||
const chrome = {};
|
||||
xsrfChromeApi(chrome, { xsrfToken });
|
||||
xsrfChromeApi(chrome, { version });
|
||||
ngMock.module(chrome.$setupXsrfRequestInterceptor);
|
||||
});
|
||||
|
||||
|
@ -78,9 +77,9 @@ describe('chrome xsrf apis', function () {
|
|||
$httpBackend.verifyNoOutstandingRequest();
|
||||
});
|
||||
|
||||
it('injects a kbn-xsrf-token header on every request', function () {
|
||||
it(`injects a ${xsrfHeader} header on every request`, function () {
|
||||
$httpBackend.expectPOST('/api/test', undefined, function (headers) {
|
||||
return headers[xsrfHeader] === xsrfToken;
|
||||
return headers[xsrfHeader] === version;
|
||||
}).respond(200, '');
|
||||
|
||||
$http.post('/api/test');
|
||||
|
@ -113,10 +112,10 @@ describe('chrome xsrf apis', function () {
|
|||
$httpBackend.flush();
|
||||
});
|
||||
|
||||
it('accepts alternate tokens to use', function () {
|
||||
const customToken = `custom:${xsrfToken}`;
|
||||
it('treats the kbnXsrfToken option as boolean-y', function () {
|
||||
const customToken = `custom:${version}`;
|
||||
$httpBackend.expectPOST('/api/test', undefined, function (headers) {
|
||||
return headers[xsrfHeader] === customToken;
|
||||
return headers[xsrfHeader] === version;
|
||||
}).respond(200, '');
|
||||
|
||||
$http({
|
||||
|
|
|
@ -4,12 +4,12 @@ import { set } from 'lodash';
|
|||
export default function (chrome, internals) {
|
||||
|
||||
chrome.getXsrfToken = function () {
|
||||
return internals.xsrfToken;
|
||||
return internals.version;
|
||||
};
|
||||
|
||||
$.ajaxPrefilter(function ({ kbnXsrfToken = internals.xsrfToken }, originalOptions, jqXHR) {
|
||||
$.ajaxPrefilter(function ({ kbnXsrfToken = true }, originalOptions, jqXHR) {
|
||||
if (kbnXsrfToken) {
|
||||
jqXHR.setRequestHeader('kbn-xsrf-token', kbnXsrfToken);
|
||||
jqXHR.setRequestHeader('kbn-version', internals.version);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -17,9 +17,9 @@ export default function (chrome, internals) {
|
|||
$httpProvider.interceptors.push(function () {
|
||||
return {
|
||||
request: function (opts) {
|
||||
const { kbnXsrfToken = internals.xsrfToken } = opts;
|
||||
const { kbnXsrfToken = true } = opts;
|
||||
if (kbnXsrfToken) {
|
||||
set(opts, ['headers', 'kbn-xsrf-token'], kbnXsrfToken);
|
||||
set(opts, ['headers', 'kbn-version'], internals.version);
|
||||
}
|
||||
return opts;
|
||||
}
|
||||
|
|
|
@ -93,7 +93,12 @@ define(function (require) {
|
|||
ignore_unavailable: true,
|
||||
preference: sessionId,
|
||||
body: body
|
||||
}));
|
||||
}))
|
||||
.catch(function (err) {
|
||||
return strategy.handleResponseError
|
||||
? strategy.handleResponseError(executable, err)
|
||||
: Promise.reject(err);
|
||||
});
|
||||
})
|
||||
.then(function (clientResp) {
|
||||
return strategy.getResponses(clientResp);
|
||||
|
|
|
@ -47,9 +47,12 @@ describe('ui/courier/fetch/request/segmented', () => {
|
|||
}
|
||||
|
||||
function mockIndexPattern() {
|
||||
const queue = [1, 2, 3];
|
||||
return {
|
||||
toIndexList: sinon.stub().returns(Promise.resolve(queue))
|
||||
toDetailedIndexList: sinon.stub().returns(Promise.resolve([
|
||||
{ index: 1, min: 0, max: 1 },
|
||||
{ index: 2, min: 0, max: 1 },
|
||||
{ index: 3, min: 0, max: 1 },
|
||||
]))
|
||||
};
|
||||
}
|
||||
});
|
||||
|
|
|
@ -34,11 +34,11 @@ describe('ui/courier/fetch/request/segmented/_createQueue', () => {
|
|||
expect(req._queueCreated).to.be(true);
|
||||
});
|
||||
|
||||
it('relies on indexPattern.toIndexList to generate queue', async function () {
|
||||
it('relies on indexPattern.toDetailedIndexList to generate queue', async function () {
|
||||
const source = new MockSource();
|
||||
const ip = source.get('index');
|
||||
const indices = [1,2,3];
|
||||
sinon.stub(ip, 'toIndexList').returns(Promise.resolve(indices));
|
||||
sinon.stub(ip, 'toDetailedIndexList').returns(Promise.resolve(indices));
|
||||
|
||||
const req = new SegmentedReq(source);
|
||||
const output = await req._createQueue();
|
||||
|
@ -49,14 +49,14 @@ describe('ui/courier/fetch/request/segmented/_createQueue', () => {
|
|||
const source = new MockSource();
|
||||
const ip = source.get('index');
|
||||
const req = new SegmentedReq(source);
|
||||
sinon.stub(ip, 'toIndexList').returns(Promise.resolve([1,2,3]));
|
||||
sinon.stub(ip, 'toDetailedIndexList').returns(Promise.resolve([1,2,3]));
|
||||
|
||||
req.setDirection('asc');
|
||||
await req._createQueue();
|
||||
expect(ip.toIndexList.lastCall.args[2]).to.be('asc');
|
||||
expect(ip.toDetailedIndexList.lastCall.args[2]).to.be('asc');
|
||||
|
||||
req.setDirection('desc');
|
||||
await req._createQueue();
|
||||
expect(ip.toIndexList.lastCall.args[2]).to.be('desc');
|
||||
expect(ip.toDetailedIndexList.lastCall.args[2]).to.be('desc');
|
||||
});
|
||||
});
|
|
@ -0,0 +1,129 @@
|
|||
import ngMock from 'ngMock';
|
||||
import expect from 'expect.js';
|
||||
import { times } from 'lodash';
|
||||
import sinon from 'auto-release-sinon';
|
||||
|
||||
import HitSortFnProv from 'plugins/kibana/discover/_hit_sort_fn';
|
||||
import NoDigestPromises from 'testUtils/noDigestPromises';
|
||||
|
||||
describe('Segmented Request Index Selection', function () {
|
||||
let Promise;
|
||||
let $rootScope;
|
||||
let SegmentedReq;
|
||||
let MockSource;
|
||||
let HitSortFn;
|
||||
|
||||
NoDigestPromises.activateForSuite();
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject((Private, $injector) => {
|
||||
Promise = $injector.get('Promise');
|
||||
HitSortFn = Private(HitSortFnProv);
|
||||
$rootScope = $injector.get('$rootScope');
|
||||
SegmentedReq = Private(require('ui/courier/fetch/request/segmented'));
|
||||
|
||||
const StubbedSearchSourceProvider = require('fixtures/stubbed_search_source');
|
||||
MockSource = class {
|
||||
constructor() {
|
||||
return $injector.invoke(StubbedSearchSourceProvider);
|
||||
}
|
||||
};
|
||||
}));
|
||||
|
||||
it('queries with size until all 500 docs returned', async function () {
|
||||
const search = new MockSource();
|
||||
const indexPattern = search.get('index');
|
||||
sinon.stub(indexPattern, 'toDetailedIndexList').returns(Promise.resolve([
|
||||
{ index: 'one', min: 0, max: 1 },
|
||||
{ index: 'two', min: 0, max: 1 },
|
||||
{ index: 'three', min: 0, max: 1 },
|
||||
{ index: 'four', min: 0, max: 1 },
|
||||
{ index: 'five', min: 0, max: 1 },
|
||||
]));
|
||||
|
||||
const req = new SegmentedReq(search);
|
||||
req._handle.setDirection('desc');
|
||||
req._handle.setSortFn(new HitSortFn('desc'));
|
||||
req._handle.setSize(500);
|
||||
await req.start();
|
||||
|
||||
// first 200
|
||||
expect((await req.getFetchParams()).body.size).to.be(500);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: times(200, (i) => ({ i })) }
|
||||
});
|
||||
|
||||
// total = 400
|
||||
expect((await req.getFetchParams()).body.size).to.be(500);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: times(200, (i) => ({ i })) }
|
||||
});
|
||||
|
||||
// total = 600
|
||||
expect((await req.getFetchParams()).body.size).to.be(500);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: times(200, (i) => ({ i })) }
|
||||
});
|
||||
|
||||
expect((await req.getFetchParams()).body.size).to.be(0);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: times(200, (i) => ({ i })) }
|
||||
});
|
||||
|
||||
expect((await req.getFetchParams()).body.size).to.be(0);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: times(200, (i) => ({ i })) }
|
||||
});
|
||||
});
|
||||
|
||||
it(`sets size 0 for indices that couldn't procude hits`, async function () {
|
||||
const search = new MockSource();
|
||||
const indexPattern = search.get('index');
|
||||
|
||||
// the segreq is looking for 10 documents, and we will give it ten docs with time:5 in the first response.
|
||||
// on the second index it should still request 10 documents because it could produce documents with time:5.
|
||||
// the next two indexes will get size 0, since they couldn't produce documents with the time:5
|
||||
// the final index will get size:10, because it too can produce docs with time:5
|
||||
sinon.stub(indexPattern, 'toDetailedIndexList').returns(Promise.resolve([
|
||||
{ index: 'one', min: 0, max: 10 },
|
||||
{ index: 'two', min: 0, max: 10 },
|
||||
{ index: 'three', min: 12, max: 20 },
|
||||
{ index: 'four', min: 15, max: 20 },
|
||||
{ index: 'five', min: 5, max: 50 },
|
||||
]));
|
||||
|
||||
const req = new SegmentedReq(search);
|
||||
req._handle.setDirection('desc');
|
||||
req._handle.setSortFn(new HitSortFn('desc'));
|
||||
req._handle.setSize(10);
|
||||
await req.start();
|
||||
|
||||
// first 10
|
||||
expect((await req.getFetchParams()).body.size).to.be(10);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: times(10, () => ({ _source: { time: 5 } })) }
|
||||
});
|
||||
|
||||
// total = 400
|
||||
expect((await req.getFetchParams()).body.size).to.be(10);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: times(10, () => ({ _source: { time: 5 } })) }
|
||||
});
|
||||
|
||||
// total = 600
|
||||
expect((await req.getFetchParams()).body.size).to.be(0);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: [] }
|
||||
});
|
||||
|
||||
expect((await req.getFetchParams()).body.size).to.be(0);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: [] }
|
||||
});
|
||||
|
||||
expect((await req.getFetchParams()).body.size).to.be(10);
|
||||
await req.handleResponse({
|
||||
hits: { total: 1000, hits: times(10, () => ({ _source: { time: 5 } })) }
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,55 @@
|
|||
import ngMock from 'ngMock';
|
||||
import expect from 'expect.js';
|
||||
import { times } from 'lodash';
|
||||
import sinon from 'auto-release-sinon';
|
||||
|
||||
import HitSortFnProv from 'plugins/kibana/discover/_hit_sort_fn';
|
||||
import NoDigestPromises from 'testUtils/noDigestPromises';
|
||||
|
||||
describe('Segmented Request Size Picking', function () {
|
||||
let Promise;
|
||||
let $rootScope;
|
||||
let SegmentedReq;
|
||||
let MockSource;
|
||||
let HitSortFn;
|
||||
|
||||
NoDigestPromises.activateForSuite();
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject((Private, $injector) => {
|
||||
Promise = $injector.get('Promise');
|
||||
HitSortFn = Private(HitSortFnProv);
|
||||
$rootScope = $injector.get('$rootScope');
|
||||
SegmentedReq = Private(require('ui/courier/fetch/request/segmented'));
|
||||
|
||||
const StubbedSearchSourceProvider = require('fixtures/stubbed_search_source');
|
||||
MockSource = class {
|
||||
constructor() {
|
||||
return $injector.invoke(StubbedSearchSourceProvider);
|
||||
}
|
||||
};
|
||||
}));
|
||||
|
||||
context('without a size', function () {
|
||||
it('does not set the request size', async function () {
|
||||
const req = new SegmentedReq(new MockSource());
|
||||
req._handle.setDirection('desc');
|
||||
req._handle.setSortFn(new HitSortFn('desc'));
|
||||
await req.start();
|
||||
|
||||
expect((await req.getFetchParams()).body).to.not.have.property('size');
|
||||
});
|
||||
});
|
||||
|
||||
context('with a size', function () {
|
||||
it('sets the request size to the entire desired size', async function () {
|
||||
const req = new SegmentedReq(new MockSource());
|
||||
req._handle.setDirection('desc');
|
||||
req._handle.setSize(555);
|
||||
req._handle.setSortFn(new HitSortFn('desc'));
|
||||
await req.start();
|
||||
|
||||
expect((await req.getFetchParams()).body).to.have.property('size', 555);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -14,7 +14,7 @@ define(function (require) {
|
|||
});
|
||||
|
||||
if (!myHandlers.length) {
|
||||
notify.fatal(new Error('unhandled error ' + (error.stack || error.message)));
|
||||
notify.fatal(new Error(`unhandled courier request error: ${ notify.describeError(error) }`));
|
||||
} else {
|
||||
myHandlers.forEach(function (handler) {
|
||||
handler.defer.resolve(error);
|
||||
|
|
|
@ -22,6 +22,7 @@ define(function (require) {
|
|||
this.setDirection = _.bindKey(req, 'setDirection');
|
||||
this.setSize = _.bindKey(req, 'setSize');
|
||||
this.setMaxSegments = _.bindKey(req, 'setMaxSegments');
|
||||
this.setSortFn = _.bindKey(req, 'setSortFn');
|
||||
}
|
||||
|
||||
return SegmentedHandle;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
define(function (require) {
|
||||
return function CourierSegmentedReqProvider(es, Private, Promise, Notifier, timefilter, config) {
|
||||
var _ = require('lodash');
|
||||
var isNumber = require('lodash').isNumber;
|
||||
var SearchReq = Private(require('ui/courier/fetch/request/search'));
|
||||
var SegmentedHandle = Private(require('ui/courier/fetch/request/_segmented_handle'));
|
||||
|
||||
|
@ -16,13 +17,16 @@ define(function (require) {
|
|||
|
||||
// segmented request specific state
|
||||
this._initFn = initFn;
|
||||
this._desiredSize = false;
|
||||
|
||||
this._desiredSize = null;
|
||||
this._maxSegments = config.get('courier:maxSegmentCount');
|
||||
this._hitsReceived = 0;
|
||||
this._direction = 'desc';
|
||||
this._sortFn = null;
|
||||
this._queueCreated = false;
|
||||
this._handle = new SegmentedHandle(this);
|
||||
|
||||
this._hitWindow = null;
|
||||
|
||||
// prevent the source from changing between requests,
|
||||
// all calls will return the same promise
|
||||
this._getFlattenedSource = _.once(this._getFlattenedSource);
|
||||
|
@ -70,8 +74,7 @@ define(function (require) {
|
|||
SegmentedReq.prototype.getFetchParams = function () {
|
||||
var self = this;
|
||||
|
||||
return self._getFlattenedSource()
|
||||
.then(function (flatSource) {
|
||||
return self._getFlattenedSource().then(function (flatSource) {
|
||||
var params = _.cloneDeep(flatSource);
|
||||
|
||||
// calculate the number of indices to fetch in this request in order to prevent
|
||||
|
@ -81,10 +84,12 @@ define(function (require) {
|
|||
// request, making sure the first request returns faster.
|
||||
var remainingSegments = self._maxSegments - self._segments.length;
|
||||
var indexCount = Math.max(1, Math.floor(self._queue.length / remainingSegments));
|
||||
params.index = self._active = self._queue.splice(0, indexCount);
|
||||
|
||||
if (self._desiredSize !== false) {
|
||||
params.body.size = Math.max(self._desiredSize - self._hitsReceived, 0);
|
||||
var indices = self._active = self._queue.splice(0, indexCount);
|
||||
params.index = _.pluck(indices, 'index');
|
||||
|
||||
if (isNumber(self._desiredSize)) {
|
||||
params.body.size = self._pickSizeForIndices(indices);
|
||||
}
|
||||
|
||||
if (params.body.size === 0) params.search_type = 'count';
|
||||
|
@ -149,6 +154,15 @@ define(function (require) {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the function that will be used to sort the rows
|
||||
*
|
||||
* @param {fn}
|
||||
*/
|
||||
SegmentedReq.prototype.setSortFn = function (sortFn) {
|
||||
this._sortFn = sortFn;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the sort total number of documents to
|
||||
* emit
|
||||
|
@ -160,7 +174,8 @@ define(function (require) {
|
|||
* @param {number|false}
|
||||
*/
|
||||
SegmentedReq.prototype.setSize = function (totalSize) {
|
||||
this._desiredSize = _.parseInt(totalSize) || false;
|
||||
this._desiredSize = _.parseInt(totalSize);
|
||||
if (isNaN(this._desiredSize)) this._desiredSize = null;
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._createQueue = function () {
|
||||
|
@ -169,7 +184,7 @@ define(function (require) {
|
|||
var indexPattern = self.source.get('index');
|
||||
self._queueCreated = false;
|
||||
|
||||
return indexPattern.toIndexList(timeBounds.min, timeBounds.max, self._direction)
|
||||
return indexPattern.toDetailedIndexList(timeBounds.min, timeBounds.max, self._direction)
|
||||
.then(function (queue) {
|
||||
if (!_.isArray(queue)) queue = [queue];
|
||||
|
||||
|
@ -205,13 +220,30 @@ define(function (require) {
|
|||
|
||||
this._mergeSegment(seg);
|
||||
this.resp = _.omit(this._mergedResp, '_bucketIndex');
|
||||
this._hitsReceived += seg.hits.hits.length;
|
||||
|
||||
if (firstHits) this._handle.emit('first', seg);
|
||||
if (gotHits) this._handle.emit('segment', seg);
|
||||
if (haveHits) this._handle.emit('mergedSegment', this.resp);
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._mergeHits = function (hits) {
|
||||
var mergedHits = this._mergedResp.hits.hits;
|
||||
var desiredSize = this._desiredSize;
|
||||
var sortFn = this._sortFn;
|
||||
|
||||
_.pushAll(hits, mergedHits);
|
||||
|
||||
if (sortFn) {
|
||||
notify.event('resort rows', function () {
|
||||
mergedHits.sort(sortFn);
|
||||
});
|
||||
}
|
||||
|
||||
if (isNumber(desiredSize)) {
|
||||
mergedHits = this._mergedResp.hits.hits = mergedHits.slice(0, desiredSize);
|
||||
}
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._mergeSegment = notify.timed('merge response segment', function (seg) {
|
||||
var merged = this._mergedResp;
|
||||
|
||||
|
@ -220,7 +252,11 @@ define(function (require) {
|
|||
merged.took += seg.took;
|
||||
merged.hits.total += seg.hits.total;
|
||||
merged.hits.max_score = Math.max(merged.hits.max_score, seg.hits.max_score);
|
||||
[].push.apply(merged.hits.hits, seg.hits.hits);
|
||||
|
||||
if (_.size(seg.hits.hits)) {
|
||||
this._mergeHits(seg.hits.hits);
|
||||
this._detectHitsWindow(merged.hits.hits);
|
||||
}
|
||||
|
||||
if (!seg.aggregations) return;
|
||||
|
||||
|
@ -251,6 +287,51 @@ define(function (require) {
|
|||
});
|
||||
});
|
||||
|
||||
SegmentedReq.prototype._detectHitsWindow = function (hits) {
|
||||
hits = hits || [];
|
||||
var indexPattern = this.source.get('index');
|
||||
var desiredSize = this._desiredSize;
|
||||
|
||||
var size = _.size(hits);
|
||||
if (!isNumber(desiredSize) || size < desiredSize) {
|
||||
this._hitWindow = {
|
||||
size: size,
|
||||
min: -Infinity,
|
||||
max: Infinity
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
let min;
|
||||
let max;
|
||||
|
||||
hits.forEach(function (deepHit) {
|
||||
var hit = indexPattern.flattenHit(deepHit);
|
||||
var time = hit[indexPattern.timeFieldName];
|
||||
if (min == null || time < min) min = time;
|
||||
if (max == null || time > max) max = time;
|
||||
});
|
||||
|
||||
this._hitWindow = { size, min, max };
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._pickSizeForIndices = function (indices) {
|
||||
var hitWindow = this._hitWindow;
|
||||
var desiredSize = this._desiredSize;
|
||||
|
||||
if (!isNumber(desiredSize)) return null;
|
||||
// we don't have any hits yet, get us more info!
|
||||
if (!hitWindow) return desiredSize;
|
||||
// the order of documents isn't important, just get us more
|
||||
if (!this._sortFn) return Math.max(desiredSize - hitWindow.size, 0);
|
||||
// if all of the documents in every index fall outside of our current doc set, we can ignore them.
|
||||
var someOverlap = indices.some(function (index) {
|
||||
return index.min <= hitWindow.max && hitWindow.min <= index.max;
|
||||
});
|
||||
|
||||
return someOverlap ? desiredSize : 0;
|
||||
};
|
||||
|
||||
return SegmentedReq;
|
||||
};
|
||||
});
|
||||
|
|
|
@ -31,6 +31,57 @@ describe('ui/courier/fetch/strategy/search', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('#handleResponseError()', () => {
|
||||
let error;
|
||||
beforeEach(() => {
|
||||
error = { status: 404, body: { error: { index: '[-*]' } } };
|
||||
});
|
||||
|
||||
it('recovers 404 for index -* with empty response', () => {
|
||||
let resp;
|
||||
search.handleResponseError(reqsFetchParams, error).then(val => resp = val);
|
||||
$rootScope.$apply();
|
||||
|
||||
expect(resp.responses).not.to.be(undefined);
|
||||
});
|
||||
|
||||
it('mocks all of the bundled searches', () => {
|
||||
let resp;
|
||||
reqsFetchParams.push({});
|
||||
search.handleResponseError(reqsFetchParams, error).then(val => resp = val);
|
||||
$rootScope.$apply();
|
||||
|
||||
expect(Array.isArray(resp.responses)).to.be(true);
|
||||
expect(resp.responses.length).to.be(2);
|
||||
resp.responses.forEach(res => {
|
||||
expect(res.hits.total).to.be(0);
|
||||
expect(res.hits.hits.length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
context('when not a 404', () => {
|
||||
it('rejects with the original response', () => {
|
||||
error.status = 403;
|
||||
let err;
|
||||
search.handleResponseError(reqsFetchParams, error).catch(val => err = val);
|
||||
$rootScope.$apply();
|
||||
|
||||
expect(err).to.be(error);
|
||||
});
|
||||
});
|
||||
|
||||
context('when not for -* index', () => {
|
||||
it('rejects with the original response', () => {
|
||||
error.body.error.index = '[foo-*]';
|
||||
let err;
|
||||
search.handleResponseError(reqsFetchParams, error).catch(val => err = val);
|
||||
$rootScope.$apply();
|
||||
|
||||
expect(err).to.be(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#reqsFetchParamsToBody()', () => {
|
||||
it('filters out any body properties that begin with $', () => {
|
||||
let value;
|
||||
|
|
|
@ -4,9 +4,32 @@ define(function (require) {
|
|||
var angular = require('angular');
|
||||
var toJson = require('ui/utils/aggressive_parse').toJson;
|
||||
|
||||
function emptyResponse() {
|
||||
return { hits: { total: 0, hits: [] } };
|
||||
};
|
||||
|
||||
return {
|
||||
clientMethod: 'msearch',
|
||||
|
||||
/**
|
||||
* Recover from a 404 when searching against no indexes
|
||||
*
|
||||
* If we get a 404 while intentionally searching for no indexes, we can
|
||||
* simply mock an empty result since that is ultimately what kibana cares
|
||||
* about.
|
||||
*
|
||||
* @param {object} response - the client response from elasticsearch
|
||||
* @return {Promise} - fulfilled by mock or rejected with original error
|
||||
*/
|
||||
handleResponseError: function (requests, response) {
|
||||
var is404 = _.get(response, 'status') === 404;
|
||||
var isEmptyIndexList = _.get(response, 'body.error.index') === '[-*]';
|
||||
|
||||
return is404 && isEmptyIndexList
|
||||
? Promise.resolve({ responses: requests.map(emptyResponse) })
|
||||
: Promise.reject(response);
|
||||
},
|
||||
|
||||
/**
|
||||
* Flatten a series of requests into as ES request body
|
||||
*
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
var Notifier = require('ui/notify/Notifier');
|
||||
var Notifier = require('ui/notify/notifier');
|
||||
|
||||
return function EventsProvider(Private, Promise) {
|
||||
var SimpleEmitter = require('ui/utils/SimpleEmitter');
|
||||
|
|
|
@ -2,21 +2,11 @@ define(function (require) {
|
|||
var _ = require('lodash');
|
||||
var dedupFilters = require('./lib/dedupFilters');
|
||||
var uniqFilters = require('./lib/uniqFilters');
|
||||
|
||||
// given an object or array of objects, return the value of the passed param
|
||||
// if the param is missing, return undefined
|
||||
function findByParam(values, param) {
|
||||
if (_.isArray(values)) { // point series chart
|
||||
var index = _.findIndex(values, param);
|
||||
if (index === -1) return;
|
||||
return values[index][param];
|
||||
}
|
||||
return values[param]; // pie chart
|
||||
}
|
||||
var findByParam = require('ui/utils/find_by_param');
|
||||
|
||||
return function (Notifier) {
|
||||
return function ($state) {
|
||||
return function (event) {
|
||||
return function (event, simulate) {
|
||||
var notify = new Notifier({
|
||||
location: 'Filter bar'
|
||||
});
|
||||
|
@ -58,9 +48,20 @@ define(function (require) {
|
|||
|
||||
if (!filters.length) return;
|
||||
|
||||
if (event.negate) {
|
||||
_.each(filters, function (filter) {
|
||||
filter.meta = filter.meta || {};
|
||||
filter.meta.negate = true;
|
||||
});
|
||||
}
|
||||
|
||||
filters = dedupFilters($state.filters, uniqFilters(filters));
|
||||
// We need to add a bunch of filter deduping here.
|
||||
$state.$newFilters = filters;
|
||||
if (!simulate) {
|
||||
$state.$newFilters = filters;
|
||||
}
|
||||
|
||||
return filters;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
@ -44,13 +44,19 @@ describe('index pattern', function () {
|
|||
|
||||
// stub calculateIndices
|
||||
calculateIndices = sinon.spy(function () {
|
||||
return $injector.get('Promise').resolve(['foo', 'bar']);
|
||||
return $injector.get('Promise').resolve([
|
||||
{ index: 'foo', max: Infinity, min: -Infinity },
|
||||
{ index: 'bar', max: Infinity, min: -Infinity }
|
||||
]);
|
||||
});
|
||||
Private.stub(require('ui/index_patterns/_calculate_indices'), calculateIndices);
|
||||
|
||||
// spy on intervals
|
||||
intervals = Private(require('ui/index_patterns/_intervals'));
|
||||
sinon.stub(intervals, 'toIndexList').returns(['foo', 'bar']);
|
||||
sinon.stub(intervals, 'toIndexList').returns([
|
||||
{ index: 'foo', max: Infinity, min: -Infinity },
|
||||
{ index: 'bar', max: Infinity, min: -Infinity }
|
||||
]);
|
||||
|
||||
IndexPattern = Private(require('ui/index_patterns/_index_pattern'));
|
||||
}));
|
||||
|
@ -290,78 +296,53 @@ describe('index pattern', function () {
|
|||
|
||||
describe('#toIndexList', function () {
|
||||
context('when index pattern is an interval', function () {
|
||||
require('testUtils/noDigestPromises').activateForSuite();
|
||||
|
||||
var interval;
|
||||
beforeEach(function () {
|
||||
interval = 'result:getInterval';
|
||||
sinon.stub(indexPattern, 'getInterval').returns(interval);
|
||||
});
|
||||
|
||||
it('invokes interval toIndexList with given start/stop times', function () {
|
||||
indexPattern.toIndexList(1, 2);
|
||||
$rootScope.$apply();
|
||||
|
||||
it('invokes interval toIndexList with given start/stop times', async function () {
|
||||
await indexPattern.toIndexList(1, 2);
|
||||
var id = indexPattern.id;
|
||||
expect(intervals.toIndexList.calledWith(id, interval, 1, 2)).to.be(true);
|
||||
});
|
||||
it('is fulfilled by the result of interval toIndexList', function () {
|
||||
var indexList;
|
||||
indexPattern.toIndexList().then(function (val) {
|
||||
indexList = val;
|
||||
});
|
||||
$rootScope.$apply();
|
||||
|
||||
it('is fulfilled by the result of interval toIndexList', async function () {
|
||||
var indexList = await indexPattern.toIndexList();
|
||||
expect(indexList[0]).to.equal('foo');
|
||||
expect(indexList[1]).to.equal('bar');
|
||||
});
|
||||
|
||||
context('with sort order', function () {
|
||||
require('testUtils/noDigestPromises').activateForSuite();
|
||||
|
||||
context('undefined', function () {
|
||||
it('provides the index list in tact', async function () {
|
||||
const indexList = await indexPattern.toIndexList();
|
||||
expect(indexList).to.eql(['foo', 'bar']);
|
||||
});
|
||||
});
|
||||
|
||||
context('asc', function () {
|
||||
it('provides the index list in tact', async function () {
|
||||
const indexList = await indexPattern.toIndexList(1, 2, 'asc');
|
||||
expect(indexList).to.eql(['foo', 'bar']);
|
||||
});
|
||||
});
|
||||
|
||||
context('desc', function () {
|
||||
it('reverses the index list', async function () {
|
||||
const indexList = await indexPattern.toIndexList(1, 2, 'desc');
|
||||
expect(indexList).to.eql(['bar', 'foo']);
|
||||
it('passes the sort order to the intervals module', function () {
|
||||
return indexPattern.toIndexList(1, 2, 'SORT_DIRECTION')
|
||||
.then(function () {
|
||||
expect(intervals.toIndexList.callCount).to.be(1);
|
||||
expect(intervals.toIndexList.getCall(0).args[4]).to.be('SORT_DIRECTION');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
context('when index pattern is a time-base wildcard', function () {
|
||||
require('testUtils/noDigestPromises').activateForSuite();
|
||||
beforeEach(function () {
|
||||
sinon.stub(indexPattern, 'getInterval').returns(false);
|
||||
sinon.stub(indexPattern, 'hasTimeField').returns(true);
|
||||
sinon.stub(indexPattern, 'isWildcard').returns(true);
|
||||
});
|
||||
|
||||
it('invokes calculateIndices with given start/stop times and sortOrder', function () {
|
||||
indexPattern.toIndexList(1, 2, 'sortOrder');
|
||||
$rootScope.$apply();
|
||||
|
||||
it('invokes calculateIndices with given start/stop times and sortOrder', async function () {
|
||||
await indexPattern.toIndexList(1, 2, 'sortOrder');
|
||||
var id = indexPattern.id;
|
||||
var field = indexPattern.timeFieldName;
|
||||
expect(calculateIndices.calledWith(id, field, 1, 2, 'sortOrder')).to.be(true);
|
||||
});
|
||||
it('is fulfilled by the result of calculateIndices', function () {
|
||||
var indexList;
|
||||
indexPattern.toIndexList().then(function (val) {
|
||||
indexList = val;
|
||||
});
|
||||
$rootScope.$apply();
|
||||
|
||||
it('is fulfilled by the result of calculateIndices', async function () {
|
||||
var indexList = await indexPattern.toIndexList();
|
||||
expect(indexList[0]).to.equal('foo');
|
||||
expect(indexList[1]).to.equal('bar');
|
||||
});
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue