mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Merge branch 'master' into gh-6484
This commit is contained in:
commit
eeedc54ce8
542 changed files with 4493 additions and 4348 deletions
|
@ -36,9 +36,7 @@ include::color-picker.asciidoc[]
|
|||
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
|
||||
|
||||
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
|
||||
*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
|
||||
*Include Pattern*:: Specify a pattern in this field to include in the results.
|
||||
*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
|
||||
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
|
||||
definition, as in the following example:
|
||||
|
||||
|
|
|
@ -50,9 +50,7 @@ Enter a string in the *Custom Label* field to change the display label.
|
|||
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
|
||||
|
||||
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
|
||||
*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
|
||||
*Include Pattern*:: Specify a pattern in this field to include in the results.
|
||||
*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
|
||||
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
|
||||
definition, as in the following example:
|
||||
|
||||
|
|
|
@ -64,9 +64,7 @@ Enter a string in the *Custom Label* field to change the display label.
|
|||
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
|
||||
|
||||
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
|
||||
*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
|
||||
*Include Pattern*:: Specify a pattern in this field to include in the results.
|
||||
*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
|
||||
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
|
||||
definition, as in the following example:
|
||||
|
||||
|
|
|
@ -78,9 +78,7 @@ Enter a string in the *Custom Label* field to change the display label.
|
|||
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
|
||||
|
||||
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
|
||||
*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
|
||||
*Include Pattern*:: Specify a pattern in this field to include in the results.
|
||||
*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
|
||||
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
|
||||
definition, as in the following example:
|
||||
|
||||
|
|
|
@ -43,9 +43,7 @@ Enter a string in the *Custom Label* field to change the display label.
|
|||
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
|
||||
|
||||
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
|
||||
*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
|
||||
*Include Pattern*:: Specify a pattern in this field to include in the results.
|
||||
*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
|
||||
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
|
||||
definition, as in the following example:
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
"number": 8467,
|
||||
"sha": "6cb7fec4e154faa0a4a3fee4b33dfef91b9870d9"
|
||||
},
|
||||
"main": "src/server/KbnServer.js",
|
||||
"homepage": "https://www.elastic.co/products/kibana",
|
||||
"bugs": {
|
||||
"url": "http://github.com/elastic/kibana/issues"
|
||||
|
|
48
src/cli/serve/__tests__/deprecated_config.js
Normal file
48
src/cli/serve/__tests__/deprecated_config.js
Normal file
|
@ -0,0 +1,48 @@
|
|||
import expect from 'expect.js';
|
||||
import { set } from 'lodash';
|
||||
import { checkForDeprecatedConfig } from '../deprecated_config';
|
||||
import sinon from 'auto-release-sinon';
|
||||
|
||||
describe('cli/serve/deprecated_config', function () {
|
||||
it('passes original config through', function () {
|
||||
const config = {};
|
||||
set(config, 'server.xsrf.token', 'xxtokenxx');
|
||||
const output = checkForDeprecatedConfig(config);
|
||||
expect(output).to.be(config);
|
||||
expect(output.server).to.be(config.server);
|
||||
expect(output.server.xsrf).to.be(config.server.xsrf);
|
||||
expect(output.server.xsrf.token).to.be(config.server.xsrf.token);
|
||||
});
|
||||
|
||||
it('logs warnings about deprecated config values', function () {
|
||||
const log = sinon.stub();
|
||||
const config = {};
|
||||
set(config, 'server.xsrf.token', 'xxtokenxx');
|
||||
checkForDeprecatedConfig(config, log);
|
||||
sinon.assert.calledOnce(log);
|
||||
expect(log.firstCall.args[0]).to.match(/server\.xsrf\.token.+deprecated/);
|
||||
});
|
||||
|
||||
describe('does not support compound.keys', function () {
|
||||
it('ignores fully compound keys', function () {
|
||||
const log = sinon.stub();
|
||||
const config = { 'server.xsrf.token': 'xxtokenxx' };
|
||||
checkForDeprecatedConfig(config, log);
|
||||
sinon.assert.notCalled(log);
|
||||
});
|
||||
|
||||
it('ignores partially compound keys', function () {
|
||||
const log = sinon.stub();
|
||||
const config = { server: { 'xsrf.token': 'xxtokenxx' } };
|
||||
checkForDeprecatedConfig(config, log);
|
||||
sinon.assert.notCalled(log);
|
||||
});
|
||||
|
||||
it('ignores partially compound keys', function () {
|
||||
const log = sinon.stub();
|
||||
const config = { 'server.xsrf': { token: 'xxtokenxx' } };
|
||||
checkForDeprecatedConfig(config, log);
|
||||
sinon.assert.notCalled(log);
|
||||
});
|
||||
});
|
||||
});
|
1
src/cli/serve/__tests__/fixtures/deprecated.yml
Normal file
1
src/cli/serve/__tests__/fixtures/deprecated.yml
Normal file
|
@ -0,0 +1 @@
|
|||
server.xsrf.token: token
|
1
src/cli/serve/__tests__/fixtures/legacy.yml
Normal file
1
src/cli/serve/__tests__/fixtures/legacy.yml
Normal file
|
@ -0,0 +1 @@
|
|||
kibana_index: indexname
|
2
src/cli/serve/__tests__/fixtures/one.yml
Normal file
2
src/cli/serve/__tests__/fixtures/one.yml
Normal file
|
@ -0,0 +1,2 @@
|
|||
foo: 1
|
||||
bar: true
|
2
src/cli/serve/__tests__/fixtures/two.yml
Normal file
2
src/cli/serve/__tests__/fixtures/two.yml
Normal file
|
@ -0,0 +1,2 @@
|
|||
foo: 2
|
||||
baz: bonkers
|
28
src/cli/serve/__tests__/legacy_config.js
Normal file
28
src/cli/serve/__tests__/legacy_config.js
Normal file
|
@ -0,0 +1,28 @@
|
|||
import expect from 'expect.js';
|
||||
import { rewriteLegacyConfig } from '../legacy_config';
|
||||
import sinon from 'auto-release-sinon';
|
||||
|
||||
describe('cli/serve/legacy_config', function () {
|
||||
it('returns a clone of the input', function () {
|
||||
const file = {};
|
||||
const output = rewriteLegacyConfig(file);
|
||||
expect(output).to.not.be(file);
|
||||
});
|
||||
|
||||
it('rewrites legacy config values with literal path replacement', function () {
|
||||
const file = { port: 4000, host: 'kibana.com' };
|
||||
const output = rewriteLegacyConfig(file);
|
||||
expect(output).to.not.be(file);
|
||||
expect(output).to.eql({
|
||||
'server.port': 4000,
|
||||
'server.host': 'kibana.com',
|
||||
});
|
||||
});
|
||||
|
||||
it('logs warnings when legacy config properties are encountered', function () {
|
||||
const log = sinon.stub();
|
||||
rewriteLegacyConfig({ port: 5555 }, log);
|
||||
sinon.assert.calledOnce(log);
|
||||
expect(log.firstCall.args[0]).to.match(/port.+deprecated.+server\.port/);
|
||||
});
|
||||
});
|
102
src/cli/serve/__tests__/read_yaml_config.js
Normal file
102
src/cli/serve/__tests__/read_yaml_config.js
Normal file
|
@ -0,0 +1,102 @@
|
|||
import expect from 'expect.js';
|
||||
import { join, relative, resolve } from 'path';
|
||||
import readYamlConfig from '../read_yaml_config';
|
||||
import sinon from 'auto-release-sinon';
|
||||
|
||||
function fixture(name) {
|
||||
return resolve(__dirname, 'fixtures', name);
|
||||
}
|
||||
|
||||
describe('cli/serve/read_yaml_config', function () {
|
||||
it('reads a single config file', function () {
|
||||
const config = readYamlConfig(fixture('one.yml'));
|
||||
|
||||
expect(readYamlConfig(fixture('one.yml'))).to.eql({
|
||||
foo: 1,
|
||||
bar: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('reads and merged mulitple config file', function () {
|
||||
const config = readYamlConfig([
|
||||
fixture('one.yml'),
|
||||
fixture('two.yml')
|
||||
]);
|
||||
|
||||
expect(config).to.eql({
|
||||
foo: 2,
|
||||
bar: true,
|
||||
baz: 'bonkers'
|
||||
});
|
||||
});
|
||||
|
||||
context('different cwd()', function () {
|
||||
const oldCwd = process.cwd();
|
||||
const newCwd = join(oldCwd, '..');
|
||||
|
||||
before(function () {
|
||||
process.chdir(newCwd);
|
||||
});
|
||||
|
||||
it('resolves relative files based on the cwd', function () {
|
||||
const relativePath = relative(newCwd, fixture('one.yml'));
|
||||
const config = readYamlConfig(relativePath);
|
||||
expect(config).to.eql({
|
||||
foo: 1,
|
||||
bar: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('fails to load relative paths, not found because of the cwd', function () {
|
||||
expect(function () {
|
||||
readYamlConfig(relative(oldCwd, fixture('one.yml')));
|
||||
}).to.throwException(/ENOENT/);
|
||||
});
|
||||
|
||||
after(function () {
|
||||
process.chdir(oldCwd);
|
||||
});
|
||||
});
|
||||
|
||||
context('stubbed stdout', function () {
|
||||
let stub;
|
||||
|
||||
beforeEach(function () {
|
||||
stub = sinon.stub(process.stdout, 'write');
|
||||
});
|
||||
|
||||
context('deprecated settings', function () {
|
||||
it('warns about deprecated settings', function () {
|
||||
readYamlConfig(fixture('deprecated.yml'));
|
||||
sinon.assert.calledOnce(stub);
|
||||
expect(stub.firstCall.args[0]).to.match(/deprecated/);
|
||||
stub.restore();
|
||||
});
|
||||
|
||||
it('only warns once about deprecated settings', function () {
|
||||
readYamlConfig(fixture('deprecated.yml'));
|
||||
readYamlConfig(fixture('deprecated.yml'));
|
||||
readYamlConfig(fixture('deprecated.yml'));
|
||||
sinon.assert.notCalled(stub); // already logged in previous test
|
||||
stub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
context('legacy settings', function () {
|
||||
it('warns about deprecated settings', function () {
|
||||
readYamlConfig(fixture('legacy.yml'));
|
||||
sinon.assert.calledOnce(stub);
|
||||
expect(stub.firstCall.args[0]).to.match(/has been replaced/);
|
||||
stub.restore();
|
||||
});
|
||||
|
||||
it('only warns once about legacy settings', function () {
|
||||
readYamlConfig(fixture('legacy.yml'));
|
||||
readYamlConfig(fixture('legacy.yml'));
|
||||
readYamlConfig(fixture('legacy.yml'));
|
||||
sinon.assert.notCalled(stub); // already logged in previous test
|
||||
stub.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
16
src/cli/serve/deprecated_config.js
Normal file
16
src/cli/serve/deprecated_config.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
import { forOwn, has, noop } from 'lodash';
|
||||
|
||||
// deprecated settings are still allowed, but will be removed at a later time. They
|
||||
// are checked for after the config object is prepared and known, so legacySettings
|
||||
// will have already been transformed.
|
||||
export const deprecatedSettings = new Map([
|
||||
[['server', 'xsrf', 'token'], 'server.xsrf.token is deprecated. It is no longer used when providing xsrf protection.']
|
||||
]);
|
||||
|
||||
// check for and warn about deprecated settings
|
||||
export function checkForDeprecatedConfig(object, log = noop) {
|
||||
for (const [key, msg] of deprecatedSettings.entries()) {
|
||||
if (has(object, key)) log(msg);
|
||||
}
|
||||
return object;
|
||||
}
|
47
src/cli/serve/legacy_config.js
Normal file
47
src/cli/serve/legacy_config.js
Normal file
|
@ -0,0 +1,47 @@
|
|||
import { noop, transform } from 'lodash';
|
||||
|
||||
// legacySettings allow kibana 4.2+ to accept the same config file that people
|
||||
// used for kibana 4.0 and 4.1. These settings are transformed to their modern
|
||||
// equivalents at the very begining of the process
|
||||
export const legacySettings = {
|
||||
// server
|
||||
port: 'server.port',
|
||||
host: 'server.host',
|
||||
pid_file: 'pid.file',
|
||||
ssl_cert_file: 'server.ssl.cert',
|
||||
ssl_key_file: 'server.ssl.key',
|
||||
|
||||
// logging
|
||||
log_file: 'logging.dest',
|
||||
|
||||
// kibana
|
||||
kibana_index: 'kibana.index',
|
||||
default_app_id: 'kibana.defaultAppId',
|
||||
|
||||
// es
|
||||
ca: 'elasticsearch.ssl.ca',
|
||||
elasticsearch_preserve_host: 'elasticsearch.preserveHost',
|
||||
elasticsearch_url: 'elasticsearch.url',
|
||||
kibana_elasticsearch_client_crt: 'elasticsearch.ssl.cert',
|
||||
kibana_elasticsearch_client_key: 'elasticsearch.ssl.key',
|
||||
kibana_elasticsearch_password: 'elasticsearch.password',
|
||||
kibana_elasticsearch_username: 'elasticsearch.username',
|
||||
ping_timeout: 'elasticsearch.pingTimeout',
|
||||
request_timeout: 'elasticsearch.requestTimeout',
|
||||
shard_timeout: 'elasticsearch.shardTimeout',
|
||||
startup_timeout: 'elasticsearch.startupTimeout',
|
||||
verify_ssl: 'elasticsearch.ssl.verify',
|
||||
};
|
||||
|
||||
// transform legacy options into new namespaced versions
|
||||
export function rewriteLegacyConfig(object, log = noop) {
|
||||
return transform(object, (clone, val, key) => {
|
||||
if (legacySettings.hasOwnProperty(key)) {
|
||||
const replacement = legacySettings[key];
|
||||
log(`Config key "${key}" is deprecated. It has been replaced with "${replacement}"`);
|
||||
clone[replacement] = val;
|
||||
} else {
|
||||
clone[key] = val;
|
||||
}
|
||||
}, {});
|
||||
}
|
|
@ -1,75 +1,40 @@
|
|||
import _ from 'lodash';
|
||||
import fs from 'fs';
|
||||
import yaml from 'js-yaml';
|
||||
import { chain, isArray, isPlainObject, forOwn, memoize, set, transform } from 'lodash';
|
||||
import { readFileSync as read } from 'fs';
|
||||
import { safeLoad } from 'js-yaml';
|
||||
import { red } from 'ansicolors';
|
||||
|
||||
import { fromRoot } from '../../utils';
|
||||
import { rewriteLegacyConfig } from './legacy_config';
|
||||
import { checkForDeprecatedConfig } from './deprecated_config';
|
||||
|
||||
let legacySettingMap = {
|
||||
// server
|
||||
port: 'server.port',
|
||||
host: 'server.host',
|
||||
pid_file: 'pid.file',
|
||||
ssl_cert_file: 'server.ssl.cert',
|
||||
ssl_key_file: 'server.ssl.key',
|
||||
const log = memoize(function (message) {
|
||||
console.log(red('WARNING:'), message);
|
||||
});
|
||||
|
||||
// logging
|
||||
log_file: 'logging.dest',
|
||||
export function merge(sources) {
|
||||
return transform(sources, (merged, source) => {
|
||||
forOwn(source, function apply(val, key) {
|
||||
if (isPlainObject(val)) {
|
||||
forOwn(val, function (subVal, subKey) {
|
||||
apply(subVal, key + '.' + subKey);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// kibana
|
||||
kibana_index: 'kibana.index',
|
||||
default_app_id: 'kibana.defaultAppId',
|
||||
if (isArray(val)) {
|
||||
set(merged, key, []);
|
||||
val.forEach((subVal, i) => apply(subVal, key + '.' + i));
|
||||
return;
|
||||
}
|
||||
|
||||
// es
|
||||
ca: 'elasticsearch.ssl.ca',
|
||||
elasticsearch_preserve_host: 'elasticsearch.preserveHost',
|
||||
elasticsearch_url: 'elasticsearch.url',
|
||||
kibana_elasticsearch_client_crt: 'elasticsearch.ssl.cert',
|
||||
kibana_elasticsearch_client_key: 'elasticsearch.ssl.key',
|
||||
kibana_elasticsearch_password: 'elasticsearch.password',
|
||||
kibana_elasticsearch_username: 'elasticsearch.username',
|
||||
ping_timeout: 'elasticsearch.pingTimeout',
|
||||
request_timeout: 'elasticsearch.requestTimeout',
|
||||
shard_timeout: 'elasticsearch.shardTimeout',
|
||||
startup_timeout: 'elasticsearch.startupTimeout',
|
||||
verify_ssl: 'elasticsearch.ssl.verify',
|
||||
};
|
||||
|
||||
const deprecatedSettings = {
|
||||
'server.xsrf.token': 'server.xsrf.token is deprecated. It is no longer used when providing xsrf protection.'
|
||||
};
|
||||
|
||||
module.exports = function (path) {
|
||||
if (!path) return {};
|
||||
|
||||
let file = yaml.safeLoad(fs.readFileSync(path, 'utf8'));
|
||||
|
||||
function apply(config, val, key) {
|
||||
if (_.isPlainObject(val)) {
|
||||
_.forOwn(val, function (subVal, subKey) {
|
||||
apply(config, subVal, key + '.' + subKey);
|
||||
});
|
||||
}
|
||||
else if (_.isArray(val)) {
|
||||
config[key] = [];
|
||||
val.forEach((subVal, i) => {
|
||||
apply(config, subVal, key + '.' + i);
|
||||
});
|
||||
}
|
||||
else {
|
||||
_.set(config, key, val);
|
||||
}
|
||||
}
|
||||
|
||||
_.each(deprecatedSettings, function (message, setting) {
|
||||
if (_.has(file, setting)) console.error(message);
|
||||
});
|
||||
|
||||
// transform legeacy options into new namespaced versions
|
||||
return _.transform(file, function (config, val, key) {
|
||||
if (legacySettingMap.hasOwnProperty(key)) {
|
||||
key = legacySettingMap[key];
|
||||
}
|
||||
|
||||
apply(config, val, key);
|
||||
set(merged, key, val);
|
||||
});
|
||||
}, {});
|
||||
};
|
||||
}
|
||||
|
||||
export default function (paths) {
|
||||
const files = [].concat(paths || []);
|
||||
const yamls = files.map(path => safeLoad(read(path, 'utf8')));
|
||||
const config = merge(yamls.map(file => rewriteLegacyConfig(file, log)));
|
||||
return checkForDeprecatedConfig(config, log);
|
||||
}
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import _ from 'lodash';
|
||||
const { isWorker } = require('cluster');
|
||||
const { resolve } = require('path');
|
||||
import { isWorker } from 'cluster';
|
||||
import { resolve } from 'path';
|
||||
|
||||
import readYamlConfig from './read_yaml_config';
|
||||
import { fromRoot } from '../../utils';
|
||||
|
||||
const cwd = process.cwd();
|
||||
import { fromRoot } from '../../utils';
|
||||
|
||||
let canCluster;
|
||||
try {
|
||||
|
@ -21,22 +23,17 @@ const pathCollector = function () {
|
|||
};
|
||||
};
|
||||
|
||||
const configPathCollector = pathCollector();
|
||||
const pluginDirCollector = pathCollector();
|
||||
const pluginPathCollector = pathCollector();
|
||||
|
||||
function initServerSettings(opts, extraCliOptions) {
|
||||
const readYamlConfig = require('./read_yaml_config');
|
||||
const settings = readYamlConfig(opts.config);
|
||||
const set = _.partial(_.set, settings);
|
||||
const get = _.partial(_.get, settings);
|
||||
const has = _.partial(_.has, settings);
|
||||
const merge = _.partial(_.merge, settings);
|
||||
|
||||
if (opts.dev) {
|
||||
try { merge(readYamlConfig(fromRoot('config/kibana.dev.yml'))); }
|
||||
catch (e) { null; }
|
||||
}
|
||||
|
||||
if (opts.dev) {
|
||||
set('env', 'development');
|
||||
set('optimize.lazy', true);
|
||||
|
@ -79,8 +76,11 @@ module.exports = function (program) {
|
|||
.option('-e, --elasticsearch <uri>', 'Elasticsearch instance')
|
||||
.option(
|
||||
'-c, --config <path>',
|
||||
'Path to the config file, can be changed with the CONFIG_PATH environment variable as well',
|
||||
process.env.CONFIG_PATH || fromRoot('config/kibana.yml'))
|
||||
'Path to the config file, can be changed with the CONFIG_PATH environment variable as well. ' +
|
||||
'Use mulitple --config args to include multiple config files.',
|
||||
configPathCollector,
|
||||
[ process.env.CONFIG_PATH || fromRoot('config/kibana.yml') ]
|
||||
)
|
||||
.option('-p, --port <port>', 'The port to bind to', parseInt)
|
||||
.option('-q, --quiet', 'Prevent all logging except errors')
|
||||
.option('-Q, --silent', 'Prevent all logging')
|
||||
|
@ -116,6 +116,10 @@ module.exports = function (program) {
|
|||
|
||||
command
|
||||
.action(async function (opts) {
|
||||
if (opts.dev) {
|
||||
opts.config.push(fromRoot('config/kibana.dev.yml'));
|
||||
}
|
||||
|
||||
const settings = initServerSettings(opts, this.getUnknownOptions());
|
||||
|
||||
if (canCluster && opts.dev && !isWorker) {
|
||||
|
|
|
@ -262,16 +262,6 @@
|
|||
}
|
||||
}
|
||||
|
||||
.regex .flags {
|
||||
.docs {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
a {
|
||||
color: @vis-editor-agg-editor-flags-color;
|
||||
}
|
||||
}
|
||||
|
||||
&-advanced-toggle {
|
||||
text-align: right;
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ import * as kbnTestServer from '../../../test/utils/kbn_server';
|
|||
describe('UiExports', function () {
|
||||
describe('#find()', function () {
|
||||
it('finds exports based on the passed export names', function () {
|
||||
var uiExports = new UiExports({});
|
||||
let uiExports = new UiExports({});
|
||||
uiExports.aliases.foo = ['a', 'b', 'c'];
|
||||
uiExports.aliases.bar = ['d', 'e', 'f'];
|
||||
|
||||
|
@ -17,7 +17,7 @@ describe('UiExports', function () {
|
|||
});
|
||||
|
||||
it('allows query types that match nothing', function () {
|
||||
var uiExports = new UiExports({});
|
||||
let uiExports = new UiExports({});
|
||||
uiExports.aliases.foo = ['a', 'b', 'c'];
|
||||
|
||||
expect(uiExports.find(['foo'])).to.eql(['a', 'b', 'c']);
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
module.exports = function ({env, bundle}) {
|
||||
|
||||
let pluginSlug = env.pluginInfo.sort()
|
||||
const pluginSlug = env.pluginInfo.sort()
|
||||
.map(p => ' * - ' + p)
|
||||
.join('\n');
|
||||
|
||||
let requires = bundle.modules
|
||||
const requires = bundle.modules
|
||||
.map(m => `require('${m}');`)
|
||||
.join('\n');
|
||||
|
||||
|
|
|
@ -10,13 +10,13 @@ import UiBundleCollection from './ui_bundle_collection';
|
|||
import UiBundlerEnv from './ui_bundler_env';
|
||||
module.exports = async (kbnServer, server, config) => {
|
||||
|
||||
let loadingGif = readFile(fromRoot('src/ui/public/loading.gif'), { encoding: 'base64'});
|
||||
const loadingGif = readFile(fromRoot('src/ui/public/loading.gif'), { encoding: 'base64'});
|
||||
|
||||
let uiExports = kbnServer.uiExports = new UiExports({
|
||||
const uiExports = kbnServer.uiExports = new UiExports({
|
||||
urlBasePath: config.get('server.basePath')
|
||||
});
|
||||
|
||||
let bundlerEnv = new UiBundlerEnv(config.get('optimize.bundleDir'));
|
||||
const bundlerEnv = new UiBundlerEnv(config.get('optimize.bundleDir'));
|
||||
bundlerEnv.addContext('env', config.get('env.name'));
|
||||
bundlerEnv.addContext('urlBasePath', config.get('server.basePath'));
|
||||
bundlerEnv.addContext('sourceMaps', config.get('optimize.sourceMaps'));
|
||||
|
@ -28,14 +28,14 @@ module.exports = async (kbnServer, server, config) => {
|
|||
uiExports.consumePlugin(plugin);
|
||||
}
|
||||
|
||||
let bundles = kbnServer.bundles = new UiBundleCollection(bundlerEnv, config.get('optimize.bundleFilter'));
|
||||
const bundles = kbnServer.bundles = new UiBundleCollection(bundlerEnv, config.get('optimize.bundleFilter'));
|
||||
|
||||
for (let app of uiExports.getAllApps()) {
|
||||
bundles.addApp(app);
|
||||
}
|
||||
|
||||
for (let gen of uiExports.getBundleProviders()) {
|
||||
let bundle = await gen(UiBundle, bundlerEnv, uiExports.getAllApps(), kbnServer.plugins);
|
||||
const bundle = await gen(UiBundle, bundlerEnv, uiExports.getAllApps(), kbnServer.plugins);
|
||||
if (bundle) bundles.add(bundle);
|
||||
}
|
||||
|
||||
|
@ -47,8 +47,8 @@ module.exports = async (kbnServer, server, config) => {
|
|||
path: '/app/{id}',
|
||||
method: 'GET',
|
||||
handler: function (req, reply) {
|
||||
let id = req.params.id;
|
||||
let app = uiExports.apps.byId[id];
|
||||
const id = req.params.id;
|
||||
const app = uiExports.apps.byId[id];
|
||||
if (!app) return reply(Boom.notFound('Unknown app ' + id));
|
||||
|
||||
if (kbnServer.status.isGreen()) {
|
||||
|
|
|
@ -17,8 +17,8 @@ describe('GeoJson Agg Response Converter', function () {
|
|||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
var Vis = Private(VisProvider);
|
||||
var indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
let Vis = Private(VisProvider);
|
||||
let indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
|
||||
esResponse = Private(FixturesAggRespGeohashGridProvider);
|
||||
tabify = Private(AggResponseTabifyTabifyProvider);
|
||||
|
@ -60,8 +60,8 @@ describe('GeoJson Agg Response Converter', function () {
|
|||
|
||||
describe('with table ' + JSON.stringify(tableOpts), function () {
|
||||
it('outputs a chart', function () {
|
||||
var table = makeTable();
|
||||
var chart = makeSingleChart(table);
|
||||
let table = makeTable();
|
||||
let chart = makeSingleChart(table);
|
||||
expect(chart).to.only.have.keys(
|
||||
'title',
|
||||
'tooltipFormatter',
|
||||
|
@ -78,9 +78,9 @@ describe('GeoJson Agg Response Converter', function () {
|
|||
});
|
||||
|
||||
it('outputs geohash points as features in a feature collection', function () {
|
||||
var table = makeTable();
|
||||
var chart = makeSingleChart(table);
|
||||
var geoJson = chart.geoJson;
|
||||
let table = makeTable();
|
||||
let chart = makeSingleChart(table);
|
||||
let geoJson = chart.geoJson;
|
||||
|
||||
expect(geoJson.type).to.be('FeatureCollection');
|
||||
expect(geoJson.features).to.be.an('array');
|
||||
|
@ -88,8 +88,8 @@ describe('GeoJson Agg Response Converter', function () {
|
|||
});
|
||||
|
||||
it('exports a bunch of properties about the geo hash grid', function () {
|
||||
var geoJson = makeGeoJson();
|
||||
var props = geoJson.properties;
|
||||
let geoJson = makeGeoJson();
|
||||
let props = geoJson.properties;
|
||||
|
||||
// props
|
||||
expect(props).to.be.an('object');
|
||||
|
@ -122,7 +122,7 @@ describe('GeoJson Agg Response Converter', function () {
|
|||
|
||||
it('should be geoJson format', function () {
|
||||
table.rows.forEach(function (row, i) {
|
||||
var feature = chart.geoJson.features[i];
|
||||
let feature = chart.geoJson.features[i];
|
||||
expect(feature).to.have.property('geometry');
|
||||
expect(feature.geometry).to.be.an('object');
|
||||
expect(feature).to.have.property('properties');
|
||||
|
@ -132,7 +132,7 @@ describe('GeoJson Agg Response Converter', function () {
|
|||
|
||||
it('should have valid geometry data', function () {
|
||||
table.rows.forEach(function (row, i) {
|
||||
var geometry = chart.geoJson.features[i].geometry;
|
||||
let geometry = chart.geoJson.features[i].geometry;
|
||||
expect(geometry.type).to.be('Point');
|
||||
expect(geometry).to.have.property('coordinates');
|
||||
expect(geometry.coordinates).to.be.an('array');
|
||||
|
@ -144,8 +144,8 @@ describe('GeoJson Agg Response Converter', function () {
|
|||
|
||||
it('should have value properties data', function () {
|
||||
table.rows.forEach(function (row, i) {
|
||||
var props = chart.geoJson.features[i].properties;
|
||||
var keys = ['value', 'geohash', 'aggConfigResult', 'rectangle', 'center'];
|
||||
let props = chart.geoJson.features[i].properties;
|
||||
let keys = ['value', 'geohash', 'aggConfigResult', 'rectangle', 'center'];
|
||||
expect(props).to.be.an('object');
|
||||
expect(props).to.only.have.keys(keys);
|
||||
expect(props.geohash).to.be.a('string');
|
||||
|
@ -155,15 +155,15 @@ describe('GeoJson Agg Response Converter', function () {
|
|||
|
||||
it('should use latLng in properties and lngLat in geometry', function () {
|
||||
table.rows.forEach(function (row, i) {
|
||||
var geometry = chart.geoJson.features[i].geometry;
|
||||
var props = chart.geoJson.features[i].properties;
|
||||
let geometry = chart.geoJson.features[i].geometry;
|
||||
let props = chart.geoJson.features[i].properties;
|
||||
expect(props.center).to.eql(geometry.coordinates.slice(0).reverse());
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle both AggConfig and non-AggConfig results', function () {
|
||||
table.rows.forEach(function (row, i) {
|
||||
var props = chart.geoJson.features[i].properties;
|
||||
let props = chart.geoJson.features[i].properties;
|
||||
if (tableOpts.asAggConfigResults) {
|
||||
expect(props.aggConfigResult).to.be(row[metricColI]);
|
||||
expect(props.value).to.be(row[metricColI].value);
|
||||
|
|
|
@ -3,20 +3,20 @@ import _ from 'lodash';
|
|||
import RegistryFieldFormatsProvider from 'ui/registry/field_formats';
|
||||
export default function TileMapTooltipFormatter($compile, $rootScope, Private) {
|
||||
|
||||
var fieldFormats = Private(RegistryFieldFormatsProvider);
|
||||
var $tooltipScope = $rootScope.$new();
|
||||
var $el = $('<div>').html(require('ui/agg_response/geo_json/_tooltip.html'));
|
||||
let fieldFormats = Private(RegistryFieldFormatsProvider);
|
||||
let $tooltipScope = $rootScope.$new();
|
||||
let $el = $('<div>').html(require('ui/agg_response/geo_json/_tooltip.html'));
|
||||
$compile($el)($tooltipScope);
|
||||
|
||||
return function tooltipFormatter(feature) {
|
||||
if (!feature) return '';
|
||||
|
||||
var value = feature.properties.value;
|
||||
var acr = feature.properties.aggConfigResult;
|
||||
var vis = acr.aggConfig.vis;
|
||||
let value = feature.properties.value;
|
||||
let acr = feature.properties.aggConfigResult;
|
||||
let vis = acr.aggConfig.vis;
|
||||
|
||||
var metricAgg = acr.aggConfig;
|
||||
var geoFormat = _.get(vis.aggs, 'byTypeName.geohash_grid[0].format');
|
||||
let metricAgg = acr.aggConfig;
|
||||
let geoFormat = _.get(vis.aggs, 'byTypeName.geohash_grid[0].format');
|
||||
if (!geoFormat) geoFormat = fieldFormats.getDefaultInstance('geo_point');
|
||||
|
||||
$tooltipScope.details = [
|
||||
|
|
|
@ -3,7 +3,7 @@ import rowsToFeatures from 'ui/agg_response/geo_json/rows_to_features';
|
|||
import AggResponseGeoJsonTooltipFormatterProvider from 'ui/agg_response/geo_json/_tooltip_formatter';
|
||||
export default function TileMapConverterFn(Private, timefilter, $compile, $rootScope) {
|
||||
|
||||
var tooltipFormatter = Private(AggResponseGeoJsonTooltipFormatterProvider);
|
||||
let tooltipFormatter = Private(AggResponseGeoJsonTooltipFormatterProvider);
|
||||
|
||||
return function (vis, table) {
|
||||
|
||||
|
@ -13,13 +13,13 @@ export default function TileMapConverterFn(Private, timefilter, $compile, $rootS
|
|||
});
|
||||
}
|
||||
|
||||
var geoI = columnIndex('segment');
|
||||
var metricI = columnIndex('metric');
|
||||
var geoAgg = _.get(table.columns, [geoI, 'aggConfig']);
|
||||
var metricAgg = _.get(table.columns, [metricI, 'aggConfig']);
|
||||
let geoI = columnIndex('segment');
|
||||
let metricI = columnIndex('metric');
|
||||
let geoAgg = _.get(table.columns, [geoI, 'aggConfig']);
|
||||
let metricAgg = _.get(table.columns, [metricI, 'aggConfig']);
|
||||
|
||||
var features = rowsToFeatures(table, geoI, metricI);
|
||||
var values = features.map(function (feature) {
|
||||
let features = rowsToFeatures(table, geoI, metricI);
|
||||
let values = features.map(function (feature) {
|
||||
return feature.properties.value;
|
||||
});
|
||||
|
||||
|
|
|
@ -12,19 +12,19 @@ function unwrap(val) {
|
|||
|
||||
function convertRowsToFeatures(table, geoI, metricI) {
|
||||
return _.transform(table.rows, function (features, row) {
|
||||
var geohash = unwrap(row[geoI]);
|
||||
let geohash = unwrap(row[geoI]);
|
||||
if (!geohash) return;
|
||||
|
||||
// fetch latLn of northwest and southeast corners, and center point
|
||||
var location = decodeGeoHash(geohash);
|
||||
let location = decodeGeoHash(geohash);
|
||||
|
||||
var centerLatLng = [
|
||||
let centerLatLng = [
|
||||
location.latitude[2],
|
||||
location.longitude[2]
|
||||
];
|
||||
|
||||
// order is nw, ne, se, sw
|
||||
var rectangle = [
|
||||
let rectangle = [
|
||||
[location.latitude[0], location.longitude[0]],
|
||||
[location.latitude[0], location.longitude[1]],
|
||||
[location.latitude[1], location.longitude[1]],
|
||||
|
|
|
@ -35,7 +35,7 @@ describe('buildHierarchicalData', function () {
|
|||
let results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
let id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
|
@ -48,7 +48,7 @@ describe('buildHierarchicalData', function () {
|
|||
});
|
||||
|
||||
it('should set the slices with one child to a consistent label', function () {
|
||||
var checkLabel = 'Count';
|
||||
let checkLabel = 'Count';
|
||||
expect(results).to.have.property('slices');
|
||||
expect(results.slices).to.have.property('children');
|
||||
expect(results.slices.children).to.have.length(1);
|
||||
|
@ -67,8 +67,8 @@ describe('buildHierarchicalData', function () {
|
|||
describe('rows and columns', function () {
|
||||
|
||||
it('should set the rows', function () {
|
||||
var id = 1;
|
||||
var vis = new Vis(indexPattern, {
|
||||
let id = 1;
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
|
@ -79,13 +79,13 @@ describe('buildHierarchicalData', function () {
|
|||
});
|
||||
// We need to set the aggs to a known value.
|
||||
_.each(vis.aggs, function (agg) { agg.id = 'agg_' + id++; });
|
||||
var results = buildHierarchicalData(vis, fixtures.threeTermBuckets);
|
||||
let results = buildHierarchicalData(vis, fixtures.threeTermBuckets);
|
||||
expect(results).to.have.property('rows');
|
||||
});
|
||||
|
||||
it('should set the columns', function () {
|
||||
var id = 1;
|
||||
var vis = new Vis(indexPattern, {
|
||||
let id = 1;
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
|
@ -96,7 +96,7 @@ describe('buildHierarchicalData', function () {
|
|||
});
|
||||
// We need to set the aggs to a known value.
|
||||
_.each(vis.aggs, function (agg) { agg.id = 'agg_' + id++; });
|
||||
var results = buildHierarchicalData(vis, fixtures.threeTermBuckets);
|
||||
let results = buildHierarchicalData(vis, fixtures.threeTermBuckets);
|
||||
expect(results).to.have.property('columns');
|
||||
});
|
||||
|
||||
|
@ -107,7 +107,7 @@ describe('buildHierarchicalData', function () {
|
|||
let results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
let id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
|
@ -149,7 +149,7 @@ describe('buildHierarchicalData', function () {
|
|||
let results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
let id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
|
@ -181,7 +181,7 @@ describe('buildHierarchicalData', function () {
|
|||
let results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
let id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
|
@ -222,7 +222,7 @@ describe('buildHierarchicalData', function () {
|
|||
let results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
let id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
|
@ -258,7 +258,7 @@ describe('buildHierarchicalData', function () {
|
|||
let results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
let id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
|
@ -281,7 +281,7 @@ describe('buildHierarchicalData', function () {
|
|||
});
|
||||
|
||||
it('should set the hits attribute for the results', function () {
|
||||
var errCall = Notifier.prototype.error.getCall(0);
|
||||
let errCall = Notifier.prototype.error.getCall(0);
|
||||
expect(errCall).to.be.ok();
|
||||
expect(errCall.args[0]).to.contain('not supported');
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ import collectBranch from 'ui/agg_response/hierarchical/_collect_branch';
|
|||
import expect from 'expect.js';
|
||||
describe('collectBranch()', function () {
|
||||
let results;
|
||||
var convert = function (name) {
|
||||
let convert = function (name) {
|
||||
return 'converted:' + name;
|
||||
};
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ describe('buildHierarchicalData()', function () {
|
|||
}));
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
let id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
|
@ -37,7 +37,7 @@ describe('buildHierarchicalData()', function () {
|
|||
{ type: 'terms', schema: 'segment', params: { field: 'geo.src' }}
|
||||
]
|
||||
});
|
||||
var buckets = arrayToLinkedList(vis.aggs.bySchemaGroup.buckets);
|
||||
let buckets = arrayToLinkedList(vis.aggs.bySchemaGroup.buckets);
|
||||
// We need to set the aggs to a known value.
|
||||
_.each(vis.aggs, function (agg) { agg.id = 'agg_' + id++; });
|
||||
results = createRawData(vis, fixtures.threeTermBuckets);
|
||||
|
@ -48,7 +48,7 @@ describe('buildHierarchicalData()', function () {
|
|||
expect(results.columns).to.have.length(6);
|
||||
_.each(results.columns, function (column) {
|
||||
expect(column).to.have.property('aggConfig');
|
||||
var agg = column.aggConfig;
|
||||
let agg = column.aggConfig;
|
||||
expect(column).to.have.property('categoryName', agg.schema.name);
|
||||
expect(column).to.have.property('id', agg.id);
|
||||
expect(column).to.have.property('aggType', agg.type);
|
||||
|
|
|
@ -7,14 +7,14 @@ describe('buildHierarchicalData()', function () {
|
|||
|
||||
it('should normalize a bucket object into an array', function () {
|
||||
|
||||
var bucket = {
|
||||
let bucket = {
|
||||
buckets: {
|
||||
foo: { doc_count: 1 },
|
||||
bar: { doc_count: 2 }
|
||||
}
|
||||
};
|
||||
|
||||
var buckets = extractBuckets(bucket);
|
||||
let buckets = extractBuckets(bucket);
|
||||
expect(buckets).to.be.an(Array);
|
||||
expect(buckets).to.have.length(2);
|
||||
expect(buckets[0]).to.have.property('key', 'foo');
|
||||
|
@ -24,19 +24,19 @@ describe('buildHierarchicalData()', function () {
|
|||
});
|
||||
|
||||
it('should return an empty array for undefined buckets', function () {
|
||||
var buckets = extractBuckets();
|
||||
let buckets = extractBuckets();
|
||||
expect(buckets).to.be.an(Array);
|
||||
expect(buckets).to.have.length(0);
|
||||
});
|
||||
|
||||
it('should return the bucket array', function () {
|
||||
var bucket = {
|
||||
let bucket = {
|
||||
buckets: [
|
||||
{ key: 'foo', doc_count: 1 },
|
||||
{ key: 'bar', doc_count: 2 }
|
||||
]
|
||||
};
|
||||
var buckets = extractBuckets(bucket);
|
||||
let buckets = extractBuckets(bucket);
|
||||
expect(buckets).to.be.an(Array);
|
||||
expect(buckets).to.be(bucket.buckets);
|
||||
});
|
||||
|
|
|
@ -42,17 +42,17 @@ describe('buildHierarchicalData()', function () {
|
|||
});
|
||||
|
||||
it('relies on metricAgg#getValue() for the size of the children', function () {
|
||||
var aggData = {
|
||||
let aggData = {
|
||||
buckets: [
|
||||
{ key: 'foo' },
|
||||
{ key: 'bar' }
|
||||
]
|
||||
};
|
||||
|
||||
var football = {};
|
||||
let football = {};
|
||||
fixture.metric.getValue = _.constant(football);
|
||||
|
||||
var children = transform(fixture.agg, fixture.metric, aggData);
|
||||
let children = transform(fixture.agg, fixture.metric, aggData);
|
||||
expect(children).to.be.an(Array);
|
||||
expect(children).to.have.length(2);
|
||||
expect(children[0]).to.have.property('size', football);
|
||||
|
@ -60,7 +60,7 @@ describe('buildHierarchicalData()', function () {
|
|||
});
|
||||
|
||||
it('should create two levels of metrics', function () {
|
||||
var children = transform(fixture.agg, fixture.metric, fixture.aggData);
|
||||
let children = transform(fixture.agg, fixture.metric, fixture.aggData);
|
||||
fixture.metric.getValue = function (b) { return b.doc_count; };
|
||||
|
||||
expect(children).to.be.an(Array);
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import collectKeys from 'ui/agg_response/hierarchical/_collect_keys';
|
||||
import AggResponseHierarchicalTransformAggregationProvider from 'ui/agg_response/hierarchical/_transform_aggregation';
|
||||
export default function biuldSplitProvider(Private) {
|
||||
var transformer = Private(AggResponseHierarchicalTransformAggregationProvider);
|
||||
let transformer = Private(AggResponseHierarchicalTransformAggregationProvider);
|
||||
return function (agg, metric, aggData) {
|
||||
// Ceate the split structure
|
||||
var split = { label: '', slices: { children: [] } };
|
||||
let split = { label: '', slices: { children: [] } };
|
||||
|
||||
// Transform the aggData into splits
|
||||
split.slices.children = transformer(agg, metric, aggData);
|
||||
|
|
|
@ -3,13 +3,13 @@ define(function () {
|
|||
// walk up the branch for each parent
|
||||
function walk(item, memo) {
|
||||
// record the the depth
|
||||
var depth = item.depth - 1;
|
||||
let depth = item.depth - 1;
|
||||
|
||||
// Using the aggConfig determine what the field name is. If the aggConfig
|
||||
// doesn't exist (which means it's an _all agg) then use the level for
|
||||
// the field name
|
||||
var col = item.aggConfig;
|
||||
var field = (col && col.params && col.params.field && col.params.field.displayName)
|
||||
let col = item.aggConfig;
|
||||
let field = (col && col.params && col.params.field && col.params.field.displayName)
|
||||
|| (col && col.label)
|
||||
|| ('level ' + item.depth);
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import _ from 'lodash';
|
||||
export default function collectKeys(children) {
|
||||
var nextChildren = _.pluck(children, 'children');
|
||||
var keys = _.pluck(children, 'name');
|
||||
let nextChildren = _.pluck(children, 'children');
|
||||
let keys = _.pluck(children, 'name');
|
||||
return _(nextChildren)
|
||||
.map(collectKeys)
|
||||
.flattenDeep()
|
||||
|
|
|
@ -3,12 +3,12 @@ import extractBuckets from 'ui/agg_response/hierarchical/_extract_buckets';
|
|||
export default function (vis, resp) {
|
||||
|
||||
// Create the initial results structure
|
||||
var results = { rows: [] };
|
||||
let results = { rows: [] };
|
||||
|
||||
// Create a reference to the buckets and metrics
|
||||
var metrics = vis.aggs.bySchemaGroup.metrics;
|
||||
var buckets = vis.aggs.bySchemaGroup.buckets;
|
||||
var aggs = [];
|
||||
let metrics = vis.aggs.bySchemaGroup.metrics;
|
||||
let buckets = vis.aggs.bySchemaGroup.buckets;
|
||||
let aggs = [];
|
||||
|
||||
if (buckets) {
|
||||
_.each(buckets, function (bucket) {
|
||||
|
@ -37,7 +37,7 @@ export default function (vis, resp) {
|
|||
|
||||
// if there are no buckets then we need to just set the value and return
|
||||
if (!buckets) {
|
||||
var value = resp.aggregations
|
||||
let value = resp.aggregations
|
||||
&& resp.aggregations[metrics[0].id]
|
||||
&& resp.aggregations[metrics[0].id].value
|
||||
|| resp.hits.total;
|
||||
|
@ -60,9 +60,9 @@ export default function (vis, resp) {
|
|||
// iterate through all the buckets
|
||||
_.each(extractBuckets(data[agg.id], agg), function (bucket) {
|
||||
|
||||
var _record = _.flattenDeep([record, bucket.key]);
|
||||
let _record = _.flattenDeep([record, bucket.key]);
|
||||
_.each(metrics, function (metric) {
|
||||
var value = bucket.doc_count;
|
||||
let value = bucket.doc_count;
|
||||
if (bucket[metric.id] && !_.isUndefined(bucket[metric.id].value)) {
|
||||
value = bucket[metric.id].value;
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ export default function (vis, resp) {
|
|||
// buckets. If it does then we need to keep on walking the tree.
|
||||
// This is where the recursion happens.
|
||||
if (agg._next) {
|
||||
var nextBucket = bucket[agg._next.id];
|
||||
let nextBucket = bucket[agg._next.id];
|
||||
if (nextBucket && nextBucket.buckets) {
|
||||
walkBuckets(agg._next, bucket, _record);
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import _ from 'lodash';
|
|||
export default function (bucket, agg) {
|
||||
if (bucket && _.isPlainObject(bucket.buckets)) {
|
||||
return _.map(bucket.buckets, function (value, key) {
|
||||
var item = _.cloneDeep(value);
|
||||
let item = _.cloneDeep(value);
|
||||
item.key = agg ? agg.getKey(value, key) : key;
|
||||
return item;
|
||||
});
|
||||
|
|
|
@ -3,19 +3,19 @@ import $ from 'jquery';
|
|||
import collectBranch from 'ui/agg_response/hierarchical/_collect_branch';
|
||||
import numeral from 'numeral';
|
||||
export default function HierarchicalTooltipFormaterProvider($rootScope, $compile, $sce) {
|
||||
var $tooltip = $(require('ui/agg_response/hierarchical/_tooltip.html'));
|
||||
var $tooltipScope = $rootScope.$new();
|
||||
let $tooltip = $(require('ui/agg_response/hierarchical/_tooltip.html'));
|
||||
let $tooltipScope = $rootScope.$new();
|
||||
|
||||
$compile($tooltip)($tooltipScope);
|
||||
|
||||
return function (columns) {
|
||||
return function (event) {
|
||||
var datum = event.datum;
|
||||
let datum = event.datum;
|
||||
|
||||
// Collect the current leaf and parents into an array of values
|
||||
$tooltipScope.rows = collectBranch(datum);
|
||||
|
||||
var metricCol = $tooltipScope.metricCol = _.find(columns, { categoryName: 'metric' });
|
||||
let metricCol = $tooltipScope.metricCol = _.find(columns, { categoryName: 'metric' });
|
||||
|
||||
// Map those values to what the tooltipSource.rows format.
|
||||
_.forEachRight($tooltipScope.rows, function (row, i, rows) {
|
||||
|
|
|
@ -4,14 +4,14 @@ import AggConfigResult from 'ui/vis/agg_config_result';
|
|||
export default function transformAggregationProvider(Private) {
|
||||
return function transformAggregation(agg, metric, aggData, parent) {
|
||||
return _.map(extractBuckets(aggData, agg), function (bucket) {
|
||||
var aggConfigResult = new AggConfigResult(
|
||||
let aggConfigResult = new AggConfigResult(
|
||||
agg,
|
||||
parent && parent.aggConfigResult,
|
||||
metric.getValue(bucket),
|
||||
agg.getKey(bucket)
|
||||
);
|
||||
|
||||
var branch = {
|
||||
let branch = {
|
||||
name: agg.fieldFormatter()(bucket.key),
|
||||
size: aggConfigResult.value,
|
||||
aggConfig: agg,
|
||||
|
@ -27,7 +27,7 @@ export default function transformAggregationProvider(Private) {
|
|||
// If the next bucket exists and it has children the we need to
|
||||
// transform it as well. This is where the recursion happens.
|
||||
if (agg._next) {
|
||||
var nextBucket = bucket[agg._next.id];
|
||||
let nextBucket = bucket[agg._next.id];
|
||||
if (nextBucket && nextBucket.buckets) {
|
||||
branch.children = transformAggregation(agg._next, metric, nextBucket, branch);
|
||||
}
|
||||
|
|
|
@ -6,34 +6,34 @@ import AggConfigResult from 'ui/vis/agg_config_result';
|
|||
import AggResponseHierarchicalBuildSplitProvider from 'ui/agg_response/hierarchical/_build_split';
|
||||
import AggResponseHierarchicalHierarchicalTooltipFormatterProvider from 'ui/agg_response/hierarchical/_hierarchical_tooltip_formatter';
|
||||
export default function buildHierarchicalDataProvider(Private, Notifier) {
|
||||
var buildSplit = Private(AggResponseHierarchicalBuildSplitProvider);
|
||||
var tooltipFormatter = Private(AggResponseHierarchicalHierarchicalTooltipFormatterProvider);
|
||||
let buildSplit = Private(AggResponseHierarchicalBuildSplitProvider);
|
||||
let tooltipFormatter = Private(AggResponseHierarchicalHierarchicalTooltipFormatterProvider);
|
||||
|
||||
|
||||
var notify = new Notifier({
|
||||
let notify = new Notifier({
|
||||
location: 'Pie chart response converter'
|
||||
});
|
||||
|
||||
return function (vis, resp) {
|
||||
// Create a refrenece to the buckets
|
||||
var buckets = vis.aggs.bySchemaGroup.buckets;
|
||||
let buckets = vis.aggs.bySchemaGroup.buckets;
|
||||
|
||||
|
||||
// Find the metric so it's easier to reference.
|
||||
// TODO: Change this to support multiple metrics.
|
||||
var metric = vis.aggs.bySchemaGroup.metrics[0];
|
||||
let metric = vis.aggs.bySchemaGroup.metrics[0];
|
||||
|
||||
// Link each agg to the next agg. This will be
|
||||
// to identify the next bucket aggregation
|
||||
buckets = arrayToLinkedList(buckets);
|
||||
|
||||
// Create the raw data to be used in the spy panel
|
||||
var raw = createRawData(vis, resp);
|
||||
let raw = createRawData(vis, resp);
|
||||
|
||||
// If buckets is falsy then we should just return the aggs
|
||||
if (!buckets) {
|
||||
var label = 'Count';
|
||||
var value = resp.aggregations
|
||||
let label = 'Count';
|
||||
let value = resp.aggregations
|
||||
&& resp.aggregations[metric.id]
|
||||
&& resp.aggregations[metric.id].value
|
||||
|| resp.hits.total;
|
||||
|
@ -50,8 +50,8 @@ export default function buildHierarchicalDataProvider(Private, Notifier) {
|
|||
};
|
||||
}
|
||||
|
||||
var firstAgg = buckets[0];
|
||||
var aggData = resp.aggregations[firstAgg.id];
|
||||
let firstAgg = buckets[0];
|
||||
let aggData = resp.aggregations[firstAgg.id];
|
||||
|
||||
if (!firstAgg._next && firstAgg.schema.name === 'split') {
|
||||
notify.error('Splitting charts without splitting slices is not supported. Pretending that we are just splitting slices.');
|
||||
|
@ -59,7 +59,7 @@ export default function buildHierarchicalDataProvider(Private, Notifier) {
|
|||
|
||||
// start with splitting slices
|
||||
if (!firstAgg._next || firstAgg.schema.name === 'segment') {
|
||||
var split = buildSplit(firstAgg, metric, aggData);
|
||||
let split = buildSplit(firstAgg, metric, aggData);
|
||||
split.hits = resp.hits.total;
|
||||
split.raw = raw;
|
||||
split.tooltipFormatter = tooltipFormatter(raw.columns);
|
||||
|
@ -67,17 +67,17 @@ export default function buildHierarchicalDataProvider(Private, Notifier) {
|
|||
}
|
||||
|
||||
// map the split aggregations into rows.
|
||||
var rows = _.map(extractBuckets(aggData, firstAgg), function (bucket) {
|
||||
var agg = firstAgg._next;
|
||||
var split = buildSplit(agg, metric, bucket[agg.id]);
|
||||
let rows = _.map(extractBuckets(aggData, firstAgg), function (bucket) {
|
||||
let agg = firstAgg._next;
|
||||
let split = buildSplit(agg, metric, bucket[agg.id]);
|
||||
// Since splits display labels we need to set it.
|
||||
split.label = firstAgg.fieldFormatter()(agg.getKey(bucket));
|
||||
|
||||
var displayName = firstAgg.fieldDisplayName();
|
||||
let displayName = firstAgg.fieldDisplayName();
|
||||
if (!_.isEmpty(displayName)) split.label += ': ' + displayName;
|
||||
|
||||
split.tooltipFormatter = tooltipFormatter(raw.columns);
|
||||
var aggConfigResult = new AggConfigResult(firstAgg, null, null, firstAgg.getKey(bucket));
|
||||
let aggConfigResult = new AggConfigResult(firstAgg, null, null, firstAgg.getKey(bucket));
|
||||
split.split = { aggConfig: firstAgg, aggConfigResult: aggConfigResult, key: bucket.key };
|
||||
_.each(split.slices.children, function (child) {
|
||||
child.aggConfigResult.$parent = aggConfigResult;
|
||||
|
@ -85,7 +85,7 @@ export default function buildHierarchicalDataProvider(Private, Notifier) {
|
|||
return split;
|
||||
});
|
||||
|
||||
var result = { hits: resp.hits.total, raw: raw };
|
||||
let result = { hits: resp.hits.total, raw: raw };
|
||||
if (firstAgg.params.row) {
|
||||
result.rows = rows;
|
||||
} else {
|
||||
|
|
|
@ -10,9 +10,9 @@ describe('addToSiri', function () {
|
|||
}));
|
||||
|
||||
it('creates a new series the first time it sees an id', function () {
|
||||
var series = new Map();
|
||||
var point = {};
|
||||
var id = 'id';
|
||||
let series = new Map();
|
||||
let point = {};
|
||||
let id = 'id';
|
||||
addToSiri(series, point, id);
|
||||
|
||||
expect(series.has(id)).to.be(true);
|
||||
|
@ -23,13 +23,13 @@ describe('addToSiri', function () {
|
|||
});
|
||||
|
||||
it('adds points to existing series if id has been seen', function () {
|
||||
var series = new Map();
|
||||
var id = 'id';
|
||||
let series = new Map();
|
||||
let id = 'id';
|
||||
|
||||
var point = {};
|
||||
let point = {};
|
||||
addToSiri(series, point, id);
|
||||
|
||||
var point2 = {};
|
||||
let point2 = {};
|
||||
addToSiri(series, point2, id);
|
||||
|
||||
expect(series.has(id)).to.be(true);
|
||||
|
@ -41,10 +41,10 @@ describe('addToSiri', function () {
|
|||
});
|
||||
|
||||
it('allows overriding the series label', function () {
|
||||
var series = new Map();
|
||||
var id = 'id';
|
||||
var label = 'label';
|
||||
var point = {};
|
||||
let series = new Map();
|
||||
let id = 'id';
|
||||
let label = 'label';
|
||||
let point = {};
|
||||
addToSiri(series, point, id, label);
|
||||
|
||||
expect(series.has(id)).to.be(true);
|
||||
|
|
|
@ -22,8 +22,8 @@ describe('makeFakeXAspect', function () {
|
|||
}));
|
||||
|
||||
it('creates an object that looks like an aspect', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram' });
|
||||
var aspect = makeFakeXAspect(vis);
|
||||
let vis = new Vis(indexPattern, { type: 'histogram' });
|
||||
let aspect = makeFakeXAspect(vis);
|
||||
|
||||
expect(aspect)
|
||||
.to.have.property('i', -1)
|
||||
|
|
|
@ -27,7 +27,7 @@ describe('getAspects', function () {
|
|||
let vis;
|
||||
let table;
|
||||
|
||||
var date = _.memoize(function (n) {
|
||||
let date = _.memoize(function (n) {
|
||||
return moment().startOf('day').add(n, 'hour').valueOf();
|
||||
});
|
||||
|
||||
|
@ -50,7 +50,7 @@ describe('getAspects', function () {
|
|||
|
||||
function init(group, x, y) {
|
||||
// map args to indicies that should be removed
|
||||
var filter = filterByIndex([
|
||||
let filter = filterByIndex([
|
||||
x > 0,
|
||||
x > 1,
|
||||
group > 0,
|
||||
|
@ -92,7 +92,7 @@ describe('getAspects', function () {
|
|||
].map(filter)
|
||||
};
|
||||
|
||||
var aggs = vis.aggs.splice(0, vis.aggs.length);
|
||||
let aggs = vis.aggs.splice(0, vis.aggs.length);
|
||||
filter(aggs).forEach(function (filter) {
|
||||
vis.aggs.push(filter);
|
||||
});
|
||||
|
@ -101,7 +101,7 @@ describe('getAspects', function () {
|
|||
it('produces an aspect object for each of the aspect types found in the columns', function () {
|
||||
init(1, 1, 1);
|
||||
|
||||
var aspects = getAspects(vis, table);
|
||||
let aspects = getAspects(vis, table);
|
||||
validate(aspects.x, 0);
|
||||
validate(aspects.series, 1);
|
||||
validate(aspects.y, 2);
|
||||
|
@ -110,7 +110,7 @@ describe('getAspects', function () {
|
|||
it('uses arrays only when there are more than one aspect of a specific type', function () {
|
||||
init(0, 1, 2);
|
||||
|
||||
var aspects = getAspects(vis, table);
|
||||
let aspects = getAspects(vis, table);
|
||||
|
||||
validate(aspects.x, 0);
|
||||
expect(aspects.series == null).to.be(true);
|
||||
|
@ -139,7 +139,7 @@ describe('getAspects', function () {
|
|||
it('creates a fake x aspect if the column does not exist', function () {
|
||||
init(0, 0, 1);
|
||||
|
||||
var aspects = getAspects(vis, table);
|
||||
let aspects = getAspects(vis, table);
|
||||
|
||||
expect(aspects.x)
|
||||
.to.be.an('object')
|
||||
|
|
|
@ -7,8 +7,8 @@ describe('getPoint', function () {
|
|||
|
||||
let getPoint;
|
||||
|
||||
var truthFormatted = { fieldFormatter: _.constant(_.constant(true)) };
|
||||
var identFormatted = { fieldFormatter: _.constant(_.identity) };
|
||||
let truthFormatted = { fieldFormatter: _.constant(_.constant(true)) };
|
||||
let identFormatted = { fieldFormatter: _.constant(_.identity) };
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
|
@ -29,9 +29,9 @@ describe('getPoint', function () {
|
|||
});
|
||||
|
||||
it('properly unwraps and scales values', function () {
|
||||
var row = [ { value: 1 }, { value: 2 }, { value: 3 } ];
|
||||
var zAspect = { i: 2 };
|
||||
var point = getPoint(xAspect, seriesAspect, yScale, row, yAspect, zAspect);
|
||||
let row = [ { value: 1 }, { value: 2 }, { value: 3 } ];
|
||||
let zAspect = { i: 2 };
|
||||
let point = getPoint(xAspect, seriesAspect, yScale, row, yAspect, zAspect);
|
||||
|
||||
expect(point)
|
||||
.to.have.property('x', 1)
|
||||
|
@ -42,8 +42,8 @@ describe('getPoint', function () {
|
|||
});
|
||||
|
||||
it('ignores points with a y value of NaN', function () {
|
||||
var row = [ { value: 1 }, { value: 'NaN' }];
|
||||
var point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
|
||||
let row = [ { value: 1 }, { value: 'NaN' }];
|
||||
let point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
|
||||
expect(point).to.be(void 0);
|
||||
});
|
||||
});
|
||||
|
@ -62,8 +62,8 @@ describe('getPoint', function () {
|
|||
});
|
||||
|
||||
it('properly unwraps and scales values', function () {
|
||||
var seriesAspect = { i: 1, agg: identFormatted };
|
||||
var point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
|
||||
let seriesAspect = { i: 1, agg: identFormatted };
|
||||
let point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
|
||||
|
||||
expect(point)
|
||||
.to.have.property('x', 1)
|
||||
|
@ -73,8 +73,8 @@ describe('getPoint', function () {
|
|||
});
|
||||
|
||||
it('properly formats series values', function () {
|
||||
var seriesAspect = { i: 1, agg: truthFormatted };
|
||||
var point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
|
||||
let seriesAspect = { i: 1, agg: truthFormatted };
|
||||
let point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
|
||||
|
||||
expect(point)
|
||||
.to.have.property('x', 1)
|
||||
|
@ -84,8 +84,8 @@ describe('getPoint', function () {
|
|||
});
|
||||
|
||||
it ('adds the aggConfig to the points', function () {
|
||||
var seriesAspect = { i: 1, agg: truthFormatted};
|
||||
var point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
|
||||
let seriesAspect = { i: 1, agg: truthFormatted};
|
||||
let point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
|
||||
|
||||
expect(point).to.have.property('aggConfig', truthFormatted);
|
||||
});
|
||||
|
|
|
@ -5,7 +5,7 @@ import AggResponsePointSeriesGetSeriesProvider from 'ui/agg_response/point_serie
|
|||
describe('getSeries', function () {
|
||||
let getSeries;
|
||||
|
||||
var agg = { fieldFormatter: _.constant(_.identity) };
|
||||
let agg = { fieldFormatter: _.constant(_.identity) };
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
|
@ -19,7 +19,7 @@ describe('getSeries', function () {
|
|||
}
|
||||
|
||||
it('produces a single series with points for each row', function () {
|
||||
var rows = [
|
||||
let rows = [
|
||||
[1, 2, 3],
|
||||
[1, 2, 3],
|
||||
[1, 2, 3],
|
||||
|
@ -27,7 +27,7 @@ describe('getSeries', function () {
|
|||
[1, 2, 3]
|
||||
].map(wrapRows);
|
||||
|
||||
var chart = {
|
||||
let chart = {
|
||||
aspects: {
|
||||
x: { i: 0 },
|
||||
y: { i: 1 },
|
||||
|
@ -35,13 +35,13 @@ describe('getSeries', function () {
|
|||
}
|
||||
};
|
||||
|
||||
var series = getSeries(rows, chart);
|
||||
let series = getSeries(rows, chart);
|
||||
|
||||
expect(series)
|
||||
.to.be.an('array')
|
||||
.and.to.have.length(1);
|
||||
|
||||
var siri = series[0];
|
||||
let siri = series[0];
|
||||
expect(siri)
|
||||
.to.be.an('object')
|
||||
.and.have.property('label', '')
|
||||
|
@ -60,7 +60,7 @@ describe('getSeries', function () {
|
|||
});
|
||||
|
||||
it('produces multiple series if there are multiple y aspects', function () {
|
||||
var rows = [
|
||||
let rows = [
|
||||
[1, 2, 3],
|
||||
[1, 2, 3],
|
||||
[1, 2, 3],
|
||||
|
@ -68,7 +68,7 @@ describe('getSeries', function () {
|
|||
[1, 2, 3]
|
||||
].map(wrapRows);
|
||||
|
||||
var chart = {
|
||||
let chart = {
|
||||
aspects: {
|
||||
x: { i: 0 },
|
||||
y: [
|
||||
|
@ -78,7 +78,7 @@ describe('getSeries', function () {
|
|||
}
|
||||
};
|
||||
|
||||
var series = getSeries(rows, chart);
|
||||
let series = getSeries(rows, chart);
|
||||
|
||||
expect(series)
|
||||
.to.be.an('array')
|
||||
|
@ -103,7 +103,7 @@ describe('getSeries', function () {
|
|||
});
|
||||
|
||||
it('produces multiple series if there is a series aspect', function () {
|
||||
var rows = [
|
||||
let rows = [
|
||||
['0', 3],
|
||||
['1', 3],
|
||||
['1', 'NaN'],
|
||||
|
@ -114,7 +114,7 @@ describe('getSeries', function () {
|
|||
['1', 3]
|
||||
].map(wrapRows);
|
||||
|
||||
var chart = {
|
||||
let chart = {
|
||||
aspects: {
|
||||
x: { i: -1 },
|
||||
series: { i: 0, agg: agg },
|
||||
|
@ -122,7 +122,7 @@ describe('getSeries', function () {
|
|||
}
|
||||
};
|
||||
|
||||
var series = getSeries(rows, chart);
|
||||
let series = getSeries(rows, chart);
|
||||
|
||||
expect(series)
|
||||
.to.be.an('array')
|
||||
|
@ -147,7 +147,7 @@ describe('getSeries', function () {
|
|||
});
|
||||
|
||||
it('produces multiple series if there is a series aspect and multipl y aspects', function () {
|
||||
var rows = [
|
||||
let rows = [
|
||||
['0', 3, 4],
|
||||
['1', 3, 4],
|
||||
['0', 3, 4],
|
||||
|
@ -156,7 +156,7 @@ describe('getSeries', function () {
|
|||
['1', 3, 4]
|
||||
].map(wrapRows);
|
||||
|
||||
var chart = {
|
||||
let chart = {
|
||||
aspects: {
|
||||
x: { i: -1 },
|
||||
series: { i: 0, agg: agg },
|
||||
|
@ -167,7 +167,7 @@ describe('getSeries', function () {
|
|||
}
|
||||
};
|
||||
|
||||
var series = getSeries(rows, chart);
|
||||
let series = getSeries(rows, chart);
|
||||
|
||||
expect(series)
|
||||
.to.be.an('array')
|
||||
|
@ -197,7 +197,7 @@ describe('getSeries', function () {
|
|||
});
|
||||
|
||||
it('produces a series list in the same order as its corresponding metric column', function () {
|
||||
var rows = [
|
||||
let rows = [
|
||||
['0', 3, 4],
|
||||
['1', 3, 4],
|
||||
['0', 3, 4],
|
||||
|
@ -206,7 +206,7 @@ describe('getSeries', function () {
|
|||
['1', 3, 4]
|
||||
].map(wrapRows);
|
||||
|
||||
var chart = {
|
||||
let chart = {
|
||||
aspects: {
|
||||
x: { i: -1 },
|
||||
series: { i: 0, agg: agg },
|
||||
|
@ -217,7 +217,7 @@ describe('getSeries', function () {
|
|||
}
|
||||
};
|
||||
|
||||
var series = getSeries(rows, chart);
|
||||
let series = getSeries(rows, chart);
|
||||
expect(series[0]).to.have.property('label', '0: 0');
|
||||
expect(series[1]).to.have.property('label', '0: 1');
|
||||
expect(series[2]).to.have.property('label', '1: 0');
|
||||
|
@ -230,7 +230,7 @@ describe('getSeries', function () {
|
|||
y.i = i;
|
||||
});
|
||||
|
||||
var series2 = getSeries(rows, chart);
|
||||
let series2 = getSeries(rows, chart);
|
||||
expect(series2[0]).to.have.property('label', '0: 1');
|
||||
expect(series2[1]).to.have.property('label', '0: 0');
|
||||
expect(series2[2]).to.have.property('label', '1: 1');
|
||||
|
|
|
@ -11,7 +11,7 @@ describe('initXAxis', function () {
|
|||
initXAxis = Private(AggResponsePointSeriesInitXAxisProvider);
|
||||
}));
|
||||
|
||||
var baseChart = {
|
||||
let baseChart = {
|
||||
aspects: {
|
||||
x: {
|
||||
agg: {
|
||||
|
@ -27,7 +27,7 @@ describe('initXAxis', function () {
|
|||
};
|
||||
|
||||
it('sets the xAxisFormatter if the agg is not ordered', function () {
|
||||
var chart = _.cloneDeep(baseChart);
|
||||
let chart = _.cloneDeep(baseChart);
|
||||
initXAxis(chart);
|
||||
expect(chart)
|
||||
.to.have.property('xAxisLabel', 'label')
|
||||
|
@ -35,7 +35,7 @@ describe('initXAxis', function () {
|
|||
});
|
||||
|
||||
it('makes the chart ordered if the agg is ordered', function () {
|
||||
var chart = _.cloneDeep(baseChart);
|
||||
let chart = _.cloneDeep(baseChart);
|
||||
chart.aspects.x.agg.type.ordered = true;
|
||||
|
||||
initXAxis(chart);
|
||||
|
@ -50,7 +50,7 @@ describe('initXAxis', function () {
|
|||
});
|
||||
|
||||
it('reads the interval param from the x agg', function () {
|
||||
var chart = _.cloneDeep(baseChart);
|
||||
let chart = _.cloneDeep(baseChart);
|
||||
chart.aspects.x.agg.type.ordered = true;
|
||||
chart.aspects.x.agg.write = _.constant({ params: { interval: 10 } });
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ describe('initYAxis', function () {
|
|||
};
|
||||
}
|
||||
|
||||
var baseChart = {
|
||||
let baseChart = {
|
||||
aspects: {
|
||||
y: [
|
||||
{ agg: agg(), col: { title: 'y1' } },
|
||||
|
@ -33,17 +33,17 @@ describe('initYAxis', function () {
|
|||
};
|
||||
|
||||
describe('with a single y aspect', function () {
|
||||
var singleYBaseChart = _.cloneDeep(baseChart);
|
||||
let singleYBaseChart = _.cloneDeep(baseChart);
|
||||
singleYBaseChart.aspects.y = singleYBaseChart.aspects.y[0];
|
||||
|
||||
it('sets the yAxisFormatter the the field formats convert fn', function () {
|
||||
var chart = _.cloneDeep(singleYBaseChart);
|
||||
let chart = _.cloneDeep(singleYBaseChart);
|
||||
initYAxis(chart);
|
||||
expect(chart).to.have.property('yAxisFormatter', chart.aspects.y.agg.fieldFormatter());
|
||||
});
|
||||
|
||||
it('sets the yAxisLabel', function () {
|
||||
var chart = _.cloneDeep(singleYBaseChart);
|
||||
let chart = _.cloneDeep(singleYBaseChart);
|
||||
initYAxis(chart);
|
||||
expect(chart).to.have.property('yAxisLabel', 'y1');
|
||||
});
|
||||
|
@ -51,7 +51,7 @@ describe('initYAxis', function () {
|
|||
|
||||
describe('with mutliple y aspects', function () {
|
||||
it('sets the yAxisFormatter the the field formats convert fn for the first y aspect', function () {
|
||||
var chart = _.cloneDeep(baseChart);
|
||||
let chart = _.cloneDeep(baseChart);
|
||||
initYAxis(chart);
|
||||
|
||||
expect(chart).to.have.property('yAxisFormatter');
|
||||
|
@ -61,7 +61,7 @@ describe('initYAxis', function () {
|
|||
});
|
||||
|
||||
it('does not set the yAxisLabel, it does not make sense to put multiple labels on the same axis', function () {
|
||||
var chart = _.cloneDeep(baseChart);
|
||||
let chart = _.cloneDeep(baseChart);
|
||||
initYAxis(chart);
|
||||
expect(chart).to.have.property('yAxisLabel', '');
|
||||
});
|
||||
|
|
|
@ -25,20 +25,20 @@ describe('pointSeriesChartDataFromTable', function () {
|
|||
}));
|
||||
|
||||
it('handles a table with just a count', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram' });
|
||||
var agg = vis.aggs[0];
|
||||
var result = new AggConfigResult(vis.aggs[0], void 0, 100, 100);
|
||||
let vis = new Vis(indexPattern, { type: 'histogram' });
|
||||
let agg = vis.aggs[0];
|
||||
let result = new AggConfigResult(vis.aggs[0], void 0, 100, 100);
|
||||
|
||||
var table = new Table();
|
||||
let table = new Table();
|
||||
table.columns = [ { aggConfig: agg } ];
|
||||
table.rows.push([ result ]);
|
||||
|
||||
var chartData = pointSeriesChartDataFromTable(vis, table);
|
||||
let chartData = pointSeriesChartDataFromTable(vis, table);
|
||||
|
||||
expect(chartData).to.be.an('object');
|
||||
expect(chartData.series).to.be.an('array');
|
||||
expect(chartData.series).to.have.length(1);
|
||||
var series = chartData.series[0];
|
||||
let series = chartData.series[0];
|
||||
expect(series.values).to.have.length(1);
|
||||
expect(series.values[0])
|
||||
.to.have.property('x', '_all')
|
||||
|
@ -47,7 +47,7 @@ describe('pointSeriesChartDataFromTable', function () {
|
|||
});
|
||||
|
||||
it('handles a table with x and y column', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'count', schema: 'metric' },
|
||||
|
@ -55,32 +55,32 @@ describe('pointSeriesChartDataFromTable', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var y = {
|
||||
let y = {
|
||||
agg: vis.aggs[0],
|
||||
col: { aggConfig: vis.aggs[0] },
|
||||
at: function (i) { return 100 * i; }
|
||||
};
|
||||
|
||||
var x = {
|
||||
let x = {
|
||||
agg: vis.aggs[1],
|
||||
col: { aggConfig: vis.aggs[1] },
|
||||
at: function (i) { return moment().startOf('day').add(i, 'day').valueOf(); }
|
||||
};
|
||||
|
||||
var rowCount = 3;
|
||||
var table = new Table();
|
||||
let rowCount = 3;
|
||||
let table = new Table();
|
||||
table.columns = [ x.col, y.col ];
|
||||
_.times(rowCount, function (i) {
|
||||
var date = new AggConfigResult(x.agg, void 0, x.at(i));
|
||||
let date = new AggConfigResult(x.agg, void 0, x.at(i));
|
||||
table.rows.push([date, new AggConfigResult(y.agg, date, y.at(i))]);
|
||||
});
|
||||
|
||||
var chartData = pointSeriesChartDataFromTable(vis, table);
|
||||
let chartData = pointSeriesChartDataFromTable(vis, table);
|
||||
|
||||
expect(chartData).to.be.an('object');
|
||||
expect(chartData.series).to.be.an('array');
|
||||
expect(chartData.series).to.have.length(1);
|
||||
var series = chartData.series[0];
|
||||
let series = chartData.series[0];
|
||||
expect(series.values).to.have.length(rowCount);
|
||||
series.values.forEach(function (point, i) {
|
||||
expect(point)
|
||||
|
@ -100,7 +100,7 @@ describe('pointSeriesChartDataFromTable', function () {
|
|||
});
|
||||
|
||||
it('handles a table with an x and two y aspects', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
|
@ -109,40 +109,40 @@ describe('pointSeriesChartDataFromTable', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var avg = {
|
||||
let avg = {
|
||||
agg: vis.aggs[0],
|
||||
col: { title: 'average', aggConfig: vis.aggs[0] },
|
||||
at: function (i) { return 75.444 * (i + 1); }
|
||||
};
|
||||
|
||||
var date = {
|
||||
let date = {
|
||||
agg: vis.aggs[1],
|
||||
col: { title: 'date', aggConfig: vis.aggs[1] },
|
||||
at: function (i) { return moment().startOf('day').add(i, 'day').valueOf(); }
|
||||
};
|
||||
|
||||
var max = {
|
||||
let max = {
|
||||
agg: vis.aggs[2],
|
||||
col: { title: 'maximum', aggConfig: vis.aggs[2] },
|
||||
at: function (i) { return 100 * (i + 1); }
|
||||
};
|
||||
|
||||
var rowCount = 3;
|
||||
var table = new Table();
|
||||
let rowCount = 3;
|
||||
let table = new Table();
|
||||
table.columns = [ date.col, avg.col, max.col ];
|
||||
_.times(rowCount, function (i) {
|
||||
var dateResult = new AggConfigResult(date.agg, void 0, date.at(i));
|
||||
var avgResult = new AggConfigResult(avg.agg, dateResult, avg.at(i));
|
||||
var maxResult = new AggConfigResult(max.agg, dateResult, max.at(i));
|
||||
let dateResult = new AggConfigResult(date.agg, void 0, date.at(i));
|
||||
let avgResult = new AggConfigResult(avg.agg, dateResult, avg.at(i));
|
||||
let maxResult = new AggConfigResult(max.agg, dateResult, max.at(i));
|
||||
table.rows.push([dateResult, avgResult, maxResult]);
|
||||
});
|
||||
|
||||
var chartData = pointSeriesChartDataFromTable(vis, table);
|
||||
let chartData = pointSeriesChartDataFromTable(vis, table);
|
||||
expect(chartData).to.be.an('object');
|
||||
expect(chartData.series).to.be.an('array');
|
||||
expect(chartData.series).to.have.length(2);
|
||||
chartData.series.forEach(function (siri, i) {
|
||||
var metric = i === 0 ? avg : max;
|
||||
let metric = i === 0 ? avg : max;
|
||||
|
||||
expect(siri).to.have.property('label', metric.col.label);
|
||||
expect(siri.values).to.have.length(rowCount);
|
||||
|
@ -170,7 +170,7 @@ describe('pointSeriesChartDataFromTable', function () {
|
|||
});
|
||||
|
||||
it('handles a table with an x, a series, and two y aspects', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'group', params: { field: 'extension' } },
|
||||
|
@ -180,53 +180,53 @@ describe('pointSeriesChartDataFromTable', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var extensions = ['php', 'jpg', 'gif', 'css'];
|
||||
var term = {
|
||||
let extensions = ['php', 'jpg', 'gif', 'css'];
|
||||
let term = {
|
||||
agg: vis.aggs[0],
|
||||
col: { title: 'extensions', aggConfig: vis.aggs[0] },
|
||||
at: function (i) { return extensions[i % extensions.length]; }
|
||||
};
|
||||
|
||||
var avg = {
|
||||
let avg = {
|
||||
agg: vis.aggs[1],
|
||||
col: { title: 'average', aggConfig: vis.aggs[1] },
|
||||
at: function (i) { return 75.444 * (i + 1); }
|
||||
};
|
||||
|
||||
var date = {
|
||||
let date = {
|
||||
agg: vis.aggs[2],
|
||||
col: { title: 'date', aggConfig: vis.aggs[2] },
|
||||
at: function (i) { return moment().startOf('day').add(i, 'day').valueOf(); }
|
||||
};
|
||||
|
||||
var max = {
|
||||
let max = {
|
||||
agg: vis.aggs[3],
|
||||
col: { title: 'maximum', aggConfig: vis.aggs[3] },
|
||||
at: function (i) { return 100 * (i + 1); }
|
||||
};
|
||||
|
||||
var metricCount = 2;
|
||||
var rowsPerSegment = 2;
|
||||
var rowCount = extensions.length * rowsPerSegment;
|
||||
var table = new Table();
|
||||
let metricCount = 2;
|
||||
let rowsPerSegment = 2;
|
||||
let rowCount = extensions.length * rowsPerSegment;
|
||||
let table = new Table();
|
||||
table.columns = [ date.col, term.col, avg.col, max.col ];
|
||||
_.times(rowCount, function (i) {
|
||||
var dateResult = new AggConfigResult(date.agg, void 0, date.at(i));
|
||||
var termResult = new AggConfigResult(term.agg, dateResult, term.at(i));
|
||||
var avgResult = new AggConfigResult(avg.agg, termResult, avg.at(i));
|
||||
var maxResult = new AggConfigResult(max.agg, termResult, max.at(i));
|
||||
let dateResult = new AggConfigResult(date.agg, void 0, date.at(i));
|
||||
let termResult = new AggConfigResult(term.agg, dateResult, term.at(i));
|
||||
let avgResult = new AggConfigResult(avg.agg, termResult, avg.at(i));
|
||||
let maxResult = new AggConfigResult(max.agg, termResult, max.at(i));
|
||||
table.rows.push([dateResult, termResult, avgResult, maxResult]);
|
||||
});
|
||||
|
||||
var chartData = pointSeriesChartDataFromTable(vis, table);
|
||||
let chartData = pointSeriesChartDataFromTable(vis, table);
|
||||
expect(chartData).to.be.an('object');
|
||||
expect(chartData.series).to.be.an('array');
|
||||
// one series for each extension, and then one for each metric inside
|
||||
expect(chartData.series).to.have.length(extensions.length * metricCount);
|
||||
chartData.series.forEach(function (siri, i) {
|
||||
// figure out the metric used to create this series
|
||||
var metricAgg = siri.values[0].aggConfigResult.aggConfig;
|
||||
var metric = avg.agg === metricAgg ? avg : max;
|
||||
let metricAgg = siri.values[0].aggConfigResult.aggConfig;
|
||||
let metric = avg.agg === metricAgg ? avg : max;
|
||||
|
||||
expect(siri.values).to.have.length(rowsPerSegment);
|
||||
siri.values.forEach(function (point) {
|
||||
|
|
|
@ -6,7 +6,7 @@ import ngMock from 'ng_mock';
|
|||
import AggResponsePointSeriesOrderedDateAxisProvider from 'ui/agg_response/point_series/_ordered_date_axis';
|
||||
describe('orderedDateAxis', function () {
|
||||
|
||||
var baseArgs = {
|
||||
let baseArgs = {
|
||||
vis: {
|
||||
indexPattern: {
|
||||
timeFieldName: '@timestamp'
|
||||
|
@ -37,7 +37,7 @@ describe('orderedDateAxis', function () {
|
|||
|
||||
describe('xAxisFormatter', function () {
|
||||
it('sets the xAxisFormatter', function () {
|
||||
var args = _.cloneDeep(baseArgs);
|
||||
let args = _.cloneDeep(baseArgs);
|
||||
orderedDateAxis(args.vis, args.chart);
|
||||
|
||||
expect(args.chart).to.have.property('xAxisFormatter');
|
||||
|
@ -45,10 +45,10 @@ describe('orderedDateAxis', function () {
|
|||
});
|
||||
|
||||
it('formats values using moment, and returns strings', function () {
|
||||
var args = _.cloneDeep(baseArgs);
|
||||
let args = _.cloneDeep(baseArgs);
|
||||
orderedDateAxis(args.vis, args.chart);
|
||||
|
||||
var val = '2014-08-06T12:34:01';
|
||||
let val = '2014-08-06T12:34:01';
|
||||
expect(args.chart.xAxisFormatter(val))
|
||||
.to.be(moment(val).format('hh:mm:ss'));
|
||||
});
|
||||
|
@ -56,7 +56,7 @@ describe('orderedDateAxis', function () {
|
|||
|
||||
describe('ordered object', function () {
|
||||
it('sets date: true', function () {
|
||||
var args = _.cloneDeep(baseArgs);
|
||||
let args = _.cloneDeep(baseArgs);
|
||||
orderedDateAxis(args.vis, args.chart);
|
||||
|
||||
expect(args.chart)
|
||||
|
@ -67,21 +67,21 @@ describe('orderedDateAxis', function () {
|
|||
});
|
||||
|
||||
it('relies on agg.buckets for the interval', function () {
|
||||
var args = _.cloneDeep(baseArgs);
|
||||
var spy = sinon.spy(args.chart.aspects.x.agg.buckets, 'getInterval');
|
||||
let args = _.cloneDeep(baseArgs);
|
||||
let spy = sinon.spy(args.chart.aspects.x.agg.buckets, 'getInterval');
|
||||
orderedDateAxis(args.vis, args.chart);
|
||||
expect(spy).to.have.property('callCount', 1);
|
||||
});
|
||||
|
||||
it('sets the min/max when the buckets are bounded', function () {
|
||||
var args = _.cloneDeep(baseArgs);
|
||||
let args = _.cloneDeep(baseArgs);
|
||||
orderedDateAxis(args.vis, args.chart);
|
||||
expect(moment.isMoment(args.chart.ordered.min)).to.be(true);
|
||||
expect(moment.isMoment(args.chart.ordered.max)).to.be(true);
|
||||
});
|
||||
|
||||
it('does not set the min/max when the buckets are unbounded', function () {
|
||||
var args = _.cloneDeep(baseArgs);
|
||||
let args = _.cloneDeep(baseArgs);
|
||||
args.chart.aspects.x.agg.buckets.getBounds = _.constant();
|
||||
orderedDateAxis(args.vis, args.chart);
|
||||
expect(args.chart.ordered).to.not.have.property('min');
|
||||
|
|
|
@ -23,7 +23,7 @@ describe('tooltipFormatter', function () {
|
|||
return $row.eq(i).text().trim();
|
||||
}
|
||||
|
||||
var baseEvent = {
|
||||
let baseEvent = {
|
||||
datum: {
|
||||
aggConfigResult: {
|
||||
aggConfig: agg('inner'),
|
||||
|
@ -42,20 +42,20 @@ describe('tooltipFormatter', function () {
|
|||
};
|
||||
|
||||
it('returns html based on the mouse event', function () {
|
||||
var event = _.cloneDeep(baseEvent);
|
||||
var $el = $(tooltipFormatter(event));
|
||||
var $rows = $el.find('tr');
|
||||
let event = _.cloneDeep(baseEvent);
|
||||
let $el = $(tooltipFormatter(event));
|
||||
let $rows = $el.find('tr');
|
||||
expect($rows.size()).to.be(3);
|
||||
|
||||
var $row1 = $rows.eq(0).find('td');
|
||||
let $row1 = $rows.eq(0).find('td');
|
||||
expect(cell($row1, 0)).to.be('inner');
|
||||
expect(cell($row1, 1)).to.be('(3)');
|
||||
|
||||
var $row2 = $rows.eq(1).find('td');
|
||||
let $row2 = $rows.eq(1).find('td');
|
||||
expect(cell($row2, 0)).to.be('middle');
|
||||
expect(cell($row2, 1)).to.be('(2)');
|
||||
|
||||
var $row3 = $rows.eq(2).find('td');
|
||||
let $row3 = $rows.eq(2).find('td');
|
||||
expect(cell($row3, 0)).to.be('top');
|
||||
expect(cell($row3, 1)).to.be('(1)');
|
||||
});
|
||||
|
|
|
@ -2,10 +2,10 @@ import VisAggConfigProvider from 'ui/vis/agg_config';
|
|||
import AggTypesAggTypeProvider from 'ui/agg_types/agg_type';
|
||||
|
||||
export default function PointSeriesFakeXAxis(Private) {
|
||||
var AggConfig = Private(VisAggConfigProvider);
|
||||
var AggType = Private(AggTypesAggTypeProvider);
|
||||
let AggConfig = Private(VisAggConfigProvider);
|
||||
let AggType = Private(AggTypesAggTypeProvider);
|
||||
|
||||
var allAgg = new AggType({
|
||||
let allAgg = new AggType({
|
||||
name: 'all',
|
||||
title: 'All docs',
|
||||
ordered: false,
|
||||
|
@ -13,7 +13,7 @@ export default function PointSeriesFakeXAxis(Private) {
|
|||
});
|
||||
|
||||
return function makeFakeXAxis(vis) {
|
||||
var fake = new AggConfig(vis, {
|
||||
let fake = new AggConfig(vis, {
|
||||
type: allAgg,
|
||||
schema: vis.type.schemas.all.byName.segment
|
||||
});
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import _ from 'lodash';
|
||||
import AggResponsePointSeriesFakeXAspectProvider from 'ui/agg_response/point_series/_fake_x_aspect';
|
||||
export default function PointSeriesGetAspects(Private) {
|
||||
var fakeXAspect = Private(AggResponsePointSeriesFakeXAspectProvider);
|
||||
let fakeXAspect = Private(AggResponsePointSeriesFakeXAspectProvider);
|
||||
|
||||
var map = {
|
||||
let map = {
|
||||
segment: 'x',
|
||||
metric: 'y',
|
||||
radius: 'z',
|
||||
|
@ -12,12 +12,12 @@ export default function PointSeriesGetAspects(Private) {
|
|||
};
|
||||
|
||||
function columnToAspect(aspects, col, i) {
|
||||
var schema = col.aggConfig.schema.name;
|
||||
let schema = col.aggConfig.schema.name;
|
||||
|
||||
var name = map[schema];
|
||||
let name = map[schema];
|
||||
if (!name) throw new TypeError('unknown schema name "' + schema + '"');
|
||||
|
||||
var aspect = {
|
||||
let aspect = {
|
||||
i: i,
|
||||
col: col,
|
||||
agg: col.aggConfig
|
||||
|
@ -36,7 +36,7 @@ export default function PointSeriesGetAspects(Private) {
|
|||
* may be undefined, a single aspect, or an array of aspects.
|
||||
*/
|
||||
return function getAspects(vis, table) {
|
||||
var aspects = _(table.columns)
|
||||
let aspects = _(table.columns)
|
||||
// write each column into the aspects under it's group
|
||||
.transform(columnToAspect, {})
|
||||
// unwrap groups that only have one value, and validate groups that have more
|
||||
|
|
|
@ -5,10 +5,10 @@ export default function PointSeriesGetPoint() {
|
|||
}
|
||||
|
||||
return function getPoint(x, series, yScale, row, y, z) {
|
||||
var zRow = z && row[z.i];
|
||||
var xRow = row[x.i];
|
||||
let zRow = z && row[z.i];
|
||||
let xRow = row[x.i];
|
||||
|
||||
var point = {
|
||||
let point = {
|
||||
x: unwrap(xRow, '_all'),
|
||||
xi: xRow && xRow.$order,
|
||||
y: unwrap(row[y.i]),
|
||||
|
|
|
@ -2,30 +2,30 @@ import _ from 'lodash';
|
|||
import AggResponsePointSeriesGetPointProvider from 'ui/agg_response/point_series/_get_point';
|
||||
import AggResponsePointSeriesAddToSiriProvider from 'ui/agg_response/point_series/_add_to_siri';
|
||||
export default function PointSeriesGetSeries(Private) {
|
||||
var getPoint = Private(AggResponsePointSeriesGetPointProvider);
|
||||
var addToSiri = Private(AggResponsePointSeriesAddToSiriProvider);
|
||||
let getPoint = Private(AggResponsePointSeriesGetPointProvider);
|
||||
let addToSiri = Private(AggResponsePointSeriesAddToSiriProvider);
|
||||
|
||||
return function getSeries(rows, chart) {
|
||||
var aspects = chart.aspects;
|
||||
var multiY = _.isArray(aspects.y);
|
||||
var yScale = chart.yScale;
|
||||
var partGetPoint = _.partial(getPoint, aspects.x, aspects.series, yScale);
|
||||
let aspects = chart.aspects;
|
||||
let multiY = _.isArray(aspects.y);
|
||||
let yScale = chart.yScale;
|
||||
let partGetPoint = _.partial(getPoint, aspects.x, aspects.series, yScale);
|
||||
|
||||
var series = _(rows)
|
||||
let series = _(rows)
|
||||
.transform(function (series, row) {
|
||||
if (!multiY) {
|
||||
var point = partGetPoint(row, aspects.y, aspects.z);
|
||||
let point = partGetPoint(row, aspects.y, aspects.z);
|
||||
if (point) addToSiri(series, point, point.series);
|
||||
return;
|
||||
}
|
||||
|
||||
aspects.y.forEach(function (y) {
|
||||
var point = partGetPoint(row, y, aspects.z);
|
||||
let point = partGetPoint(row, y, aspects.z);
|
||||
if (!point) return;
|
||||
|
||||
var prefix = point.series ? point.series + ': ' : '';
|
||||
var seriesId = prefix + y.agg.id;
|
||||
var seriesLabel = prefix + y.col.title;
|
||||
let prefix = point.series ? point.series + ': ' : '';
|
||||
let seriesId = prefix + y.agg.id;
|
||||
let seriesLabel = prefix + y.col.title;
|
||||
|
||||
addToSiri(series, point, seriesId, seriesLabel);
|
||||
});
|
||||
|
@ -36,11 +36,11 @@ export default function PointSeriesGetSeries(Private) {
|
|||
|
||||
if (multiY) {
|
||||
series = _.sortBy(series, function (siri) {
|
||||
var firstVal = siri.values[0];
|
||||
let firstVal = siri.values[0];
|
||||
let y;
|
||||
|
||||
if (firstVal) {
|
||||
var agg = firstVal.aggConfigResult.aggConfig;
|
||||
let agg = firstVal.aggConfigResult.aggConfig;
|
||||
y = _.find(aspects.y, function (y) {
|
||||
return y.agg === agg;
|
||||
});
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
define(function () {
|
||||
return function PointSeriesInitX() {
|
||||
return function initXAxis(chart) {
|
||||
var x = chart.aspects.x;
|
||||
let x = chart.aspects.x;
|
||||
chart.xAxisFormatter = x.agg ? x.agg.fieldFormatter() : String;
|
||||
chart.xAxisLabel = x.col.title;
|
||||
|
||||
if (!x.agg || !x.agg.type.ordered) return;
|
||||
|
||||
chart.ordered = {};
|
||||
var xAggOutput = x.agg.write();
|
||||
let xAggOutput = x.agg.write();
|
||||
if (xAggOutput.params.interval) {
|
||||
chart.ordered.interval = xAggOutput.params.interval;
|
||||
}
|
||||
|
|
|
@ -2,8 +2,8 @@ import _ from 'lodash';
|
|||
export default function PointSeriesInitYAxis() {
|
||||
|
||||
return function initYAxis(chart) {
|
||||
var y = chart.aspects.y;
|
||||
var x = chart.aspects.x;
|
||||
let y = chart.aspects.y;
|
||||
let x = chart.aspects.x;
|
||||
|
||||
if (_.isArray(y)) {
|
||||
// TODO: vis option should allow choosing this format
|
||||
|
@ -14,7 +14,7 @@ export default function PointSeriesInitYAxis() {
|
|||
chart.yAxisLabel = y.col.title;
|
||||
}
|
||||
|
||||
var xAggOutput = x.agg.write();
|
||||
let xAggOutput = x.agg.write();
|
||||
chart.yScale = xAggOutput.metricScale || null;
|
||||
};
|
||||
};
|
||||
|
|
|
@ -2,9 +2,9 @@ import moment from 'moment';
|
|||
export default function PointSeriesOrderedDateAxis(timefilter) {
|
||||
|
||||
return function orderedDateAxis(vis, chart) {
|
||||
var xAgg = chart.aspects.x.agg;
|
||||
var buckets = xAgg.buckets;
|
||||
var format = buckets.getScaledDateFormat();
|
||||
let xAgg = chart.aspects.x.agg;
|
||||
let buckets = xAgg.buckets;
|
||||
let format = buckets.getScaledDateFormat();
|
||||
|
||||
chart.xAxisFormatter = function (val) {
|
||||
return moment(val).format(format);
|
||||
|
@ -15,8 +15,8 @@ export default function PointSeriesOrderedDateAxis(timefilter) {
|
|||
interval: buckets.getInterval(),
|
||||
};
|
||||
|
||||
var axisOnTimeField = xAgg.fieldIsTimeField();
|
||||
var bounds = buckets.getBounds();
|
||||
let axisOnTimeField = xAgg.fieldIsTimeField();
|
||||
let bounds = buckets.getBounds();
|
||||
if (bounds && axisOnTimeField) {
|
||||
chart.ordered.min = bounds.min;
|
||||
chart.ordered.max = bounds.max;
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
import $ from 'jquery';
|
||||
export default function PointSeriesTooltipFormatter($compile, $rootScope) {
|
||||
|
||||
var $tooltipScope = $rootScope.$new();
|
||||
var $tooltip = $(require('ui/agg_response/point_series/_tooltip.html'));
|
||||
let $tooltipScope = $rootScope.$new();
|
||||
let $tooltip = $(require('ui/agg_response/point_series/_tooltip.html'));
|
||||
$compile($tooltip)($tooltipScope);
|
||||
|
||||
return function tooltipFormatter(event) {
|
||||
var datum = event.datum;
|
||||
let datum = event.datum;
|
||||
if (!datum || !datum.aggConfigResult) return '';
|
||||
|
||||
var details = $tooltipScope.details = [];
|
||||
var result = { $parent: datum.aggConfigResult };
|
||||
let details = $tooltipScope.details = [];
|
||||
let result = { $parent: datum.aggConfigResult };
|
||||
|
||||
function addDetail(result) {
|
||||
var agg = result.aggConfig;
|
||||
var value = result.value;
|
||||
let agg = result.aggConfig;
|
||||
let value = result.value;
|
||||
|
||||
var detail = {
|
||||
let detail = {
|
||||
value: agg.fieldFormatter()(value),
|
||||
label: agg.makeLabel()
|
||||
};
|
||||
|
|
|
@ -7,23 +7,23 @@ import AggResponsePointSeriesOrderedDateAxisProvider from 'ui/agg_response/point
|
|||
import AggResponsePointSeriesTooltipFormatterProvider from 'ui/agg_response/point_series/_tooltip_formatter';
|
||||
export default function PointSeriesProvider(Private) {
|
||||
|
||||
var getSeries = Private(AggResponsePointSeriesGetSeriesProvider);
|
||||
var getAspects = Private(AggResponsePointSeriesGetAspectsProvider);
|
||||
var initYAxis = Private(AggResponsePointSeriesInitYAxisProvider);
|
||||
var initXAxis = Private(AggResponsePointSeriesInitXAxisProvider);
|
||||
var setupOrderedDateXAxis = Private(AggResponsePointSeriesOrderedDateAxisProvider);
|
||||
var tooltipFormatter = Private(AggResponsePointSeriesTooltipFormatterProvider);
|
||||
let getSeries = Private(AggResponsePointSeriesGetSeriesProvider);
|
||||
let getAspects = Private(AggResponsePointSeriesGetAspectsProvider);
|
||||
let initYAxis = Private(AggResponsePointSeriesInitYAxisProvider);
|
||||
let initXAxis = Private(AggResponsePointSeriesInitXAxisProvider);
|
||||
let setupOrderedDateXAxis = Private(AggResponsePointSeriesOrderedDateAxisProvider);
|
||||
let tooltipFormatter = Private(AggResponsePointSeriesTooltipFormatterProvider);
|
||||
|
||||
return function pointSeriesChartDataFromTable(vis, table) {
|
||||
var chart = {};
|
||||
var aspects = chart.aspects = getAspects(vis, table);
|
||||
let chart = {};
|
||||
let aspects = chart.aspects = getAspects(vis, table);
|
||||
|
||||
chart.tooltipFormatter = tooltipFormatter;
|
||||
|
||||
initXAxis(chart);
|
||||
initYAxis(chart);
|
||||
|
||||
var datedX = aspects.x.agg.type.ordered && aspects.x.agg.type.ordered.date;
|
||||
let datedX = aspects.x.agg.type.ordered && aspects.x.agg.type.ordered.date;
|
||||
if (datedX) {
|
||||
setupOrderedDateXAxis(vis, chart);
|
||||
}
|
||||
|
|
|
@ -12,13 +12,13 @@ describe('Buckets wrapper', function () {
|
|||
|
||||
function test(aggResp, count, keys) {
|
||||
it('reads the length', function () {
|
||||
var buckets = new Buckets(aggResp);
|
||||
let buckets = new Buckets(aggResp);
|
||||
expect(buckets).to.have.length(count);
|
||||
});
|
||||
|
||||
it('itterates properly, passing in the key', function () {
|
||||
var buckets = new Buckets(aggResp);
|
||||
var keysSent = [];
|
||||
let buckets = new Buckets(aggResp);
|
||||
let keysSent = [];
|
||||
buckets.forEach(function (bucket, key) {
|
||||
keysSent.push(key);
|
||||
});
|
||||
|
@ -29,7 +29,7 @@ describe('Buckets wrapper', function () {
|
|||
}
|
||||
|
||||
describe('with object style buckets', function () {
|
||||
var aggResp = {
|
||||
let aggResp = {
|
||||
buckets: {
|
||||
'0-100': {},
|
||||
'100-200': {},
|
||||
|
@ -37,14 +37,14 @@ describe('Buckets wrapper', function () {
|
|||
}
|
||||
};
|
||||
|
||||
var count = 3;
|
||||
var keys = ['0-100', '100-200', '200-300'];
|
||||
let count = 3;
|
||||
let keys = ['0-100', '100-200', '200-300'];
|
||||
|
||||
test(aggResp, count, keys);
|
||||
});
|
||||
|
||||
describe('with array style buckets', function () {
|
||||
var aggResp = {
|
||||
let aggResp = {
|
||||
buckets: [
|
||||
{ key: '0-100', value: {} },
|
||||
{ key: '100-200', value: {} },
|
||||
|
@ -52,8 +52,8 @@ describe('Buckets wrapper', function () {
|
|||
]
|
||||
};
|
||||
|
||||
var count = 3;
|
||||
var keys = ['0-100', '100-200', '200-300'];
|
||||
let count = 3;
|
||||
let keys = ['0-100', '100-200', '200-300'];
|
||||
|
||||
test(aggResp, count, keys);
|
||||
});
|
||||
|
|
|
@ -16,11 +16,11 @@ describe('get columns', function () {
|
|||
}));
|
||||
|
||||
it('should inject a count metric if no aggs exist', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie'
|
||||
});
|
||||
while (vis.aggs.length) vis.aggs.pop();
|
||||
var columns = getColumns(vis);
|
||||
let columns = getColumns(vis);
|
||||
|
||||
expect(columns).to.have.length(1);
|
||||
expect(columns[0]).to.have.property('aggConfig');
|
||||
|
@ -28,14 +28,14 @@ describe('get columns', function () {
|
|||
});
|
||||
|
||||
it('should inject a count metric if only buckets exist', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } }
|
||||
]
|
||||
});
|
||||
|
||||
var columns = getColumns(vis);
|
||||
let columns = getColumns(vis);
|
||||
|
||||
expect(columns).to.have.length(2);
|
||||
expect(columns[1]).to.have.property('aggConfig');
|
||||
|
@ -43,7 +43,7 @@ describe('get columns', function () {
|
|||
});
|
||||
|
||||
it('should inject the metric after each bucket if the vis is hierarchical', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
|
@ -53,7 +53,7 @@ describe('get columns', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var columns = getColumns(vis);
|
||||
let columns = getColumns(vis);
|
||||
|
||||
expect(columns).to.have.length(8);
|
||||
columns.forEach(function (column, i) {
|
||||
|
@ -63,7 +63,7 @@ describe('get columns', function () {
|
|||
});
|
||||
|
||||
it('should inject the multiple metrics after each bucket if the vis is hierarchical', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
|
@ -75,7 +75,7 @@ describe('get columns', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var columns = getColumns(vis);
|
||||
let columns = getColumns(vis);
|
||||
|
||||
function checkColumns(column, i) {
|
||||
expect(column).to.have.property('aggConfig');
|
||||
|
@ -93,14 +93,14 @@ describe('get columns', function () {
|
|||
}
|
||||
|
||||
expect(columns).to.have.length(12);
|
||||
for (var i = 0; i < columns.length; i += 3) {
|
||||
var counts = { buckets: 0, metrics: 0 };
|
||||
for (let i = 0; i < columns.length; i += 3) {
|
||||
let counts = { buckets: 0, metrics: 0 };
|
||||
columns.slice(i, i + 3).forEach(checkColumns);
|
||||
}
|
||||
});
|
||||
|
||||
it('should put all metrics at the end of the columns if the vis is not hierarchical', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
|
@ -112,7 +112,7 @@ describe('get columns', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var columns = getColumns(vis);
|
||||
let columns = getColumns(vis);
|
||||
expect(columns).to.have.length(6);
|
||||
|
||||
// sum should be last
|
||||
|
|
|
@ -26,13 +26,13 @@ describe('tabifyAggResponse Integration', function () {
|
|||
}
|
||||
|
||||
it('transforms a simple response properly', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: []
|
||||
});
|
||||
normalizeIds(vis);
|
||||
|
||||
var resp = tabifyAggResponse(vis, fixtures.metricOnly, { canSplit: false });
|
||||
let resp = tabifyAggResponse(vis, fixtures.metricOnly, { canSplit: false });
|
||||
|
||||
expect(resp).to.not.have.property('tables');
|
||||
expect(resp).to.have.property('rows').and.property('columns');
|
||||
|
@ -81,7 +81,7 @@ describe('tabifyAggResponse Integration', function () {
|
|||
function expectRootGroup(rootTableGroup, expectInnerTables) {
|
||||
expect(rootTableGroup).to.have.property('tables');
|
||||
|
||||
var tables = rootTableGroup.tables;
|
||||
let tables = rootTableGroup.tables;
|
||||
expect(tables).to.be.an('array').and.have.length(3);
|
||||
expectExtensionSplit(tables[0], 'png', expectInnerTables);
|
||||
expectExtensionSplit(tables[1], 'css', expectInnerTables);
|
||||
|
@ -155,7 +155,7 @@ describe('tabifyAggResponse Integration', function () {
|
|||
// only complete rows, and only put the metrics at the end.
|
||||
|
||||
vis.isHierarchical = _.constant(false);
|
||||
var tabbed = tabifyAggResponse(vis, esResp);
|
||||
let tabbed = tabifyAggResponse(vis, esResp);
|
||||
|
||||
expectRootGroup(tabbed, function expectTable(table, splitKey) {
|
||||
expectColumns(table, [src, os, avg]);
|
||||
|
@ -181,7 +181,7 @@ describe('tabifyAggResponse Integration', function () {
|
|||
// the existing bucket and it's metric
|
||||
|
||||
vis.isHierarchical = _.constant(true);
|
||||
var tabbed = tabifyAggResponse(vis, esResp, {
|
||||
let tabbed = tabifyAggResponse(vis, esResp, {
|
||||
partialRows: true
|
||||
});
|
||||
|
||||
|
@ -215,7 +215,7 @@ describe('tabifyAggResponse Integration', function () {
|
|||
// the end
|
||||
|
||||
vis.isHierarchical = _.constant(true);
|
||||
var tabbed = tabifyAggResponse(vis, esResp, {
|
||||
let tabbed = tabifyAggResponse(vis, esResp, {
|
||||
partialRows: true,
|
||||
minimalColumns: true
|
||||
});
|
||||
|
@ -247,7 +247,7 @@ describe('tabifyAggResponse Integration', function () {
|
|||
// create metric columns after each bucket
|
||||
|
||||
vis.isHierarchical = _.constant(false);
|
||||
var tabbed = tabifyAggResponse(vis, esResp, {
|
||||
let tabbed = tabifyAggResponse(vis, esResp, {
|
||||
minimalColumns: false
|
||||
});
|
||||
|
||||
|
|
|
@ -42,15 +42,15 @@ describe('ResponseWriter class', function () {
|
|||
defineSetup(true);
|
||||
|
||||
it('gets the columns for the vis', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let writer = new ResponseWriter(vis);
|
||||
|
||||
expect(getColumns).to.have.property('callCount', 1);
|
||||
expect(getColumns.firstCall.args[0]).to.be(vis);
|
||||
});
|
||||
|
||||
it('collects the aggConfigs from each column in aggStack', function () {
|
||||
var aggs = [
|
||||
let aggs = [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'terms', schema: 'segment', params: { field: 'extension' } },
|
||||
{ type: 'avg', schema: 'metric', params: { field: '@timestamp' } }
|
||||
|
@ -60,12 +60,12 @@ describe('ResponseWriter class', function () {
|
|||
return { aggConfig: agg };
|
||||
}));
|
||||
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: aggs
|
||||
});
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
let writer = new ResponseWriter(vis);
|
||||
expect(writer.aggStack).to.be.an('array');
|
||||
expect(writer.aggStack).to.have.length(aggs.length);
|
||||
writer.aggStack.forEach(function (agg, i) {
|
||||
|
@ -74,40 +74,40 @@ describe('ResponseWriter class', function () {
|
|||
});
|
||||
|
||||
it('sets canSplit=true by default', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let writer = new ResponseWriter(vis);
|
||||
expect(writer).to.have.property('canSplit', true);
|
||||
});
|
||||
|
||||
it('sets canSplit=false when config says to', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis, { canSplit: false });
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let writer = new ResponseWriter(vis, { canSplit: false });
|
||||
expect(writer).to.have.property('canSplit', false);
|
||||
});
|
||||
|
||||
describe('sets partialRows', function () {
|
||||
it('to the value of the config if set', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var partial = Boolean(Math.round(Math.random()));
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let partial = Boolean(Math.round(Math.random()));
|
||||
|
||||
var writer = new ResponseWriter(vis, { partialRows: partial });
|
||||
let writer = new ResponseWriter(vis, { partialRows: partial });
|
||||
expect(writer).to.have.property('partialRows', partial);
|
||||
});
|
||||
|
||||
it('to the value of vis.isHierarchical if no config', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var hierarchical = Boolean(Math.round(Math.random()));
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let hierarchical = Boolean(Math.round(Math.random()));
|
||||
sinon.stub(vis, 'isHierarchical').returns(hierarchical);
|
||||
|
||||
var writer = new ResponseWriter(vis, {});
|
||||
let writer = new ResponseWriter(vis, {});
|
||||
expect(writer).to.have.property('partialRows', hierarchical);
|
||||
});
|
||||
});
|
||||
|
||||
it('starts off with a root TableGroup', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
let writer = new ResponseWriter(vis);
|
||||
expect(writer.root).to.be.a(TableGroup);
|
||||
expect(writer.splitStack).to.be.an('array');
|
||||
expect(writer.splitStack).to.have.length(1);
|
||||
|
@ -120,29 +120,29 @@ describe('ResponseWriter class', function () {
|
|||
|
||||
describe('#response()', function () {
|
||||
it('returns the root TableGroup if splitting', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let writer = new ResponseWriter(vis);
|
||||
expect(writer.response()).to.be(writer.root);
|
||||
});
|
||||
|
||||
it('returns the first table if not splitting', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis, { canSplit: false });
|
||||
var table = writer._table();
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let writer = new ResponseWriter(vis, { canSplit: false });
|
||||
let table = writer._table();
|
||||
expect(writer.response()).to.be(table);
|
||||
});
|
||||
|
||||
it('adds columns to all of the tables', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', params: { field: '_type' }, schema: 'split' },
|
||||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
var buckets = new Buckets({ buckets: [ { key: 'nginx' }, { key: 'apache' } ] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
var tables = [];
|
||||
let buckets = new Buckets({ buckets: [ { key: 'nginx' }, { key: 'apache' } ] });
|
||||
let writer = new ResponseWriter(vis);
|
||||
let tables = [];
|
||||
|
||||
writer.split(vis.aggs[0], buckets, function () {
|
||||
writer.cell(vis.aggs[1], 100, function () {
|
||||
|
@ -154,11 +154,11 @@ describe('ResponseWriter class', function () {
|
|||
expect(table.columns == null).to.be(true);
|
||||
});
|
||||
|
||||
var resp = writer.response();
|
||||
let resp = writer.response();
|
||||
expect(resp).to.be.a(TableGroup);
|
||||
expect(resp.tables).to.have.length(2);
|
||||
|
||||
var nginx = resp.tables.shift();
|
||||
let nginx = resp.tables.shift();
|
||||
expect(nginx).to.have.property('aggConfig', vis.aggs[0]);
|
||||
expect(nginx).to.have.property('key', 'nginx');
|
||||
expect(nginx.tables).to.have.length(1);
|
||||
|
@ -166,7 +166,7 @@ describe('ResponseWriter class', function () {
|
|||
expect(_.contains(tables, table)).to.be(true);
|
||||
});
|
||||
|
||||
var apache = resp.tables.shift();
|
||||
let apache = resp.tables.shift();
|
||||
expect(apache).to.have.property('aggConfig', vis.aggs[0]);
|
||||
expect(apache).to.have.property('key', 'apache');
|
||||
expect(apache.tables).to.have.length(1);
|
||||
|
@ -184,16 +184,16 @@ describe('ResponseWriter class', function () {
|
|||
|
||||
describe('#split()', function () {
|
||||
it('with break if the user has specified that splitting is to be disabled', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'split', params: { field: '_type' } },
|
||||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
var agg = vis.aggs.bySchemaName.split[0];
|
||||
var buckets = new Buckets({ buckets: [ { key: 'apache' } ]});
|
||||
var writer = new ResponseWriter(vis, { canSplit: false });
|
||||
let agg = vis.aggs.bySchemaName.split[0];
|
||||
let buckets = new Buckets({ buckets: [ { key: 'apache' } ]});
|
||||
let writer = new ResponseWriter(vis, { canSplit: false });
|
||||
|
||||
expect(function () {
|
||||
writer.split(agg, buckets, _.noop);
|
||||
|
@ -201,7 +201,7 @@ describe('ResponseWriter class', function () {
|
|||
});
|
||||
|
||||
it('forks the acrStack and rewrites the parents', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', params: { field: 'extension' }, schema: 'segment' },
|
||||
|
@ -211,10 +211,10 @@ describe('ResponseWriter class', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var writer = new ResponseWriter(vis, { asAggConfigResults: true });
|
||||
var extensions = new Buckets({ buckets: [ { key: 'jpg' }, { key: 'png' } ] });
|
||||
var types = new Buckets({ buckets: [ { key: 'nginx' }, { key: 'apache' } ] });
|
||||
var os = new Buckets({ buckets: [ { key: 'window' }, { key: 'osx' } ] });
|
||||
let writer = new ResponseWriter(vis, { asAggConfigResults: true });
|
||||
let extensions = new Buckets({ buckets: [ { key: 'jpg' }, { key: 'png' } ] });
|
||||
let types = new Buckets({ buckets: [ { key: 'nginx' }, { key: 'apache' } ] });
|
||||
let os = new Buckets({ buckets: [ { key: 'window' }, { key: 'osx' } ] });
|
||||
|
||||
extensions.forEach(function (b, extension) {
|
||||
writer.cell(vis.aggs[0], extension, function () {
|
||||
|
@ -230,11 +230,11 @@ describe('ResponseWriter class', function () {
|
|||
});
|
||||
});
|
||||
|
||||
var tables = _.flattenDeep(_.pluck(writer.response().tables, 'tables'));
|
||||
let tables = _.flattenDeep(_.pluck(writer.response().tables, 'tables'));
|
||||
expect(tables.length).to.be(types.length);
|
||||
|
||||
// collect the far left acr from each table
|
||||
var leftAcrs = _.pluck(tables, 'rows[0][0]');
|
||||
let leftAcrs = _.pluck(tables, 'rows[0][0]');
|
||||
|
||||
leftAcrs.forEach(function (acr, i, acrs) {
|
||||
expect(acr.aggConfig).to.be(vis.aggs[0]);
|
||||
|
@ -243,7 +243,7 @@ describe('ResponseWriter class', function () {
|
|||
|
||||
// for all but the last acr, compare to the next
|
||||
if (i + 1 >= acrs.length) return;
|
||||
var acr2 = leftAcrs[i + 1];
|
||||
let acr2 = leftAcrs[i + 1];
|
||||
|
||||
expect(acr.key).to.be(acr2.key);
|
||||
expect(acr.value).to.be(acr2.value);
|
||||
|
@ -258,8 +258,8 @@ describe('ResponseWriter class', function () {
|
|||
describe('#cell()', function () {
|
||||
it('logs a cell in the ResponseWriters row buffer, calls the block arg, then removes the value from the buffer',
|
||||
function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let writer = new ResponseWriter(vis);
|
||||
|
||||
expect(writer.rowBuffer).to.have.length(0);
|
||||
writer.cell({}, 500, function () {
|
||||
|
@ -272,10 +272,10 @@ describe('ResponseWriter class', function () {
|
|||
|
||||
describe('#row()', function () {
|
||||
it('writes the ResponseWriters internal rowBuffer into a table', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
let vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
let writer = new ResponseWriter(vis);
|
||||
|
||||
var table = writer._table();
|
||||
let table = writer._table();
|
||||
writer.cell({}, 1, function () {
|
||||
writer.cell({}, 2, function () {
|
||||
writer.cell({}, 3, function () {
|
||||
|
@ -289,7 +289,7 @@ describe('ResponseWriter class', function () {
|
|||
});
|
||||
|
||||
it('always writes to the table group at the top of the split stack', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'split', params: { field: '_type' } },
|
||||
|
@ -298,20 +298,20 @@ describe('ResponseWriter class', function () {
|
|||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
var splits = vis.aggs.bySchemaName.split;
|
||||
let splits = vis.aggs.bySchemaName.split;
|
||||
|
||||
var type = splits[0];
|
||||
var typeBuckets = new Buckets({ buckets: [ { key: 'nginx' }, { key: 'apache' } ] });
|
||||
let type = splits[0];
|
||||
let typeBuckets = new Buckets({ buckets: [ { key: 'nginx' }, { key: 'apache' } ] });
|
||||
|
||||
var ext = splits[1];
|
||||
var extBuckets = new Buckets({ buckets: [ { key: 'jpg' }, { key: 'png' } ] });
|
||||
let ext = splits[1];
|
||||
let extBuckets = new Buckets({ buckets: [ { key: 'jpg' }, { key: 'png' } ] });
|
||||
|
||||
var os = splits[2];
|
||||
var osBuckets = new Buckets({ buckets: [ { key: 'windows' }, { key: 'mac' } ] });
|
||||
let os = splits[2];
|
||||
let osBuckets = new Buckets({ buckets: [ { key: 'windows' }, { key: 'mac' } ] });
|
||||
|
||||
var count = vis.aggs[3];
|
||||
let count = vis.aggs[3];
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
let writer = new ResponseWriter(vis);
|
||||
writer.split(type, typeBuckets, function () {
|
||||
writer.split(ext, extBuckets, function () {
|
||||
writer.split(os, osBuckets, function (bucket, key) {
|
||||
|
@ -322,9 +322,9 @@ describe('ResponseWriter class', function () {
|
|||
});
|
||||
});
|
||||
|
||||
var resp = writer.response();
|
||||
var sum = 0;
|
||||
var tables = 0;
|
||||
let resp = writer.response();
|
||||
let sum = 0;
|
||||
let tables = 0;
|
||||
(function recurse(t) {
|
||||
if (t.tables) {
|
||||
// table group
|
||||
|
@ -347,7 +347,7 @@ describe('ResponseWriter class', function () {
|
|||
});
|
||||
|
||||
it('writes partial rows for hierarchical vis', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'segment', params: { field: '_type' }},
|
||||
|
@ -355,8 +355,8 @@ describe('ResponseWriter class', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
var table = writer._table();
|
||||
let writer = new ResponseWriter(vis);
|
||||
let table = writer._table();
|
||||
writer.cell(vis.aggs[0], 'apache', function () {
|
||||
writer.row();
|
||||
});
|
||||
|
@ -366,7 +366,7 @@ describe('ResponseWriter class', function () {
|
|||
});
|
||||
|
||||
it('skips partial rows for non-hierarchical vis', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'segment', params: { field: '_type' }},
|
||||
|
@ -374,8 +374,8 @@ describe('ResponseWriter class', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
var table = writer._table();
|
||||
let writer = new ResponseWriter(vis);
|
||||
let table = writer._table();
|
||||
writer.cell(vis.aggs[0], 'apache', function () {
|
||||
writer.row();
|
||||
});
|
||||
|
|
|
@ -12,16 +12,16 @@ describe('Table class', function () {
|
|||
}));
|
||||
|
||||
it('exposes rows array, but not the columns', function () {
|
||||
var table = new Table();
|
||||
let table = new Table();
|
||||
expect(table.rows).to.be.an('array');
|
||||
expect(table.columns == null).to.be.ok();
|
||||
});
|
||||
|
||||
describe('#aggConfig', function () {
|
||||
it('accepts a column from the table and returns its agg config', function () {
|
||||
var table = new Table();
|
||||
var football = {};
|
||||
var column = {
|
||||
let table = new Table();
|
||||
let football = {};
|
||||
let column = {
|
||||
aggConfig: football
|
||||
};
|
||||
|
||||
|
@ -30,7 +30,7 @@ describe('Table class', function () {
|
|||
|
||||
it('throws a TypeError if the column is malformed', function () {
|
||||
expect(function () {
|
||||
var notAColumn = {};
|
||||
let notAColumn = {};
|
||||
(new Table()).aggConfig(notAColumn);
|
||||
}).to.throwException(TypeError);
|
||||
});
|
||||
|
@ -38,12 +38,12 @@ describe('Table class', function () {
|
|||
|
||||
describe('#title', function () {
|
||||
it('returns nothing if the table is not part of a table group', function () {
|
||||
var table = new Table();
|
||||
let table = new Table();
|
||||
expect(table.title()).to.be('');
|
||||
});
|
||||
|
||||
it('returns the title of the TableGroup if the table is part of one', function () {
|
||||
var table = new Table();
|
||||
let table = new Table();
|
||||
table.$parent = {
|
||||
title: 'TableGroup Title',
|
||||
tables: [table]
|
||||
|
@ -55,9 +55,9 @@ describe('Table class', function () {
|
|||
|
||||
describe('#field', function () {
|
||||
it('calls the columns aggConfig#field() method', function () {
|
||||
var table = new Table();
|
||||
var football = {};
|
||||
var column = {
|
||||
let table = new Table();
|
||||
let football = {};
|
||||
let column = {
|
||||
aggConfig: {
|
||||
field: _.constant(football)
|
||||
}
|
||||
|
@ -69,9 +69,9 @@ describe('Table class', function () {
|
|||
|
||||
describe('#fieldFormatter', function () {
|
||||
it('calls the columns aggConfig#fieldFormatter() method', function () {
|
||||
var table = new Table();
|
||||
var football = {};
|
||||
var column = {
|
||||
let table = new Table();
|
||||
let football = {};
|
||||
let column = {
|
||||
aggConfig: {
|
||||
fieldFormatter: _.constant(football)
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ describe('Table Group class', function () {
|
|||
}));
|
||||
|
||||
it('exposes tables array and empty aggConfig, key and title', function () {
|
||||
var tableGroup = new TableGroup();
|
||||
let tableGroup = new TableGroup();
|
||||
expect(tableGroup.tables).to.be.an('array');
|
||||
expect(tableGroup.aggConfig).to.be(null);
|
||||
expect(tableGroup.key).to.be(null);
|
||||
|
|
|
@ -15,7 +15,7 @@ export default function AggResponseBucketsProvider() {
|
|||
}
|
||||
|
||||
Buckets.prototype.forEach = function (fn) {
|
||||
var buckets = this.buckets;
|
||||
let buckets = this.buckets;
|
||||
|
||||
if (this.objectMode) {
|
||||
this._keys.forEach(function (key) {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import _ from 'lodash';
|
||||
import VisAggConfigProvider from 'ui/vis/agg_config';
|
||||
export default function GetColumnsProvider(Private) {
|
||||
var AggConfig = Private(VisAggConfigProvider);
|
||||
let AggConfig = Private(VisAggConfigProvider);
|
||||
|
||||
return function getColumns(vis, minimal) {
|
||||
var aggs = vis.aggs.getResponseAggs();
|
||||
let aggs = vis.aggs.getResponseAggs();
|
||||
|
||||
if (minimal == null) minimal = !vis.isHierarchical();
|
||||
|
||||
|
@ -23,10 +23,10 @@ export default function GetColumnsProvider(Private) {
|
|||
}
|
||||
|
||||
// supposed to be bucket,...metrics,bucket,...metrics
|
||||
var columns = [];
|
||||
let columns = [];
|
||||
|
||||
// seperate the metrics
|
||||
var grouped = _.groupBy(aggs, function (agg) {
|
||||
let grouped = _.groupBy(aggs, function (agg) {
|
||||
return agg.schema.group;
|
||||
});
|
||||
|
||||
|
|
|
@ -4,9 +4,9 @@ import AggResponseTabifyTableProvider from 'ui/agg_response/tabify/_table';
|
|||
import AggResponseTabifyTableGroupProvider from 'ui/agg_response/tabify/_table_group';
|
||||
import AggResponseTabifyGetColumnsProvider from 'ui/agg_response/tabify/_get_columns';
|
||||
export default function TabbedAggResponseWriterProvider(Private) {
|
||||
var Table = Private(AggResponseTabifyTableProvider);
|
||||
var TableGroup = Private(AggResponseTabifyTableGroupProvider);
|
||||
var getColumns = Private(AggResponseTabifyGetColumnsProvider);
|
||||
let Table = Private(AggResponseTabifyTableProvider);
|
||||
let TableGroup = Private(AggResponseTabifyTableGroupProvider);
|
||||
let getColumns = Private(AggResponseTabifyGetColumnsProvider);
|
||||
|
||||
|
||||
_.class(SplitAcr).inherits(AggConfigResult);
|
||||
|
@ -25,7 +25,7 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
this.opts = opts || {};
|
||||
this.rowBuffer = [];
|
||||
|
||||
var visIsHier = vis.isHierarchical();
|
||||
let visIsHier = vis.isHierarchical();
|
||||
|
||||
// do the options allow for splitting? we will only split if true and
|
||||
// tabify calls the split method.
|
||||
|
@ -67,9 +67,9 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
* @return {Table/TableGroup} table - the created table
|
||||
*/
|
||||
TabbedAggResponseWriter.prototype._table = function (group, agg, key) {
|
||||
var Class = (group) ? TableGroup : Table;
|
||||
var table = new Class();
|
||||
var parent = this.splitStack[0];
|
||||
let Class = (group) ? TableGroup : Table;
|
||||
let table = new Class();
|
||||
let parent = this.splitStack[0];
|
||||
|
||||
if (group) {
|
||||
table.aggConfig = agg;
|
||||
|
@ -95,7 +95,7 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
* @param {function} block - a function to execute for each sub bucket
|
||||
*/
|
||||
TabbedAggResponseWriter.prototype.split = function (agg, buckets, block) {
|
||||
var self = this;
|
||||
let self = this;
|
||||
|
||||
if (!self.canSplit) {
|
||||
throw new Error('attempted to split when splitting is disabled');
|
||||
|
@ -105,11 +105,11 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
|
||||
buckets.forEach(function (bucket, key) {
|
||||
// find the existing split that we should extend
|
||||
var tableGroup = _.find(self.splitStack[0].tables, { aggConfig: agg, key: key });
|
||||
let tableGroup = _.find(self.splitStack[0].tables, { aggConfig: agg, key: key });
|
||||
// create the split if it doesn't exist yet
|
||||
if (!tableGroup) tableGroup = self._table(true, agg, key);
|
||||
|
||||
var splitAcr = false;
|
||||
let splitAcr = false;
|
||||
if (self.asAggConfigResults) {
|
||||
splitAcr = self._injectParentSplit(agg, key);
|
||||
}
|
||||
|
@ -127,7 +127,7 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
};
|
||||
|
||||
TabbedAggResponseWriter.prototype._removeAggFromColumns = function (agg) {
|
||||
var i = _.findIndex(this.columns, function (col) {
|
||||
let i = _.findIndex(this.columns, function (col) {
|
||||
return col.aggConfig === agg;
|
||||
});
|
||||
|
||||
|
@ -140,8 +140,8 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
|
||||
// hierarchical vis creats additional columns for each bucket
|
||||
// we will remove those too
|
||||
var mCol = this.columns.splice(i, 1).pop();
|
||||
var mI = _.findIndex(this.aggStack, function (agg) {
|
||||
let mCol = this.columns.splice(i, 1).pop();
|
||||
let mI = _.findIndex(this.aggStack, function (agg) {
|
||||
return agg === mCol.aggConfig;
|
||||
});
|
||||
|
||||
|
@ -160,12 +160,12 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
* @return {SplitAcr} - the AggConfigResult created for the split bucket
|
||||
*/
|
||||
TabbedAggResponseWriter.prototype._injectParentSplit = function (agg, key) {
|
||||
var oldList = this.acrStack;
|
||||
var newList = this.acrStack = [];
|
||||
let oldList = this.acrStack;
|
||||
let newList = this.acrStack = [];
|
||||
|
||||
// walk from right to left through the old stack
|
||||
// and move things to the new stack
|
||||
var injected = false;
|
||||
let injected = false;
|
||||
|
||||
if (!oldList.length) {
|
||||
injected = new SplitAcr(agg, null, key);
|
||||
|
@ -175,7 +175,7 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
|
||||
// walk from right to left, emptying the previous list
|
||||
while (oldList.length) {
|
||||
var acr = oldList.pop();
|
||||
let acr = oldList.pop();
|
||||
|
||||
// ignore other splits
|
||||
if (acr instanceof SplitAcr) {
|
||||
|
@ -189,11 +189,11 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
newList.unshift(injected);
|
||||
}
|
||||
|
||||
var newAcr = new AggConfigResult(acr.aggConfig, newList[0], acr.value, acr.aggConfig.getKey(acr));
|
||||
let newAcr = new AggConfigResult(acr.aggConfig, newList[0], acr.value, acr.aggConfig.getKey(acr));
|
||||
newList.unshift(newAcr);
|
||||
|
||||
// and replace the acr in the row buffer if its there
|
||||
var rowI = this.rowBuffer.indexOf(acr);
|
||||
let rowI = this.rowBuffer.indexOf(acr);
|
||||
if (rowI > -1) {
|
||||
this.rowBuffer[rowI] = newAcr;
|
||||
}
|
||||
|
@ -215,7 +215,7 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
value = new AggConfigResult(agg, this.acrStack[0], value, value);
|
||||
}
|
||||
|
||||
var staskResult = this.asAggConfigResults && value.type === 'bucket';
|
||||
let staskResult = this.asAggConfigResults && value.type === 'bucket';
|
||||
|
||||
this.rowBuffer.push(value);
|
||||
if (staskResult) this.acrStack.unshift(value);
|
||||
|
@ -237,14 +237,14 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
* @return {undefined}
|
||||
*/
|
||||
TabbedAggResponseWriter.prototype.row = function (buffer) {
|
||||
var cells = buffer || this.rowBuffer.slice(0);
|
||||
let cells = buffer || this.rowBuffer.slice(0);
|
||||
|
||||
if (!this.partialRows && cells.length < this.columns.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
var split = this.splitStack[0];
|
||||
var table = split.tables[0] || this._table(false);
|
||||
let split = this.splitStack[0];
|
||||
let table = split.tables[0] || this._table(false);
|
||||
|
||||
while (cells.length < this.columns.length) cells.push('');
|
||||
table.rows.push(cells);
|
||||
|
@ -257,7 +257,7 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
* @return {object} - the final table-tree
|
||||
*/
|
||||
TabbedAggResponseWriter.prototype.response = function () {
|
||||
var columns = this.columns;
|
||||
let columns = this.columns;
|
||||
|
||||
// give the columns some metadata
|
||||
columns.map(function (col) {
|
||||
|
@ -272,7 +272,7 @@ export default function TabbedAggResponseWriterProvider(Private) {
|
|||
|
||||
if (this.canSplit) return this.root;
|
||||
|
||||
var table = this.root.tables[0];
|
||||
let table = this.root.tables[0];
|
||||
if (!table) return;
|
||||
|
||||
delete table.$parent;
|
||||
|
|
|
@ -4,15 +4,15 @@ import AggResponseTabifyResponseWriterProvider from 'ui/agg_response/tabify/_res
|
|||
import AggResponseTabifyBucketsProvider from 'ui/agg_response/tabify/_buckets';
|
||||
export default function tabifyAggResponseProvider(Private, Notifier) {
|
||||
|
||||
var AggConfig = Private(VisAggConfigProvider);
|
||||
var TabbedAggResponseWriter = Private(AggResponseTabifyResponseWriterProvider);
|
||||
var Buckets = Private(AggResponseTabifyBucketsProvider);
|
||||
var notify = new Notifier({ location: 'agg_response/tabify'});
|
||||
let AggConfig = Private(VisAggConfigProvider);
|
||||
let TabbedAggResponseWriter = Private(AggResponseTabifyResponseWriterProvider);
|
||||
let Buckets = Private(AggResponseTabifyBucketsProvider);
|
||||
let notify = new Notifier({ location: 'agg_response/tabify'});
|
||||
|
||||
function tabifyAggResponse(vis, esResponse, respOpts) {
|
||||
var write = new TabbedAggResponseWriter(vis, respOpts);
|
||||
let write = new TabbedAggResponseWriter(vis, respOpts);
|
||||
|
||||
var topLevelBucket = _.assign({}, esResponse.aggregations, {
|
||||
let topLevelBucket = _.assign({}, esResponse.aggregations, {
|
||||
doc_count: esResponse.hits.total
|
||||
});
|
||||
|
||||
|
@ -31,13 +31,13 @@ export default function tabifyAggResponseProvider(Private, Notifier) {
|
|||
* @returns {undefined}
|
||||
*/
|
||||
function collectBucket(write, bucket, key) {
|
||||
var agg = write.aggStack.shift();
|
||||
let agg = write.aggStack.shift();
|
||||
|
||||
switch (agg.schema.group) {
|
||||
case 'buckets':
|
||||
var buckets = new Buckets(bucket[agg.id]);
|
||||
let buckets = new Buckets(bucket[agg.id]);
|
||||
if (buckets.length) {
|
||||
var splitting = write.canSplit && agg.schema.name === 'split';
|
||||
let splitting = write.canSplit && agg.schema.name === 'split';
|
||||
if (splitting) {
|
||||
write.split(agg, buckets, function forEachBucket(subBucket, key) {
|
||||
collectBucket(write, subBucket, agg.getKey(subBucket), key);
|
||||
|
@ -63,7 +63,7 @@ export default function tabifyAggResponseProvider(Private, Notifier) {
|
|||
}
|
||||
break;
|
||||
case 'metrics':
|
||||
var value = agg.getValue(bucket);
|
||||
let value = agg.getValue(bucket);
|
||||
write.cell(agg, value, function () {
|
||||
if (!write.aggStack.length) {
|
||||
// row complete
|
||||
|
@ -82,7 +82,7 @@ export default function tabifyAggResponseProvider(Private, Notifier) {
|
|||
// write empty values for each bucket agg, then write
|
||||
// the metrics from the initial bucket using collectBucket()
|
||||
function passEmptyBuckets(write, bucket, key) {
|
||||
var agg = write.aggStack.shift();
|
||||
let agg = write.aggStack.shift();
|
||||
|
||||
switch (agg.schema.group) {
|
||||
case 'metrics':
|
||||
|
|
|
@ -34,9 +34,9 @@ describe('AggTableGroup Directive', function () {
|
|||
|
||||
|
||||
it('renders a simple split response properly', function () {
|
||||
var vis = new Vis(indexPattern, 'table');
|
||||
let vis = new Vis(indexPattern, 'table');
|
||||
$scope.group = tabifyAggResponse(vis, fixtures.metricOnly);
|
||||
var $el = $('<kbn-agg-table-group group="group"></kbn-agg-table-group>');
|
||||
let $el = $('<kbn-agg-table-group group="group"></kbn-agg-table-group>');
|
||||
|
||||
$compile($el)($scope);
|
||||
$scope.$digest();
|
||||
|
@ -46,7 +46,7 @@ describe('AggTableGroup Directive', function () {
|
|||
});
|
||||
|
||||
it('renders nothing if the table list is empty', function () {
|
||||
var $el = $('<kbn-agg-table-group group="group"></kbn-agg-table-group>');
|
||||
let $el = $('<kbn-agg-table-group group="group"></kbn-agg-table-group>');
|
||||
|
||||
$scope.group = {
|
||||
tables: []
|
||||
|
@ -55,12 +55,12 @@ describe('AggTableGroup Directive', function () {
|
|||
$compile($el)($scope);
|
||||
$scope.$digest();
|
||||
|
||||
var $subTables = $el.find('kbn-agg-table');
|
||||
let $subTables = $el.find('kbn-agg-table');
|
||||
expect($subTables.size()).to.be(0);
|
||||
});
|
||||
|
||||
it('renders a complex response properly', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
|
@ -73,15 +73,15 @@ describe('AggTableGroup Directive', function () {
|
|||
agg.id = 'agg_' + (i + 1);
|
||||
});
|
||||
|
||||
var group = $scope.group = tabifyAggResponse(vis, fixtures.threeTermBuckets);
|
||||
var $el = $('<kbn-agg-table-group group="group"></kbn-agg-table-group>');
|
||||
let group = $scope.group = tabifyAggResponse(vis, fixtures.threeTermBuckets);
|
||||
let $el = $('<kbn-agg-table-group group="group"></kbn-agg-table-group>');
|
||||
$compile($el)($scope);
|
||||
$scope.$digest();
|
||||
|
||||
var $subTables = $el.find('kbn-agg-table');
|
||||
let $subTables = $el.find('kbn-agg-table');
|
||||
expect($subTables.size()).to.be(3);
|
||||
|
||||
var $subTableHeaders = $el.find('.agg-table-group-header');
|
||||
let $subTableHeaders = $el.find('.agg-table-group-header');
|
||||
expect($subTableHeaders.size()).to.be(3);
|
||||
|
||||
$subTableHeaders.each(function (i) {
|
||||
|
|
|
@ -35,10 +35,10 @@ describe('AggTable Directive', function () {
|
|||
|
||||
|
||||
it('renders a simple response properly', function () {
|
||||
var vis = new Vis(indexPattern, 'table');
|
||||
let vis = new Vis(indexPattern, 'table');
|
||||
$scope.table = tabifyAggResponse(vis, fixtures.metricOnly, { canSplit: false });
|
||||
|
||||
var $el = $compile('<kbn-agg-table table="table"></kbn-agg-table>')($scope);
|
||||
let $el = $compile('<kbn-agg-table table="table"></kbn-agg-table>')($scope);
|
||||
$scope.$digest();
|
||||
|
||||
expect($el.find('tbody').size()).to.be(1);
|
||||
|
@ -48,14 +48,14 @@ describe('AggTable Directive', function () {
|
|||
|
||||
it('renders nothing if the table is empty', function () {
|
||||
$scope.table = null;
|
||||
var $el = $compile('<kbn-agg-table table="table"></kbn-agg-table>')($scope);
|
||||
let $el = $compile('<kbn-agg-table table="table"></kbn-agg-table>')($scope);
|
||||
$scope.$digest();
|
||||
|
||||
expect($el.find('tbody').size()).to.be(0);
|
||||
});
|
||||
|
||||
it('renders a complex response properly', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
|
@ -69,27 +69,27 @@ describe('AggTable Directive', function () {
|
|||
});
|
||||
|
||||
$scope.table = tabifyAggResponse(vis, fixtures.threeTermBuckets, { canSplit: false });
|
||||
var $el = $('<kbn-agg-table table="table"></kbn-agg-table>');
|
||||
let $el = $('<kbn-agg-table table="table"></kbn-agg-table>');
|
||||
$compile($el)($scope);
|
||||
$scope.$digest();
|
||||
|
||||
expect($el.find('tbody').size()).to.be(1);
|
||||
|
||||
var $rows = $el.find('tbody tr');
|
||||
let $rows = $el.find('tbody tr');
|
||||
expect($rows.size()).to.be.greaterThan(0);
|
||||
|
||||
function validBytes(str) {
|
||||
expect(str).to.match(/^\d+$/);
|
||||
var bytesAsNum = _.parseInt(str);
|
||||
let bytesAsNum = _.parseInt(str);
|
||||
expect(bytesAsNum === 0 || bytesAsNum > 1000).to.be.ok();
|
||||
}
|
||||
|
||||
$rows.each(function (i) {
|
||||
// 6 cells in every row
|
||||
var $cells = $(this).find('td');
|
||||
let $cells = $(this).find('td');
|
||||
expect($cells.size()).to.be(6);
|
||||
|
||||
var txts = $cells.map(function () {
|
||||
let txts = $cells.map(function () {
|
||||
return $(this).text().trim();
|
||||
});
|
||||
|
||||
|
@ -109,11 +109,11 @@ describe('AggTable Directive', function () {
|
|||
|
||||
describe('aggTable.toCsv()', function () {
|
||||
it('escapes and formats the rows and columns properly', function () {
|
||||
var $el = $compile('<kbn-agg-table table="table">')($scope);
|
||||
let $el = $compile('<kbn-agg-table table="table">')($scope);
|
||||
$scope.$digest();
|
||||
|
||||
var $tableScope = $el.isolateScope();
|
||||
var aggTable = $tableScope.aggTable;
|
||||
let $tableScope = $el.isolateScope();
|
||||
let aggTable = $tableScope.aggTable;
|
||||
|
||||
$tableScope.table = {
|
||||
columns: [
|
||||
|
@ -150,13 +150,13 @@ describe('AggTable Directive', function () {
|
|||
});
|
||||
|
||||
it('calls _saveAs properly', function () {
|
||||
var $el = $compile('<kbn-agg-table table="table">')($scope);
|
||||
let $el = $compile('<kbn-agg-table table="table">')($scope);
|
||||
$scope.$digest();
|
||||
|
||||
var $tableScope = $el.isolateScope();
|
||||
var aggTable = $tableScope.aggTable;
|
||||
let $tableScope = $el.isolateScope();
|
||||
let aggTable = $tableScope.aggTable;
|
||||
|
||||
var saveAs = sinon.stub(aggTable, '_saveAs');
|
||||
let saveAs = sinon.stub(aggTable, '_saveAs');
|
||||
$tableScope.table = {
|
||||
columns: [
|
||||
{ title: 'one' },
|
||||
|
@ -172,7 +172,7 @@ describe('AggTable Directive', function () {
|
|||
aggTable.exportAsCsv();
|
||||
|
||||
expect(saveAs.callCount).to.be(1);
|
||||
var call = saveAs.getCall(0);
|
||||
let call = saveAs.getCall(0);
|
||||
expect(call.args[0]).to.be.a(FakeBlob);
|
||||
expect(call.args[0].slices).to.eql([
|
||||
'one,two,"with double-quotes("")"' + '\r\n' +
|
||||
|
@ -185,12 +185,12 @@ describe('AggTable Directive', function () {
|
|||
});
|
||||
|
||||
it('should use the export-title attribute', function () {
|
||||
var expected = 'export file name';
|
||||
var $el = $compile(`<kbn-agg-table table="table" export-title="exportTitle">`)($scope);
|
||||
let expected = 'export file name';
|
||||
let $el = $compile(`<kbn-agg-table table="table" export-title="exportTitle">`)($scope);
|
||||
$scope.$digest();
|
||||
|
||||
var $tableScope = $el.isolateScope();
|
||||
var aggTable = $tableScope.aggTable;
|
||||
let $tableScope = $el.isolateScope();
|
||||
let aggTable = $tableScope.aggTable;
|
||||
$tableScope.table = {
|
||||
columns: [],
|
||||
rows: []
|
||||
|
|
|
@ -24,7 +24,7 @@ uiModules
|
|||
return compileRecursiveDirective.compile($el);
|
||||
},
|
||||
controller: function ($scope) {
|
||||
var self = this;
|
||||
let self = this;
|
||||
|
||||
self.sort = null;
|
||||
self._saveAs = require('@spalger/filesaver').saveAs;
|
||||
|
@ -34,15 +34,15 @@ uiModules
|
|||
};
|
||||
|
||||
self.exportAsCsv = function (formatted) {
|
||||
var csv = new Blob([self.toCsv(formatted)], { type: 'text/plain' });
|
||||
let csv = new Blob([self.toCsv(formatted)], { type: 'text/plain' });
|
||||
self._saveAs(csv, self.csv.filename);
|
||||
};
|
||||
|
||||
self.toCsv = function (formatted) {
|
||||
var rows = $scope.table.rows;
|
||||
var columns = formatted ? $scope.formattedColumns : $scope.table.columns;
|
||||
var nonAlphaNumRE = /[^a-zA-Z0-9]/;
|
||||
var allDoubleQuoteRE = /"/g;
|
||||
let rows = $scope.table.rows;
|
||||
let columns = formatted ? $scope.formattedColumns : $scope.table.columns;
|
||||
let nonAlphaNumRE = /[^a-zA-Z0-9]/;
|
||||
let allDoubleQuoteRE = /"/g;
|
||||
|
||||
function escape(val) {
|
||||
if (!formatted && _.isObject(val)) val = val.valueOf();
|
||||
|
@ -54,7 +54,7 @@ uiModules
|
|||
}
|
||||
|
||||
// escape each cell in each row
|
||||
var csvRows = rows.map(function (row) {
|
||||
let csvRows = rows.map(function (row) {
|
||||
return row.map(escape);
|
||||
});
|
||||
|
||||
|
@ -69,7 +69,7 @@ uiModules
|
|||
};
|
||||
|
||||
$scope.$watch('table', function () {
|
||||
var table = $scope.table;
|
||||
let table = $scope.table;
|
||||
|
||||
if (!table) {
|
||||
$scope.rows = null;
|
||||
|
@ -80,14 +80,14 @@ uiModules
|
|||
self.csv.filename = ($scope.exportTitle || table.title() || 'table') + '.csv';
|
||||
$scope.rows = table.rows;
|
||||
$scope.formattedColumns = table.columns.map(function (col, i) {
|
||||
var agg = $scope.table.aggConfig(col);
|
||||
var field = agg.field();
|
||||
var formattedColumn = {
|
||||
let agg = $scope.table.aggConfig(col);
|
||||
let field = agg.field();
|
||||
let formattedColumn = {
|
||||
title: col.title,
|
||||
filterable: field && field.filterable && agg.schema.group === 'buckets'
|
||||
};
|
||||
|
||||
var last = i === (table.columns.length - 1);
|
||||
let last = i === (table.columns.length - 1);
|
||||
|
||||
if (last || (agg.schema.group === 'metrics')) {
|
||||
formattedColumn.class = 'visualize-table-right';
|
||||
|
|
|
@ -25,10 +25,10 @@ uiModules
|
|||
|
||||
if (!group || !group.tables.length) return;
|
||||
|
||||
var firstTable = group.tables[0];
|
||||
var params = firstTable.aggConfig && firstTable.aggConfig.params;
|
||||
let firstTable = group.tables[0];
|
||||
let params = firstTable.aggConfig && firstTable.aggConfig.params;
|
||||
// render groups that have Table children as if they were rows, because iteration is cleaner
|
||||
var childLayout = (params && !params.row) ? 'columns' : 'rows';
|
||||
let childLayout = (params && !params.row) ? 'columns' : 'rows';
|
||||
|
||||
$scope[childLayout] = group.tables;
|
||||
});
|
||||
|
|
|
@ -4,10 +4,10 @@ import AggTypesIndexProvider from 'ui/agg_types/index';
|
|||
import RegistryVisTypesProvider from 'ui/registry/vis_types';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
module.exports = function AggParamWriterHelper(Private) {
|
||||
var Vis = Private(VisProvider);
|
||||
var aggTypes = Private(AggTypesIndexProvider);
|
||||
var visTypes = Private(RegistryVisTypesProvider);
|
||||
var stubbedLogstashIndexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
let Vis = Private(VisProvider);
|
||||
let aggTypes = Private(AggTypesIndexProvider);
|
||||
let visTypes = Private(RegistryVisTypesProvider);
|
||||
let stubbedLogstashIndexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
|
||||
/**
|
||||
* Helper object for writing aggParams. Specify an aggType and it will find a vis & schema, and
|
||||
|
@ -17,8 +17,8 @@ module.exports = function AggParamWriterHelper(Private) {
|
|||
* - Verify that the interval parameter of the histogram visualization casts its input to a number
|
||||
* ```js
|
||||
* it('casts to a number', function () {
|
||||
* var writer = new AggParamWriter({ aggType: 'histogram' });
|
||||
* var output = writer.write({ interval : '100/10' });
|
||||
* let writer = new AggParamWriter({ aggType: 'histogram' });
|
||||
* let output = writer.write({ interval : '100/10' });
|
||||
* expect(output.params.interval).to.be.a('number');
|
||||
* expect(output.params.interval).to.be(100);
|
||||
* });
|
||||
|
@ -29,7 +29,7 @@ module.exports = function AggParamWriterHelper(Private) {
|
|||
* @param {string} opts.aggType - the name of the aggType we want to test. ('histogram', 'filter', etc.)
|
||||
*/
|
||||
function AggParamWriter(opts) {
|
||||
var self = this;
|
||||
let self = this;
|
||||
|
||||
self.aggType = opts.aggType;
|
||||
if (_.isString(self.aggType)) {
|
||||
|
@ -47,7 +47,7 @@ module.exports = function AggParamWriterHelper(Private) {
|
|||
|
||||
// find a suitable vis type and schema
|
||||
_.find(visTypes, function (visType) {
|
||||
var schema = _.find(visType.schemas.all, function (schema) {
|
||||
let schema = _.find(visType.schemas.all, function (schema) {
|
||||
// type, type, type, type, type... :(
|
||||
return schema.group === self.aggType.type;
|
||||
});
|
||||
|
@ -69,7 +69,7 @@ module.exports = function AggParamWriterHelper(Private) {
|
|||
}
|
||||
|
||||
AggParamWriter.prototype.write = function (paramValues) {
|
||||
var self = this;
|
||||
let self = this;
|
||||
paramValues = _.clone(paramValues);
|
||||
|
||||
if (self.aggType.params.byName.field && !paramValues.field) {
|
||||
|
@ -90,7 +90,7 @@ module.exports = function AggParamWriterHelper(Private) {
|
|||
}]
|
||||
});
|
||||
|
||||
var aggConfig = _.find(self.vis.aggs, function (aggConfig) {
|
||||
let aggConfig = _.find(self.vis.aggs, function (aggConfig) {
|
||||
return aggConfig.type === self.aggType;
|
||||
});
|
||||
|
||||
|
|
|
@ -28,11 +28,11 @@ describe('AggParams class', function () {
|
|||
|
||||
describe('constructor args', function () {
|
||||
it('accepts an array of param defs', function () {
|
||||
var params = [
|
||||
let params = [
|
||||
{ name: 'one' },
|
||||
{ name: 'two' }
|
||||
];
|
||||
var aggParams = new AggParams(params);
|
||||
let aggParams = new AggParams(params);
|
||||
|
||||
expect(aggParams).to.have.length(params.length);
|
||||
expect(aggParams).to.be.an(Array);
|
||||
|
@ -42,43 +42,43 @@ describe('AggParams class', function () {
|
|||
|
||||
describe('AggParam creation', function () {
|
||||
it('Uses the FieldAggParam class for params with the name "field"', function () {
|
||||
var params = [
|
||||
let params = [
|
||||
{ name: 'field' }
|
||||
];
|
||||
var aggParams = new AggParams(params);
|
||||
let aggParams = new AggParams(params);
|
||||
|
||||
expect(aggParams).to.have.length(params.length);
|
||||
expect(aggParams[0]).to.be.a(FieldAggParam);
|
||||
});
|
||||
|
||||
it('Uses the OptionedAggParam class for params of type "optioned"', function () {
|
||||
var params = [
|
||||
let params = [
|
||||
{
|
||||
name: 'interval',
|
||||
type: 'optioned'
|
||||
}
|
||||
];
|
||||
var aggParams = new AggParams(params);
|
||||
let aggParams = new AggParams(params);
|
||||
|
||||
expect(aggParams).to.have.length(params.length);
|
||||
expect(aggParams[0]).to.be.a(OptionedAggParam);
|
||||
});
|
||||
|
||||
it('Uses the RegexAggParam class for params of type "regex"', function () {
|
||||
var params = [
|
||||
let params = [
|
||||
{
|
||||
name: 'exclude',
|
||||
type: 'regex'
|
||||
}
|
||||
];
|
||||
var aggParams = new AggParams(params);
|
||||
let aggParams = new AggParams(params);
|
||||
|
||||
expect(aggParams).to.have.length(params.length);
|
||||
expect(aggParams[0]).to.be.a(RegexAggParam);
|
||||
});
|
||||
|
||||
it('Always converts the params to a BaseAggParam', function () {
|
||||
var params = [
|
||||
let params = [
|
||||
{
|
||||
name: 'height',
|
||||
editor: '<blink>high</blink>'
|
||||
|
@ -92,7 +92,7 @@ describe('AggParams class', function () {
|
|||
editor: '<blink>small</blink>'
|
||||
}
|
||||
];
|
||||
var aggParams = new AggParams(params);
|
||||
let aggParams = new AggParams(params);
|
||||
|
||||
expect(BaseAggParam).to.have.property('callCount', params.length);
|
||||
expect(FieldAggParam).to.have.property('callCount', 0);
|
||||
|
|
|
@ -39,7 +39,7 @@ describe('AggType Class', function () {
|
|||
});
|
||||
|
||||
describe('application of config properties', function () {
|
||||
var copiedConfigProps = [
|
||||
let copiedConfigProps = [
|
||||
'name',
|
||||
'title',
|
||||
'makeLabel',
|
||||
|
@ -48,11 +48,11 @@ describe('AggType Class', function () {
|
|||
|
||||
describe('"' + copiedConfigProps.join('", "') + '"', function () {
|
||||
it('assigns the config value to itself', function () {
|
||||
var config = _.transform(copiedConfigProps, function (config, prop) {
|
||||
let config = _.transform(copiedConfigProps, function (config, prop) {
|
||||
config[prop] = {};
|
||||
}, {});
|
||||
|
||||
var aggType = new AggType(config);
|
||||
let aggType = new AggType(config);
|
||||
|
||||
copiedConfigProps.forEach(function (prop) {
|
||||
expect(aggType[prop]).to.be(config[prop]);
|
||||
|
@ -62,9 +62,9 @@ describe('AggType Class', function () {
|
|||
|
||||
describe('makeLabel', function () {
|
||||
it('makes a function when the makeLabel config is not specified', function () {
|
||||
var someGetter = function () {};
|
||||
let someGetter = function () {};
|
||||
|
||||
var aggType = new AggType({
|
||||
let aggType = new AggType({
|
||||
makeLabel: someGetter
|
||||
});
|
||||
|
||||
|
@ -81,9 +81,9 @@ describe('AggType Class', function () {
|
|||
|
||||
describe('getFormat', function () {
|
||||
it('returns the formatter for the aggConfig', function () {
|
||||
var aggType = new AggType({});
|
||||
let aggType = new AggType({});
|
||||
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -93,7 +93,7 @@ describe('AggType Class', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.date_histogram[0];
|
||||
let aggConfig = vis.aggs.byTypeName.date_histogram[0];
|
||||
|
||||
expect(aggType.getFormat(aggConfig)).to.be(fieldFormat.getDefaultInstance('date'));
|
||||
|
||||
|
@ -112,9 +112,9 @@ describe('AggType Class', function () {
|
|||
});
|
||||
|
||||
it('can be overridden via config', function () {
|
||||
var someGetter = function () {};
|
||||
let someGetter = function () {};
|
||||
|
||||
var aggType = new AggType({
|
||||
let aggType = new AggType({
|
||||
getFormat: someGetter
|
||||
});
|
||||
|
||||
|
@ -128,7 +128,7 @@ describe('AggType Class', function () {
|
|||
});
|
||||
|
||||
it('defaults to AggParams object with JSON param', function () {
|
||||
var aggType = new AggType({
|
||||
let aggType = new AggType({
|
||||
name: 'smart agg'
|
||||
});
|
||||
|
||||
|
@ -139,7 +139,7 @@ describe('AggType Class', function () {
|
|||
});
|
||||
|
||||
it('can disable customLabel', function () {
|
||||
var aggType = new AggType({
|
||||
let aggType = new AggType({
|
||||
name: 'smart agg',
|
||||
customLabels: false
|
||||
});
|
||||
|
@ -149,13 +149,13 @@ describe('AggType Class', function () {
|
|||
});
|
||||
|
||||
it('passes the params arg directly to the AggParams constructor', function () {
|
||||
var params = [
|
||||
let params = [
|
||||
{name: 'one'},
|
||||
{name: 'two'}
|
||||
];
|
||||
var paramLength = params.length + 2; // json and custom label are always appended
|
||||
let paramLength = params.length + 2; // json and custom label are always appended
|
||||
|
||||
var aggType = new AggType({
|
||||
let aggType = new AggType({
|
||||
name: 'bucketeer',
|
||||
params: params
|
||||
});
|
||||
|
@ -169,8 +169,8 @@ describe('AggType Class', function () {
|
|||
|
||||
describe('getResponseAggs', function () {
|
||||
it('copies the value', function () {
|
||||
var football = {};
|
||||
var aggType = new AggType({
|
||||
let football = {};
|
||||
let aggType = new AggType({
|
||||
getResponseAggs: football
|
||||
});
|
||||
|
||||
|
@ -178,7 +178,7 @@ describe('AggType Class', function () {
|
|||
});
|
||||
|
||||
it('defaults to _.noop', function () {
|
||||
var aggType = new AggType({});
|
||||
let aggType = new AggType({});
|
||||
|
||||
expect(aggType.getResponseAggs).to.be(_.noop);
|
||||
});
|
||||
|
|
|
@ -32,7 +32,7 @@ describe('bucketCountBetween util', function () {
|
|||
}));
|
||||
|
||||
it('returns a positive number when a is before b', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -46,14 +46,14 @@ describe('bucketCountBetween util', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var a = vis.aggs.byTypeName.date_histogram[0];
|
||||
var b = vis.aggs.byTypeName.terms[0];
|
||||
var count = bucketCountBetween(a, b);
|
||||
let a = vis.aggs.byTypeName.date_histogram[0];
|
||||
let b = vis.aggs.byTypeName.terms[0];
|
||||
let count = bucketCountBetween(a, b);
|
||||
expect(isNegative(count)).to.be(false);
|
||||
});
|
||||
|
||||
it('returns a negative number when a is after b', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -67,14 +67,14 @@ describe('bucketCountBetween util', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var a = vis.aggs.byTypeName.terms[0];
|
||||
var b = vis.aggs.byTypeName.date_histogram[0];
|
||||
var count = bucketCountBetween(a, b);
|
||||
let a = vis.aggs.byTypeName.terms[0];
|
||||
let b = vis.aggs.byTypeName.date_histogram[0];
|
||||
let count = bucketCountBetween(a, b);
|
||||
expect(isNegative(count)).to.be(true);
|
||||
});
|
||||
|
||||
it('returns 0 when there are no buckets between a and b', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -88,13 +88,13 @@ describe('bucketCountBetween util', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var a = vis.aggs.byTypeName.date_histogram[0];
|
||||
var b = vis.aggs.byTypeName.terms[0];
|
||||
let a = vis.aggs.byTypeName.date_histogram[0];
|
||||
let b = vis.aggs.byTypeName.terms[0];
|
||||
expect(bucketCountBetween(a, b)).to.be(0);
|
||||
});
|
||||
|
||||
it('returns null when b is not in the aggs', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -104,8 +104,8 @@ describe('bucketCountBetween util', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var a = vis.aggs.byTypeName.date_histogram[0];
|
||||
var b = new AggConfig(vis, {
|
||||
let a = vis.aggs.byTypeName.date_histogram[0];
|
||||
let b = new AggConfig(vis, {
|
||||
type: 'terms',
|
||||
schema: 'segment'
|
||||
});
|
||||
|
@ -114,7 +114,7 @@ describe('bucketCountBetween util', function () {
|
|||
});
|
||||
|
||||
it('returns null when a is not in the aggs', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -124,27 +124,27 @@ describe('bucketCountBetween util', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var a = new AggConfig(vis, {
|
||||
let a = new AggConfig(vis, {
|
||||
type: 'terms',
|
||||
schema: 'segment'
|
||||
});
|
||||
var b = vis.aggs.byTypeName.date_histogram[0];
|
||||
let b = vis.aggs.byTypeName.date_histogram[0];
|
||||
|
||||
expect(bucketCountBetween(a, b)).to.be(null);
|
||||
});
|
||||
|
||||
it('returns null when a and b are not in the aggs', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: []
|
||||
});
|
||||
|
||||
var a = new AggConfig(vis, {
|
||||
let a = new AggConfig(vis, {
|
||||
type: 'terms',
|
||||
schema: 'segment'
|
||||
});
|
||||
|
||||
var b = new AggConfig(vis, {
|
||||
let b = new AggConfig(vis, {
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
});
|
||||
|
@ -154,14 +154,14 @@ describe('bucketCountBetween util', function () {
|
|||
|
||||
function countTest(pre, post) {
|
||||
return function () {
|
||||
var schemas = visTypes.byName.histogram.schemas.buckets;
|
||||
let schemas = visTypes.byName.histogram.schemas.buckets;
|
||||
|
||||
// slow for this test is actually somewhere around 1/2 a sec
|
||||
this.slow(500);
|
||||
|
||||
function randBucketAggForVis(vis) {
|
||||
var schema = _.sample(schemas);
|
||||
var aggType = _.sample(aggTypes.byType.buckets);
|
||||
let schema = _.sample(schemas);
|
||||
let aggType = _.sample(aggTypes.byType.buckets);
|
||||
|
||||
return new AggConfig(vis, {
|
||||
schema: schema,
|
||||
|
@ -170,20 +170,20 @@ describe('bucketCountBetween util', function () {
|
|||
}
|
||||
|
||||
_.times(50, function (n) {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: []
|
||||
});
|
||||
|
||||
var randBucketAgg = _.partial(randBucketAggForVis, vis);
|
||||
let randBucketAgg = _.partial(randBucketAggForVis, vis);
|
||||
|
||||
var a = randBucketAgg();
|
||||
var b = randBucketAgg();
|
||||
let a = randBucketAgg();
|
||||
let b = randBucketAgg();
|
||||
|
||||
// create n aggs between a and b
|
||||
var aggs = [];
|
||||
let aggs = [];
|
||||
aggs.fill = function (n) {
|
||||
for (var i = 0; i < n; i++) {
|
||||
for (let i = 0; i < n; i++) {
|
||||
aggs.push(randBucketAgg());
|
||||
}
|
||||
};
|
||||
|
|
|
@ -28,7 +28,7 @@ describe('Histogram Agg', function () {
|
|||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
var AggParamWriter = Private(AggParamWriterProvider);
|
||||
let AggParamWriter = Private(AggParamWriterProvider);
|
||||
paramWriter = new AggParamWriter({ aggType: 'histogram' });
|
||||
}));
|
||||
|
||||
|
@ -36,25 +36,25 @@ describe('Histogram Agg', function () {
|
|||
// reads aggConfig.params.interval, writes to dsl.interval
|
||||
|
||||
it('accepts a number', function () {
|
||||
var output = paramWriter.write({ interval: 100 });
|
||||
let output = paramWriter.write({ interval: 100 });
|
||||
expect(output.params).to.have.property('interval', 100);
|
||||
});
|
||||
|
||||
it('accepts a string', function () {
|
||||
var output = paramWriter.write({ interval: '10' });
|
||||
let output = paramWriter.write({ interval: '10' });
|
||||
expect(output.params).to.have.property('interval', 10);
|
||||
});
|
||||
|
||||
it('fails on non-numeric values', function () {
|
||||
// template validation prevents this from users, not devs
|
||||
var output = paramWriter.write({ interval: [] });
|
||||
let output = paramWriter.write({ interval: [] });
|
||||
expect(isNaN(output.params.interval)).to.be.ok();
|
||||
});
|
||||
});
|
||||
|
||||
describe('min_doc_count', function () {
|
||||
it('casts true values to 0', function () {
|
||||
var output = paramWriter.write({ min_doc_count: true });
|
||||
let output = paramWriter.write({ min_doc_count: true });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: 'yes' });
|
||||
|
@ -68,7 +68,7 @@ describe('Histogram Agg', function () {
|
|||
});
|
||||
|
||||
it('writes nothing for false values', function () {
|
||||
var output = paramWriter.write({ min_doc_count: '' });
|
||||
let output = paramWriter.write({ min_doc_count: '' });
|
||||
expect(output.params).to.not.have.property('min_doc_count');
|
||||
|
||||
output = paramWriter.write({ min_doc_count: null });
|
||||
|
@ -81,7 +81,7 @@ describe('Histogram Agg', function () {
|
|||
|
||||
describe('extended_bounds', function () {
|
||||
it('writes when only eb.min is set', function () {
|
||||
var output = paramWriter.write({
|
||||
let output = paramWriter.write({
|
||||
min_doc_count: true,
|
||||
extended_bounds: { min: 0 }
|
||||
});
|
||||
|
@ -90,7 +90,7 @@ describe('Histogram Agg', function () {
|
|||
});
|
||||
|
||||
it('writes when only eb.max is set', function () {
|
||||
var output = paramWriter.write({
|
||||
let output = paramWriter.write({
|
||||
min_doc_count: true,
|
||||
extended_bounds: { max: 0 }
|
||||
});
|
||||
|
@ -99,7 +99,7 @@ describe('Histogram Agg', function () {
|
|||
});
|
||||
|
||||
it('writes when both eb.min and eb.max are set', function () {
|
||||
var output = paramWriter.write({
|
||||
let output = paramWriter.write({
|
||||
min_doc_count: true,
|
||||
extended_bounds: { min: 99, max: 100 }
|
||||
});
|
||||
|
@ -108,7 +108,7 @@ describe('Histogram Agg', function () {
|
|||
});
|
||||
|
||||
it('does not write when nothing is set', function () {
|
||||
var output = paramWriter.write({
|
||||
let output = paramWriter.write({
|
||||
min_doc_count: true,
|
||||
extended_bounds: {}
|
||||
});
|
||||
|
@ -116,7 +116,7 @@ describe('Histogram Agg', function () {
|
|||
});
|
||||
|
||||
it('does not write when min_doc_count is false', function () {
|
||||
var output = paramWriter.write({
|
||||
let output = paramWriter.write({
|
||||
min_doc_count: false,
|
||||
extended_bounds: { min: 99, max: 100 }
|
||||
});
|
||||
|
|
|
@ -9,7 +9,7 @@ import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logsta
|
|||
import RegistryFieldFormatsProvider from 'ui/registry/field_formats';
|
||||
describe('Range Agg', function () {
|
||||
|
||||
var buckets = values(resp.aggregations[1].buckets);
|
||||
let buckets = values(resp.aggregations[1].buckets);
|
||||
|
||||
let range;
|
||||
let Vis;
|
||||
|
@ -21,7 +21,7 @@ describe('Range Agg', function () {
|
|||
Vis = Private(VisProvider);
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
|
||||
var BytesFormat = Private(RegistryFieldFormatsProvider).byId.bytes;
|
||||
let BytesFormat = Private(RegistryFieldFormatsProvider).byId.bytes;
|
||||
|
||||
indexPattern.fieldFormatMap.bytes = new BytesFormat({
|
||||
pattern: '0,0.[000] b'
|
||||
|
@ -32,7 +32,7 @@ describe('Range Agg', function () {
|
|||
|
||||
describe('formating', function () {
|
||||
it('formats bucket keys properly', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -49,8 +49,8 @@ describe('Range Agg', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var agg = vis.aggs.byTypeName.range[0];
|
||||
var format = function (val) {
|
||||
let agg = vis.aggs.byTypeName.range[0];
|
||||
let format = function (val) {
|
||||
return agg.fieldFormatter()(agg.getKey(val));
|
||||
};
|
||||
expect(format(buckets[0])).to.be('-∞ to 1 KB');
|
||||
|
|
|
@ -25,10 +25,10 @@ describe('AggConfig Filters', function () {
|
|||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private, $injector) {
|
||||
var Vis = Private(VisProvider);
|
||||
var indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
var createFilter = Private(AggTypesBucketsCreateFilterDateHistogramProvider);
|
||||
var TimeBuckets = Private(TimeBucketsProvider);
|
||||
let Vis = Private(VisProvider);
|
||||
let indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
let createFilter = Private(AggTypesBucketsCreateFilterDateHistogramProvider);
|
||||
let TimeBuckets = Private(TimeBucketsProvider);
|
||||
intervalOptions = Private(AggTypesBucketsIntervalOptionsProvider);
|
||||
|
||||
init = function (interval, duration) {
|
||||
|
@ -51,7 +51,7 @@ describe('AggConfig Filters', function () {
|
|||
bucketKey = _.sample(aggResp.aggregations['1'].buckets).key;
|
||||
bucketStart = moment(bucketKey);
|
||||
|
||||
var timePad = moment.duration(duration / 2);
|
||||
let timePad = moment.duration(duration / 2);
|
||||
agg.buckets.setBounds({
|
||||
min: bucketStart.clone().subtract(timePad),
|
||||
max: bucketStart.clone().add(timePad),
|
||||
|
@ -68,7 +68,7 @@ describe('AggConfig Filters', function () {
|
|||
expect(filter).to.have.property('range');
|
||||
expect(filter.range).to.have.property(field.name);
|
||||
|
||||
var fieldParams = filter.range[field.name];
|
||||
let fieldParams = filter.range[field.name];
|
||||
expect(fieldParams).to.have.property('gte');
|
||||
expect(fieldParams.gte).to.be.a('number');
|
||||
|
||||
|
@ -95,8 +95,8 @@ describe('AggConfig Filters', function () {
|
|||
|
||||
init(option.val, duration);
|
||||
|
||||
var interval = agg.buckets.getInterval();
|
||||
var params = filter.range[field.name];
|
||||
let interval = agg.buckets.getInterval();
|
||||
let params = filter.range[field.name];
|
||||
|
||||
expect(params.gte).to.be(+bucketStart);
|
||||
expect(params.lte).to.be(+bucketStart.clone().add(interval).subtract(1, 'ms'));
|
||||
|
|
|
@ -23,7 +23,7 @@ describe('AggConfig Filters', function () {
|
|||
}));
|
||||
|
||||
it('should return a range filter for date_range agg', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -38,8 +38,8 @@ describe('AggConfig Filters', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.date_range[0];
|
||||
var filter = createFilter(aggConfig, 'February 1st, 2015 to February 7th, 2015');
|
||||
let aggConfig = vis.aggs.byTypeName.date_range[0];
|
||||
let filter = createFilter(aggConfig, 'February 1st, 2015 to February 7th, 2015');
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
|
|
|
@ -23,7 +23,7 @@ describe('AggConfig Filters', function () {
|
|||
}));
|
||||
|
||||
it('should return a filters filter', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -39,8 +39,8 @@ describe('AggConfig Filters', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.filters[0];
|
||||
var filter = createFilter(aggConfig, '_type:nginx');
|
||||
let aggConfig = vis.aggs.byTypeName.filters[0];
|
||||
let filter = createFilter(aggConfig, '_type:nginx');
|
||||
expect(_.omit(filter, 'meta')).to.eql(aggConfig.params.filters[1].input);
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ describe('AggConfig Filters', function () {
|
|||
}));
|
||||
|
||||
it('should return an range filter for histogram', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -34,8 +34,8 @@ describe('AggConfig Filters', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.histogram[0];
|
||||
var filter = createFilter(aggConfig, 2048);
|
||||
let aggConfig = vis.aggs.byTypeName.histogram[0];
|
||||
let filter = createFilter(aggConfig, 2048);
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
expect(filter).to.have.property('range');
|
||||
|
|
|
@ -21,7 +21,7 @@ describe('AggConfig Filters', function () {
|
|||
}));
|
||||
|
||||
it('should return a range filter for ip_range agg', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -40,8 +40,8 @@ describe('AggConfig Filters', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.ip_range[0];
|
||||
var filter = createFilter(aggConfig, '0.0.0.0-1.1.1.1');
|
||||
let aggConfig = vis.aggs.byTypeName.ip_range[0];
|
||||
let filter = createFilter(aggConfig, '0.0.0.0-1.1.1.1');
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
|
@ -51,7 +51,7 @@ describe('AggConfig Filters', function () {
|
|||
});
|
||||
|
||||
it('should return a range filter for ip_range agg using a CIDR mask', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -70,8 +70,8 @@ describe('AggConfig Filters', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.ip_range[0];
|
||||
var filter = createFilter(aggConfig, '67.129.65.201/27');
|
||||
let aggConfig = vis.aggs.byTypeName.ip_range[0];
|
||||
let filter = createFilter(aggConfig, '67.129.65.201/27');
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
|
|
|
@ -21,7 +21,7 @@ describe('AggConfig Filters', function () {
|
|||
}));
|
||||
|
||||
it('should return a range filter for range agg', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
|
@ -37,8 +37,8 @@ describe('AggConfig Filters', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.range[0];
|
||||
var filter = createFilter(aggConfig, { gte: 1024, lt: 2048.0 });
|
||||
let aggConfig = vis.aggs.byTypeName.range[0];
|
||||
let filter = createFilter(aggConfig, { gte: 1024, lt: 2048.0 });
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
|
|
|
@ -21,12 +21,12 @@ describe('AggConfig Filters', function () {
|
|||
}));
|
||||
|
||||
it('should return a match filter for terms', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [ { type: 'terms', schema: 'segment', params: { field: '_type' } } ]
|
||||
});
|
||||
var aggConfig = vis.aggs.byTypeName.terms[0];
|
||||
var filter = createFilter(aggConfig, 'apache');
|
||||
let aggConfig = vis.aggs.byTypeName.terms[0];
|
||||
let filter = createFilter(aggConfig, 'apache');
|
||||
expect(filter).to.have.property('query');
|
||||
expect(filter.query).to.have.property('match');
|
||||
expect(filter.query.match).to.have.property('_type');
|
||||
|
|
|
@ -17,7 +17,7 @@ describe('editor', function () {
|
|||
beforeEach(ngMock.inject(function (Private, $injector, $compile) {
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
|
||||
var Vis = Private(VisProvider);
|
||||
let Vis = Private(VisProvider);
|
||||
|
||||
/**
|
||||
* Render the AggParams editor for the date histogram aggregation
|
||||
|
@ -38,8 +38,8 @@ describe('editor', function () {
|
|||
]
|
||||
});
|
||||
|
||||
var $el = $('<vis-editor-agg-params agg="agg" group-name="groupName"></vis-editor-agg-params>');
|
||||
var $parentScope = $injector.get('$rootScope').$new();
|
||||
let $el = $('<vis-editor-agg-params agg="agg" group-name="groupName"></vis-editor-agg-params>');
|
||||
let $parentScope = $injector.get('$rootScope').$new();
|
||||
|
||||
agg = $parentScope.agg = vis.aggs.bySchemaName.segment[0];
|
||||
$parentScope.groupName = 'buckets';
|
||||
|
@ -48,10 +48,10 @@ describe('editor', function () {
|
|||
$scope = $el.scope();
|
||||
$scope.$digest();
|
||||
|
||||
var $inputs = $('vis-agg-param-editor', $el);
|
||||
let $inputs = $('vis-agg-param-editor', $el);
|
||||
return _.transform($inputs.toArray(), function (inputs, e) {
|
||||
var $el = $(e);
|
||||
var $scope = $el.scope();
|
||||
let $el = $(e);
|
||||
let $scope = $el.scope();
|
||||
|
||||
inputs[$scope.aggParam.name] = {
|
||||
$el: $el,
|
||||
|
@ -107,7 +107,7 @@ describe('editor', function () {
|
|||
expect(params.interval.modelValue().val).to.be('auto');
|
||||
expect(params.field.modelValue().name).to.be(indexPattern.timeFieldName);
|
||||
|
||||
var field = _.find(indexPattern.fields, function (f) {
|
||||
let field = _.find(indexPattern.fields, function (f) {
|
||||
return f.type === 'date' && f.name !== indexPattern.timeFieldName;
|
||||
});
|
||||
|
||||
|
|
|
@ -18,9 +18,9 @@ describe('params', function () {
|
|||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private, $injector) {
|
||||
var AggParamWriter = Private(AggParamWriterProvider);
|
||||
var indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
var timefilter = $injector.get('timefilter');
|
||||
let AggParamWriter = Private(AggParamWriterProvider);
|
||||
let indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
let timefilter = $injector.get('timefilter');
|
||||
|
||||
timeField = indexPattern.timeFieldName;
|
||||
aggTypes = Private(AggTypesIndexProvider);
|
||||
|
@ -31,7 +31,7 @@ describe('params', function () {
|
|||
return paramWriter.write({ interval: interval, field: timeField });
|
||||
};
|
||||
|
||||
var now = moment();
|
||||
let now = moment();
|
||||
setTimeBounds = function (n, units) {
|
||||
timefilter.enabled = true;
|
||||
timefilter.getBounds = _.constant({
|
||||
|
@ -43,24 +43,24 @@ describe('params', function () {
|
|||
|
||||
describe('interval', function () {
|
||||
it('accepts a valid interval', function () {
|
||||
var output = writeInterval('d');
|
||||
let output = writeInterval('d');
|
||||
expect(output.params).to.have.property('interval', '1d');
|
||||
});
|
||||
|
||||
it('ignores invalid intervals', function () {
|
||||
var output = writeInterval('foo');
|
||||
let output = writeInterval('foo');
|
||||
expect(output.params).to.have.property('interval', '0ms');
|
||||
});
|
||||
|
||||
it('automatically picks an interval', function () {
|
||||
setTimeBounds(15, 'm');
|
||||
var output = writeInterval('auto');
|
||||
let output = writeInterval('auto');
|
||||
expect(output.params.interval).to.be('30s');
|
||||
});
|
||||
|
||||
it('scales up the interval if it will make too many buckets', function () {
|
||||
setTimeBounds(30, 'm');
|
||||
var output = writeInterval('s');
|
||||
let output = writeInterval('s');
|
||||
expect(output.params.interval).to.be('10s');
|
||||
expect(output.metricScaleText).to.be('second');
|
||||
expect(output.metricScale).to.be(0.1);
|
||||
|
@ -68,30 +68,30 @@ describe('params', function () {
|
|||
|
||||
it('does not scale down the interval', function () {
|
||||
setTimeBounds(1, 'm');
|
||||
var output = writeInterval('h');
|
||||
let output = writeInterval('h');
|
||||
expect(output.params.interval).to.be('1h');
|
||||
expect(output.metricScaleText).to.be(undefined);
|
||||
expect(output.metricScale).to.be(undefined);
|
||||
});
|
||||
|
||||
describe('only scales when all metrics are sum or count', function () {
|
||||
var tests = [
|
||||
let tests = [
|
||||
[ false, 'avg', 'count', 'sum' ],
|
||||
[ true, 'count', 'sum' ],
|
||||
[ false, 'count', 'cardinality' ]
|
||||
];
|
||||
|
||||
tests.forEach(function (test) {
|
||||
var should = test.shift();
|
||||
var typeNames = test.slice();
|
||||
let should = test.shift();
|
||||
let typeNames = test.slice();
|
||||
|
||||
it(typeNames.join(', ') + ' should ' + (should ? '' : 'not') + ' scale', function () {
|
||||
setTimeBounds(1, 'y');
|
||||
|
||||
var vis = paramWriter.vis;
|
||||
let vis = paramWriter.vis;
|
||||
vis.aggs.splice(0);
|
||||
|
||||
var histoConfig = new AggConfig(vis, {
|
||||
let histoConfig = new AggConfig(vis, {
|
||||
type: aggTypes.byName.date_histogram,
|
||||
schema: 'segment',
|
||||
params: { interval: 's', field: timeField }
|
||||
|
@ -106,7 +106,7 @@ describe('params', function () {
|
|||
}));
|
||||
});
|
||||
|
||||
var output = histoConfig.write();
|
||||
let output = histoConfig.write();
|
||||
expect(_.has(output, 'metricScale')).to.be(should);
|
||||
});
|
||||
});
|
||||
|
@ -115,9 +115,9 @@ describe('params', function () {
|
|||
|
||||
describe('extended_bounds', function () {
|
||||
it('should write a long value if a moment passed in', function () {
|
||||
var then = moment(0);
|
||||
var now = moment(500);
|
||||
var output = paramWriter.write({
|
||||
let then = moment(0);
|
||||
let now = moment(500);
|
||||
let output = paramWriter.write({
|
||||
extended_bounds: {
|
||||
min: then,
|
||||
max: now
|
||||
|
@ -133,9 +133,9 @@ describe('params', function () {
|
|||
});
|
||||
|
||||
it('should write a long if a long is passed', function () {
|
||||
var then = 0;
|
||||
var now = 500;
|
||||
var output = paramWriter.write({
|
||||
let then = 0;
|
||||
let now = 500;
|
||||
let output = paramWriter.write({
|
||||
extended_bounds: {
|
||||
min: then,
|
||||
max: now
|
||||
|
|
|
@ -13,15 +13,15 @@ describe('NumberList directive', function () {
|
|||
|
||||
function onlyValidValues() {
|
||||
return $el.find('[ng-model]').toArray().map(function (el) {
|
||||
var ngModel = $(el).controller('ngModel');
|
||||
let ngModel = $(el).controller('ngModel');
|
||||
return ngModel.$valid ? ngModel.$modelValue : undefined;
|
||||
});
|
||||
}
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function ($injector) {
|
||||
var $compile = $injector.get('$compile');
|
||||
var $rootScope = $injector.get('$rootScope');
|
||||
let $compile = $injector.get('$compile');
|
||||
let $rootScope = $injector.get('$rootScope');
|
||||
|
||||
$scope = $rootScope.$new();
|
||||
$el = $('<kbn-number-list ng-model="vals">');
|
||||
|
@ -75,7 +75,7 @@ describe('NumberList directive', function () {
|
|||
it('shift-up increases by 0.1', function () {
|
||||
compile([4.8]);
|
||||
|
||||
var seq = [
|
||||
let seq = [
|
||||
{
|
||||
type: 'press',
|
||||
key: 'shift',
|
||||
|
@ -111,7 +111,7 @@ describe('NumberList directive', function () {
|
|||
it('shift-down decreases by 0.1', function () {
|
||||
compile([5.1]);
|
||||
|
||||
var seq = [
|
||||
let seq = [
|
||||
{
|
||||
type: 'press',
|
||||
key: 'shift',
|
||||
|
@ -135,12 +135,12 @@ describe('NumberList directive', function () {
|
|||
it('maintains valid number', function () {
|
||||
compile([9, 11, 13]);
|
||||
|
||||
var seq = [
|
||||
let seq = [
|
||||
'down', // 10 (11 - 1)
|
||||
'down' // 10 (limited by 9)
|
||||
];
|
||||
|
||||
var getEl = function () { return $el.find('input').eq(1); };
|
||||
let getEl = function () { return $el.find('input').eq(1); };
|
||||
|
||||
return simulateKeys(getEl, seq)
|
||||
.then(function () {
|
||||
|
|
|
@ -21,15 +21,15 @@ describe('calculateInterval()', function () {
|
|||
calculateInterval = Private(AggTypesParamTypesCalculateIntervalProvider);
|
||||
}));
|
||||
|
||||
var testInterval = function (option, expected) {
|
||||
var msg = 'should return ' + JSON.stringify(expected) + ' for ' + option;
|
||||
let testInterval = function (option, expected) {
|
||||
let msg = 'should return ' + JSON.stringify(expected) + ' for ' + option;
|
||||
it(msg, function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [ { type: 'date_histogram', schema: 'segment', params: { field: '@timestamp', interval: option } } ]
|
||||
});
|
||||
var aggConfig = vis.aggs.byTypeName.date_histogram[0];
|
||||
var interval = calculateInterval(aggConfig);
|
||||
let aggConfig = vis.aggs.byTypeName.date_histogram[0];
|
||||
let interval = calculateInterval(aggConfig);
|
||||
_.each(expected, function (val, key) {
|
||||
expect(interval).to.have.property(key, val);
|
||||
});
|
||||
|
|
|
@ -17,7 +17,7 @@ describe('Field', function () {
|
|||
|
||||
describe('constructor', function () {
|
||||
it('it is an instance of BaseAggParam', function () {
|
||||
var aggParam = new FieldAggParam({
|
||||
let aggParam = new FieldAggParam({
|
||||
name: 'field'
|
||||
});
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ describe('Optioned', function () {
|
|||
|
||||
describe('constructor', function () {
|
||||
it('it is an instance of BaseAggParam', function () {
|
||||
var aggParam = new OptionedAggParam({
|
||||
let aggParam = new OptionedAggParam({
|
||||
name: 'some_param',
|
||||
type: 'optioned'
|
||||
});
|
||||
|
|
|
@ -6,7 +6,7 @@ import AggTypesParamTypesRawJsonProvider from 'ui/agg_types/param_types/raw_json
|
|||
|
||||
|
||||
module.exports = describe('JSON', function () {
|
||||
var paramName = 'json_test';
|
||||
let paramName = 'json_test';
|
||||
let BaseAggParam;
|
||||
let JsonAggParam;
|
||||
let aggParam;
|
||||
|
@ -15,7 +15,7 @@ module.exports = describe('JSON', function () {
|
|||
|
||||
function initAggParam(config) {
|
||||
config = config || {};
|
||||
var defaults = {
|
||||
let defaults = {
|
||||
name: paramName,
|
||||
type: 'json'
|
||||
};
|
||||
|
@ -59,7 +59,7 @@ module.exports = describe('JSON', function () {
|
|||
});
|
||||
|
||||
it('should append param when valid JSON', function () {
|
||||
var jsonData = JSON.stringify({
|
||||
let jsonData = JSON.stringify({
|
||||
new_param: 'should exist in output'
|
||||
});
|
||||
|
||||
|
@ -75,7 +75,7 @@ module.exports = describe('JSON', function () {
|
|||
});
|
||||
|
||||
it('should not overwrite existing params', function () {
|
||||
var jsonData = JSON.stringify({
|
||||
let jsonData = JSON.stringify({
|
||||
new_param: 'should exist in output',
|
||||
existing: 'should be used'
|
||||
});
|
||||
|
@ -88,7 +88,7 @@ module.exports = describe('JSON', function () {
|
|||
});
|
||||
|
||||
it('should drop nulled params', function () {
|
||||
var jsonData = JSON.stringify({
|
||||
let jsonData = JSON.stringify({
|
||||
new_param: 'should exist in output',
|
||||
field: null
|
||||
});
|
||||
|
|
|
@ -23,7 +23,7 @@ describe('Regex', function () {
|
|||
|
||||
describe('constructor', function () {
|
||||
it('should be an instance of BaseAggParam', function () {
|
||||
var aggParam = new RegexAggParam({
|
||||
let aggParam = new RegexAggParam({
|
||||
name: 'some_param',
|
||||
type: 'regex'
|
||||
});
|
||||
|
@ -36,11 +36,11 @@ describe('Regex', function () {
|
|||
describe('write results', function () {
|
||||
let aggParam;
|
||||
let aggConfig;
|
||||
var output = { params: {} };
|
||||
var paramName = 'exclude';
|
||||
let output = { params: {} };
|
||||
let paramName = 'exclude';
|
||||
|
||||
beforeEach(function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
let vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'split', params: { field: 'extension' }},
|
||||
|
@ -73,20 +73,6 @@ describe('Regex', function () {
|
|||
aggParam.write(aggConfig, output);
|
||||
expect(output.params).to.have.property(paramName);
|
||||
expect(output.params[paramName]).to.eql({ pattern: 'testing' });
|
||||
expect(output.params[paramName]).not.to.have.property('flags');
|
||||
});
|
||||
|
||||
it('should include flags', function () {
|
||||
aggConfig.params[paramName] = {
|
||||
pattern: 'testing',
|
||||
flags: [ 'TEST1', 'TEST2', 'TEST_RED', 'TEST_BLUE' ]
|
||||
};
|
||||
|
||||
aggParam.write(aggConfig, output);
|
||||
expect(output.params).to.have.property(paramName);
|
||||
expect(output.params[paramName]).to.have.property('flags');
|
||||
expect(typeof output.params[paramName].flags).to.be('string');
|
||||
expect(output.params[paramName].flags).to.be('TEST1|TEST2|TEST_RED|TEST_BLUE');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,7 +5,7 @@ import AggTypesParamTypesBaseProvider from 'ui/agg_types/param_types/base';
|
|||
import AggTypesParamTypesStringProvider from 'ui/agg_types/param_types/string';
|
||||
|
||||
module.exports = describe('String', function () {
|
||||
var paramName = 'json_test';
|
||||
let paramName = 'json_test';
|
||||
let BaseAggParam;
|
||||
let StringAggParam;
|
||||
let aggParam;
|
||||
|
@ -14,7 +14,7 @@ module.exports = describe('String', function () {
|
|||
|
||||
function initAggParam(config) {
|
||||
config = config || {};
|
||||
var defaults = {
|
||||
let defaults = {
|
||||
name: paramName,
|
||||
type: 'string'
|
||||
};
|
||||
|
@ -42,8 +42,8 @@ module.exports = describe('String', function () {
|
|||
|
||||
describe('write', function () {
|
||||
it('should append param by name', function () {
|
||||
var paramName = 'testing';
|
||||
var params = {};
|
||||
let paramName = 'testing';
|
||||
let params = {};
|
||||
params[paramName] = 'some input';
|
||||
|
||||
initAggParam({ name: paramName });
|
||||
|
@ -55,8 +55,8 @@ module.exports = describe('String', function () {
|
|||
});
|
||||
|
||||
it('should not be in output with empty input', function () {
|
||||
var paramName = 'more_testing';
|
||||
var params = {};
|
||||
let paramName = 'more_testing';
|
||||
let params = {};
|
||||
params[paramName] = '';
|
||||
|
||||
initAggParam({ name: paramName });
|
||||
|
|
|
@ -2,7 +2,7 @@ import _ from 'lodash';
|
|||
import sinon from 'auto-release-sinon';
|
||||
|
||||
function ParamClassStub(parent, body) {
|
||||
var stub = sinon.spy(body || function () {
|
||||
let stub = sinon.spy(body || function () {
|
||||
stub.Super && stub.Super.call(this);
|
||||
});
|
||||
if (parent) _.class(stub).inherits(parent);
|
||||
|
@ -14,7 +14,7 @@ function ParamClassStub(parent, body) {
|
|||
* This method should be passed directly to ngMock.inject();
|
||||
*
|
||||
* ```js
|
||||
* var stubParamClasses = require('./utils/_stub_agg_params');
|
||||
* let stubParamClasses = require('./utils/_stub_agg_params');
|
||||
* describe('something', function () {
|
||||
* beforeEach(ngMock.inject(stubParamClasses));
|
||||
* })
|
||||
|
@ -24,7 +24,7 @@ function ParamClassStub(parent, body) {
|
|||
* @return {undefined}
|
||||
*/
|
||||
module.exports = function stubParamClasses(Private) {
|
||||
var BaseAggParam = Private.stub(
|
||||
let BaseAggParam = Private.stub(
|
||||
require('ui/agg_types/param_types/base'),
|
||||
new ParamClassStub(null, function (config) {
|
||||
_.assign(this, config);
|
||||
|
|
|
@ -10,7 +10,7 @@ import AggTypesParamTypesBaseProvider from 'ui/agg_types/param_types/base';
|
|||
export default function AggParamsFactory(Private) {
|
||||
|
||||
|
||||
var paramTypeMap = {
|
||||
let paramTypeMap = {
|
||||
field: Private(AggTypesParamTypesFieldProvider),
|
||||
optioned: Private(AggTypesParamTypesOptionedProvider),
|
||||
regex: Private(AggTypesParamTypesRegexProvider),
|
||||
|
@ -37,8 +37,8 @@ export default function AggParamsFactory(Private) {
|
|||
AggParams.Super.call(this, {
|
||||
index: ['name'],
|
||||
initialSet: params.map(function (config) {
|
||||
var type = config.name === 'field' ? config.name : config.type;
|
||||
var Class = paramTypeMap[type] || paramTypeMap._default;
|
||||
let type = config.name === 'field' ? config.name : config.type;
|
||||
let Class = paramTypeMap[type] || paramTypeMap._default;
|
||||
return new Class(config);
|
||||
})
|
||||
});
|
||||
|
@ -59,7 +59,7 @@ export default function AggParamsFactory(Private) {
|
|||
* are dependent on the AggParam#write methods which should be studied for each AggType.
|
||||
*/
|
||||
AggParams.prototype.write = function (aggConfig, locals) {
|
||||
var output = { params: {} };
|
||||
let output = { params: {} };
|
||||
locals = locals || {};
|
||||
|
||||
this.forEach(function (param) {
|
||||
|
|
|
@ -2,8 +2,8 @@ import _ from 'lodash';
|
|||
import AggTypesAggParamsProvider from 'ui/agg_types/agg_params';
|
||||
import RegistryFieldFormatsProvider from 'ui/registry/field_formats';
|
||||
export default function AggTypeFactory(Private) {
|
||||
var AggParams = Private(AggTypesAggParamsProvider);
|
||||
var fieldFormats = Private(RegistryFieldFormatsProvider);
|
||||
let AggParams = Private(AggTypesAggParamsProvider);
|
||||
let fieldFormats = Private(RegistryFieldFormatsProvider);
|
||||
|
||||
/**
|
||||
* Generic AggType Constructor
|
||||
|
@ -142,7 +142,7 @@ export default function AggTypeFactory(Private) {
|
|||
* @return {FieldFromat}
|
||||
*/
|
||||
AggType.prototype.getFormat = function (agg) {
|
||||
var field = agg.field();
|
||||
let field = agg.field();
|
||||
return field ? field.format : fieldFormats.getDefaultInstance('string');
|
||||
};
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import _ from 'lodash';
|
||||
import AggTypesAggTypeProvider from 'ui/agg_types/agg_type';
|
||||
export default function BucketAggTypeProvider(Private) {
|
||||
var AggType = Private(AggTypesAggTypeProvider);
|
||||
let AggType = Private(AggTypesAggTypeProvider);
|
||||
|
||||
_.class(BucketAggType).inherits(AggType);
|
||||
function BucketAggType(config) {
|
||||
|
|
|
@ -13,19 +13,19 @@ export default function BucketCountBetweenProvider() {
|
|||
* @return {null|number}
|
||||
*/
|
||||
function bucketCountBetween(aggConfigA, aggConfigB) {
|
||||
var aggs = aggConfigA.vis.aggs.getRequestAggs();
|
||||
let aggs = aggConfigA.vis.aggs.getRequestAggs();
|
||||
|
||||
var aIndex = aggs.indexOf(aggConfigA);
|
||||
var bIndex = aggs.indexOf(aggConfigB);
|
||||
let aIndex = aggs.indexOf(aggConfigA);
|
||||
let bIndex = aggs.indexOf(aggConfigB);
|
||||
|
||||
if (aIndex === -1 || bIndex === -1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// return a negative distance, if b is before a
|
||||
var negative = (aIndex > bIndex);
|
||||
let negative = (aIndex > bIndex);
|
||||
|
||||
var count = aggs
|
||||
let count = aggs
|
||||
.slice(Math.min(aIndex, bIndex), Math.max(aIndex, bIndex))
|
||||
.reduce(function (count, cfg) {
|
||||
if (cfg === aggConfigA || cfg === aggConfigB || cfg.schema.group !== 'buckets') {
|
||||
|
|
|
@ -3,7 +3,7 @@ import 'ui/directives/input_whole_number';
|
|||
export default function IntervalOptionsService(Private) {
|
||||
|
||||
// shorthand
|
||||
var ms = function (type) { return moment.duration(1, type).asMilliseconds(); };
|
||||
let ms = function (type) { return moment.duration(1, type).asMilliseconds(); };
|
||||
|
||||
return [
|
||||
{
|
||||
|
|
|
@ -3,8 +3,8 @@ import buildRangeFilter from 'ui/filter_manager/lib/range';
|
|||
export default function createDateHistogramFilterProvider(Private) {
|
||||
|
||||
return function (agg, key) {
|
||||
var start = moment(key);
|
||||
var interval = agg.buckets.getInterval();
|
||||
let start = moment(key);
|
||||
let interval = agg.buckets.getInterval();
|
||||
|
||||
return buildRangeFilter(agg.params.field, {
|
||||
gte: start.valueOf(),
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue