Merge remote-tracking branch 'origin/master' into feature/merge-code

This commit is contained in:
Fuyao Zhao 2019-01-20 17:12:26 -08:00
commit 840a8a8672
477 changed files with 10773 additions and 6692 deletions

View file

@ -203,6 +203,34 @@ module.exports = {
},
},
/**
* Files that run in the browser with only node-level transpilation
*/
{
files: [
'test/functional/services/lib/leadfoot_element_wrapper/scroll_into_view_if_necessary.js',
],
rules: {
'prefer-object-spread/prefer-object-spread': 'off',
'no-var': 'off',
'prefer-const': 'off',
'prefer-destructuring': 'off',
'no-restricted-syntax': [
'error',
'ArrowFunctionExpression',
'AwaitExpression',
'ClassDeclaration',
'RestElement',
'SpreadElement',
'YieldExpression',
'VariableDeclaration[kind="const"]',
'VariableDeclaration[kind="let"]',
'VariableDeclarator[id.type="ArrayPattern"]',
'VariableDeclarator[id.type="ObjectPattern"]',
],
},
},
/**
* Files that run AFTER node version check
* and are not also transpiled with babel

24
.github/CODEOWNERS vendored
View file

@ -1,9 +1,31 @@
# GitHub CODEOWNERS definition
# See: https://help.github.com/articles/about-codeowners/
# Identify which groups will be pinged by changes to different parts of the codebase.
# For more info, see https://help.github.com/articles/about-codeowners/
# APM
/x-pack/plugins/apm/ @elastic/apm-ui
# Beats
/x-pack/plugins/beats_management/ @elastic/beats
# Canvas
/x-pack/plugins/canvas/ @elastic/kibana-canvas
# Security
/x-pack/plugins/security/ @elastic/kibana-security
/x-pack/plugins/spaces/ @elastic/kibana-security
# Design
**/*.scss @elastic/kibana-design
# Elasticsearch UI
/src/legacy/core_plugins/console/ @elastic/es-ui
/x-pack/plugins/console_extensions/ @elastic/es-ui
/x-pack/plugins/cross_cluster_replication/ @elastic/es-ui
/x-pack/plugins/index_lifecycle_management/ @elastic/es-ui
/x-pack/plugins/index_management/ @elastic/es-ui
/x-pack/plugins/license_management/ @elastic/es-ui
/x-pack/plugins/remote_clusters/ @elastic/es-ui
/x-pack/plugins/rollup/ @elastic/es-ui
/x-pack/plugins/searchprofiler/ @elastic/es-ui
/x-pack/plugins/watcher/ @elastic/es-ui

View file

@ -34,6 +34,7 @@
"xpack.security": "x-pack/plugins/security",
"xpack.spaces": "x-pack/plugins/spaces",
"xpack.upgradeAssistant": "x-pack/plugins/upgrade_assistant",
"xpack.uptime": "x-pack/plugins/uptime",
"xpack.watcher": "x-pack/plugins/watcher"
},
"exclude": [

View file

@ -34,6 +34,32 @@ THE SOFTWARE.
This product uses Noto fonts that are licensed under the SIL Open
Font License, Version 1.1.
---
Based on the scroll-into-view-if-necessary module from npm
https://github.com/stipsan/compute-scroll-into-view/blob/master/src/index.ts#L269-L340
MIT License
Copyright (c) 2018 Cody Olsen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
---
Pretty handling of logarithmic axes.
Copyright (c) 2007-2014 IOLA and Ole Laursen.

File diff suppressed because it is too large Load diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 151 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 183 KiB

View file

@ -38,3 +38,8 @@ include::management/advanced-options.asciidoc[]
include::management/managing-saved-objects.asciidoc[]
include::management/managing-beats.asciidoc[]
include::management/managing-remote-clusters.asciidoc[]

View file

@ -0,0 +1,26 @@
[[managing-remote-clusters]]
== Managing Remote Clusters
{kib} *Management* provides two user interfaces for working with data from remote
clusters.
*Remote Clusters* helps you manage remote clusters for use with
{ref}/modules-cross-cluster-search.html[cross cluster search] and
{xpack-ref}/xpack-ccr.html[cross cluster replication]. You can add and remove remote
clusters and check their connectivity.
Go to *Management > Elasticsearch > Remote Clusters* to get started.
[role="screenshot"]
image::images/add_remote_cluster.png[][UI for adding a remote cluster]
*Cross Cluster Replication* includes tools to help you create and manage the remote
replication process. You can follow an index pattern on the remote cluster for
auto-discovery and then replicate new indices in the local cluster that match the
auto-follow pattern.
Go to *Management > Elasticsearch > Cross Cluster Replication* to get started.
[role="screenshot"]
image::images/auto_follow_pattern.png[][UI for adding an auto-follow pattern]

View file

@ -95,7 +95,7 @@
},
"dependencies": {
"@elastic/datemath": "5.0.2",
"@elastic/eui": "6.3.1",
"@elastic/eui": "6.5.1",
"@elastic/filesaver": "1.1.2",
"@elastic/good": "8.1.1-kibana2",
"@elastic/numeral": "2.3.2",

View file

@ -418,16 +418,16 @@ describe('I18n engine', () => {
describe('translate', () => {
test('should throw error if id is not a non-empty string', () => {
expect(() => i18n.translate(undefined as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(null as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(true as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(5 as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate({} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate('')).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(undefined as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(null as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(true as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate(5 as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate({} as any, {} as any)).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate('', {} as any)).toThrowErrorMatchingSnapshot();
});
test('should throw error if translation message and defaultMessage are not provided', () => {
expect(() => i18n.translate('foo')).toThrowErrorMatchingSnapshot();
expect(() => i18n.translate('foo', {} as any)).toThrowErrorMatchingSnapshot();
});
test('should return message as is if values are not provided', () => {
@ -438,7 +438,7 @@ describe('I18n engine', () => {
},
});
expect(i18n.translate('a.b.c')).toBe('foo');
expect(i18n.translate('a.b.c', {} as any)).toBe('foo');
});
test('should return default message as is if values are not provided', () => {
@ -467,10 +467,10 @@ describe('I18n engine', () => {
expect(
i18n.translate('a.b.c', {
values: { a: 1, b: 2, c: 3 },
})
} as any)
).toBe('foo 1, 2, 3 bar');
expect(i18n.translate('d.e.f', { values: { foo: 'bar' } })).toBe('bar');
expect(i18n.translate('d.e.f', { values: { foo: 'bar' } } as any)).toBe('bar');
});
test('should interpolate variables for default messages', () => {
@ -494,9 +494,13 @@ describe('I18n engine', () => {
},
});
expect(i18n.translate('a.b.c', { values: { numPhotos: 0 } })).toBe('You have no photos.');
expect(i18n.translate('a.b.c', { values: { numPhotos: 1 } })).toBe('You have one photo.');
expect(i18n.translate('a.b.c', { values: { numPhotos: 1000 } })).toBe(
expect(i18n.translate('a.b.c', { values: { numPhotos: 0 } } as any)).toBe(
'You have no photos.'
);
expect(i18n.translate('a.b.c', { values: { numPhotos: 1 } } as any)).toBe(
'You have one photo.'
);
expect(i18n.translate('a.b.c', { values: { numPhotos: 1000 } } as any)).toBe(
'You have 1,000 photos.'
);
});
@ -551,7 +555,9 @@ describe('I18n engine', () => {
});
i18n.setDefaultLocale('en');
expect(() => i18n.translate('a.b.c', { values: { foo: 0 } })).toThrowErrorMatchingSnapshot();
expect(() =>
i18n.translate('a.b.c', { values: { foo: 0 } } as any)
).toThrowErrorMatchingSnapshot();
expect(() =>
i18n.translate('d.e.f', {
@ -574,7 +580,7 @@ describe('I18n engine', () => {
});
i18n.setDefaultLocale('en');
expect(i18n.translate('a.b.c', { values: { result: 0.15 } })).toBe('Result: 15%');
expect(i18n.translate('a.b.c', { values: { result: 0.15 } } as any)).toBe('Result: 15%');
expect(
i18n.translate('d.e.f', {
@ -598,25 +604,25 @@ describe('I18n engine', () => {
expect(
i18n.translate('a.short', {
values: { start: new Date(2018, 5, 20) },
})
} as any)
).toBe('Sale begins 6/20/18');
expect(
i18n.translate('a.medium', {
values: { start: new Date(2018, 5, 20) },
})
} as any)
).toBe('Sale begins Jun 20, 2018');
expect(
i18n.translate('a.long', {
values: { start: new Date(2018, 5, 20) },
})
} as any)
).toBe('Sale begins June 20, 2018');
expect(
i18n.translate('a.full', {
values: { start: new Date(2018, 5, 20) },
})
} as any)
).toBe('Sale begins Wednesday, June 20, 2018');
});
@ -664,13 +670,13 @@ describe('I18n engine', () => {
expect(
i18n.translate('a.short', {
values: { expires: new Date(2018, 5, 20, 18, 40, 30, 50) },
})
} as any)
).toBe('Coupon expires at 6:40 PM');
expect(
i18n.translate('a.medium', {
values: { expires: new Date(2018, 5, 20, 18, 40, 30, 50) },
})
} as any)
).toBe('Coupon expires at 6:40:30 PM');
});
@ -706,7 +712,9 @@ describe('I18n engine', () => {
},
});
expect(i18n.translate('a.b.c', { values: { total: 1000 } })).toBe('Your total is $1,000.00');
expect(i18n.translate('a.b.c', { values: { total: 1000 } } as any)).toBe(
'Your total is $1,000.00'
);
i18n.setFormats({
number: {
@ -714,9 +722,13 @@ describe('I18n engine', () => {
},
});
expect(i18n.translate('a.b.c', { values: { total: 1000 } })).toBe('Your total is $1,000.00');
expect(i18n.translate('a.b.c', { values: { total: 1000 } } as any)).toBe(
'Your total is $1,000.00'
);
expect(i18n.translate('d.e.f', { values: { total: 1000 } })).toBe('Your total is €1,000.00');
expect(i18n.translate('d.e.f', { values: { total: 1000 } } as any)).toBe(
'Your total is €1,000.00'
);
});
test('should format default message with a custom format', () => {
@ -768,7 +780,9 @@ describe('I18n engine', () => {
});
i18n.setDefaultLocale('en');
expect(i18n.translate('a.b.c', { values: { total: 1000 } })).toBe('Your total is 1,000');
expect(i18n.translate('a.b.c', { values: { total: 1000 } } as any)).toBe(
'Your total is 1,000'
);
expect(
i18n.translate('d.e.f', {
@ -788,7 +802,7 @@ describe('I18n engine', () => {
i18n.setDefaultLocale('en');
expect(() =>
i18n.translate('a.b.c', { values: { total: 1 } })
i18n.translate('a.b.c', { values: { total: 1 } } as any)
).toThrowErrorMatchingSnapshot();
expect(() =>

View file

@ -165,8 +165,8 @@ export function getRegisteredLocales() {
}
interface TranslateArguments {
values?: { [key: string]: string | number | Date };
defaultMessage?: string;
values?: Record<string, string | number | boolean | Date | null | undefined>;
defaultMessage: string;
description?: string;
}
@ -177,13 +177,7 @@ interface TranslateArguments {
* @param [options.values] - values to pass into translation
* @param [options.defaultMessage] - will be used unless translation was successful
*/
export function translate(
id: string,
{ values = {}, defaultMessage = '' }: TranslateArguments = {
values: {},
defaultMessage: '',
}
) {
export function translate(id: string, { values = {}, defaultMessage }: TranslateArguments) {
const shouldUsePseudoLocale = isPseudoLocale(currentLocale);
if (!id || !isString(id)) {

View file

@ -21,7 +21,7 @@ declare module 'intl-format-cache' {
import IntlMessageFormat from 'intl-messageformat';
interface Message {
format: (values: { [key: string]: string | number | Date }) => string;
format: (values: Record<string, string | number | boolean | Date | null | undefined>) => string;
}
function memoizeIntlConstructor(

View file

@ -17,8 +17,8 @@
* under the License.
*/
export { functionsRegistry } from './lib/functions_registry';
export { typesRegistry } from './lib/types_registry';
export { FunctionsRegistry } from './lib/functions_registry';
export { TypesRegistry } from './lib/types_registry';
export { createError } from './interpreter/create_error';
export { interpretProvider } from './interpreter/interpret';
export { serializeProvider } from './lib/serialize';

View file

@ -22,7 +22,6 @@ import { each, keys, last, mapValues, reduce, zipObject } from 'lodash';
import { getType } from '../lib/get_type';
import { fromExpression } from '../lib/ast';
import { getByAlias } from '../lib/get_by_alias';
import { typesRegistry } from '../lib/types_registry';
import { castProvider } from './cast';
import { createError } from './create_error';
@ -103,7 +102,7 @@ export function interpretProvider(config) {
}
// Validate the function output against the type definition's validate function
const type = typesRegistry.get(fnDef.type);
const type = handlers.types[fnDef.type];
if (type && type.validate) {
try {
type.validate(fnOutput);

View file

@ -20,10 +20,8 @@
import { Registry } from './registry';
import { Fn } from './fn';
class FunctionsRegistry extends Registry {
export class FunctionsRegistry extends Registry {
wrapper(obj) {
return new Fn(obj);
}
}
export const functionsRegistry = new FunctionsRegistry();

View file

@ -20,10 +20,8 @@
import { Registry } from './registry';
import { Type } from './type';
class TypesRegistry extends Registry {
export class TypesRegistry extends Registry {
wrapper(obj) {
return new Type(obj);
}
}
export const typesRegistry = new TypesRegistry();

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { map, zipObject } from 'lodash';
import { map, pick, zipObject } from 'lodash';
export const datatable = () => ({
name: 'datatable',
@ -78,5 +78,32 @@ export const datatable = () => ({
},
};
},
pointseries: datatable => {
// datatable columns are an array that looks like [{ name: "one", type: "string" }, { name: "two", type: "string" }]
// rows look like [{ one: 1, two: 2}, { one: 3, two: 4}, ...]
const validFields = ['x', 'y', 'color', 'size', 'text'];
const columns = datatable.columns.filter(column => validFields.includes(column.name));
const rows = datatable.rows.map(row => pick(row, validFields));
return {
type: 'pointseries',
columns: columns.reduce((acc, column) => {
/* pointseries columns are an object that looks like this
* {
* x: { type: "string", expression: "x", role: "dimension" },
* y: { type: "string", expression: "y", role: "dimension" }
* }
*/
acc[column.name] = {
type: column.type,
expression: column.name,
role: 'dimension',
};
return acc;
}, {}),
rows,
};
},
},
});

View file

@ -24,7 +24,7 @@ export const pointseries = () => ({
return {
type: 'pointseries',
rows: [],
columns: [],
columns: {},
};
},
},

View file

@ -20,18 +20,7 @@
import { i18n } from '@kbn/i18n';
import $script from 'scriptjs';
let resolvePromise = null;
let called = false;
let populatePromise = new Promise(_resolve => {
resolvePromise = _resolve;
});
export const getBrowserRegistries = () => {
return populatePromise;
};
const loadBrowserRegistries = (registries, basePath) => {
export const loadBrowserRegistries = (registries, basePath) => {
const remainingTypes = Object.keys(registries);
const populatedTypes = {};
@ -58,27 +47,3 @@ const loadBrowserRegistries = (registries, basePath) => {
loadType();
});
};
export const populateBrowserRegistries = (registries, basePath) => {
if (called) {
const oldPromise = populatePromise;
let newResolve;
populatePromise = new Promise(_resolve => {
newResolve = _resolve;
});
oldPromise.then(oldTypes => {
loadBrowserRegistries(registries, basePath).then(newTypes => {
newResolve({
...oldTypes,
...newTypes,
});
});
});
return populatePromise;
}
called = true;
loadBrowserRegistries(registries, basePath).then(registries => {
resolvePromise(registries);
});
return populatePromise;
};

View file

@ -17,6 +17,6 @@
* under the License.
*/
export { populateBrowserRegistries, getBrowserRegistries } from './browser_registries';
export { loadBrowserRegistries } from './browser_registries';
export { createSocket } from './socket';
export { initializeInterpreter, interpretAst, getInitializedFunctions } from './interpreter';
export { initializeInterpreter } from './interpreter';

View file

@ -19,18 +19,28 @@
import { socketInterpreterProvider } from '../common/interpreter/socket_interpret';
import { serializeProvider } from '../common/lib/serialize';
import { getSocket } from './socket';
import { typesRegistry } from '../common/lib/types_registry';
import { createHandlers } from './create_handlers';
import { functionsRegistry } from '../common/lib/functions_registry';
import { getBrowserRegistries } from './browser_registries';
let socket;
let resolve;
const functionList = new Promise(_resolve => (resolve = _resolve));
export async function initializeInterpreter(socket, typesRegistry, functionsRegistry) {
let resolve;
const functionList = new Promise(_resolve => (resolve = _resolve));
export async function initializeInterpreter() {
socket = getSocket();
const getInitializedFunctions = async () => {
return functionList;
};
const interpretAst = async (ast, context, handlers) => {
// Load plugins before attempting to get functions, otherwise this gets racey
const serverFunctionList = await functionList;
const interpretFn = await socketInterpreterProvider({
types: typesRegistry.toJS(),
handlers: { ...handlers, ...createHandlers(socket) },
functions: functionsRegistry.toJS(),
referableFunctions: serverFunctionList,
socket: socket,
});
return interpretFn(ast, context);
};
// Listen for interpreter runs
socket.on('run', ({ ast, context, id }) => {
@ -42,27 +52,20 @@ export async function initializeInterpreter() {
});
// Create the function list
socket.emit('getFunctionList');
socket.once('functionList', resolve);
return functionList;
let gotFunctionList = false;
socket.once('functionList', (fl) => {
gotFunctionList = true;
resolve(fl);
});
const interval = setInterval(() => {
if (gotFunctionList) {
clearInterval(interval);
return;
}
socket.emit('getFunctionList');
}, 1000);
return { getInitializedFunctions, interpretAst };
}
export async function getInitializedFunctions() {
return functionList;
}
// Use the above promise to seed the interpreter with the functions it can defer to
export async function interpretAst(ast, context, handlers) {
// Load plugins before attempting to get functions, otherwise this gets racey
return Promise.all([functionList, getBrowserRegistries()])
.then(([serverFunctionList]) => {
return socketInterpreterProvider({
types: typesRegistry.toJS(),
handlers: { ...handlers, ...createHandlers(socket) },
functions: functionsRegistry.toJS(),
referableFunctions: serverFunctionList,
socket: socket,
});
})
.then(interpretFn => interpretFn(ast, context));
}

View file

@ -18,22 +18,13 @@
*/
import io from 'socket.io-client';
import { functionsRegistry } from '../common/lib/functions_registry';
import { getBrowserRegistries } from './browser_registries';
const SOCKET_CONNECTION_TIMEOUT = 5000; // timeout in ms
let socket;
export async function createSocket(basePath) {
if (socket != null) return socket;
export async function createSocket(basePath, functionsRegistry) {
return new Promise((resolve, rej) => {
const reject = p => {
socket = null; // reset the socket on errors
rej(p);
};
socket = io({
return new Promise((resolve, reject) => {
const socket = io({
path: `${basePath}/socket.io`,
transports: ['polling', 'websocket'],
transportOptions: {
@ -49,12 +40,11 @@ export async function createSocket(basePath) {
});
socket.on('getFunctionList', () => {
const pluginsLoaded = getBrowserRegistries();
pluginsLoaded.then(() => socket.emit('functionList', functionsRegistry.toJS()));
socket.emit('functionList', functionsRegistry.toJS());
});
socket.on('connect', () => {
resolve();
resolve(socket);
socket.off('connectionFailed', errorHandler);
socket.off('connect_error', errorHandler);
socket.off('connect_timeout', errorHandler);
@ -71,8 +61,3 @@ export async function createSocket(basePath) {
socket.on('connect_timeout', errorHandler);
});
}
export function getSocket() {
if (!socket) throw new Error('getSocket failed, socket has not been created');
return socket;
}

View file

@ -17,6 +17,6 @@
* under the License.
*/
export { populateServerRegistries, getServerRegistries } from './server_registries';
export { populateServerRegistries } from './server_registries';
export { getPluginPaths } from './get_plugin_paths';
export { pluginPaths } from './plugin_paths';

View file

@ -18,56 +18,39 @@
*/
import { i18n } from '@kbn/i18n';
import { typesRegistry } from '../common/lib/types_registry';
import { functionsRegistry as serverFunctions } from '../common/lib/functions_registry';
import { getPluginPaths } from './get_plugin_paths';
const registries = {
serverFunctions: serverFunctions,
commonFunctions: serverFunctions,
types: typesRegistry,
};
let resolve = null;
let called = false;
export const populateServerRegistries = registries => {
if (!registries) throw new Error('registries are required');
const populatePromise = new Promise(_resolve => {
resolve = _resolve;
});
return new Promise(resolve => {
const remainingTypes = Object.keys(registries);
const populatedTypes = {};
export const getServerRegistries = () => {
return populatePromise;
};
const loadType = () => {
const type = remainingTypes.pop();
getPluginPaths(type).then(paths => {
global.canvas = global.canvas || {};
global.canvas.register = d => registries[type].register(d);
global.canvas.i18n = i18n;
export const populateServerRegistries = types => {
if (called) {
return populatePromise;
}
called = true;
if (!types || !types.length) throw new Error('types is required');
paths.forEach(path => {
require(path); // eslint-disable-line import/no-dynamic-require
});
const remainingTypes = types;
const populatedTypes = {};
delete global.canvas;
const loadType = () => {
const type = remainingTypes.pop();
getPluginPaths(type).then(paths => {
global.canvas = global.canvas || {};
global.canvas.register = d => registries[type].register(d);
global.canvas.i18n = i18n;
paths.forEach(path => {
require(path); // eslint-disable-line import/no-dynamic-require
populatedTypes[type] = registries[type];
if (remainingTypes.length) {
loadType();
}
else {
resolve(populatedTypes);
}
});
};
delete global.canvas;
populatedTypes[type] = registries[type];
if (remainingTypes.length) loadType();
else resolve(populatedTypes);
});
};
if (remainingTypes.length) loadType();
return populatePromise;
if (remainingTypes.length) loadType();
});
};

View file

@ -36,6 +36,7 @@ module.exports = function ({ sourceMaps }, { watch }) {
entry: {
'types/all': resolve(PLUGIN_SOURCE_DIR, 'types/register.js'),
'functions/browser/all': resolve(PLUGIN_SOURCE_DIR, 'functions/browser/register.js'),
'functions/browser/common': resolve(PLUGIN_SOURCE_DIR, 'functions/common/register.js'),
},
// there were problems with the node and web targets since this code is actually

View file

@ -6,8 +6,8 @@ export default {
'yScale': null,
'series': [{
'label': 'ios',
'aggLabel': 'Count',
'aggId': '1',
'id': '1',
'yAxisFormatter': _.identity,
'values': [{
'x': '_all',
'y': 2820,
@ -15,8 +15,8 @@ export default {
}]
}, {
'label': 'win 7',
'aggLabel': 'Count',
'aggId': '1',
'yAxisFormatter': _.identity,
'values': [{
'x': '_all',
'y': 2319,
@ -24,8 +24,8 @@ export default {
}]
}, {
'label': 'win 8',
'aggLabel': 'Count',
'aggId': '1',
'id': '1',
'yAxisFormatter': _.identity,
'values': [{
'x': '_all',
'y': 1835,
@ -33,8 +33,8 @@ export default {
}]
}, {
'label': 'windows xp service pack 2 version 20123452',
'aggLabel': 'Count',
'aggId': '1',
'id': '1',
'yAxisFormatter': _.identity,
'values': [{
'x': '_all',
'y': 734,
@ -42,8 +42,8 @@ export default {
}]
}, {
'label': 'osx',
'aggLabel': 'Count',
'aggId': '1',
'id': '1',
'yAxisFormatter': _.identity,
'values': [{
'x': '_all',
'y': 1352,

View file

@ -181,4 +181,9 @@ export const schema = Joi.object().keys({
failureDebugging: Joi.object().keys({
htmlDirectory: Joi.string().default(defaultRelativeToConfigPath('failure_debug/html'))
}).default(),
// settings for the find service
layout: Joi.object().keys({
fixedHeaderHeight: Joi.number().default(50),
}).default(),
}).default();

View file

@ -79,7 +79,7 @@ utils.expandLiteralStrings = function (data) {
utils.extractDeprecationMessages = function (warnings) {
// pattern for valid warning header
const re = /\d{3} [0-9a-zA-Z!#$%&'*+-.^_`|~]+ \"((?:\t| |!|[\x23-\x5b]|[\x5d-\x7e]|[\x80-\xff]|\\\\|\\")*)\"(?: \"[^"]*\")/;
const re = /\d{3} [0-9a-zA-Z!#$%&'*+-.^_`|~]+ \"((?:\t| |!|[\x23-\x5b]|[\x5d-\x7e]|[\x80-\xff]|\\\\|\\")*)\"(?: \"[^"]*\")?/;
// split on any comma that is followed by an even number of quotes
return _.map(utils.splitOnUnquotedCommaSpace(warnings), function (warning) {
const match = re.exec(warning);

View file

@ -55,15 +55,30 @@ describe('Utils class', () => {
expect(utils.extractDeprecationMessages(
'299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a warning" "Mon, 27 Feb 2017 14:52:14 GMT"')).toEqual(
['#! Deprecation: this is a warning']);
expect(utils.extractDeprecationMessages(
'299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a warning"')).toEqual(
['#! Deprecation: this is a warning']);
expect(utils.extractDeprecationMessages( //eslint-disable-next-line max-len
'299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a warning" "Mon, 27 Feb 2017 14:52:14 GMT", 299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a second warning" "Mon, 27 Feb 2017 14:52:14 GMT"')).toEqual(
['#! Deprecation: this is a warning', '#! Deprecation: this is a second warning']);
expect(utils.extractDeprecationMessages( //eslint-disable-next-line max-len
'299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a warning", 299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a second warning"')).toEqual(
['#! Deprecation: this is a warning', '#! Deprecation: this is a second warning']);
expect(utils.extractDeprecationMessages( //eslint-disable-next-line max-len
'299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a warning, and it includes a comma" "Mon, 27 Feb 2017 14:52:14 GMT"')).toEqual(
['#! Deprecation: this is a warning, and it includes a comma']);
expect(utils.extractDeprecationMessages( //eslint-disable-next-line max-len
'299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a warning, and it includes a comma"')).toEqual(
['#! Deprecation: this is a warning, and it includes a comma']);
expect(utils.extractDeprecationMessages( //eslint-disable-next-line max-len
'299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a warning, and it includes an escaped backslash \\\\ and a pair of \\\"escaped quotes\\\"" "Mon, 27 Feb 2017 14:52:14 GMT"')).toEqual(
['#! Deprecation: this is a warning, and it includes an escaped backslash \\ and a pair of "escaped quotes"']);
expect(utils.extractDeprecationMessages( //eslint-disable-next-line max-len
'299 Elasticsearch-6.0.0-alpha1-SNAPSHOT-abcdef1 "this is a warning, and it includes an escaped backslash \\\\ and a pair of \\\"escaped quotes\\\""')).toEqual(
['#! Deprecation: this is a warning, and it includes an escaped backslash \\ and a pair of "escaped quotes"']);
});
test('unescape', function () {

View file

@ -25,11 +25,6 @@ export default function (kibana) {
id: 'interpreter',
require: ['kibana', 'elasticsearch'],
publicDir: resolve(__dirname, 'public'),
uiExports: {
hacks: [
'plugins/interpreter/load_browser_plugins.js',
],
},
init,
});
}

View file

@ -18,10 +18,16 @@
*/
import { routes } from './server/routes';
import { functionsRegistry } from '@kbn/interpreter/common';
import { FunctionsRegistry, TypesRegistry } from '@kbn/interpreter/common';
import { populateServerRegistries } from '@kbn/interpreter/server';
export default async function (server /*options*/) {
const registries = {
serverFunctions: new FunctionsRegistry(),
types: new TypesRegistry()
};
server.injectUiAppVars('canvas', () => {
const config = server.config();
const basePath = config.get('server.basePath');
@ -35,12 +41,14 @@ export default async function (server /*options*/) {
kbnIndex: config.get('kibana.index'),
esShardTimeout: config.get('elasticsearch.shardTimeout'),
esApiVersion: config.get('elasticsearch.apiVersion'),
serverFunctions: functionsRegistry.toArray(),
serverFunctions: registries.serverFunctions.toArray(),
basePath,
reportingBrowserType,
};
});
await populateServerRegistries(['serverFunctions', 'types']);
await populateServerRegistries(registries);
server.expose(registries);
routes(server);
}

View file

@ -31,14 +31,6 @@ export const metric = () => ({
defaultMessage: 'Metric visualization'
}),
args: {
bucket: {
types: ['string', 'null'],
default: null,
},
metric: {
types: ['string'],
default: '1',
},
visConfig: {
types: ['string', 'null'],
default: '"{}"',
@ -46,17 +38,6 @@ export const metric = () => ({
},
fn(context, args) {
const visConfigParams = JSON.parse(args.visConfig);
const metrics = args.metric.split(',');
metrics.forEach(metric => {
const metricColumn = context.columns.find((column, i) =>
column.id === metric || column.name === metric || i === parseInt(metric));
metricColumn.aggConfig.schema = 'metric';
});
if (args.bucket) {
const bucketColumn = context.columns.find((column, i) =>
column.id === args.bucket || column.name === args.bucket || i === parseInt(args.bucket));
bucketColumn.aggConfig.schema = 'segment';
}
return {
type: 'render',

View file

@ -17,7 +17,6 @@
* under the License.
*/
import { VisTypesRegistryProvider } from 'ui/registry/vis_types';
import { VislibSlicesResponseHandlerProvider } from 'ui/vis/response_handlers/vislib';
import chrome from 'ui/chrome';
import { i18n } from '@kbn/i18n';
@ -34,10 +33,6 @@ export const kibanaPie = () => ({
defaultMessage: 'Pie visualization'
}),
args: {
schemas: {
types: ['string'],
default: '"{}"',
},
visConfig: {
types: ['string', 'null'],
default: '"{}"',
@ -47,24 +42,9 @@ export const kibanaPie = () => ({
const $injector = await chrome.dangerouslyGetActiveInjector();
const Private = $injector.get('Private');
const responseHandler = Private(VislibSlicesResponseHandlerProvider).handler;
const visTypes = Private(VisTypesRegistryProvider);
const visConfigParams = JSON.parse(args.visConfig);
const visType = visTypes.byName.pie;
const schemas = JSON.parse(args.schemas);
if (context.columns) {
context.columns.forEach(column => {
column.aggConfig.aggConfigs.schemas = visType.schemas.all;
});
Object.keys(schemas).forEach(key => {
schemas[key].forEach(i => {
context.columns[i].aggConfig.schema = key;
});
});
}
const convertedData = await responseHandler(context);
const convertedData = await responseHandler(context, visConfigParams.dimensions);
return {
type: 'render',

View file

@ -31,14 +31,6 @@ export const regionmap = () => ({
defaultMessage: 'Regionmap visualization'
}),
args: {
bucket: {
types: ['string'],
default: '0',
},
metric: {
types: ['string'],
default: '1',
},
visConfig: {
types: ['string', 'null'],
default: '"{}"',
@ -46,15 +38,6 @@ export const regionmap = () => ({
},
fn(context, args) {
const visConfigParams = JSON.parse(args.visConfig);
const metricColumn = context.columns.find((column, i) =>
column.id === args.metric || column.name === args.metric || i === parseInt(args.metric)
);
const bucketColumn = context.columns.find((column, i) =>
column.id === args.bucket || column.name === args.bucket || i === parseInt(args.bucket)
);
metricColumn.aggConfig.schema = 'metric';
bucketColumn.aggConfig.schema = 'segment';
return {
type: 'render',

View file

@ -35,19 +35,6 @@ export const kibanaTable = () => ({
defaultMessage: 'Table visualization'
}),
args: {
bucket: {
types: ['string'],
},
splitRow: {
types: ['string'],
},
splitColumn: {
types: ['string'],
},
metric: {
types: ['string'],
default: '1',
},
visConfig: {
types: ['string', 'null'],
default: '"{}"',
@ -55,35 +42,8 @@ export const kibanaTable = () => ({
},
async fn(context, args) {
const visConfigParams = JSON.parse(args.visConfig);
args.metric.split(',').forEach(metric => {
const metricColumn = context.columns.find((column, i) =>
column.id === metric || column.name === metric || i === parseInt(metric));
metricColumn.aggConfig.schema = 'metric';
});
if (args.bucket) {
args.bucket.split(',').forEach(bucket => {
const bucketColumn = context.columns.find((column, i) =>
column.id === bucket || column.name === bucket || i === parseInt(bucket));
bucketColumn.aggConfig.schema = 'bucket';
});
}
if (args.splitColumn) {
args.splitColumn.split(',').forEach(split => {
const splitColumn = context.columns.find((column, i) =>
column.id === split || column.name === split || i === parseInt(split));
splitColumn.aggConfig.schema = 'split';
});
}
if (args.splitRow) {
args.splitRow.split(',').forEach(split => {
const splitColumn = context.columns.find((column, i) =>
column.id === split || column.name === split || i === parseInt(split));
splitColumn.aggConfig.schema = 'split';
splitColumn.aggConfig.params.row = true;
});
}
const convertedData = await responseHandler(context);
const convertedData = await responseHandler(context, visConfigParams.dimensions);
return {
type: 'render',

View file

@ -31,14 +31,6 @@ export const tagcloud = () => ({
defaultMessage: 'Tagcloud visualization'
}),
args: {
bucket: {
types: ['string'],
default: '0',
},
metric: {
types: ['string'],
default: '1',
},
visConfig: {
types: ['string', 'null'],
default: '"{}"',
@ -46,15 +38,6 @@ export const tagcloud = () => ({
},
fn(context, args) {
const visConfigParams = JSON.parse(args.visConfig);
const metricColumn = context.columns.find((column, i) =>
column.id === args.metric || column.name === args.metric || i === parseInt(args.metric)
);
const bucketColumn = context.columns.find((column, i) =>
column.id === args.bucket || column.name === args.bucket || i === parseInt(args.bucket)
);
metricColumn.aggConfig.schema = 'metric';
bucketColumn.aggConfig.schema = 'segment';
return {
type: 'render',

View file

@ -17,11 +17,9 @@
* under the License.
*/
import { makeGeoJsonResponseHandler } from 'plugins/tile_map/coordinatemap_response_handler';
import { convertToGeoJson } from 'ui/vis/map/convert_to_geojson';
import { i18n } from '@kbn/i18n';
const responseHandler = makeGeoJsonResponseHandler();
export const tilemap = () => ({
name: 'tilemap',
type: 'render',
@ -34,14 +32,6 @@ export const tilemap = () => ({
defaultMessage: 'Tilemap visualization'
}),
args: {
bucket: {
types: ['string'],
default: '0',
},
metric: {
types: ['string'],
default: '1',
},
visConfig: {
types: ['string', 'null'],
default: '"{}"',
@ -49,17 +39,12 @@ export const tilemap = () => ({
},
fn(context, args) {
const visConfigParams = JSON.parse(args.visConfig);
const metricColumn = context.columns.find((column, i) =>
column.id === args.metric || column.name === args.metric || i === parseInt(args.metric)
);
const bucketColumn = context.columns.find((column, i) =>
column.id === args.bucket || column.name === args.bucket || i === parseInt(args.bucket)
);
metricColumn.aggConfig.schema = 'metric';
bucketColumn.aggConfig.schema = 'segment';
const convertedData = responseHandler(context);
const convertedData = convertToGeoJson(context, {
geohash: visConfigParams.geohash,
metric: visConfigParams.metric,
geocentroid: visConfigParams.geocentroid,
});
return {
type: 'render',

View file

@ -18,7 +18,6 @@
*/
import { i18n } from '@kbn/i18n';
import { VisTypesRegistryProvider } from 'ui/registry/vis_types';
import { VislibSeriesResponseHandlerProvider } from 'ui/vis/response_handlers/vislib';
import chrome from 'ui/chrome';
@ -34,14 +33,6 @@ export const vislib = () => ({
defaultMessage: 'Vislib visualization'
}),
args: {
type: {
types: ['string'],
default: 'metric',
},
schemas: {
types: ['string'],
default: '"{}"',
},
visConfig: {
types: ['string', 'null'],
default: '"{}"',
@ -51,29 +42,9 @@ export const vislib = () => ({
const $injector = await chrome.dangerouslyGetActiveInjector();
const Private = $injector.get('Private');
const responseHandler = Private(VislibSeriesResponseHandlerProvider).handler;
const visTypes = Private(VisTypesRegistryProvider);
const visConfigParams = JSON.parse(args.visConfig);
const schemas = JSON.parse(args.schemas);
const visType = visTypes.byName[args.type || 'histogram'];
if (context.columns) {
// assign schemas to aggConfigs
context.columns.forEach(column => {
column.aggConfig.aggConfigs.schemas = visType.schemas.all;
});
Object.keys(schemas).forEach(key => {
schemas[key].forEach(i => {
const schema = key.split('_');
context.columns[i].aggConfig.schema = schema[0];
if (schema[1] === 'row') {
context.columns[i].aggConfig.params.row = true;
}
});
});
}
const convertedData = await responseHandler(context);
const convertedData = await responseHandler(context, visConfigParams.dimensions);
return {
type: 'render',

View file

@ -17,8 +17,7 @@
* under the License.
*/
import { convertToGeoJson } from 'ui/vis/map/convert_to_geojson';
export function makeGeoJsonResponseHandler() {
return convertToGeoJson;
}
import { FunctionsRegistry } from '@kbn/interpreter/common';
export const functionsRegistry = new FunctionsRegistry();

View file

@ -17,10 +17,12 @@
* under the License.
*/
import { initializeInterpreter, loadBrowserRegistries, createSocket } from '@kbn/interpreter/public';
import chrome from 'ui/chrome';
import { populateBrowserRegistries, createSocket, initializeInterpreter } from '@kbn/interpreter/public';
import { typesRegistry, functionsRegistry } from '@kbn/interpreter/common';
import { functions } from './functions';
import { functionsRegistry } from './functions_registry';
import { typesRegistry } from './types_registry';
const basePath = chrome.getBasePath();
@ -35,7 +37,31 @@ function addFunction(fnDef) {
functions.forEach(addFunction);
createSocket(basePath).then(async () => {
await populateBrowserRegistries(types, basePath);
await initializeInterpreter();
});
let _resolve;
let _interpreterPromise;
const initialize = async () => {
await loadBrowserRegistries(types, basePath);
const socket = await createSocket(basePath, functionsRegistry);
initializeInterpreter(socket, typesRegistry, functionsRegistry).then(interpreter => {
_resolve({ interpreter, socket });
});
};
export const getInterpreter = async () => {
if (!_interpreterPromise) {
_interpreterPromise = new Promise(resolve => _resolve = resolve);
initialize();
}
return await _interpreterPromise;
};
export const interpretAst = async (...params) => {
const { interpreter } = await getInterpreter();
return await interpreter.interpretAst(...params);
};
export const updateInterpreterFunctions = async () => {
const { socket } = await getInterpreter();
socket.emit('updateFunctionList');
};

View file

@ -17,4 +17,7 @@
* under the License.
*/
export { PersistedLog } from './persisted_log';
import { TypesRegistry } from '@kbn/interpreter/common';
export const typesRegistry = new TypesRegistry();

View file

@ -22,36 +22,46 @@ import uuid from 'uuid/v4';
export const browser = ({ socket, serialize, deserialize }) => {
// Note that we need to be careful about how many times routeExpressionProvider is called, because of the socket.once below.
// It's too bad we can't get a list of browser plugins on the server
const getClientFunctions = new Promise(resolve => {
socket.emit('getFunctionList');
socket.once('functionList', resolve);
let getFunctionsPromise;
socket.on('updateFunctionList', () => {
getFunctionsPromise = undefined;
});
return getClientFunctions.then(functions => {
return {
interpret: (ast, context) => {
return new Promise((resolve, reject) => {
const id = uuid();
const listener = resp => {
if (resp.type === 'msgError') {
const { value } = resp;
// cast error strings back into error instances
const err = value instanceof Error ? value : new Error(value);
if (value.stack) err.stack = value.stack;
// Reject's with a legit error. Check! Environments should always reject with an error when something bad happens
reject(err);
} else {
resolve(deserialize(resp.value));
}
};
const getFunctions = async () => {
if (!getFunctionsPromise) {
getFunctionsPromise = new Promise(resolve => {
socket.once('functionList', resolve);
socket.emit('getFunctionList');
});
}
// {type: msgSuccess or msgError, value: foo}. Doesn't matter if it's success or error, we do the same thing for now
socket.once(`resp:${id}`, listener);
return Object.keys(await getFunctionsPromise);
};
socket.emit('run', { ast, context: serialize(context), id });
});
},
getFunctions: () => Object.keys(functions),
};
});
return {
interpret: (ast, context) => {
return new Promise(async (resolve, reject) => {
await getFunctions();
const id = uuid();
const listener = resp => {
if (resp.type === 'msgError') {
const { value } = resp;
// cast error strings back into error instances
const err = value instanceof Error ? value : new Error(value);
if (value.stack) err.stack = value.stack;
// Reject's with a legit error. Check! Environments should always reject with an error when something bad happens
reject(err);
} else {
resolve(deserialize(resp.value));
}
};
// {type: msgSuccess or msgError, value: foo}. Doesn't matter if it's success or error, we do the same thing for now
socket.once(`resp:${id}`, listener);
socket.emit('run', { ast, context: serialize(context), id });
});
}, getFunctions
};
};

View file

@ -23,8 +23,8 @@ export const routeExpressionProvider = environments => {
async function routeExpression(ast, context = null) {
// List of environments in order of preference
return Promise.all(environments).then(environments => {
const environmentFunctions = environments.map(env => env.getFunctions());
return Promise.all(environments).then(async environments => {
const environmentFunctions = await Promise.all(environments.map(env => env.getFunctions()));
// Grab name of the first function in the chain
const fnName = ast.chain[0].function.toLowerCase();

View file

@ -17,12 +17,11 @@
* under the License.
*/
import { getServerRegistries } from '@kbn/interpreter/server';
import { interpretProvider } from '@kbn/interpreter/common';
import { createHandlers } from '../create_handlers';
export const server = async ({ onFunctionNotFound, server, request }) => {
const { serverFunctions, types } = await getServerRegistries(['serverFunctions', 'types']);
const { serverFunctions, types } = server.plugins.interpreter;
return {
interpret: (ast, context) => {

View file

@ -19,10 +19,15 @@
import uuid from 'uuid/v4';
import { populateServerRegistries } from '@kbn/interpreter/server';
import { interpretProvider, serializeProvider } from '@kbn/interpreter/common';
import { interpretProvider, serializeProvider, FunctionsRegistry, TypesRegistry } from '@kbn/interpreter/common';
// We actually DO need populateServerRegistries here since this is a different node process
const pluginsReady = populateServerRegistries(['commonFunctions', 'types']);
const registries = {
commonFunctions: new FunctionsRegistry(),
types: new TypesRegistry(),
};
const pluginsReady = populateServerRegistries(registries);
const heap = {};
process.on('message', msg => {

View file

@ -18,8 +18,7 @@
*/
import socket from 'socket.io';
import { serializeProvider, typesRegistry } from '@kbn/interpreter/common';
import { getServerRegistries } from '@kbn/interpreter/server';
import { serializeProvider } from '@kbn/interpreter/common';
import { routeExpressionProvider } from '../lib/route_expression/index';
import { browser } from '../lib/route_expression/browser';
import { thread } from '../lib/route_expression/thread/index';
@ -54,7 +53,7 @@ export function socketApi(server) {
const request = await getModifiedRequest(server, socket);
if (!request) return; // do nothing without the request object
const types = typesRegistry.toJS();
const types = server.plugins.interpreter.types.toJS();
const { serialize, deserialize } = serializeProvider(types);
// I'd love to find a way to generalize all of these, but they each need a different set of things
@ -70,9 +69,7 @@ export function socketApi(server) {
}
socket.on('getFunctionList', () => {
getServerRegistries().then(({ serverFunctions }) =>
socket.emit('functionList', serverFunctions.toJS())
);
socket.emit('functionList', server.plugins.interpreter.serverFunctions.toJS());
});
socket.on('run', async ({ ast, context, id }) => {

View file

@ -51,7 +51,8 @@ export default function (kibana) {
return Joi.object({
enabled: Joi.boolean().default(true),
defaultAppId: Joi.string().default('home'),
index: Joi.string().default('.kibana')
index: Joi.string().default('.kibana'),
disableWelcomeScreen: Joi.boolean().default(false),
}).default();
},

View file

@ -66,6 +66,7 @@ export function injectVars(server) {
return {
kbnDefaultAppId: serverConfig.get('kibana.defaultAppId'),
disableWelcomeScreen: serverConfig.get('kibana.disableWelcomeScreen'),
regionmapsConfig: regionmap,
mapConfig: mapConfig,
tilemapsConfig: {

View file

@ -548,11 +548,7 @@ exports[`after fetch renders warning when listingLimit is exceeded 1`] = `
/>
</EuiLink>,
"listingLimitText": <strong>
<FormattedMessage
defaultMessage="listingLimit"
id="kbn.dashboard.listing.listingLimitExceededListingLimitTitle"
values={Object {}}
/>
listingLimit
</strong>,
"listingLimitValue": 1,
"totalDashboards": 2,

View file

@ -257,10 +257,7 @@ class DashboardListingUi extends React.Component {
listingLimitValue: this.props.listingLimit,
listingLimitText: (
<strong>
<FormattedMessage
id="kbn.dashboard.listing.listingLimitExceededListingLimitTitle"
defaultMessage="listingLimit"
/>
listingLimit
</strong>
),
advancedSettingsLink: (

View file

@ -65,6 +65,7 @@ import { tabifyAggResponse } from 'ui/agg_response/tabify';
import { showSaveModal } from 'ui/saved_objects/show_saved_object_save_modal';
import { SavedObjectSaveModal } from 'ui/saved_objects/components/saved_object_save_modal';
import { getRootBreadcrumbs, getSavedSearchBreadcrumbs } from '../breadcrumbs';
import { buildVislibDimensions } from 'ui/visualize/loader/pipeline_helpers/build_pipeline';
const app = uiModules.get('apps/discover', [
'kibana/notify',
@ -760,7 +761,7 @@ function discoverController(
const tabifiedData = tabifyAggResponse($scope.vis.aggs, merged);
$scope.searchSource.rawResponse = merged;
Promise
.resolve(responseHandler(tabifiedData))
.resolve(responseHandler(tabifiedData, buildVislibDimensions($scope.vis, $scope.timeRange)))
.then(resp => {
visualizeHandler.render({ value: resp });
});

View file

@ -23,6 +23,7 @@ import { Synopsis } from './synopsis';
import { AddData } from './add_data';
import { RecentlyAccessed, recentlyAccessedShape } from './recently_accessed';
import { FormattedMessage } from '@kbn/i18n/react';
import chrome from 'ui/chrome';
import {
EuiButton,
@ -46,7 +47,7 @@ export class Home extends Component {
constructor(props) {
super(props);
const isWelcomeEnabled = props.localStorage.getItem(KEY_ENABLE_WELCOME) !== 'false';
const isWelcomeEnabled = !(chrome.getInjected('disableWelcomeScreen') || props.localStorage.getItem(KEY_ENABLE_WELCOME) === 'false');
this.state = {
// If welcome is enabled, we wait for loading to complete

View file

@ -116,6 +116,7 @@ function VisEditor(
$route,
AppState,
$window,
$injector,
kbnUrl,
redirectWhenMissing,
Private,
@ -427,6 +428,7 @@ function VisEditor(
kbnUrl.change(dashboardParsedUrl.appPath);
} else if (savedVis.id === $route.current.params.id) {
docTitle.change(savedVis.lastSavedTitle);
chrome.breadcrumbs.set($injector.invoke(getEditBreadcrumbs));
} else {
kbnUrl.change(`${VisualizeConstants.EDIT_PATH}/{{id}}`, { id: savedVis.id });
}

View file

@ -54,10 +54,6 @@ uiRoutes
resolve: {
createNewVis: () => true,
},
})
// Old path, will be removed in 7.0
.when('/visualize/step/1', {
redirectTo: VisualizeConstants.WIZARD_STEP_1_PAGE_PATH,
});
FeatureCatalogueRegistryProvider.register(i18n => {

View file

@ -21,8 +21,6 @@ import moment from 'moment-timezone';
import numeralLanguages from '@elastic/numeral/languages';
import { i18n } from '@kbn/i18n';
import { IS_KIBANA_RELEASE } from '../../../utils';
export function getUiSettingDefaults() {
const weekdays = moment.weekdays().slice();
const [defaultWeekday] = weekdays;
@ -77,7 +75,7 @@ export function getUiSettingDefaults() {
name: i18n.translate('kbn.advancedSettings.k7designTitle', {
defaultMessage: 'Use the new K7 UI design',
}),
value: IS_KIBANA_RELEASE,
value: true,
description: i18n.translate('kbn.advancedSettings.k7designText', {
defaultMessage:
'When set, Kibana will use the new K7 design targeted for release in 7.0. At this time, not all features are implemented.',

View file

@ -46,6 +46,11 @@ describe('metric vis', () => {
aggs: [{ id: '1', type: 'top_hits', schema: 'metric', params: { field: 'ip' } }],
});
vis.params.metric.metrics = [{ accessor: 0, format: { id: 'url', params: {
urlTemplate: 'http://ip.info?address={{value}}',
labelTemplate: 'ip[{{value}}]'
} } }];
const el = document.createElement('div');
const Controller = metricVisType.visualization;
const controller = new Controller(el, vis);
@ -62,7 +67,7 @@ describe('metric vis', () => {
const ip = '235.195.237.208';
render({
columns: [{ id: 'col-0', title: 'ip', aggConfig: vis.aggs[0] }],
columns: [{ id: 'col-0', name: 'ip' }],
rows: [{ 'col-0': ip }]
});

View file

@ -29,22 +29,13 @@ describe('metric vis controller', function () {
colorsRange: [
{ from: 0, to: 1000 }
],
style: {}
style: {},
bucket: null,
metrics: [{ accessor: 0 }]
}
}
};
const formatter = function (value) {
return value.toFixed(3);
};
const aggConfig = {
fieldFormatter: () => {
return formatter;
},
type: {}
};
let metricController;
beforeEach(() => {
@ -53,28 +44,29 @@ describe('metric vis controller', function () {
it('should set the metric label and value', function () {
const metrics = metricController._processTableGroups({
columns: [{ id: 'col-0', title: 'Count', aggConfig: { ...aggConfig, makeLabel: () => 'Count' } }],
columns: [{ id: 'col-0', name: 'Count' }],
rows: [{ 'col-0': 4301021 }]
});
expect(metrics.length).to.be(1);
expect(metrics[0].label).to.be('Count');
expect(metrics[0].value).to.be('4301021.000');
expect(metrics[0].value).to.be(4301021);
});
it('should support multi-value metrics', function () {
vis.params.metric.metrics.push({ accessor: 1 });
const metrics = metricController._processTableGroups({
columns: [
{ id: 'col-0', aggConfig: { ...aggConfig, makeLabel: () => '1st percentile of bytes' } },
{ id: 'col-1', aggConfig: { ...aggConfig, makeLabel: () => '99th percentile of bytes' } }
{ id: 'col-0', name: '1st percentile of bytes' },
{ id: 'col-1', name: '99th percentile of bytes' }
],
rows: [{ 'col-0': 182, 'col-1': 445842.4634666484 }]
});
expect(metrics.length).to.be(2);
expect(metrics[0].label).to.be('1st percentile of bytes');
expect(metrics[0].value).to.be('182.000');
expect(metrics[0].value).to.be(182);
expect(metrics[1].label).to.be('99th percentile of bytes');
expect(metrics[1].value).to.be('445842.463');
expect(metrics[1].value).to.be(445842.4634666484);
});
});

View file

@ -20,6 +20,7 @@
import _ from 'lodash';
import React, { Component } from 'react';
import { getHeatmapColors } from 'ui/vislib/components/color/heatmap_color';
import { getFormat } from 'ui/visualize/loader/pipeline_helpers/utilities';
import { isColorDark } from '@elastic/eui';
import { MetricVisValue } from './components/metric_vis_value';
@ -84,10 +85,10 @@ export class MetricVisComponent extends Component {
return isColorDark(parseInt(color[1]), parseInt(color[2]), parseInt(color[3]));
}
_getFormattedValue(fieldFormatter, value) {
_getFormattedValue = (fieldFormatter, value, format = 'text') => {
if (_.isNaN(value)) return '-';
return fieldFormatter(value);
}
return fieldFormatter.convert(value, format);
};
_processTableGroups(table) {
const config = this.props.vis.params.metric;
@ -98,22 +99,18 @@ export class MetricVisComponent extends Component {
const labels = this._getLabels();
const metrics = [];
let bucketAgg;
let bucketColumnId;
let rowHeaderIndex;
let bucketFormatter;
table.columns.forEach((column, columnIndex) => {
const aggConfig = column.aggConfig;
if (aggConfig && aggConfig.type.type === 'buckets') {
bucketAgg = aggConfig;
// Store the current index, so we later know in which position in the
// row array, the bucket agg key will be, so we can create filters on it.
rowHeaderIndex = columnIndex;
bucketColumnId = column.id;
return;
}
if (config.bucket) {
bucketColumnId = table.columns[config.bucket.accessor].id;
bucketFormatter = getFormat(config.bucket.format);
}
config.metrics.forEach(metric => {
const columnIndex = metric.accessor;
const column = table.columns[columnIndex];
const formatter = getFormat(metric.format);
table.rows.forEach((row, rowIndex) => {
let title = column.name;
@ -123,16 +120,13 @@ export class MetricVisComponent extends Component {
if (isPercentageMode) {
const percentage = Math.round(100 * (value - min) / (max - min));
value = `${percentage}%`;
} else {
value = this._getFormattedValue(formatter, value, 'html');
}
if (aggConfig) {
if (!isPercentageMode) value = this._getFormattedValue(aggConfig.fieldFormatter('html'), value);
if (bucketAgg) {
const bucketValue = bucketAgg.fieldFormatter('text')(row[bucketColumnId]);
title = `${bucketValue} - ${aggConfig.makeLabel()}`;
} else {
title = aggConfig.makeLabel();
}
if (bucketColumnId) {
const bucketValue = this._getFormattedValue(bucketFormatter, row[bucketColumnId]);
title = `${bucketValue} - ${title}`;
}
const shouldColor = config.colorsRange.length > 1;
@ -143,10 +137,7 @@ export class MetricVisComponent extends Component {
color: shouldColor && config.style.labelColor ? color : null,
bgColor: shouldColor && config.style.bgColor ? color : null,
lightText: shouldColor && config.style.bgColor && this._needsLightText(color),
filterKey: bucketColumnId !== undefined ? row[bucketColumnId] : null,
rowIndex: rowIndex,
columnIndex: rowHeaderIndex,
bucketAgg: bucketAgg,
});
});
});
@ -155,11 +146,12 @@ export class MetricVisComponent extends Component {
}
_filterBucket = (metric) => {
if (!metric.filterKey || !metric.bucketAgg) {
const config = this.props.vis.params.metric;
if (!config.bucket) {
return;
}
const table = this.props.visData;
this.props.vis.API.events.filter({ table, column: metric.columnIndex, row: metric.rowIndex });
this.props.vis.API.events.filter({ table, column: config.bucket.accessor, row: metric.rowIndex });
};
_renderMetric = (metric, index) => {

View file

@ -129,6 +129,13 @@ describe('RegionMapsVisualizationTests', function () {
type: 'region_map'
});
vis.params.bucket = {
accessor: 0,
};
vis.params.metric = {
accessor: 1,
};
vis.params.selectedJoinField = { 'name': 'iso2', 'description': 'Two letter abbreviation' };
vis.params.selectedLayer = {
'attribution': '<p><a href="http://www.naturalearthdata.com/about/terms-of-use">Made with NaturalEarth</a> | <a href="https://www.elastic.co/elastic-maps-service">Elastic Maps Service</a></p>&#10;',

View file

@ -241,7 +241,7 @@ CORS configuration of the server permits requests from the Kibana application on
return this._layerName;
}
setTooltipFormatter(tooltipFormatter, metricsAgg, fieldName) {
setTooltipFormatter(tooltipFormatter, fieldFormatter, fieldName, metricLabel) {
this._tooltipFormatter = (geojsonFeature) => {
if (!this._metrics) {
return '';
@ -249,7 +249,7 @@ CORS configuration of the server permits requests from the Kibana application on
const match = this._metrics.find((bucket) => {
return compareLexicographically(bucket.term, geojsonFeature.properties[this._joinField]) === 0;
});
return tooltipFormatter(metricsAgg, match, fieldName);
return tooltipFormatter(match, fieldFormatter, fieldName, metricLabel);
};
}
@ -268,8 +268,8 @@ CORS configuration of the server permits requests from the Kibana application on
clonedLayer.setColorRamp(this._colorRamp);
clonedLayer.setLineWeight(this._lineWeight);
clonedLayer.setTooltipFormatter(this._tooltipFormatter);
if (this._metrics && this._metricsAgg) {
clonedLayer.setMetrics(this._metrics, this._metricsAgg);
if (this._metrics) {
clonedLayer.setMetrics(this._metrics, this._valueFormatter, this._metricTitle);
}
return clonedLayer;
}
@ -289,11 +289,10 @@ CORS configuration of the server permits requests from the Kibana application on
return this._whenDataLoaded;
}
setMetrics(metrics, metricsAgg) {
setMetrics(metrics, fieldFormatter, metricTitle) {
this._metrics = metrics.slice();
this._metricsAgg = metricsAgg;
this._valueFormatter = this._metricsAgg.fieldFormatter();
this._valueFormatter = fieldFormatter;
this._metricTitle = metricTitle;
this._metrics.sort((a, b) => compareLexicographically(a.term, b.term));
this._invalidateJoin();
@ -346,11 +345,11 @@ CORS configuration of the server permits requests from the Kibana application on
appendLegendContents(jqueryDiv) {
if (!this._legendColors || !this._legendQuantizer || !this._metricsAgg) {
if (!this._legendColors || !this._legendQuantizer) {
return;
}
const titleText = this._metricsAgg.makeLabel();
const titleText = this._metricTitle;
const $title = $('<div>').addClass('visMapLegend__title').text(titleText);
jqueryDiv.append($title);
@ -358,7 +357,9 @@ CORS configuration of the server permits requests from the Kibana application on
const labelText = this._legendQuantizer
.invertExtent(color)
.map(this._valueFormatter)
.map(val => {
return this._valueFormatter.convert(val);
})
.join('  ');
const label = $('<div>');

View file

@ -18,17 +18,17 @@
*/
import 'plugins/kbn_vislib_vis_types/controls/vislib_basic_options';
import _ from 'lodash';
import { BaseMapsVisualizationProvider } from '../../tile_map/public/base_maps_visualization';
import ChoroplethLayer from './choropleth_layer';
import { truncatedColorMaps } from 'ui/vislib/components/color/truncated_colormaps';
import AggResponsePointSeriesTooltipFormatterProvider from './tooltip_formatter';
import { getFormat } from 'ui/visualize/loader/pipeline_helpers/utilities';
import { TileMapTooltipFormatter } from './tooltip_formatter';
import 'ui/vis/map/service_settings';
import { toastNotifications } from 'ui/notify';
export function RegionMapsVisualizationProvider(Private, config, i18n) {
const tooltipFormatter = Private(AggResponsePointSeriesTooltipFormatterProvider);
const tooltipFormatter = Private(TileMapTooltipFormatter);
const BaseMapsVisualization = Private(BaseMapsVisualizationProvider);
return class RegionMapsVisualization extends BaseMapsVisualization {
@ -48,15 +48,15 @@ export function RegionMapsVisualizationProvider(Private, config, i18n) {
async _updateData(table) {
this._chartData = table;
const termColumn = this._vis.params.bucket ? table.columns[this._vis.params.bucket.accessor] : null;
const valueColumn = table.columns[this._vis.params.metric.accessor];
let results;
if (!table || !table.rows.length || table.columns.length !== 2) {
if (!this._hasColumns() || !table.rows.length) {
results = [];
} else {
const termColumn = table.columns[0].id;
const valueColumn = table.columns[1].id;
results = table.rows.map(row => {
const term = row[termColumn];
const value = row[valueColumn];
const term = row[termColumn.id];
const value = row[valueColumn.id];
return { term: term, value: value };
});
}
@ -75,9 +75,11 @@ export function RegionMapsVisualizationProvider(Private, config, i18n) {
this._vis.params.showAllShapes,
results
);
const metricsAgg = _.first(this._vis.getAggConfig().bySchemaName.metric);
this._choroplethLayer.setMetrics(results, metricsAgg);
this._setTooltipFormatter();
const metricFieldFormatter = getFormat(this._vis.params.metric.format);
this._choroplethLayer.setMetrics(results, metricFieldFormatter, valueColumn.name);
this._choroplethLayer.setTooltipFormatter(tooltipFormatter, metricFieldFormatter, termColumn.name, valueColumn.name);
this._kibanaMap.useUiStateFromVisualization(this._vis);
}
@ -100,10 +102,13 @@ export function RegionMapsVisualizationProvider(Private, config, i18n) {
visParams.selectedLayer.attribution,
this._vis.params.showAllShapes
);
const metricFieldFormatter = getFormat(this._vis.params.metric.format);
this._choroplethLayer.setJoinField(visParams.selectedJoinField.name);
this._choroplethLayer.setColorRamp(truncatedColorMaps[visParams.colorSchema].value);
this._choroplethLayer.setLineWeight(visParams.outlineWeight);
this._setTooltipFormatter();
this._choroplethLayer.setTooltipFormatter(tooltipFormatter, metricFieldFormatter, this._metricLabel);
}
@ -147,14 +152,6 @@ export function RegionMapsVisualizationProvider(Private, config, i18n) {
}
this._choroplethLayer.on('select', (event) => {
if (!this._isAggReady()) {
//even though we have maps data available and have added the choropleth layer to the map
//the aggregation may not be available yet
return;
}
const rowIndex = this._chartData.rows.findIndex(row => row[0] === event);
this._vis.API.events.filter({ table: this._chartData, column: 0, row: rowIndex, value: event });
});
@ -185,19 +182,8 @@ export function RegionMapsVisualizationProvider(Private, config, i18n) {
}
_isAggReady() {
return this._vis.getAggConfig().bySchemaName.segment && this._vis.getAggConfig().bySchemaName.segment[0];
}
_setTooltipFormatter() {
const metricsAgg = _.first(this._vis.getAggConfig().bySchemaName.metric);
if (this._isAggReady()) {
const fieldName = this._vis.getAggConfig().bySchemaName.segment[0].makeLabel();
this._choroplethLayer.setTooltipFormatter(tooltipFormatter, metricsAgg, fieldName);
} else {
this._choroplethLayer.setTooltipFormatter(tooltipFormatter, metricsAgg, null);
}
_hasColumns() {
return this._chartData && this._chartData.columns.length === 2;
}
};

View file

@ -18,13 +18,13 @@
*/
import $ from 'jquery';
export default function TileMapTooltipFormatter($compile, $rootScope) {
export const TileMapTooltipFormatter = ($compile, $rootScope) => {
const $tooltipScope = $rootScope.$new();
const $el = $('<div>').html(require('./tooltip.html'));
$compile($el)($tooltipScope);
return function tooltipFormatter(metricAgg, metric, fieldName) {
return function tooltipFormatter(metric, fieldFormatter, fieldName, metricName) {
if (!metric) {
return '';
@ -34,18 +34,18 @@ export default function TileMapTooltipFormatter($compile, $rootScope) {
if (fieldName && metric) {
$tooltipScope.details.push({
label: fieldName,
value: metric.term
value: metric.term,
});
}
if (metric) {
$tooltipScope.details.push({
label: metricAgg.makeLabel(),
value: metricAgg.fieldFormatter()(metric.value)
label: metricName,
value: fieldFormatter ? fieldFormatter.convert(metric.value, 'text') : metric.value,
});
}
$tooltipScope.$apply();
return $el.html();
};
}
};

View file

@ -1,154 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import sinon from 'sinon';
import ngMock from 'ng_mock';
import { AggConfig } from '../../../../../ui/public/vis/agg_config';
import AggConfigResult from '../../../../../ui/public/vis/agg_config_result';
import { VisProvider } from '../../../../../ui/public/vis';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
import { splitRowsOnColumn, splitTable, legacyTableResponseHandler } from '../legacy_response_handler';
const rows = [
{ 'col-0-2': 'A', 'col-1-3': 100, 'col-2-1': 'Jim' },
{ 'col-0-2': 'A', 'col-1-3': 0, 'col-2-1': 'Dwight' },
{ 'col-0-2': 'B', 'col-1-3': 24, 'col-2-1': 'Angela' },
{ 'col-0-2': 'C', 'col-1-3': 1, 'col-2-1': 'Angela' },
{ 'col-0-2': 'C', 'col-1-3': 7, 'col-2-1': 'Angela' },
{ 'col-0-2': 'C', 'col-1-3': -30, 'col-2-1': 'Jim' },
];
describe('Table Vis Legacy Response Handler', () => {
let Vis;
let indexPattern;
let columns;
let mockAggConfig;
let mockSplitAggConfig;
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private) {
Vis = Private(VisProvider);
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
const vis = new Vis(indexPattern, { type: 'table', aggs: [] });
mockAggConfig = new AggConfig(vis.aggs, { type: 'terms', schema: 'metric' });
mockSplitAggConfig = new AggConfig(vis.aggs, { type: 'terms', schema: 'split' });
sinon.stub(mockSplitAggConfig, 'fieldFormatter').returns(val => val);
sinon.stub(mockSplitAggConfig, 'makeLabel').returns('some label');
columns = [
{ id: 'col-0-2', name: 'Team', aggConfig: mockSplitAggConfig },
{ id: 'col-1-3', name: 'Score', aggConfig: mockAggConfig },
{ id: 'col-2-1', name: 'Leader', aggConfig: mockAggConfig },
];
}));
describe('#splitRowsOnColumn', () => {
it('should be a function', () => {
expect(typeof splitRowsOnColumn).to.be('function');
});
it('.results should return an array with each unique value for the column id', () => {
const expected = ['A', 'B', 'C'];
const actual = splitRowsOnColumn(rows, 'col-0-2');
expect(actual.results).to.eql(expected);
});
it('.results should preserve types in case a result is not a string', () => {
const expected = [0, 1, 7, 24, 100, -30];
const actual = splitRowsOnColumn(rows, 'col-1-3');
expect(actual.results).to.eql(expected);
actual.results.forEach(result => expect(typeof result).to.eql('number'));
});
it('.rowsGroupedByResult should return an object with rows grouped by value for the column id', () => {
const expected = {
A: [
{ 'col-1-3': 100, 'col-2-1': 'Jim' },
{ 'col-1-3': 0, 'col-2-1': 'Dwight' },
],
B: [
{ 'col-1-3': 24, 'col-2-1': 'Angela' },
],
C: [
{ 'col-1-3': 1, 'col-2-1': 'Angela' },
{ 'col-1-3': 7, 'col-2-1': 'Angela' },
{ 'col-1-3': -30, 'col-2-1': 'Jim' },
],
};
const actual = splitRowsOnColumn(rows, 'col-0-2');
expect(actual.rowsGroupedByResult).to.eql(expected);
});
});
describe('#splitTable', () => {
it('should be a function', () => {
expect(typeof splitTable).to.be('function');
});
it('should return an array of objects with the expected keys', () => {
const expected = ['$parent', 'aggConfig', 'title', 'key', 'tables'];
const actual = splitTable(columns, rows, null);
expect(Object.keys(actual[0])).to.eql(expected);
});
it('should return a reference to the parent AggConfigResult', () => {
const actual = splitTable(columns, rows, null);
expect(actual[0].$parent).to.be.a(AggConfigResult);
});
it('should return the correct split values', () => {
const expected = ['A', 'B', 'C'];
const actual = splitTable(columns, rows, null);
expect(actual.map(i => i.key)).to.eql(expected);
});
it('should return the correct titles', () => {
const expected = ['A: some label', 'B: some label', 'C: some label'];
const actual = splitTable(columns, rows, null);
expect(actual.map(i => i.title)).to.eql(expected);
});
it('should return nested split tables with the correct number of entries', () => {
const expected = [2, 1, 3];
const actual = splitTable(columns, rows, null);
expect(actual.map(i => i.tables[0].rows.length)).to.eql(expected);
});
it('should return nested split tables with rows of the correct type', () => {
const actual = splitTable(columns, rows, null);
expect(actual[0].tables[0].rows[0][0]).to.be.a(AggConfigResult);
});
});
describe('#legacyTableResponseHandler', () => {
it('should be a function', () => {
expect(typeof legacyTableResponseHandler).to.be('function');
});
it('should return the correct number of tables', async () => {
const actual = await legacyTableResponseHandler({ columns, rows });
expect(actual.tables).to.have.length(3);
});
});
});

View file

@ -73,6 +73,15 @@ describe('Table Vis Controller', function () {
);
}
const dimensions = {
buckets: [{
accessor: 0,
}], metrics: [{
accessor: 1,
format: { id: 'range' },
}]
};
// basically a parameterized beforeEach
function initController(vis) {
vis.aggs.forEach(function (agg, i) { agg.id = 'agg_' + (i + 1); });
@ -110,7 +119,7 @@ describe('Table Vis Controller', function () {
expect(!$scope.tableGroups).to.be.ok();
expect(!$scope.hasSomeRows).to.be.ok();
attachEsResponseToScope(await tableAggResponse(tabifiedResponse));
attachEsResponseToScope(await tableAggResponse(tabifiedResponse, dimensions));
expect($scope.hasSomeRows).to.be(true);
expect($scope.tableGroups).to.have.property('tables');
@ -123,7 +132,7 @@ describe('Table Vis Controller', function () {
const vis = new OneRangeVis();
initController(vis);
attachEsResponseToScope(await tableAggResponse(tabifiedResponse));
attachEsResponseToScope(await tableAggResponse(tabifiedResponse, dimensions));
removeEsResponseFromScope();
expect(!$scope.hasSomeRows).to.be.ok();
@ -138,7 +147,7 @@ describe('Table Vis Controller', function () {
const vis = new OneRangeVis({ sort: sortObj });
initController(vis);
attachEsResponseToScope(await tableAggResponse(tabifiedResponse));
attachEsResponseToScope(await tableAggResponse(tabifiedResponse, dimensions));
expect($scope.sort.columnIndex).to.equal(sortObj.columnIndex);
expect($scope.sort.direction).to.equal(sortObj.direction);
@ -150,7 +159,7 @@ describe('Table Vis Controller', function () {
tabifiedResponse.rows = [];
attachEsResponseToScope(await tableAggResponse(tabifiedResponse));
attachEsResponseToScope(await tableAggResponse(tabifiedResponse, dimensions));
expect($scope.hasSomeRows).to.be(false);
expect(!$scope.tableGroups).to.be.ok();

View file

@ -18,7 +18,6 @@
*/
import './_table_vis_controller';
import './_legacy_response_handler';
describe('Table Vis', function () {
});

View file

@ -1,105 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { get, findLastIndex } from 'lodash';
import AggConfigResult from 'ui/vis/agg_config_result';
/**
* Takes an array of tabified rows and splits them by column value:
*
* const rows = [
* { col-1: 'foo', col-2: 'X' },
* { col-1: 'bar', col-2: 50 },
* { col-1: 'baz', col-2: 'X' },
* ];
* const splitRows = splitRowsOnColumn(rows, 'col-2');
* splitRows.results; // ['X', 50];
* splitRows.rowsGroupedByResult; // { X: [{ col-1: 'foo' }, { col-1: 'baz' }], 50: [{ col-1: 'bar' }] }
*/
export function splitRowsOnColumn(rows, columnId) {
const resultsMap = {}; // Used to preserve types, since object keys are always converted to strings.
return {
rowsGroupedByResult: rows.reduce((acc, row) => {
const { [columnId]: splitValue, ...rest } = row;
resultsMap[splitValue] = splitValue;
acc[splitValue] = [...(acc[splitValue] || []), rest];
return acc;
}, {}),
results: Object.values(resultsMap),
};
}
export function splitTable(columns, rows, $parent) {
const splitColumn = columns.find(column => get(column, 'aggConfig.schema.name') === 'split');
if (!splitColumn) {
return [{
$parent,
columns: columns.map(column => ({ title: column.name, ...column })),
rows: rows.map((row, rowIndex) => {
return columns.map(column => {
const aggConfigResult = new AggConfigResult(column.aggConfig, $parent, row[column.id], row[column.id]);
aggConfigResult.rawData = {
table: { columns, rows },
column: columns.findIndex(c => c.id === column.id),
row: rowIndex,
};
return aggConfigResult;
});
})
}];
}
const splitColumnIndex = columns.findIndex(column => column.id === splitColumn.id);
const splitRows = splitRowsOnColumn(rows, splitColumn.id);
// Check if there are buckets after the first metric.
const firstMetricsColumnIndex = columns.findIndex(column => get(column, 'aggConfig.type.type') === 'metrics');
const lastBucketsColumnIndex = findLastIndex(columns, column => get(column, 'aggConfig.type.type') === 'buckets');
const metricsAtAllLevels = firstMetricsColumnIndex < lastBucketsColumnIndex;
// Calculate metrics:bucket ratio.
const numberOfMetrics = columns.filter(column => get(column, 'aggConfig.type.type') === 'metrics').length;
const numberOfBuckets = columns.filter(column => get(column, 'aggConfig.type.type') === 'buckets').length;
const metricsPerBucket = numberOfMetrics / numberOfBuckets;
const filteredColumns = columns
.filter((column, i) => {
const isSplitColumn = i === splitColumnIndex;
const isSplitMetric = metricsAtAllLevels && i > splitColumnIndex && i <= splitColumnIndex + metricsPerBucket;
return !isSplitColumn && !isSplitMetric;
})
.map(column => ({ title: column.name, ...column }));
return splitRows.results.map(splitValue => {
const $newParent = new AggConfigResult(splitColumn.aggConfig, $parent, splitValue, splitValue);
return {
$parent: $newParent,
aggConfig: splitColumn.aggConfig,
title: `${splitColumn.aggConfig.fieldFormatter()(splitValue)}: ${splitColumn.aggConfig.makeLabel()}`,
key: splitValue,
// Recurse with filtered data to continue the search for additional split columns.
tables: splitTable(filteredColumns, splitRows.rowsGroupedByResult[splitValue], $newParent),
};
});
}
export async function legacyTableResponseHandler(table) {
return { tables: splitTable(table.columns, table.rows, null) };
}

View file

@ -9,6 +9,8 @@
<div ng-if="tableGroups" class="table-vis-container" data-test-subj="tableVis">
<kbn-agg-table-group
filter="vis.API.events.filter"
dimensions="dimensions"
group="tableGroups"
export-title="visState.title"
per-page="visState.params.perPage"

View file

@ -26,7 +26,7 @@ import { VisFactoryProvider } from 'ui/vis/vis_factory';
import { Schemas } from 'ui/vis/editors/default/schemas';
import tableVisTemplate from './table_vis.html';
import { VisTypesRegistryProvider } from 'ui/registry/vis_types';
import { legacyTableResponseHandler } from './legacy_response_handler';
import { VisFiltersProvider } from 'ui/vis/vis_filters';
// we need to load the css ourselves
@ -42,6 +42,7 @@ VisTypesRegistryProvider.register(TableVisTypeProvider);
// define the TableVisType
function TableVisTypeProvider(Private) {
const VisFactory = Private(VisFactoryProvider);
const visFilters = Private(VisFiltersProvider);
// define the TableVisController which is used in the template
// by angular's ng-controller directive
@ -105,9 +106,10 @@ function TableVisTypeProvider(Private) {
}
])
},
responseHandler: legacyTableResponseHandler,
responseHandlerConfig: {
asAggConfigResults: true
events: {
filterBucket: {
defaultAction: visFilters.filter,
},
},
hierarchicalData: function (vis) {
return Boolean(vis.params.showPartialRows || vis.params.showMetricsAtAllLevels);

View file

@ -56,6 +56,7 @@ module.controller('KbnTableVisController', function ($scope) {
$scope.hasSomeRows = hasSomeRows;
if (hasSomeRows) {
$scope.dimensions = $scope.vis.params.dimensions;
$scope.tableGroups = tableGroups;
}
$scope.renderComplete();

View file

@ -22,6 +22,7 @@ import * as Rx from 'rxjs';
import { take } from 'rxjs/operators';
import { render, unmountComponentAtNode } from 'react-dom';
import React from 'react';
import { getFormat } from 'ui/visualize/loader/pipeline_helpers/utilities';
import { I18nProvider } from '@kbn/i18n/react';
import { Label } from './label';
@ -40,11 +41,10 @@ export class TagCloudVisualization {
this._containerNode.appendChild(cloudContainer);
this._vis = vis;
this._bucketAgg = null;
this._truncated = false;
this._tagCloud = new TagCloud(cloudContainer);
this._tagCloud.on('select', (event) => {
if (!this._bucketAgg) {
if (!this._vis.params.bucket) {
return;
}
this._vis.API.events.filter({
@ -84,16 +84,16 @@ export class TagCloudVisualization {
await this._renderComplete$.pipe(take(1)).toPromise();
const hasAggDefined = this._vis.aggs[0] && this._vis.aggs[1];
if (!hasAggDefined) {
if (data.columns.length !== 2) {
this._feedbackMessage.current.setState({
shouldShowTruncate: false,
shouldShowIncomplete: false
});
return;
}
this._label.current.setState({
label: `${this._vis.aggs[0].makeLabel()} - ${this._vis.aggs[1].makeLabel()}`,
label: `${data.columns[0].name} - ${data.columns[1].name}`,
shouldShowLabel: this._vis.params.showLabel
});
this._feedbackMessage.current.setState({
@ -107,7 +107,6 @@ export class TagCloudVisualization {
this._tagCloud.destroy();
unmountComponentAtNode(this._feedbackNode);
unmountComponentAtNode(this._labelNode);
}
_updateData(data) {
@ -116,21 +115,15 @@ export class TagCloudVisualization {
return;
}
const segmentAggs = this._vis.aggs.bySchemaName.segment;
if (segmentAggs && segmentAggs.length > 0) {
this._bucketAgg = segmentAggs[0];
} else {
this._bucketAgg = null;
}
const bucketFormatter = this._vis.params.bucket ? getFormat(this._vis.params.bucket.format) : null;
const hasTags = data.columns.length === 2;
const tagColumn = hasTags ? data.columns[0].id : -1;
const metricColumn = data.columns[hasTags ? 1 : 0].id;
const tags = data.rows.map((row, rowIndex) => {
const tag = row[tagColumn] || 'all';
const tag = row[tagColumn] === undefined ? 'all' : row[tagColumn];
const metric = row[metricColumn];
return {
displayText: this._bucketAgg ? this._bucketAgg.fieldFormatter()(tag) : tag,
displayText: bucketFormatter ? bucketFormatter.convert(tag, 'text') : tag,
rawText: tag,
value: metric,
meta: {

View file

@ -26,7 +26,6 @@ import { CoordinateMapsVisualizationProvider } from './coordinate_maps_visualiza
import { Schemas } from 'ui/vis/editors/default/schemas';
import { VisTypesRegistryProvider } from 'ui/registry/vis_types';
import { Status } from 'ui/vis/update_status';
import { makeGeoJsonResponseHandler } from './coordinatemap_response_handler';
import { truncatedColorMaps } from 'ui/vislib/components/color/truncated_colormaps';
VisTypesRegistryProvider.register(function TileMapVisType(Private, getAppState, courier, config) {
@ -58,7 +57,6 @@ VisTypesRegistryProvider.register(function TileMapVisType(Private, getAppState,
}
},
requiresUpdateStatus: [Status.AGGS, Status.PARAMS, Status.RESIZE, Status.UI_STATE],
responseHandler: makeGeoJsonResponseHandler(),
requiresPartialRows: true,
visualization: CoordinateMapsVisualization,
editorConfig: {

View file

@ -32,7 +32,7 @@ function unflatten(data) {
let prop = '';
let m;
while (m = regex.exec(p)) {
cur = cur[prop] || (cur[prop] = (m[2] ? [] : {}));
cur = (cur.hasOwnProperty(prop) && cur[prop]) || (cur[prop] = (m[2] ? [] : {}));
prop = m[2] || m[1];
}
cur[prop] = data[p];

View file

@ -19,6 +19,7 @@
import good from '@elastic/good';
import loggingConfiguration from './configuration';
import { logWithMetadata } from './log_with_metadata';
export async function setupLogging(server, config) {
return await server.register({
@ -28,5 +29,6 @@ export async function setupLogging(server, config) {
}
export async function loggingMixin(kbnServer, server, config) {
logWithMetadata.decorateServer(server);
return await setupLogging(server, config);
}

View file

@ -26,6 +26,7 @@ import stringify from 'json-stringify-safe';
import querystring from 'querystring';
import applyFiltersToKeys from './apply_filters_to_keys';
import { inspect } from 'util';
import { logWithMetadata } from './log_with_metadata';
function serializeError(err = {}) {
return {
@ -158,6 +159,9 @@ export default class TransformObjStream extends Stream.Transform {
const message = get(event, 'error.message');
data.message = message || 'Unknown error object (no message)';
}
else if (logWithMetadata.isLogEvent(event.data)) {
_.assign(data, logWithMetadata.getLogEventData(event.data));
}
else if (_.isPlainObject(event.data) && event.data.tmpl) {
_.assign(data, event.data);
data.tmpl = undefined;

View file

@ -0,0 +1,45 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { isPlainObject } from 'lodash';
const symbol = Symbol('log message with metadata');
export const logWithMetadata = {
isLogEvent(eventData) {
return Boolean(isPlainObject(eventData) && eventData[symbol]);
},
getLogEventData(eventData) {
const { message, metadata } = eventData[symbol];
return {
...metadata,
message
};
},
decorateServer(server) {
server.decorate('server', 'logWithMetadata', (tags, message, metadata = {}) => {
server.log(tags, {
[symbol]: {
message,
metadata,
},
});
});
},
};

View file

@ -23,6 +23,7 @@ import { registerStatusPage, registerStatusApi, registerStatsApi } from './route
import { getOpsStatsCollector } from './collectors';
import Oppsy from 'oppsy';
import { cloneDeep } from 'lodash';
import { getOSInfo } from './lib/get_os_info';
export function statusMixin(kbnServer, server, config) {
kbnServer.status = new ServerStatus(kbnServer.server);
@ -53,4 +54,7 @@ export function statusMixin(kbnServer, server, config) {
registerStatusPage(kbnServer, server, config);
registerStatusApi(kbnServer, server, config);
registerStatsApi(kbnServer, server, config);
// expore shared functionality
server.decorate('server', 'getOSInfo', getOSInfo);
}

View file

@ -1,7 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import os from 'os';

View file

@ -1,7 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
jest.mock('os', () => ({

View file

@ -22,6 +22,7 @@ import v8 from 'v8';
import { get, isObject, merge } from 'lodash';
import { keysToSnakeCaseShallow } from '../../../utils/case_conversion';
import { getAllStats as cGroupStats } from './cgroup';
import { getOSInfo } from './get_os_info';
const requestDefaults = {
disconnects: 0,
@ -55,7 +56,7 @@ export class Metrics {
async capture(hapiEvent) {
const timestamp = new Date().toISOString();
const event = this.captureEvent(hapiEvent);
const event = await this.captureEvent(hapiEvent);
const cgroup = await this.captureCGroupsIfAvailable();
const metrics = {
@ -66,7 +67,7 @@ export class Metrics {
return merge(metrics, event, cgroup);
}
captureEvent(hapiEvent) {
async captureEvent(hapiEvent) {
const heapStats = v8.getHeapStatistics();
const port = this.config.get('server.port');
const avgInMillis = get(hapiEvent, ['responseTimes', port, 'avg']); // sadly, it's possible for this to be NaN
@ -98,7 +99,8 @@ export class Metrics {
free_in_bytes: os.freemem(),
used_in_bytes: get(hapiEvent, 'osmem.total') - get(hapiEvent, 'osmem.free')
},
uptime_in_millis: os.uptime() * 1000
uptime_in_millis: os.uptime() * 1000,
...(await getOSInfo())
},
response_times: {
avg_in_millis: isNaN(avgInMillis) ? undefined : avgInMillis, // convert NaN to undefined

View file

@ -24,7 +24,9 @@ jest.mock('fs', () => ({
jest.mock('os', () => ({
freemem: jest.fn(),
totalmem: jest.fn(),
uptime: jest.fn()
uptime: jest.fn(),
platform: jest.fn(),
release: jest.fn()
}));
jest.mock('process', () => ({
@ -81,7 +83,7 @@ describe('Metrics', function () {
});
describe('captureEvent', () => {
it('parses the hapi event', () => {
it('parses the hapi event', async () => {
sinon.stub(os, 'uptime').returns(12000);
sinon.stub(process, 'uptime').returns(5000);
@ -105,7 +107,7 @@ describe('Metrics', function () {
'host': 'blahblah.local'
};
expect(metrics.captureEvent(hapiEvent)).toMatchObject({
expect(await metrics.captureEvent(hapiEvent)).toMatchObject({
'concurrent_connections': 0,
'os': {
'load': {
@ -140,7 +142,7 @@ describe('Metrics', function () {
});
});
it('parses event with missing fields / NaN for responseTimes.avg', () => {
it('parses event with missing fields / NaN for responseTimes.avg', async () => {
const hapiEvent = {
requests: {
'5603': { total: 22, disconnects: 0, statusCodes: { '200': 22 } },
@ -149,7 +151,7 @@ describe('Metrics', function () {
host: 'blahblah.local',
};
expect(metrics.captureEvent(hapiEvent)).toMatchObject({
expect(await metrics.captureEvent(hapiEvent)).toMatchObject({
process: { memory: { heap: {} }, pid: 8675309, uptime_in_millis: 5000000 },
os: {
load: {},

View file

@ -1,269 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import _ from 'lodash';
import fixtures from 'fixtures/fake_hierarchical_data';
import expect from 'expect.js';
import ngMock from 'ng_mock';
import { VisProvider } from '../../../vis';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
import { VislibSlicesResponseHandlerProvider } from '../../../vis/response_handlers/vislib';
import { tabifyAggResponse } from '../../tabify';
describe('buildHierarchicalData', function () {
let Vis;
let indexPattern;
let responseHandler;
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private) {
Vis = Private(VisProvider);
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
responseHandler = Private(VislibSlicesResponseHandlerProvider).handler;
}));
const buildHierarchicalData = async (aggs, response) => {
const vis = new Vis(indexPattern, { type: 'histogram', aggs: aggs });
vis.isHierarchical = () => true;
const data = tabifyAggResponse(vis.aggs, response, { metricsAtAllLevels: true });
return await responseHandler(data);
};
describe('metric only', function () {
let results;
beforeEach(async function () {
const aggs = [{
id: 'agg_1',
schema: 'metric',
type: 'avg',
params: {
field: 'bytes',
}
}];
results = await buildHierarchicalData(aggs, fixtures.metricOnly);
});
it('should set the slices with one child to a consistent label', function () {
const checkLabel = 'Average bytes';
expect(results).to.have.property('slices');
expect(results.slices).to.have.property('children');
expect(results.slices.children).to.have.length(1);
expect(results.slices.children[0]).to.have.property('name', checkLabel);
expect(results.slices.children[0]).to.have.property('size', 412032);
expect(results).to.have.property('names');
expect(results.names).to.eql([checkLabel]);
expect(results).to.have.property('raw');
expect(results.raw).to.have.property('rows');
expect(results.raw.rows).to.have.length(1);
});
});
describe('rows and columns', function () {
let results;
it('should set the rows', async function () {
const aggs = [{
id: 'agg_2',
type: 'terms',
schema: 'split',
params: {
field: 'extension',
}
}, {
id: 'agg_3',
type: 'terms',
schema: 'group',
params: {
field: 'geo.src',
}
}];
results = await buildHierarchicalData(aggs, fixtures.threeTermBuckets);
expect(results).to.have.property('rows');
});
it('should set the columns', async function () {
const aggs = [{
id: 'agg_2',
type: 'terms',
schema: 'split',
params: {
row: false,
field: 'extension',
}
}, {
id: 'agg_3',
type: 'terms',
schema: 'group',
params: {
field: 'geo.src',
}
}];
results = await buildHierarchicalData(aggs, fixtures.threeTermBuckets);
expect(results).to.have.property('columns');
});
});
describe('threeTermBuckets', function () {
let results;
beforeEach(async function () {
const aggs = [{
id: 'agg_1',
type: 'avg',
schema: 'metric',
params: {
field: 'bytes',
}
}, {
id: 'agg_2',
type: 'terms',
schema: 'split',
params: {
field: 'extension',
}
}, {
id: 'agg_3',
type: 'terms',
schema: 'group',
params: {
field: 'geo.src',
}
}, {
id: 'agg_4',
type: 'terms',
schema: 'group',
params: {
field: 'machine.os',
}
}];
results = await buildHierarchicalData(aggs, fixtures.threeTermBuckets);
});
it('should set the hits attribute for the results', function () {
expect(results).to.have.property('rows');
_.each(results.rows, function (item) {
expect(item).to.have.property('names');
expect(item).to.have.property('slices');
expect(item.slices).to.have.property('children');
});
});
it('should set the parent of the first item in the split', function () {
expect(results).to.have.property('rows');
expect(results.rows).to.have.length(3);
expect(results.rows[0]).to.have.property('slices');
expect(results.rows[0].slices).to.have.property('children');
expect(results.rows[0].slices.children).to.have.length(2);
expect(results.rows[0].slices.children[0]).to.have.property('aggConfigResult');
expect(results.rows[0].slices.children[0].aggConfigResult.$parent.$parent).to.have.property('key', 'png');
});
});
describe('oneHistogramBucket', function () {
let results;
beforeEach(async function () {
const aggs = [{
id: 'agg_2',
type: 'histogram',
schema: 'group',
params: {
field: 'bytes',
interval: 8192
}
}];
results = await buildHierarchicalData(aggs, fixtures.oneHistogramBucket);
});
it('should set the hits attribute for the results', function () {
expect(results).to.have.property('slices');
expect(results.slices).to.property('children');
expect(results).to.have.property('names');
expect(results.names).to.have.length(6);
expect(results).to.have.property('raw');
});
});
describe('oneRangeBucket', function () {
let results;
beforeEach(async function () {
const aggs = [{
id: 'agg_2',
type: 'range',
schema: 'group',
params: {
field: 'bytes',
}
}];
results = await buildHierarchicalData(aggs, fixtures.oneRangeBucket);
});
it('should set the hits attribute for the results', function () {
expect(results).to.have.property('slices');
expect(results.slices).to.property('children');
expect(results).to.have.property('names');
expect(results.names).to.have.length(2);
expect(results).to.have.property('raw');
});
});
describe('oneFilterBucket', function () {
let results;
beforeEach(async function () {
const aggs = [{
id: 'agg_2',
type: 'filters',
schema: 'group',
params: {
field: 'geo.src',
filters: [
{
label: 'type:apache',
input: { query: 'type:apache' },
},
{
label: 'type:nginx',
input: { query: 'type:nginx' },
},
],
}
}];
results = await buildHierarchicalData(aggs, fixtures.oneFilterBucket);
});
it('should set the hits attribute for the results', function () {
expect(results).to.have.property('slices');
expect(results).to.have.property('names');
expect(results.names).to.have.length(2);
expect(results).to.have.property('raw');
});
});
});

View file

@ -0,0 +1,87 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { toArray } from 'lodash';
import { getFormat } from 'ui/visualize/loader/pipeline_helpers/utilities';
export function convertTableProvider(tooltipFormatter) {
return function (table, { metric, buckets = [] }) {
let slices;
const names = {};
const metricColumn = table.columns[metric.accessor];
const metricFieldFormatter = getFormat(metric.format);
if (!buckets.length) {
slices = [{
name: metricColumn.name,
size: table.rows[0][metricColumn.id],
aggConfig: metricColumn.aggConfig
}];
names[metricColumn.name] = metricColumn.name;
} else {
slices = [];
table.rows.forEach((row, rowIndex) => {
let parent;
let dataLevel = slices;
buckets.forEach(bucket => {
const bucketColumn = table.columns[bucket.accessor];
const bucketValueColumn = table.columns[bucket.accessor + 1];
const bucketFormatter = getFormat(bucket.format);
const name = bucketFormatter.convert(row[bucketColumn.id]);
const size = row[bucketValueColumn.id];
names[name] = name;
let slice = dataLevel.find(slice => slice.name === name);
if (!slice) {
slice = {
name,
size,
parent,
children: [],
aggConfig: bucketColumn.aggConfig,
rawData: {
table,
row: rowIndex,
column: bucket.accessor,
value: row[bucketColumn.id],
},
};
dataLevel.push(slice);
}
parent = slice;
dataLevel = slice.children;
});
});
}
return {
hits: table.rows.length,
raw: table,
names: toArray(names),
tooltipFormatter: tooltipFormatter(metricFieldFormatter),
slices: {
children: [
...slices
]
}
};
};
}

View file

@ -28,15 +28,13 @@ export function HierarchicalTooltipFormatterProvider($rootScope, $compile, $sce)
$compile($tooltip)($tooltipScope);
return function (columns) {
return function (metricFieldFormatter) {
return function (event) {
const datum = event.datum;
// Collect the current leaf and parents into an array of values
$tooltipScope.rows = collectBranch(datum);
const metricCol = $tooltipScope.metricCol = columns.find(column => column.aggConfig.type.type === 'metrics');
// Map those values to what the tooltipSource.rows format.
_.forEachRight($tooltipScope.rows, function (row) {
row.spacer = $sce.trustAsHtml(_.repeat('&nbsp;', row.depth));
@ -46,7 +44,7 @@ export function HierarchicalTooltipFormatterProvider($rootScope, $compile, $sce)
percent = row.item.percentOfGroup;
}
row.metric = metricCol.aggConfig.fieldFormatter()(row.metric);
row.metric = metricFieldFormatter ? metricFieldFormatter.convert(row.metric) : row.metric;
if (percent != null) {
row.metric += ' (' + numeral(percent).format('0.[00]%') + ')';

View file

@ -17,54 +17,10 @@
* under the License.
*/
import { toArray } from 'lodash';
import { HierarchicalTooltipFormatterProvider } from './_hierarchical_tooltip_formatter';
import { convertTableProvider } from './_convert_table';
export function BuildHierarchicalDataProvider(Private) {
const tooltipFormatter = Private(HierarchicalTooltipFormatterProvider);
return function (table) {
let slices;
const names = {};
if (table.columns.length === 1) {
slices = [{ name: table.columns[0].title, size: table.rows[0][0].value }];
names[table.columns[0].title] = table.columns[0].title;
} else {
let parent;
slices = [];
table.rows.forEach(row => {
let dataLevel = slices;
// we always have one bucket column and one metric column (for every level)
for (let columnIndex = 0; columnIndex < table.columns.length; columnIndex += 2) {
const { aggConfig } = table.columns[columnIndex];
const fieldFormatter = aggConfig.fieldFormatter('text');
const bucketColumn = row[columnIndex];
const metricColumn = row[columnIndex + 1];
const name = fieldFormatter(bucketColumn.value);
const size = metricColumn.value;
names[name] = name;
let slice = dataLevel.find(slice => slice.name === name);
if (!slice) {
slice = { name, size, parent, aggConfig, aggConfigResult: metricColumn, children: [] };
dataLevel.push(slice);
}
parent = slice;
dataLevel = slice.children;
}
});
}
return {
hits: table.rows.length,
raw: table,
names: toArray(names),
tooltipFormatter: tooltipFormatter(table.columns),
slices: {
children: [
...slices
]
}
};
};
return convertTableProvider(tooltipFormatter);
}

View file

@ -0,0 +1,271 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { convertTableProvider } from './_convert_table';
import { LegacyResponseHandlerProvider as legacyResponseHandlerProvider } from '../../vis/response_handlers/legacy';
jest.mock('../../registry/field_formats', () => ({
fieldFormats: {
byId: {
'1': jest.fn(),
agg_1: jest.fn(),
}
}
}));
jest.mock('../../chrome', () => ({
getUiSettingsClient: jest.fn()
}));
describe('buildHierarchicalData convertTable', () => {
const mockToolTipFormatter = () => ({});
const convertTable = convertTableProvider(mockToolTipFormatter);
const responseHandler = legacyResponseHandlerProvider().handler;
describe('metric only', () => {
let dimensions;
let table;
beforeEach(async () => {
const tabifyResponse = {
columns: [
{ id: 'col-0-agg_1', name: 'Average bytes' },
],
rows: [
{ 'col-0-agg_1': 412032 },
],
};
dimensions = {
metric: { accessor: 0 },
};
const tableGroup = await responseHandler(tabifyResponse, dimensions);
table = tableGroup.tables[0];
});
it('should set the slices with one child to a consistent label', () => {
const results = convertTable(table, dimensions);
const checkLabel = 'Average bytes';
expect(results).toHaveProperty('names');
expect(results.names).toEqual([checkLabel]);
expect(results).toHaveProperty('raw');
expect(results.raw).toHaveProperty('rows');
expect(results.raw.rows).toHaveLength(1);
expect(results).toHaveProperty('slices');
expect(results.slices).toHaveProperty('children');
expect(results.slices.children).toHaveLength(1);
expect(results.slices.children[0]).toHaveProperty('name', checkLabel);
expect(results.slices.children[0]).toHaveProperty('size', 412032);
});
});
describe('threeTermBuckets', () => {
let dimensions;
let tables;
beforeEach(async () => {
const tabifyResponse = {
columns: [
{ 'id': 'col-0-agg_2', 'name': 'extension: Descending' },
{ 'id': 'col-1-agg_1', 'name': 'Average bytes' },
{ 'id': 'col-2-agg_3', 'name': 'geo.src: Descending' },
{ 'id': 'col-3-agg_1', 'name': 'Average bytes' },
{ 'id': 'col-4-agg_4', 'name': 'machine.os: Descending' },
{ 'id': 'col-5-agg_1', 'name': 'Average bytes' }
],
rows: [
/* eslint-disable max-len */
{ 'col-0-agg_2': 'png', 'col-2-agg_3': 'IT', 'col-4-agg_4': 'win', 'col-1-agg_1': 412032, 'col-3-agg_1': 9299, 'col-5-agg_1': 0 },
{ 'col-0-agg_2': 'png', 'col-2-agg_3': 'IT', 'col-4-agg_4': 'mac', 'col-1-agg_1': 412032, 'col-3-agg_1': 9299, 'col-5-agg_1': 9299 },
{ 'col-0-agg_2': 'png', 'col-2-agg_3': 'US', 'col-4-agg_4': 'linux', 'col-1-agg_1': 412032, 'col-3-agg_1': 8293, 'col-5-agg_1': 3992 },
{ 'col-0-agg_2': 'png', 'col-2-agg_3': 'US', 'col-4-agg_4': 'mac', 'col-1-agg_1': 412032, 'col-3-agg_1': 8293, 'col-5-agg_1': 3029 },
{ 'col-0-agg_2': 'css', 'col-2-agg_3': 'MX', 'col-4-agg_4': 'win', 'col-1-agg_1': 412032, 'col-3-agg_1': 9299, 'col-5-agg_1': 4992 },
{ 'col-0-agg_2': 'css', 'col-2-agg_3': 'MX', 'col-4-agg_4': 'mac', 'col-1-agg_1': 412032, 'col-3-agg_1': 9299, 'col-5-agg_1': 5892 },
{ 'col-0-agg_2': 'css', 'col-2-agg_3': 'US', 'col-4-agg_4': 'linux', 'col-1-agg_1': 412032, 'col-3-agg_1': 8293, 'col-5-agg_1': 3992 },
{ 'col-0-agg_2': 'css', 'col-2-agg_3': 'US', 'col-4-agg_4': 'mac', 'col-1-agg_1': 412032, 'col-3-agg_1': 8293, 'col-5-agg_1': 3029 },
{ 'col-0-agg_2': 'html', 'col-2-agg_3': 'CN', 'col-4-agg_4': 'win', 'col-1-agg_1': 412032, 'col-3-agg_1': 9299, 'col-5-agg_1': 4992 },
{ 'col-0-agg_2': 'html', 'col-2-agg_3': 'CN', 'col-4-agg_4': 'mac', 'col-1-agg_1': 412032, 'col-3-agg_1': 9299, 'col-5-agg_1': 5892 },
{ 'col-0-agg_2': 'html', 'col-2-agg_3': 'FR', 'col-4-agg_4': 'win', 'col-1-agg_1': 412032, 'col-3-agg_1': 8293, 'col-5-agg_1': 3992 },
{ 'col-0-agg_2': 'html', 'col-2-agg_3': 'FR', 'col-4-agg_4': 'mac', 'col-1-agg_1': 412032, 'col-3-agg_1': 8293, 'col-5-agg_1': 3029 }
/* eslint-enable max-len */
]
};
dimensions = {
splitRow: [{ accessor: 0 }],
metric: { accessor: 5 },
buckets: [
{ accessor: 2 },
{ accessor: 4 },
]
};
const tableGroup = await responseHandler(tabifyResponse, dimensions);
tables = tableGroup.tables;
});
it('should set the correct hits attribute for each of the results', () => {
tables.forEach(t => {
const results = convertTable(t.tables[0], dimensions);
expect(results).toHaveProperty('hits');
expect(results.hits).toBe(4);
});
});
it('should set the correct names for each of the results', () => {
const results0 = convertTable(tables[0].tables[0], dimensions);
expect(results0).toHaveProperty('names');
expect(results0.names).toHaveLength(5);
const results1 = convertTable(tables[1].tables[0], dimensions);
expect(results1).toHaveProperty('names');
expect(results1.names).toHaveLength(5);
const results2 = convertTable(tables[2].tables[0], dimensions);
expect(results2).toHaveProperty('names');
expect(results2.names).toHaveLength(4);
});
it('should set the parent of the first item in the split', () => {
const results0 = convertTable(tables[0].tables[0], dimensions);
expect(results0).toHaveProperty('slices');
expect(results0.slices).toHaveProperty('children');
expect(results0.slices.children).toHaveLength(2);
expect(results0.slices.children[0].rawData.table.$parent).toHaveProperty('key', 'png');
const results1 = convertTable(tables[1].tables[0], dimensions);
expect(results1).toHaveProperty('slices');
expect(results1.slices).toHaveProperty('children');
expect(results1.slices.children).toHaveLength(2);
expect(results1.slices.children[0].rawData.table.$parent).toHaveProperty('key', 'css');
const results2 = convertTable(tables[2].tables[0], dimensions);
expect(results2).toHaveProperty('slices');
expect(results2.slices).toHaveProperty('children');
expect(results2.slices.children).toHaveLength(2);
expect(results2.slices.children[0].rawData.table.$parent).toHaveProperty('key', 'html');
});
});
describe('oneHistogramBucket', () => {
let dimensions;
let table;
beforeEach(async () => {
const tabifyResponse = {
columns: [
{ 'id': 'col-0-agg_2', 'name': 'bytes' },
{ 'id': 'col-1-1', 'name': 'Count' }
],
rows: [
{ 'col-0-agg_2': 1411862400000, 'col-1-1': 8247 },
{ 'col-0-agg_2': 1411948800000, 'col-1-1': 8184 },
{ 'col-0-agg_2': 1412035200000, 'col-1-1': 8269 },
{ 'col-0-agg_2': 1412121600000, 'col-1-1': 8141 },
{ 'col-0-agg_2': 1412208000000, 'col-1-1': 8148 },
{ 'col-0-agg_2': 1412294400000, 'col-1-1': 8219 }
]
};
dimensions = {
metric: { accessor: 1 },
buckets: [
{ accessor: 0, params: { field: 'bytes', interval: 8192 } },
]
};
const tableGroup = await responseHandler(tabifyResponse, dimensions);
table = tableGroup.tables[0];
});
it('should set the hits attribute for the results', () => {
const results = convertTable(table, dimensions);
expect(results).toHaveProperty('raw');
expect(results).toHaveProperty('slices');
expect(results.slices).toHaveProperty('children');
expect(results).toHaveProperty('names');
expect(results.names).toHaveLength(6);
});
});
describe('oneRangeBucket', () => {
let dimensions;
let table;
beforeEach(async () => {
const tabifyResponse = {
columns: [
{ 'id': 'col-0-agg_2', 'name': 'bytes ranges' },
{ 'id': 'col-1-1', 'name': 'Count' }
],
rows: [
{ 'col-0-agg_2': { 'gte': 0, 'lt': 1000 }, 'col-1-1': 606 },
{ 'col-0-agg_2': { 'gte': 1000, 'lt': 2000 }, 'col-1-1': 298 }
]
};
dimensions = {
metric: { accessor: 1 },
buckets: [
{ accessor: 0, format: { id: 'range', params: { id: 'agg_2' } } },
]
};
const tableGroup = await responseHandler(tabifyResponse, dimensions);
table = tableGroup.tables[0];
});
it('should set the hits attribute for the results', () => {
const results = convertTable(table, dimensions);
expect(results).toHaveProperty('raw');
expect(results).toHaveProperty('slices');
expect(results.slices).toHaveProperty('children');
expect(results).toHaveProperty('names');
expect(results.names).toHaveLength(2);
});
});
describe('oneFilterBucket', () => {
let dimensions;
let table;
beforeEach(async () => {
const tabifyResponse = {
columns: [
{ 'id': 'col-0-agg_2', 'name': 'filters' },
{ 'id': 'col-1-1', 'name': 'Count' }
],
rows: [
{ 'col-0-agg_2': 'type:apache', 'col-1-1': 4844 },
{ 'col-0-agg_2': 'type:nginx', 'col-1-1': 1161 }
]
};
dimensions = {
metric: { accessor: 1 },
buckets: [{
accessor: 0,
}],
};
const tableGroup = await responseHandler(tabifyResponse, dimensions);
table = tableGroup.tables[0];
});
it('should set the hits attribute for the results', () => {
const results = convertTable(table, dimensions);
expect(results).toHaveProperty('raw');
expect(results).toHaveProperty('slices');
expect(results).toHaveProperty('names');
expect(results.names).toHaveLength(2);
});
});
});

View file

@ -18,7 +18,6 @@
*/
import expect from 'expect.js';
import { AggType } from '../../../agg_types/agg_type';
import { makeFakeXAspect } from '../_fake_x_aspect';
describe('makeFakeXAspect', function () {
@ -27,18 +26,10 @@ describe('makeFakeXAspect', function () {
const aspect = makeFakeXAspect();
expect(aspect)
.to.have.property('i', -1)
.and.have.property('aggConfig');
expect(aspect.aggConfig)
.to.have.property('fieldFormatter')
.and.to.have.property('type');
expect(aspect.aggConfig.type)
.to.be.an(AggType)
.and.to.have.property('name', 'all')
.and.to.have.property('title', 'All docs')
.and.to.have.property('hasNoDsl', true);
.to.have.property('accessor', -1)
.and.have.property('title', 'All docs')
.and.have.property('fieldFormatter')
.and.have.property('params');
});
});

View file

@ -17,136 +17,58 @@
* under the License.
*/
import _ from 'lodash';
import moment from 'moment';
import expect from 'expect.js';
import ngMock from 'ng_mock';
import { VisProvider } from '../../../vis';
import { getAspects } from '../_get_aspects';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
describe('getAspects', function () {
let Vis;
let indexPattern;
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private) {
Vis = Private(VisProvider);
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
}));
let vis;
let table;
const date = _.memoize(function (n) {
return moment().startOf('day').add(n, 'hour').valueOf();
});
function filterByIndex(map) {
return function (arr) {
return arr.filter(function (val, i) {
return map[i];
});
};
}
let dimensions;
function validate(aspect, i) {
expect(i).to.be.a('number');
expect(aspect)
.to.be.an('object')
.and.have.property('i', i)
.and.have.property('aggConfig', vis.aggs[i]);
.and.have.property('accessor', i);
}
function init(group, x, y) {
// map args to indices that should be removed
const filter = filterByIndex([
x > 0,
x > 1,
group > 0,
group > 1,
y > 0,
y > 1
]);
vis = new Vis(indexPattern, {
type: 'histogram',
aggs: [
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
{ type: 'date_histogram', schema: 'segment', params: { field: 'utc_time' } },
{ type: 'terms', schema: 'group', params: { field: 'extension' } },
{ type: 'terms', schema: 'group', params: { field: 'geo.src' } },
{ type: 'count', schema: 'metric' },
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } }
]
});
table = {
columns: filter([
{ aggConfig: vis.aggs[0] }, // date
{ aggConfig: vis.aggs[1] }, // date
{ aggConfig: vis.aggs[2] }, // extension
{ aggConfig: vis.aggs[3] }, // extension
{ aggConfig: vis.aggs[4] }, // count
{ aggConfig: vis.aggs[5] } // avg
]),
rows: [
[ date(0), date(6), 'html', 'CN', 50, 50 ],
[ date(0), date(7), 'css', 'CN', 100, 25 ],
[ date(1), date(8), 'html', 'CN', 60, 50 ],
[ date(1), date(9), 'css', 'CN', 120, 25 ],
[ date(2), date(10), 'html', 'CN', 70, 50 ],
[ date(2), date(11), 'css', 'CN', 140, 25 ],
[ date(3), date(12), 'html', 'CN', 80, 50 ],
[ date(3), date(13), 'css', 'CN', 160, 25 ]
].map(filter)
columns: [
{ id: '0', title: 'date' }, // date
{ id: '1', title: 'date utc_time' }, // date
{ id: '2', title: 'ext' }, // extension
{ id: '3', title: 'geo.src' }, // extension
{ id: '4', title: 'count' }, // count
{ id: '5', title: 'avg bytes' } // avg
],
rows: []
};
const aggs = vis.aggs.splice(0, vis.aggs.length);
filter(aggs).forEach(function (filter) {
vis.aggs.push(filter);
});
dimensions = {
x: { accessor: x },
y: { accessor: y },
series: { accessor: group },
};
}
it('produces an aspect object for each of the aspect types found in the columns', function () {
init(1, 1, 1);
init(1, 0, 2);
const aspects = getAspects(table);
validate(aspects.x, 0);
validate(aspects.series, 1);
validate(aspects.y, 2);
});
it('uses arrays only when there are more than one aspect of a specific type', function () {
init(0, 1, 2);
const aspects = getAspects(table);
validate(aspects.x, 0);
expect(aspects.series == null).to.be(true);
expect(aspects.y).to.be.an('array').and.have.length(2);
expect(aspects.y[0], 1);
expect(aspects.y[1], 2);
});
it('throws an error if there are multiple x aspects', function () {
init(0, 2, 1);
expect(function () {
getAspects(table);
}).to.throwError(TypeError);
const aspects = getAspects(table, dimensions);
validate(aspects.x[0], '0');
validate(aspects.series[0], '1');
validate(aspects.y[0], '2');
});
it('creates a fake x aspect if the column does not exist', function () {
init(0, 0, 1);
init(0, null, 1);
const aspects = getAspects(table);
const aspects = getAspects(table, dimensions);
expect(aspects.x)
expect(aspects.x[0])
.to.be.an('object')
.and.have.property('i', -1)
.and.have.property('aggConfig')
.and.have.property('accessor', -1)
.and.have.property('title');
});

View file

@ -23,8 +23,15 @@ import { getPoint } from '../_get_point';
describe('getPoint', function () {
const truthFormatted = { fieldFormatter: _.constant(_.constant(true)) };
const identFormatted = { fieldFormatter: _.constant(_.identity) };
const truthFormatted = _.constant(true);
const table = {
columns: [{ id: '0' }, { id: '1' }, { id: '3' }],
rows: [
{ '0': 1, '1': 2, '2': 3 },
{ '0': 4, '1': 'NaN', '2': 6 }
]
};
describe('Without series aspect', function () {
let seriesAspect;
@ -34,27 +41,27 @@ describe('getPoint', function () {
beforeEach(function () {
seriesAspect = null;
xAspect = { i: 0 };
yAspect = { i: 1, title: 'Y', aggConfig: {} };
xAspect = { accessor: 0 };
yAspect = { accessor: 1, title: 'Y' };
yScale = 5;
});
it('properly unwraps and scales values', function () {
const row = [ { value: 1 }, { value: 2 }, { value: 3 } ];
const zAspect = { i: 2 };
const point = getPoint(xAspect, seriesAspect, yScale, row, yAspect, zAspect);
const row = table.rows[0];
const zAspect = { accessor: 2 };
const point = getPoint(table, xAspect, seriesAspect, yScale, row, 0, yAspect, zAspect);
expect(point)
.to.have.property('x', 1)
.and.have.property('y', 10)
.and.have.property('z', 3)
.and.have.property('series', yAspect.title)
.and.have.property('aggConfigResult', row[1]);
.and.have.property('series', yAspect.title);
});
it('ignores points with a y value of NaN', function () {
const row = [ { value: 1 }, { value: 'NaN' }];
const point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
const row = table.rows[1];
const point = getPoint(table, xAspect, seriesAspect, yScale, row, 1, yAspect);
expect(point).to.be(void 0);
});
});
@ -66,39 +73,30 @@ describe('getPoint', function () {
let yScale;
beforeEach(function () {
row = [ { value: 1 }, { value: 2 }, { value: 3 }];
xAspect = { i: 0 };
yAspect = { i: 2 };
row = table.rows[0];
xAspect = { accessor: 0 };
yAspect = { accessor: 2 };
yScale = null;
});
it('properly unwraps and scales values', function () {
const seriesAspect = { i: 1, aggConfig: identFormatted };
const point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
const seriesAspect = [{ accessor: 1, fieldFormatter: _.identity }];
const point = getPoint(table, xAspect, seriesAspect, yScale, row, 0, yAspect);
expect(point)
.to.have.property('x', 1)
.and.have.property('series', '2')
.and.have.property('y', 3)
.and.have.property('aggConfigResult', row[2]);
.and.have.property('y', 3);
});
it('properly formats series values', function () {
const seriesAspect = { i: 1, aggConfig: truthFormatted };
const point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
const seriesAspect = [{ accessor: 1, fieldFormatter: truthFormatted }];
const point = getPoint(table, xAspect, seriesAspect, yScale, row, 0, yAspect);
expect(point)
.to.have.property('x', 1)
.and.have.property('series', 'true')
.and.have.property('y', 3)
.and.have.property('aggConfigResult', row[2]);
});
it ('adds the aggConfig to the points', function () {
const seriesAspect = { i: 1, aggConfig: truthFormatted };
const point = getPoint(xAspect, seriesAspect, yScale, row, yAspect);
expect(point).to.have.property('aggConfig', truthFormatted);
.and.have.property('y', 3);
});
});
});

View file

@ -21,33 +21,32 @@ import _ from 'lodash';
import expect from 'expect.js';
import { getSeries } from '../_get_series';
describe('getSeries', function () {
const agg = { fieldFormatter: _.constant(_.identity) };
function wrapRows(row) {
return row.map(function (v) {
return { value: v };
});
}
describe('getSeries', function () {
it('produces a single series with points for each row', function () {
const rows = [
[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3]
].map(wrapRows);
const table = {
columns: [{ id: '0' }, { id: '1' }, { id: '3' }],
rows: [
{ '0': 1, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
]
};
const chart = {
aspects: {
x: { i: 0 },
y: { i: 1, title: 'y', aggConfig: { id: 'id' } },
z: { i: 2 }
x: [{ accessor: 0 }],
y: [{ accessor: 1, title: 'y' }],
z: { accessor: 2 }
}
};
const series = getSeries(rows, chart);
const series = getSeries(table, chart);
expect(series)
.to.be.an('array')
@ -72,25 +71,29 @@ describe('getSeries', function () {
});
it('produces multiple series if there are multiple y aspects', function () {
const rows = [
[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3]
].map(wrapRows);
const table = {
columns: [{ id: '0' }, { id: '1' }, { id: '3' }],
rows: [
{ '0': 1, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
]
};
const chart = {
aspects: {
x: { i: 0 },
x: [{ accessor: 0 }],
y: [
{ i: 1, title: '0', aggConfig: { id: 1 } },
{ i: 2, title: '1', aggConfig: { id: 2 } },
{ accessor: 1, title: '0' },
{ accessor: 2, title: '1' },
]
}
};
const series = getSeries(rows, chart);
const series = getSeries(table, chart);
expect(series)
.to.be.an('array')
@ -115,26 +118,28 @@ describe('getSeries', function () {
});
it('produces multiple series if there is a series aspect', function () {
const rows = [
['0', 3],
['1', 3],
['1', 'NaN'],
['0', 3],
['0', 'NaN'],
['1', 3],
['0', 3],
['1', 3]
].map(wrapRows);
const table = {
columns: [{ id: '0' }, { id: '1' }, { id: '3' }],
rows: [
{ '0': 0, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 0, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 0, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
]
};
const chart = {
aspects: {
x: { i: -1 },
series: { i: 0, aggConfig: agg },
y: { i: 1, title: '0', aggConfig: agg }
x: [{ accessor: -1 }],
series: [{ accessor: 0, fieldFormatter: _.identity }],
y: [{ accessor: 1, title: '0' }]
}
};
const series = getSeries(rows, chart);
const series = getSeries(table, chart);
expect(series)
.to.be.an('array')
@ -152,34 +157,37 @@ describe('getSeries', function () {
siri.values.forEach(function (point) {
expect(point)
.to.have.property('x', '_all')
.and.property('y', 3);
.to.have.property('y', 2);
});
});
});
it('produces multiple series if there is a series aspect and multiple y aspects', function () {
const rows = [
['0', 3, 4],
['1', 3, 4],
['0', 3, 4],
['1', 3, 4],
['0', 3, 4],
['1', 3, 4]
].map(wrapRows);
const table = {
columns: [{ id: '0' }, { id: '1' }, { id: '3' }],
rows: [
{ '0': 0, '1': 3, '2': 4 },
{ '0': 1, '1': 3, '2': 4 },
{ '0': 0, '1': 3, '2': 4 },
{ '0': 1, '1': 3, '2': 4 },
{ '0': 0, '1': 3, '2': 4 },
{ '0': 1, '1': 3, '2': 4 },
]
};
const chart = {
aspects: {
x: { i: -1 },
series: { i: 0, aggConfig: agg },
x: [{ accessor: -1 }],
series: [{ accessor: 0, fieldFormatter: _.identity }],
y: [
{ i: 1, title: '0', aggConfig: { id: 1 } },
{ i: 2, title: '1', aggConfig: { id: 2 } }
{ accessor: 1, title: '0' },
{ accessor: 2, title: '1' }
]
}
};
const series = getSeries(rows, chart);
const series = getSeries(table, chart);
expect(series)
.to.be.an('array')
@ -202,34 +210,36 @@ describe('getSeries', function () {
siri.values.forEach(function (point) {
expect(point)
.to.have.property('x', '_all')
.and.property('y', y);
.to.have.property('y', y);
});
}
});
it('produces a series list in the same order as its corresponding metric column', function () {
const rows = [
['0', 3, 4],
['1', 3, 4],
['0', 3, 4],
['1', 3, 4],
['0', 3, 4],
['1', 3, 4]
].map(wrapRows);
const table = {
columns: [{ id: '0' }, { id: '1' }, { id: '3' }],
rows: [
{ '0': 0, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 0, '1': 2, '2': 3 },
{ '0': 1, '1': 2, '2': 3 },
{ '0': 0, '1': 2, '2': 3 },
]
};
const chart = {
aspects: {
x: { i: -1 },
series: { i: 0, aggConfig: agg },
x: [{ accessor: -1 }],
series: [{ accessor: 0, fieldFormatter: _.identity }],
y: [
{ i: 1, title: '0', aggConfig: { id: 1 } },
{ i: 2, title: '1', aggConfig: { id: 2 } }
{ accessor: 1, title: '0' },
{ accessor: 2, title: '1' }
]
}
};
const series = getSeries(rows, chart);
const series = getSeries(table, chart);
expect(series[0]).to.have.property('label', '0: 0');
expect(series[1]).to.have.property('label', '0: 1');
expect(series[2]).to.have.property('label', '1: 0');
@ -242,7 +252,7 @@ describe('getSeries', function () {
y.i = i;
});
const series2 = getSeries(rows, chart);
const series2 = getSeries(table, chart);
expect(series2[0]).to.have.property('label', '0: 1');
expect(series2[1]).to.have.property('label', '0: 0');
expect(series2[2]).to.have.property('label', '1: 1');

View file

@ -23,23 +23,13 @@ import { initXAxis } from '../_init_x_axis';
describe('initXAxis', function () {
const field = {};
const indexPattern = {};
const baseChart = {
aspects: {
x: {
aggConfig: {
fieldFormatter: _.constant({}),
write: _.constant({ params: {} }),
aggConfigs: {},
getIndexPattern: () => {
return indexPattern;
},
type: {}
},
title: 'label'
}
x: [{
fieldFormatter: _.constant({}),
title: 'label',
params: {}
}]
}
};
@ -48,45 +38,28 @@ describe('initXAxis', function () {
initXAxis(chart);
expect(chart)
.to.have.property('xAxisLabel', 'label')
.and.have.property('xAxisFormatter', chart.aspects.x.aggConfig.fieldFormatter());
.and.have.property('xAxisFormatter', chart.aspects.x[0].fieldFormatter);
});
it('makes the chart ordered if the agg is ordered', function () {
const chart = _.cloneDeep(baseChart);
chart.aspects.x.aggConfig.type.ordered = true;
chart.aspects.x.aggConfig.params = {
field: field
};
chart.aspects.x.aggConfig.aggConfigs.indexPattern = indexPattern;
chart.aspects.x[0].params.date = true;
initXAxis(chart);
expect(chart)
.to.have.property('xAxisLabel', 'label')
.and.have.property('xAxisFormatter', chart.aspects.x.aggConfig.fieldFormatter())
.and.have.property('indexPattern', indexPattern)
.and.have.property('xAxisField', field)
.and.have.property('xAxisFormatter', chart.aspects.x[0].fieldFormatter)
.and.have.property('ordered');
expect(chart.ordered)
.to.be.an('object')
.and.not.have.property('interval');
});
it('reads the interval param from the x agg', function () {
const chart = _.cloneDeep(baseChart);
chart.aspects.x.aggConfig.type.ordered = true;
chart.aspects.x.aggConfig.write = _.constant({ params: { interval: 10 } });
chart.aspects.x.aggConfig.params = {
field: field
};
chart.aspects.x.aggConfig.aggConfigs.indexPattern = indexPattern;
chart.aspects.x[0].params.date = true;
chart.aspects.x[0].params.interval = 10;
initXAxis(chart);
expect(chart)
.to.have.property('xAxisLabel', 'label')
.and.have.property('xAxisFormatter', chart.aspects.x.aggConfig.fieldFormatter())
.and.have.property('indexPattern', indexPattern)
.and.have.property('xAxisField', field)
.and.have.property('xAxisFormatter', chart.aspects.x[0].fieldFormatter)
.and.have.property('ordered');
expect(chart.ordered)

View file

@ -23,36 +23,26 @@ import { initYAxis } from '../_init_y_axis';
describe('initYAxis', function () {
function agg() {
return {
fieldFormatter: _.constant({}),
write: _.constant({ params: {} }),
type: {}
};
}
const baseChart = {
aspects: {
y: [
{ aggConfig: agg(), title: 'y1' },
{ aggConfig: agg(), title: 'y2' },
{ title: 'y1', fieldFormatter: v => v },
{ title: 'y2', fieldFormatter: v => v },
],
x: {
aggConfig: agg(),
x: [{
title: 'x'
}
}]
}
};
describe('with a single y aspect', function () {
const singleYBaseChart = _.cloneDeep(baseChart);
singleYBaseChart.aspects.y = singleYBaseChart.aspects.y[0];
singleYBaseChart.aspects.y = [singleYBaseChart.aspects.y[0]];
it('sets the yAxisFormatter the the field formats convert fn', function () {
const chart = _.cloneDeep(singleYBaseChart);
initYAxis(chart);
expect(chart).to.have.property('yAxisFormatter', chart.aspects.y.aggConfig.fieldFormatter());
expect(chart).to.have.property('yAxisFormatter');
});
it('sets the yAxisLabel', function () {
@ -69,8 +59,8 @@ describe('initYAxis', function () {
expect(chart).to.have.property('yAxisFormatter');
expect(chart.yAxisFormatter)
.to.be(chart.aspects.y[0].aggConfig.fieldFormatter())
.and.not.be(chart.aspects.y[1].aggConfig.fieldFormatter());
.to.be(chart.aspects.y[0].fieldFormatter)
.and.not.be(chart.aspects.y[1].fieldFormatter);
});
it('does not set the yAxisLabel, it does not make sense to put multiple labels on the same axis', function () {

View file

@ -18,12 +18,8 @@
*/
import _ from 'lodash';
import moment from 'moment';
import AggConfigResult from '../../../vis/agg_config_result';
import expect from 'expect.js';
import ngMock from 'ng_mock';
import { VisProvider } from '../../../vis';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
import { AggResponsePointSeriesProvider } from '../point_series';
describe('pointSeriesChartDataFromTable', function () {
@ -31,26 +27,25 @@ describe('pointSeriesChartDataFromTable', function () {
let pointSeriesChartDataFromTable;
let indexPattern;
let Vis;
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private) {
Vis = Private(VisProvider);
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
pointSeriesChartDataFromTable = Private(AggResponsePointSeriesProvider);
}));
it('handles a table with just a count', function () {
const vis = new Vis(indexPattern, { type: 'histogram' });
const agg = vis.aggs[0];
const result = new AggConfigResult(vis.aggs[0], void 0, 100, 100);
const table = { rows: [] };
table.columns = [ { aggConfig: agg } ];
table.rows.push([ result ]);
const chartData = pointSeriesChartDataFromTable(table);
const table = {
columns: [{ id: '0' }],
rows: [
{ '0': 100 }
],
};
const chartData = pointSeriesChartDataFromTable(table, {
y: [{
accessor: 0,
params: {},
}]
});
expect(chartData).to.be.an('object');
expect(chartData.series).to.be.an('array');
@ -59,214 +54,87 @@ describe('pointSeriesChartDataFromTable', function () {
expect(series.values).to.have.length(1);
expect(series.values[0])
.to.have.property('x', '_all')
.and.have.property('y', 100)
.and.have.property('aggConfigResult', result);
.and.have.property('y', 100);
});
it('handles a table with x and y column', function () {
const vis = new Vis(indexPattern, {
type: 'histogram',
aggs: [
{ type: 'count', schema: 'metric' },
{ type: 'date_histogram', params: { field: '@timestamp', interval: 'hour' }, schema: 'segment' }
]
});
const y = {
agg: vis.aggs[0],
col: { aggConfig: vis.aggs[0], title: vis.aggs[0].makeLabel() },
at: function (i) { return 100 * i; }
const table = {
columns: [{ id: '0', name: 'x' }, { id: '1', name: 'Count' }],
rows: [
{ '0': 1, '1': 200 },
{ '0': 2, '1': 200 },
{ '0': 3, '1': 200 },
],
};
const x = {
agg: vis.aggs[1],
col: { aggConfig: vis.aggs[1] },
at: function (i) { return moment().startOf('day').add(i, 'day').valueOf(); }
const dimensions = {
x: [{ accessor: 0, params: {} }],
y: [{ accessor: 1, params: {} }],
};
const rowCount = 3;
const table = { rows: [] };
table.columns = [ x.col, y.col ];
_.times(rowCount, function (i) {
const date = new AggConfigResult(x.agg, void 0, x.at(i));
table.rows.push([date, new AggConfigResult(y.agg, date, y.at(i))]);
});
const chartData = pointSeriesChartDataFromTable(table);
const chartData = pointSeriesChartDataFromTable(table, dimensions);
expect(chartData).to.be.an('object');
expect(chartData.series).to.be.an('array');
expect(chartData.series).to.have.length(1);
const series = chartData.series[0];
expect(series).to.have.property('label', y.col.title);
expect(series.values).to.have.length(rowCount);
series.values.forEach(function (point, i) {
expect(point)
.to.have.property('x', x.at(i))
.and.property('y', y.at(i))
.and.property('aggConfigResult');
expect(point.aggConfigResult)
.to.be.an(AggConfigResult)
.and.property('value', y.at(i))
.and.property('$parent');
expect(point.aggConfigResult.$parent)
.to.have.property('value', x.at(i))
.and.property('$parent', undefined);
});
expect(series).to.have.property('label', 'Count');
expect(series.values).to.have.length(3);
});
it('handles a table with an x and two y aspects', function () {
const vis = new Vis(indexPattern, {
type: 'histogram',
aggs: [
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
{ type: 'date_histogram', params: { field: '@timestamp', interval: 'hour' }, schema: 'segment' },
{ type: 'max', schema: 'metric', params: { field: 'bytes' } }
]
});
const avg = {
agg: vis.aggs[0],
col: { title: 'average', aggConfig: vis.aggs[0] },
at: function (i) { return 75.444 * (i + 1); }
const table = {
columns: [{ id: '0' }, { id: '1', name: 'Count-0' }, { id: '2', name: 'Count-1' }],
rows: [
{ '0': 1, '1': 200, '2': 300 },
{ '0': 2, '1': 200, '2': 300 },
{ '0': 3, '1': 200, '2': 300 },
],
};
const date = {
agg: vis.aggs[1],
col: { title: 'date', aggConfig: vis.aggs[1] },
at: function (i) { return moment().startOf('day').add(i, 'day').valueOf(); }
const dimensions = {
x: [{ accessor: 0, params: {} }],
y: [{ accessor: 1, params: {} }, { accessor: 2, params: {} }],
};
const max = {
agg: vis.aggs[2],
col: { title: 'maximum', aggConfig: vis.aggs[2] },
at: function (i) { return 100 * (i + 1); }
};
const rowCount = 3;
const table = { rows: [] };
table.columns = [ date.col, avg.col, max.col ];
_.times(rowCount, function (i) {
const dateResult = new AggConfigResult(date.agg, void 0, date.at(i));
const avgResult = new AggConfigResult(avg.agg, dateResult, avg.at(i));
const maxResult = new AggConfigResult(max.agg, dateResult, max.at(i));
table.rows.push([dateResult, avgResult, maxResult]);
});
const chartData = pointSeriesChartDataFromTable(table);
const chartData = pointSeriesChartDataFromTable(table, dimensions);
expect(chartData).to.be.an('object');
expect(chartData.series).to.be.an('array');
expect(chartData.series).to.have.length(2);
chartData.series.forEach(function (siri, i) {
const metric = i === 0 ? avg : max;
expect(siri).to.have.property('label', metric.col.label);
expect(siri.values).to.have.length(rowCount);
siri.values.forEach(function (point) {
expect(point).to.have.property('x');
expect(point.x).to.be.a('number');
expect(point).to.have.property('y');
expect(point.y).to.be.a('number');
expect(point).to.have.property('series', siri.label);
expect(point).to.have.property('aggConfigResult');
expect(point.aggConfigResult)
.to.be.a(AggConfigResult)
.and.have.property('aggConfig', metric.agg)
.and.have.property('value', point.y)
.and.to.have.property('$parent');
expect(point.aggConfigResult.$parent)
.to.be.an(AggConfigResult)
.and.have.property('aggConfig', date.agg);
});
expect(siri).to.have.property('label', `Count-${i}`);
expect(siri.values).to.have.length(3);
});
});
it('handles a table with an x, a series, and two y aspects', function () {
const vis = new Vis(indexPattern, {
type: 'histogram',
aggs: [
{ type: 'terms', schema: 'group', params: { field: 'extension' } },
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
{ type: 'date_histogram', params: { field: '@timestamp', interval: 'hour' }, schema: 'segment' },
{ type: 'max', schema: 'metric', params: { field: 'bytes' } }
]
});
const extensions = ['php', 'jpg', 'gif', 'css'];
const term = {
agg: vis.aggs[0],
col: { title: 'extensions', aggConfig: vis.aggs[0] },
at: function (i) { return extensions[i % extensions.length]; }
const table = {
columns: [
{ id: '0', name: 'x' },
{ id: '1', name: 'series', fieldFormatter: _.identity },
{ id: '2', name: 'y1' },
{ id: '3', name: 'y2' }],
rows: [
{ '0': 1, '1': 0, '2': 300, '3': 400 },
{ '0': 1, '1': 1, '2': 300, '3': 400 },
{ '0': 2, '1': 0, '2': 300, '3': 400 },
{ '0': 2, '1': 1, '2': 300, '3': 400 },
],
};
const avg = {
agg: vis.aggs[1],
col: { title: 'average', aggConfig: vis.aggs[1] },
at: function (i) { return 75.444 * (i + 1); }
const dimensions = {
x: [{ accessor: 0, params: {} }],
series: [{ accessor: 1, params: {} }],
y: [{ accessor: 2, params: {} }, { accessor: 3, params: {} }],
};
const date = {
agg: vis.aggs[2],
col: { title: 'date', aggConfig: vis.aggs[2] },
at: function (i) { return moment().startOf('day').add(i, 'day').valueOf(); }
};
const max = {
agg: vis.aggs[3],
col: { title: 'maximum', aggConfig: vis.aggs[3] },
at: function (i) { return 100 * (i + 1); }
};
const metricCount = 2;
const rowsPerSegment = 2;
const rowCount = extensions.length * rowsPerSegment;
const table = { rows: [] };
table.columns = [ date.col, term.col, avg.col, max.col ];
_.times(rowCount, function (i) {
const dateResult = new AggConfigResult(date.agg, void 0, date.at(i));
const termResult = new AggConfigResult(term.agg, dateResult, term.at(i));
const avgResult = new AggConfigResult(avg.agg, termResult, avg.at(i));
const maxResult = new AggConfigResult(max.agg, termResult, max.at(i));
table.rows.push([dateResult, termResult, avgResult, maxResult]);
});
const chartData = pointSeriesChartDataFromTable(table);
const chartData = pointSeriesChartDataFromTable(table, dimensions);
expect(chartData).to.be.an('object');
expect(chartData.series).to.be.an('array');
// one series for each extension, and then one for each metric inside
expect(chartData.series).to.have.length(extensions.length * metricCount);
expect(chartData.series).to.have.length(4);
chartData.series.forEach(function (siri) {
// figure out the metric used to create this series
const metricAgg = siri.values[0].aggConfigResult.aggConfig;
const metric = avg.agg === metricAgg ? avg : max;
expect(siri.values).to.have.length(rowsPerSegment);
siri.values.forEach(function (point) {
expect(point).to.have.property('x');
expect(point.x).to.be.a('number');
expect(point).to.have.property('y');
expect(point.y).to.be.a('number');
expect(point).to.have.property('series');
expect(_.contains(extensions, point.series)).to.be.ok();
expect(point).to.have.property('aggConfigResult');
expect(point.aggConfigResult)
.to.be.a(AggConfigResult)
.and.have.property('aggConfig', metric.agg)
.and.have.property('value', point.y)
.and.to.have.property('$parent');
expect(point.aggConfigResult.$parent)
.to.be.an(AggConfigResult)
.and.have.property('aggConfig', term.agg);
});
expect(siri.values).to.have.length(2);
});
});
});

View file

@ -19,7 +19,6 @@
import moment from 'moment';
import _ from 'lodash';
import sinon from 'sinon';
import expect from 'expect.js';
import { orderedDateAxis } from '../_ordered_date_axis';
@ -32,17 +31,14 @@ describe('orderedDateAxis', function () {
}
},
chart: {
ordered: {},
aspects: {
x: {
aggConfig: {
fieldIsTimeField: _.constant(true),
buckets: {
getScaledDateFormat: _.constant('hh:mm:ss'),
getInterval: _.constant(moment.duration(15, 'm')),
getBounds: _.constant({ min: moment().subtract(15, 'm'), max: moment() })
}
x: [{
params: {
format: 'hh:mm:ss',
bounds: { min: moment().subtract(15, 'm').valueOf(), max: moment().valueOf() }
}
}
}]
}
}
};
@ -78,23 +74,16 @@ describe('orderedDateAxis', function () {
.to.have.property('date', true);
});
it('relies on agg.buckets for the interval', function () {
const args = _.cloneDeep(baseArgs);
const spy = sinon.spy(args.chart.aspects.x.aggConfig.buckets, 'getInterval');
orderedDateAxis(args.chart);
expect(spy).to.have.property('callCount', 1);
});
it('sets the min/max when the buckets are bounded', function () {
const args = _.cloneDeep(baseArgs);
orderedDateAxis(args.chart);
expect(moment.isMoment(args.chart.ordered.min)).to.be(true);
expect(moment.isMoment(args.chart.ordered.max)).to.be(true);
expect(args.chart.ordered).to.have.property('min');
expect(args.chart.ordered).to.have.property('max');
});
it('does not set the min/max when the buckets are unbounded', function () {
const args = _.cloneDeep(baseArgs);
args.chart.aspects.x.aggConfig.buckets.getBounds = _.constant();
args.chart.aspects.x[0].params.bounds = null;
orderedDateAxis(args.chart);
expect(args.chart.ordered).to.not.have.property('min');
expect(args.chart.ordered).to.not.have.property('max');

View file

@ -32,31 +32,21 @@ describe('tooltipFormatter', function () {
tooltipFormatter = Private(PointSeriesTooltipFormatter);
}));
function agg(name) {
return {
fieldFormatter: _.constant(function (v) { return '(' + v + ')'; }),
makeLabel: _.constant(name)
};
}
function cell($row, i) {
return $row.eq(i).text().trim();
}
const baseEvent = {
data: {
xAxisLabel: 'inner',
xAxisFormatter: _.identity,
yAxisLabel: 'middle',
yAxisFormatter: _.identity,
zAxisLabel: 'top',
zAxisFormatter: _.identity,
},
datum: {
aggConfigResult: {
aggConfig: agg('inner'),
value: 3,
$parent: {
aggConfig: agg('middle'),
value: 2,
$parent: {
aggConfig: agg('top'),
value: 1
}
}
},
x: 3, y: 2, z: 1,
extraMetrics: []
}
};
@ -69,14 +59,14 @@ describe('tooltipFormatter', function () {
const $row1 = $rows.eq(0).find('td');
expect(cell($row1, 0)).to.be('inner');
expect(cell($row1, 1)).to.be('(3)');
expect(cell($row1, 1)).to.be('3');
const $row2 = $rows.eq(1).find('td');
expect(cell($row2, 0)).to.be('middle');
expect(cell($row2, 1)).to.be('(2)');
expect(cell($row2, 1)).to.be('2');
const $row3 = $rows.eq(2).find('td');
expect(cell($row3, 0)).to.be('top');
expect(cell($row3, 1)).to.be('(1)');
expect(cell($row3, 1)).to.be('1');
});
});

View file

@ -17,7 +17,7 @@
* under the License.
*/
export function addToSiri(series, point, id, label, agg) {
export function addToSiri(series, point, id, label, formatter) {
id = id == null ? '' : id + '';
if (series.has(id)) {
@ -26,10 +26,10 @@ export function addToSiri(series, point, id, label, agg) {
}
series.set(id, {
id: id.split('-').pop(),
label: label == null ? id : label,
aggLabel: agg.type ? agg.type.makeLabel(agg) : label,
aggId: agg.parentId ? agg.parentId : agg.id,
count: 0,
values: [point]
values: [point],
yAxisFormatter: formatter,
});
}

View file

@ -17,28 +17,16 @@
* under the License.
*/
import { AggType } from '../../agg_types/agg_type';
import { i18n } from '@kbn/i18n';
const allAgg = new AggType({
name: 'all',
title: i18n.translate('common.ui.aggResponse.allDocsTitle', {
defaultMessage: 'All docs'
}),
ordered: false,
hasNoDsl: true
});
export function makeFakeXAspect() {
const fake = {
makeLabel: () => 'all',
fieldFormatter: () => '',
type: allAgg
};
return {
i: -1,
aggConfig: fake,
title: fake.makeLabel(),
accessor: -1,
title: i18n.translate('common.ui.aggResponse.allDocsTitle', {
defaultMessage: 'All docs'
}),
params: {},
fieldFormatter: () => '',
};
}

View file

@ -17,33 +17,9 @@
* under the License.
*/
import _ from 'lodash';
import { getFormat } from 'ui/visualize/loader/pipeline_helpers/utilities';
import { makeFakeXAspect } from './_fake_x_aspect';
const map = {
segment: 'x',
metric: 'y',
radius: 'z',
width: 'width',
group: 'series'
};
function columnToAspect(aspects, col, i) {
const schema = col.aggConfig.schema.name;
const name = map[schema];
if (!name) throw new TypeError('unknown schema name "' + schema + '"');
const aspect = {
i: i,
title: col.title,
aggConfig: col.aggConfig
};
if (!aspects[name]) aspects[name] = [];
aspects[name].push(aspect);
}
/**
* Identify and group the columns based on the aspect of the pointSeries
* they represent.
@ -52,22 +28,34 @@ function columnToAspect(aspects, col, i) {
* @return {object} - an object with a key for each aspect (see map). The values
* may be undefined, a single aspect, or an array of aspects.
*/
export function getAspects(table) {
const aspects = _(table.columns)
// write each column into the aspects under it's group
.transform(columnToAspect, {})
// unwrap groups that only have one value, and validate groups that have more
.transform(function (aspects, group, name) {
if ((name !== 'y' && name !== 'series') && group.length > 1) {
throw new TypeError('Only multiple metrics and series are supported in point series');
export function getAspects(table, dimensions) {
const aspects = {};
Object.keys(dimensions).forEach(name => {
const dimension = Array.isArray(dimensions[name]) ? dimensions[name] : [dimensions[name]];
dimension.forEach(d => {
if (!d) {
return;
}
aspects[name] = group.length > 1 ? group : group[0];
})
.value();
const column = table.columns[d.accessor];
if (!column) {
return;
}
const formatter = getFormat(d.format);
if (!aspects[name]) {
aspects[name] = [];
}
aspects[name].push({
accessor: column.id,
column: d.accessor,
title: column.name,
fieldFormatter: val => formatter.convert(val, 'text'),
params: d.params,
});
});
});
if (!aspects.x) {
aspects.x = makeFakeXAspect();
aspects.x = [makeFakeXAspect()];
}
return aspects;

View file

@ -19,21 +19,49 @@
import _ from 'lodash';
function unwrap(aggConfigResult, def) {
return aggConfigResult ? aggConfigResult.value : def;
}
export function getPoint(x, series, yScale, row, y, z) {
const zRow = z && row[z.i];
const xRow = row[x.i];
export function getPoint(table, x, series, yScale, row, rowIndex, y, z) {
const zRow = z && row[z.accessor];
const xRow = x.accessor === -1 ? '_all' : row[x.accessor];
const yRow = row[y.accessor];
const point = {
x: unwrap(xRow, '_all'),
y: unwrap(row[y.i]),
z: zRow && unwrap(zRow),
aggConfigResult: row[y.i],
x: xRow,
y: yRow,
z: zRow,
extraMetrics: _.compact([zRow]),
yScale: yScale
yScale: yScale,
seriesRaw: series && {
table,
column: series[0].column,
row: rowIndex,
value: row[series[0].accessor],
},
xRaw: {
table,
column: x.column,
row: rowIndex,
value: xRow,
},
yRaw: {
table,
column: y.column,
row: rowIndex,
value: yRow,
},
zRaw: z && {
table,
column: z.column,
row: rowIndex,
value: zRow,
},
tableRaw: table.$parent && {
table: table.$parent.table,
column: table.$parent.column,
row: table.$parent.row,
value: table.$parent.key,
title: table.$parent.name,
},
parent: series ? series[0] : null,
};
if (point.y === 'NaN') {
@ -44,12 +72,10 @@ export function getPoint(x, series, yScale, row, y, z) {
if (series) {
const seriesArray = series.length ? series : [ series ];
point.aggConfig = seriesArray[0].aggConfig;
point.series = seriesArray.map(s => s.aggConfig.fieldFormatter()(unwrap(row[s.i]))).join(' - ');
point.series = seriesArray.map(s => s.fieldFormatter(row[s.accessor])).join(' - ');
} else if (y) {
// If the data is not split up with a series aspect, then
// each point's "series" becomes the y-agg that produced it
point.aggConfig = y.aggConfig;
point.series = y.title;
}

View file

@ -21,28 +21,29 @@ import _ from 'lodash';
import { getPoint } from './_get_point';
import { addToSiri } from './_add_to_siri';
export function getSeries(rows, chart) {
export function getSeries(table, chart) {
const aspects = chart.aspects;
const multiY = Array.isArray(aspects.y);
const multiY = Array.isArray(aspects.y) && aspects.y.length > 1;
const yScale = chart.yScale;
const partGetPoint = _.partial(getPoint, aspects.x, aspects.series, yScale);
const partGetPoint = _.partial(getPoint, table, aspects.x[0], aspects.series, yScale);
let series = _(rows)
.transform(function (series, row) {
let series = _(table.rows)
.transform(function (series, row, rowIndex) {
if (!multiY) {
const point = partGetPoint(row, aspects.y, aspects.z);
if (point) addToSiri(series, point, point.series, point.series, aspects.y.aggConfig);
const point = partGetPoint(row, rowIndex, aspects.y[0], aspects.z);
const id = `${point.series}-${aspects.y[0].accessor}`;
if (point) addToSiri(series, point, id, point.series, aspects.y[0].fieldFormatter);
return;
}
aspects.y.forEach(function (y) {
const point = partGetPoint(row, y, aspects.z);
const point = partGetPoint(row, rowIndex, y, aspects.z);
if (!point) return;
// use the point's y-axis as it's series by default,
// but augment that with series aspect if it's actually
// available
let seriesId = y.aggConfig.id;
let seriesId = y.accessor;
let seriesLabel = y.title;
if (aspects.series) {
@ -51,7 +52,7 @@ export function getSeries(rows, chart) {
seriesLabel = prefix + seriesLabel;
}
addToSiri(series, point, seriesId, seriesLabel, y.aggConfig);
addToSiri(series, point, seriesId, seriesLabel, y.fieldFormatter);
});
}, new Map())
@ -64,9 +65,8 @@ export function getSeries(rows, chart) {
let y;
if (firstVal) {
const agg = firstVal.aggConfigResult.aggConfig;
y = _.find(aspects.y, function (y) {
return y.aggConfig === agg;
return y.accessor === firstVal.accessor;
});
}

Some files were not shown because too many files have changed in this diff Show more