mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
Hierarchial Data Builder
- Added hierarchialData attribute to VisType - Created buildHierarchialData() method - Moved pie tooltipFormatter to _hierarchial_tooltip_formatter.js - Added initial testing structure for buildHierarchialData() - Added tests - Closes #1682
This commit is contained in:
parent
9e0e973ca0
commit
6778d813a8
19 changed files with 930 additions and 7 deletions
|
@ -14,10 +14,11 @@ define(function (require) {
|
|||
this.icon = opts.icon;
|
||||
this.vislibParams = opts.vislibParams || {};
|
||||
this.responseConverter = opts.responseConverter || HistogramConverter;
|
||||
this.hierarchialData = opts.hierarchialData || false;
|
||||
this.listeners = opts.listeners || {};
|
||||
this.schemas = opts.schemas || new VisTypeSchemas();
|
||||
}
|
||||
|
||||
return VisType;
|
||||
};
|
||||
});
|
||||
});
|
||||
|
|
|
@ -14,6 +14,7 @@ define(function (require) {
|
|||
addLegend: true
|
||||
},
|
||||
responseConverter: PieConverter,
|
||||
hierarchialData: true,
|
||||
schemas: new Schemas([
|
||||
{
|
||||
group: 'metrics',
|
||||
|
@ -21,7 +22,6 @@ define(function (require) {
|
|||
title: 'Slice Size',
|
||||
min: 1,
|
||||
max: 1,
|
||||
aggFilter: 'count',
|
||||
defaults: [
|
||||
{ schema: 'metric', type: 'count' }
|
||||
]
|
||||
|
@ -46,4 +46,4 @@ define(function (require) {
|
|||
])
|
||||
});
|
||||
};
|
||||
});
|
||||
});
|
||||
|
|
13
src/kibana/components/visualize/_array_to_linked_list.js
Normal file
13
src/kibana/components/visualize/_array_to_linked_list.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
return function (buckets) {
|
||||
var previous;
|
||||
_.each(buckets, function (bucket) {
|
||||
if (previous) {
|
||||
previous._next = bucket;
|
||||
}
|
||||
previous = bucket;
|
||||
});
|
||||
return buckets;
|
||||
};
|
||||
});
|
74
src/kibana/components/visualize/_build_hierarchial_data.js
Normal file
74
src/kibana/components/visualize/_build_hierarchial_data.js
Normal file
|
@ -0,0 +1,74 @@
|
|||
define(function (require) {
|
||||
return function buildHierarchialDataProvider(Private) {
|
||||
var _ = require('lodash');
|
||||
var buildSplit = require('components/visualize/_build_split');
|
||||
var extractBuckets = require('components/visualize/_extract_buckets');
|
||||
var createRawData = require('components/visualize/_create_raw_data');
|
||||
var arrayToLinkedList = require('components/visualize/_array_to_linked_list');
|
||||
var tooltipFormatter = Private(require('components/visualize/_hierarchial_tooltip_formatter'));
|
||||
|
||||
return function (vis, resp) {
|
||||
// Create a refrenece to the buckets
|
||||
var buckets = vis.aggs.bySchemaGroup.buckets;
|
||||
|
||||
|
||||
// Find the metric so it's easier to reference.
|
||||
// TODO: Change this to support multiple metrics.
|
||||
var metric = vis.aggs.bySchemaGroup.metrics[0];
|
||||
|
||||
// Link each agg to the next agg. This will be
|
||||
// to identify the next bucket aggregation
|
||||
buckets = arrayToLinkedList(buckets);
|
||||
|
||||
// Create the raw data to be used in the spy panel
|
||||
var raw = createRawData(vis, resp);
|
||||
|
||||
// If buckets is falsy then we should just return the aggs
|
||||
if (!buckets) {
|
||||
var value = resp.aggregations
|
||||
&& resp.aggregations[metric.id]
|
||||
&& resp.aggregations[metric.id].value
|
||||
|| resp.hits.total;
|
||||
return {
|
||||
hits: resp.hits.total,
|
||||
raw: raw,
|
||||
names: ['_all'],
|
||||
tooltipFormatter: tooltipFormatter(raw.columns),
|
||||
slices: {
|
||||
children: [
|
||||
{ name: '_all', size: value }
|
||||
]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
var firstAgg = buckets[0];
|
||||
var aggData = resp.aggregations[firstAgg.id];
|
||||
|
||||
|
||||
// If the firstAgg is a split then we need to map
|
||||
// the split aggregations into rows.
|
||||
if (firstAgg.schema.name === 'split') {
|
||||
var rows = _.map(extractBuckets(aggData), function (bucket) {
|
||||
var agg = firstAgg._next;
|
||||
var split = buildSplit(agg, metric, bucket[agg.id]);
|
||||
// Since splits display labels we need to set it.
|
||||
split.label = bucket.key + ': ' + firstAgg.params.field.name;
|
||||
split.tooltipFormatter = tooltipFormatter(raw.columns);
|
||||
return split;
|
||||
});
|
||||
return { hits: resp.hits.total, rows: rows, raw: raw };
|
||||
// otherwise we can start at the first bucket.
|
||||
} else {
|
||||
return (function () {
|
||||
var split = buildSplit(firstAgg, metric, aggData);
|
||||
split.hits = resp.hits.total;
|
||||
split.raw = raw;
|
||||
split.tooltipFormatter = tooltipFormatter(raw.columns);
|
||||
return split;
|
||||
})();
|
||||
}
|
||||
|
||||
};
|
||||
};
|
||||
});
|
15
src/kibana/components/visualize/_build_split.js
Normal file
15
src/kibana/components/visualize/_build_split.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
define(function (require) {
|
||||
var transformer = require('components/visualize/_transform_aggregation');
|
||||
var collectKeys = require('components/visualize/_collect_keys');
|
||||
return function (agg, metric, aggData) {
|
||||
// Ceate the split structure
|
||||
var split = { label: '', slices: { children: [] } };
|
||||
|
||||
// Transform the aggData into splits
|
||||
split.slices.children = transformer(agg, metric, aggData);
|
||||
|
||||
// Collect all the keys
|
||||
split.names = collectKeys(split.slices.children);
|
||||
return split;
|
||||
};
|
||||
});
|
12
src/kibana/components/visualize/_collect_keys.js
Normal file
12
src/kibana/components/visualize/_collect_keys.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
return function collectKeys(children) {
|
||||
var nextChildren = _.pluck(children, 'children');
|
||||
var keys = _.pluck(children, 'name');
|
||||
return _(nextChildren)
|
||||
.map(collectKeys)
|
||||
.flatten()
|
||||
.union(keys)
|
||||
.value();
|
||||
};
|
||||
});
|
82
src/kibana/components/visualize/_create_raw_data.js
Normal file
82
src/kibana/components/visualize/_create_raw_data.js
Normal file
|
@ -0,0 +1,82 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
var extractBuckets = require('components/visualize/_extract_buckets');
|
||||
return function (vis, resp) {
|
||||
|
||||
// Create the initial results structure
|
||||
var results = { rows: [] };
|
||||
|
||||
// Create a reference to the buckets and metrics
|
||||
var metrics = vis.aggs.bySchemaGroup.metrics;
|
||||
var buckets = vis.aggs.bySchemaGroup.buckets;
|
||||
var aggs = [];
|
||||
if (buckets) aggs.push(buckets);
|
||||
if (metrics) aggs.push(metrics);
|
||||
|
||||
// Create the columns
|
||||
results.columns = _(aggs).flatten().map(function (agg) {
|
||||
return {
|
||||
categoryName: agg.schema.name,
|
||||
id: agg.id,
|
||||
aggConfig: agg,
|
||||
aggType: agg.type,
|
||||
field: agg.params.field,
|
||||
label: agg.type.makeLabel(agg)
|
||||
};
|
||||
}).value();
|
||||
|
||||
|
||||
// if there are no buckets then we need to just set the value and return
|
||||
if (!buckets) {
|
||||
var value = resp.aggregations
|
||||
&& resp.aggregations[metrics[0].id]
|
||||
&& resp.aggregations[metrics[0].id].value
|
||||
|| resp.hits.total;
|
||||
results.rows.push([value]);
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Walk the buckets and create records for each leaf
|
||||
* @param {aggConfig} agg The aggConfig for the current level
|
||||
* @param {object} data The aggergation object
|
||||
* @param {array} [record] The record that will eventually get pushed to the rows
|
||||
* @returns {void}
|
||||
*/
|
||||
function walkBuckets(agg, data, record) {
|
||||
if (!_.isArray(record)) {
|
||||
record = [];
|
||||
}
|
||||
|
||||
// iterate through all the buckets
|
||||
_.each(extractBuckets(data[agg.id]), function (bucket) {
|
||||
|
||||
var _record = _.flatten([record, bucket.key]);
|
||||
|
||||
// If there is another agg to call we need to check to see if it has
|
||||
// buckets. If it does then we need to keep on walking the tree.
|
||||
// This is where the recursion happens.
|
||||
if (agg._next) {
|
||||
var nextBucket = bucket[agg._next.id];
|
||||
if (nextBucket && nextBucket.buckets) {
|
||||
walkBuckets(agg._next, bucket, _record);
|
||||
}
|
||||
}
|
||||
// if there are no more aggs to walk then we need to write each metric
|
||||
// to the record and push the record to the rows.
|
||||
else {
|
||||
_.each(metrics, function (metric) {
|
||||
var value = bucket[metric.id] && bucket[metric.id].value || bucket.doc_count;
|
||||
_record.push(value);
|
||||
});
|
||||
results.rows.push(_record);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Start walking the buckets at the beginning of the aggregations object.
|
||||
walkBuckets(buckets[0], resp.aggregations);
|
||||
|
||||
return results;
|
||||
};
|
||||
});
|
15
src/kibana/components/visualize/_extract_buckets.js
Normal file
15
src/kibana/components/visualize/_extract_buckets.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
return function (bucket) {
|
||||
if (bucket && _.isPlainObject(bucket.buckets)) {
|
||||
return _.map(bucket.buckets, function (value, key) {
|
||||
var item = _.cloneDeep(value);
|
||||
item.key = key;
|
||||
return item;
|
||||
});
|
||||
|
||||
} else {
|
||||
return bucket && bucket.buckets || [];
|
||||
}
|
||||
};
|
||||
});
|
|
@ -0,0 +1,53 @@
|
|||
define(function (require) {
|
||||
return function HierarchialTooltipFormaterProvider($rootScope, $compile, $sce) {
|
||||
var _ = require('lodash');
|
||||
var $ = require('jquery');
|
||||
var $tooltip = $(require('text!components/vis_types/tooltips/pie.html'));
|
||||
var $tooltipScope = $rootScope.$new();
|
||||
$compile($tooltip)($tooltipScope);
|
||||
|
||||
return function (columns) {
|
||||
return function (event) {
|
||||
var datum = event.point;
|
||||
var parent;
|
||||
var sum;
|
||||
|
||||
// the sum of values at all levels/generations is the same, but levels
|
||||
// are seperated by their parents so the root is the simplest to find
|
||||
for (parent = datum; parent; parent = parent.parent) {
|
||||
sum = parent.value;
|
||||
}
|
||||
|
||||
var rows = $tooltipScope.rows = [];
|
||||
for (parent = datum; parent.parent; parent = parent.parent) {
|
||||
var i = parent.depth - 1;
|
||||
var col = columns[i];
|
||||
|
||||
// field/agg details
|
||||
var group = (col.field && col.field.name) || col.label || ('level ' + datum.depth);
|
||||
|
||||
// field value that defines the bucket
|
||||
var bucket = parent.name;
|
||||
if (col.field) bucket = col.field.format.convert(bucket);
|
||||
|
||||
// metric for the bucket
|
||||
var val = parent.value;
|
||||
|
||||
rows.unshift({
|
||||
spacer: $sce.trustAsHtml(_.repeat(' ', i)),
|
||||
field: group,
|
||||
bucket: bucket,
|
||||
metric: val + ' (' + Math.round((parent.value / sum) * 100) + '%)'
|
||||
});
|
||||
}
|
||||
|
||||
$tooltipScope.metricCol = _.find(columns, { categoryName: 'metric' });
|
||||
|
||||
$tooltipScope.$apply();
|
||||
return $tooltip[0].outerHTML;
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
};
|
||||
});
|
29
src/kibana/components/visualize/_transform_aggregation.js
Normal file
29
src/kibana/components/visualize/_transform_aggregation.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
var extractBuckets = require('components/visualize/_extract_buckets');
|
||||
return function transformAggregation(agg, metric, aggData) {
|
||||
return _.map(extractBuckets(aggData), function (bucket) {
|
||||
// Pick the appropriate value, if the metric doesn't exist then we just
|
||||
// use the count.
|
||||
var value = bucket[metric.id] && bucket[metric.id].value || bucket.doc_count;
|
||||
|
||||
// Create the new branch record
|
||||
var branch = {
|
||||
name: bucket.key,
|
||||
size: value,
|
||||
aggConfig: agg
|
||||
};
|
||||
|
||||
// If the next bucket exists and it has children the we need to
|
||||
// transform it as well. This is where the recursion happens.
|
||||
if (agg._next) {
|
||||
var nextBucket = bucket[agg._next.id];
|
||||
if (nextBucket && nextBucket.buckets) {
|
||||
branch.children = transformAggregation(agg._next, metric, nextBucket);
|
||||
}
|
||||
}
|
||||
|
||||
return branch;
|
||||
});
|
||||
};
|
||||
});
|
|
@ -9,6 +9,7 @@ define(function (require) {
|
|||
var _ = require('lodash');
|
||||
var visTypes = Private(require('components/vis_types/index'));
|
||||
var buildChartData = Private(require('components/visualize/_build_chart_data'));
|
||||
var buildHierarchialData = Private(require('components/visualize/_build_hierarchial_data'));
|
||||
|
||||
var notify = new Notifier({
|
||||
location: 'Visualize'
|
||||
|
@ -116,8 +117,15 @@ define(function (require) {
|
|||
}));
|
||||
|
||||
$scope.$watch('esResp', prereq(function (resp, prevResp) {
|
||||
var fn;
|
||||
if (!resp) return;
|
||||
$scope.chartData = buildChartData($scope.vis, resp);
|
||||
if ($scope.vis.type.hierarchialData) {
|
||||
fn = buildHierarchialData;
|
||||
} else {
|
||||
fn = buildChartData;
|
||||
}
|
||||
var chartData = fn($scope.vis, resp);
|
||||
$scope.chartData = chartData;
|
||||
}));
|
||||
|
||||
$scope.$watch('chartData', function (chartData) {
|
||||
|
@ -141,4 +149,4 @@ define(function (require) {
|
|||
}
|
||||
};
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
0
src/server/routes/plugins.rb
Normal file
0
src/server/routes/plugins.rb
Normal file
228
test/unit/fixtures/fake_hierarchial_data.js
Normal file
228
test/unit/fixtures/fake_hierarchial_data.js
Normal file
|
@ -0,0 +1,228 @@
|
|||
define(function (require) {
|
||||
var data = { };
|
||||
|
||||
data.metricOnly = {
|
||||
hits: { total: 1000, hits: [], max_score: 0 },
|
||||
aggregations: {
|
||||
agg_1: { value: 412032 },
|
||||
}
|
||||
};
|
||||
|
||||
data.threeTermBuckets = {
|
||||
hits: { total: 1000, hits: [], max_score: 0 },
|
||||
aggregations: {
|
||||
agg_2: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'png',
|
||||
doc_count: 50,
|
||||
agg_1: { value: 412032 },
|
||||
agg_3: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'IT',
|
||||
doc_count: 10,
|
||||
agg_1: { value: 9299 },
|
||||
agg_4: {
|
||||
buckets: [
|
||||
{ key: 'win', doc_count: 4, agg_1: { value: 4992 } },
|
||||
{ key: 'mac', doc_count: 6, agg_1: { value: 5892 } }
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'US',
|
||||
doc_count: 20,
|
||||
agg_1: { value: 8293 },
|
||||
agg_4: {
|
||||
buckets: [
|
||||
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
|
||||
{ key: 'mac', doc_count: 8, agg_1: { value: 3029 } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'css',
|
||||
doc_count: 20,
|
||||
agg_1: { value: 412032 },
|
||||
agg_3: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'MX',
|
||||
doc_count: 7,
|
||||
agg_1: { value: 9299 },
|
||||
agg_4: {
|
||||
buckets: [
|
||||
{ key: 'win', doc_count: 3, agg_1: { value: 4992 } },
|
||||
{ key: 'mac', doc_count: 4, agg_1: { value: 5892 } }
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'US',
|
||||
doc_count: 13,
|
||||
agg_1: { value: 8293 },
|
||||
agg_4: {
|
||||
buckets: [
|
||||
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
|
||||
{ key: 'mac', doc_count: 1, agg_1: { value: 3029 } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'html',
|
||||
doc_count: 90,
|
||||
agg_1: { value: 412032 },
|
||||
agg_3: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'CN',
|
||||
doc_count: 85,
|
||||
agg_1: { value: 9299 },
|
||||
agg_4: {
|
||||
buckets: [
|
||||
{ key: 'win', doc_count: 46, agg_1: { value: 4992 } },
|
||||
{ key: 'mac', doc_count: 39, agg_1: { value: 5892 } }
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'FR',
|
||||
doc_count: 15,
|
||||
agg_1: { value: 8293 },
|
||||
agg_4: {
|
||||
buckets: [
|
||||
{ key: 'win', doc_count: 3, agg_1: { value: 3992 } },
|
||||
{ key: 'mac', doc_count: 12, agg_1: { value: 3029 } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
data.oneRangeBucket = {
|
||||
'took': 35,
|
||||
'timed_out': false,
|
||||
'_shards': {
|
||||
'total': 1,
|
||||
'successful': 1,
|
||||
'failed': 0
|
||||
},
|
||||
'hits': {
|
||||
'total': 6039,
|
||||
'max_score': 0,
|
||||
'hits': []
|
||||
},
|
||||
'aggregations': {
|
||||
'agg_2': {
|
||||
'buckets': {
|
||||
'0.0-1000.0': {
|
||||
'from': 0,
|
||||
'from_as_string': '0.0',
|
||||
'to': 1000,
|
||||
'to_as_string': '1000.0',
|
||||
'doc_count': 606
|
||||
},
|
||||
'1000.0-2000.0': {
|
||||
'from': 1000,
|
||||
'from_as_string': '1000.0',
|
||||
'to': 2000,
|
||||
'to_as_string': '2000.0',
|
||||
'doc_count': 298
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
data.oneFilterBucket = {
|
||||
'took': 11,
|
||||
'timed_out': false,
|
||||
'_shards': {
|
||||
'total': 1,
|
||||
'successful': 1,
|
||||
'failed': 0
|
||||
},
|
||||
'hits': {
|
||||
'total': 6005,
|
||||
'max_score': 0,
|
||||
'hits': []
|
||||
},
|
||||
'aggregations': {
|
||||
'agg_2': {
|
||||
'buckets': {
|
||||
'_type:apache': {
|
||||
'doc_count': 4844
|
||||
},
|
||||
'_type:nginx': {
|
||||
'doc_count': 1161
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
data.oneHistogramBucket = {
|
||||
'took': 37,
|
||||
'timed_out': false,
|
||||
'_shards': {
|
||||
'total': 6,
|
||||
'successful': 6,
|
||||
'failed': 0
|
||||
},
|
||||
'hits': {
|
||||
'total': 49208,
|
||||
'max_score': 0,
|
||||
'hits': []
|
||||
},
|
||||
'aggregations': {
|
||||
'agg_2': {
|
||||
'buckets': [
|
||||
{
|
||||
'key_as_string': '2014-09-28T00:00:00.000Z',
|
||||
'key': 1411862400000,
|
||||
'doc_count': 8247
|
||||
},
|
||||
{
|
||||
'key_as_string': '2014-09-29T00:00:00.000Z',
|
||||
'key': 1411948800000,
|
||||
'doc_count': 8184
|
||||
},
|
||||
{
|
||||
'key_as_string': '2014-09-30T00:00:00.000Z',
|
||||
'key': 1412035200000,
|
||||
'doc_count': 8269
|
||||
},
|
||||
{
|
||||
'key_as_string': '2014-10-01T00:00:00.000Z',
|
||||
'key': 1412121600000,
|
||||
'doc_count': 8141
|
||||
},
|
||||
{
|
||||
'key_as_string': '2014-10-02T00:00:00.000Z',
|
||||
'key': 1412208000000,
|
||||
'doc_count': 8148
|
||||
},
|
||||
{
|
||||
'key_as_string': '2014-10-03T00:00:00.000Z',
|
||||
'key': 1412294400000,
|
||||
'doc_count': 8219
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return data;
|
||||
});
|
|
@ -12,8 +12,10 @@ define(function (require) {
|
|||
{ type: 'geo_point', indexed: true, analyzed: true, count: 0, name: 'point' },
|
||||
{ type: 'geo_shape', indexed: true, analyzed: true, count: 0, name: 'area' },
|
||||
{ type: 'string', indexed: true, analyzed: true, count: 0, name: 'extension' },
|
||||
{ type: 'string', indexed: true, analyzed: true, count: 0, name: 'machine.os' },
|
||||
{ type: 'string', indexed: true, analyzed: true, count: 0, name: 'geo.src' },
|
||||
{ type: 'string', indexed: true, analyzed: true, count: 0, name: '_type' },
|
||||
{ type: 'conflict', indexed: false, analyzed: false, count: 0, name: 'custom_user_field' }
|
||||
]);
|
||||
};
|
||||
});
|
||||
});
|
||||
|
|
|
@ -141,7 +141,11 @@
|
|||
'specs/components/agg_types/param_types/index',
|
||||
'specs/components/vis/index',
|
||||
'specs/components/reflow_watcher',
|
||||
'specs/components/clipboard'
|
||||
'specs/components/clipboard',
|
||||
'specs/visualize/_build_hierarchial_data',
|
||||
'specs/visualize/_extract_buckets',
|
||||
'specs/visualize/_transform_aggregation',
|
||||
'specs/visualize/_create_raw_data'
|
||||
], function (kibana, sinon) {
|
||||
kibana.load(function () {
|
||||
var xhr = sinon.useFakeXMLHttpRequest();
|
||||
|
|
190
test/unit/specs/visualize/_build_hierarchial_data.js
Normal file
190
test/unit/specs/visualize/_build_hierarchial_data.js
Normal file
|
@ -0,0 +1,190 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
var fixtures = require('fixtures/fake_hierarchial_data');
|
||||
|
||||
var AggConfigs;
|
||||
var Vis;
|
||||
var indexPattern;
|
||||
var buildHierarchialData;
|
||||
|
||||
describe('buildHierarchialData()', function () {
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
Vis = Private(require('components/vis/vis'));
|
||||
AggConfigs = Private(require('components/vis/_agg_configs'));
|
||||
indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
buildHierarchialData = Private(require('components/visualize/_build_hierarchial_data'));
|
||||
}));
|
||||
|
||||
|
||||
describe('metric only', function () {
|
||||
var vis, results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
]
|
||||
});
|
||||
vis.aggs[0].id = 'agg_1';
|
||||
results = buildHierarchialData(vis, fixtures.metricOnly);
|
||||
|
||||
});
|
||||
|
||||
it('should set the slices with one children with the name _all', function () {
|
||||
expect(results).to.have.property('slices');
|
||||
expect(results.slices).to.have.property('children');
|
||||
expect(results.slices.children).to.have.length(1);
|
||||
expect(results.slices.children[0]).to.have.property('name', '_all');
|
||||
expect(results.slices.children[0]).to.have.property('size', 412032);
|
||||
expect(results).to.have.property('names');
|
||||
expect(results.names).to.eql(['_all']);
|
||||
expect(results).to.have.property('raw');
|
||||
expect(results.raw).to.have.property('rows');
|
||||
expect(results.raw.rows).to.have.length(1);
|
||||
expect(results.raw.rows).to.eql([[412032]]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('threeTermBuckets', function () {
|
||||
var vis, results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'terms', schema: 'split', params: { field: 'extension' }},
|
||||
{ type: 'terms', schema: 'segment', params: { field: 'machine.os' }},
|
||||
{ type: 'terms', schema: 'segment', params: { field: 'geo.src' }}
|
||||
]
|
||||
});
|
||||
// We need to set the aggs to a known value.
|
||||
_.each(vis.aggs, function (agg) { agg.id = 'agg_' + id++; });
|
||||
results = buildHierarchialData(vis, fixtures.threeTermBuckets);
|
||||
});
|
||||
|
||||
it('should set the hits attribute for the results', function () {
|
||||
expect(results).to.have.property('rows');
|
||||
_.each(results.rows, function (item) {
|
||||
expect(item).to.have.property('names');
|
||||
expect(item).to.have.property('slices');
|
||||
expect(item.slices).to.have.property('children');
|
||||
});
|
||||
expect(results).to.have.property('raw');
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
describe('oneHistogramBucket', function () {
|
||||
var vis, results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{
|
||||
type: 'count',
|
||||
schema: 'metric'
|
||||
},
|
||||
{ type: 'histogram', schema: 'segment', params: { field: 'bytes', interval: 8192 }}
|
||||
]
|
||||
});
|
||||
// We need to set the aggs to a known value.
|
||||
_.each(vis.aggs, function (agg) { agg.id = 'agg_' + id++; });
|
||||
results = buildHierarchialData(vis, fixtures.oneHistogramBucket);
|
||||
});
|
||||
|
||||
it('should set the hits attribute for the results', function () {
|
||||
expect(results).to.have.property('slices');
|
||||
expect(results.slices).to.property('children');
|
||||
expect(results).to.have.property('names');
|
||||
expect(results.names).to.have.length(6);
|
||||
expect(results).to.have.property('raw');
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
describe('oneRangeBucket', function () {
|
||||
var vis, results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{
|
||||
type: 'count',
|
||||
schema: 'metric'
|
||||
},
|
||||
{
|
||||
type: 'range',
|
||||
schema: 'segment',
|
||||
params: {
|
||||
field: 'bytes',
|
||||
ranges: [
|
||||
{ from: 0, to: 1000 },
|
||||
{ from: 1000, to: 2000 }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
// We need to set the aggs to a known value.
|
||||
_.each(vis.aggs, function (agg) { agg.id = 'agg_' + id++; });
|
||||
results = buildHierarchialData(vis, fixtures.oneRangeBucket);
|
||||
});
|
||||
|
||||
it('should set the hits attribute for the results', function () {
|
||||
expect(results).to.have.property('slices');
|
||||
expect(results.slices).to.property('children');
|
||||
expect(results).to.have.property('names');
|
||||
expect(results.names).to.have.length(2);
|
||||
expect(results).to.have.property('raw');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('oneFilterBucket', function () {
|
||||
var vis, results;
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'count', schema: 'metric' },
|
||||
{ type: 'filters', schema: 'segment', params: {
|
||||
filters: [
|
||||
{ input: { query: { query_string: { query: '_type:apache' } } } },
|
||||
{ input: { query: { query_string: { query: '_type:nginx' } } } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
// We need to set the aggs to a known value.
|
||||
_.each(vis.aggs, function (agg) { agg.id = 'agg_' + id++; });
|
||||
results = buildHierarchialData(vis, fixtures.oneFilterBucket);
|
||||
});
|
||||
|
||||
it('should set the hits attribute for the results', function () {
|
||||
expect(results).to.have.property('slices');
|
||||
expect(results).to.have.property('names');
|
||||
expect(results.names).to.have.length(2);
|
||||
expect(results).to.have.property('raw');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
74
test/unit/specs/visualize/_create_raw_data.js
Normal file
74
test/unit/specs/visualize/_create_raw_data.js
Normal file
|
@ -0,0 +1,74 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
var fixtures = require('fixtures/fake_hierarchial_data');
|
||||
var createRawData = require('components/visualize/_create_raw_data');
|
||||
var arrayToLinkedList = require('components/visualize/_array_to_linked_list');
|
||||
|
||||
var AggConfigs;
|
||||
var Vis;
|
||||
var indexPattern;
|
||||
|
||||
describe('buildHierarchialData()', function () {
|
||||
describe('createRawData()', function () {
|
||||
var vis, results;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
|
||||
beforeEach(inject(function (Private) {
|
||||
Vis = Private(require('components/vis/vis'));
|
||||
AggConfigs = Private(require('components/vis/_agg_configs'));
|
||||
indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
}));
|
||||
|
||||
beforeEach(function () {
|
||||
var id = 1;
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'terms', schema: 'split', params: { field: 'extension' }},
|
||||
{ type: 'terms', schema: 'segment', params: { field: 'machine.os' }},
|
||||
{ type: 'terms', schema: 'segment', params: { field: 'geo.src' }}
|
||||
]
|
||||
});
|
||||
var buckets = arrayToLinkedList(vis.aggs.bySchemaGroup.buckets);
|
||||
// We need to set the aggs to a known value.
|
||||
_.each(vis.aggs, function (agg) { agg.id = 'agg_' + id++; });
|
||||
results = createRawData(vis, fixtures.threeTermBuckets);
|
||||
});
|
||||
|
||||
it('should have columns set', function () {
|
||||
expect(results).to.have.property('columns');
|
||||
expect(results.columns).to.have.length(4);
|
||||
_.each(results.columns, function (column) {
|
||||
expect(column).to.have.property('aggConfig');
|
||||
var agg = column.aggConfig;
|
||||
expect(column).to.have.property('categoryName', agg.schema.name);
|
||||
expect(column).to.have.property('id', agg.id);
|
||||
expect(column).to.have.property('aggType', agg.type);
|
||||
expect(column).to.have.property('field', agg.params.field);
|
||||
expect(column).to.have.property('label', agg.type.makeLabel(agg));
|
||||
});
|
||||
});
|
||||
|
||||
it('should have rows set', function () {
|
||||
expect(results).to.have.property('rows');
|
||||
expect(results.rows).to.eql([
|
||||
['png', 'IT', 'win', 4992],
|
||||
['png', 'IT', 'mac', 5892],
|
||||
['png', 'US', 'linux', 3992],
|
||||
['png', 'US', 'mac', 3029],
|
||||
['css', 'MX', 'win', 4992],
|
||||
['css', 'MX', 'mac', 5892],
|
||||
['css', 'US', 'linux', 3992],
|
||||
['css', 'US', 'mac', 3029],
|
||||
['html', 'CN', 'win', 4992],
|
||||
['html', 'CN', 'mac', 5892],
|
||||
['html', 'FR', 'win', 3992],
|
||||
['html', 'FR', 'mac', 3029]
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
});
|
45
test/unit/specs/visualize/_extract_buckets.js
Normal file
45
test/unit/specs/visualize/_extract_buckets.js
Normal file
|
@ -0,0 +1,45 @@
|
|||
define(function (require) {
|
||||
var extractBuckets = require('components/visualize/_extract_buckets');
|
||||
|
||||
describe('buildHierarchialData()', function () {
|
||||
describe('extractBuckets()', function () {
|
||||
|
||||
it('should normalize a bucket object into an array', function () {
|
||||
|
||||
var bucket = {
|
||||
buckets: {
|
||||
foo: { doc_count: 1 },
|
||||
bar: { doc_count: 2 }
|
||||
}
|
||||
};
|
||||
|
||||
var buckets = extractBuckets(bucket);
|
||||
expect(buckets).to.be.an(Array);
|
||||
expect(buckets).to.have.length(2);
|
||||
expect(buckets[0]).to.have.property('key', 'foo');
|
||||
expect(buckets[0]).to.have.property('doc_count', 1);
|
||||
expect(buckets[1]).to.have.property('key', 'bar');
|
||||
expect(buckets[1]).to.have.property('doc_count', 2);
|
||||
});
|
||||
|
||||
it('should return an empty array for undefined buckets', function () {
|
||||
var buckets = extractBuckets();
|
||||
expect(buckets).to.be.an(Array);
|
||||
expect(buckets).to.have.length(0);
|
||||
});
|
||||
|
||||
it('should return the bucket array', function () {
|
||||
var bucket = {
|
||||
buckets: [
|
||||
{ key: 'foo', doc_count: 1 },
|
||||
{ key: 'bar', doc_count: 2 }
|
||||
]
|
||||
};
|
||||
var buckets = extractBuckets(bucket);
|
||||
expect(buckets).to.be.an(Array);
|
||||
expect(buckets).to.be(bucket.buckets);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
});
|
78
test/unit/specs/visualize/_transform_aggregation.js
Normal file
78
test/unit/specs/visualize/_transform_aggregation.js
Normal file
|
@ -0,0 +1,78 @@
|
|||
define(function (require) {
|
||||
describe('buildHierarchialData()', function () {
|
||||
describe('transformAggregation()', function () {
|
||||
|
||||
var tranform = require('components/visualize/_transform_aggregation');
|
||||
var fixture = {};
|
||||
fixture.agg = { id: 'agg_2', name: 'test', _next: { id: 'agg_3', name: 'example' } };
|
||||
fixture.metric = { id: 'agg_1' };
|
||||
fixture.aggData = {
|
||||
buckets: [
|
||||
{ key: 'foo', doc_count: 2, agg_3: { buckets: [ { key: 'win', doc_count: 1 }, { key: 'mac', doc_count: 1 }]}},
|
||||
{ key: 'bar', doc_count: 4, agg_3: { buckets: [ { key: 'win', doc_count: 2 }, { key: 'mac', doc_count: 2 }]}}
|
||||
]
|
||||
};
|
||||
|
||||
it('should return an array of objects with the doc_count as the size if the metric does not exist', function () {
|
||||
var agg = { id: 'agg_2', name: 'test' };
|
||||
var aggData = {
|
||||
buckets: [
|
||||
{ key: 'foo', doc_count: 1 },
|
||||
{ key: 'bar', doc_count: 2 }
|
||||
]
|
||||
};
|
||||
|
||||
var children = tranform(agg, fixture.metric, aggData);
|
||||
expect(children).to.be.an(Array);
|
||||
expect(children).to.have.length(2);
|
||||
expect(children[0]).to.have.property('size', 1);
|
||||
expect(children[1]).to.have.property('size', 2);
|
||||
});
|
||||
|
||||
it('should return an array of objects with the metric agg value as the size', function () {
|
||||
var agg = { id: 'agg_2', name: 'test' };
|
||||
var aggData = {
|
||||
buckets: [
|
||||
{ key: 'foo', doc_count: 1, agg_1: { value: 3 } },
|
||||
{ key: 'bar', doc_count: 2, agg_1: { value: 4 } }
|
||||
]
|
||||
};
|
||||
|
||||
var children = tranform(agg, fixture.metric, aggData);
|
||||
expect(children).to.be.an(Array);
|
||||
expect(children).to.have.length(2);
|
||||
expect(children[0]).to.have.property('size', 3);
|
||||
expect(children[1]).to.have.property('size', 4);
|
||||
});
|
||||
|
||||
it('should create two levels of metrics', function () {
|
||||
var children = tranform(fixture.agg, fixture.metric, fixture.aggData);
|
||||
expect(children).to.be.an(Array);
|
||||
expect(children).to.have.length(2);
|
||||
expect(children[0]).to.have.property('children');
|
||||
expect(children[1]).to.have.property('children');
|
||||
expect(children[0]).to.have.property('aggConfig', fixture.agg);
|
||||
expect(children[1]).to.have.property('aggConfig', fixture.agg);
|
||||
expect(children[0].children).to.have.length(2);
|
||||
expect(children[1].children).to.have.length(2);
|
||||
expect(children[0]).to.have.property('name', 'foo');
|
||||
expect(children[0]).to.have.property('size', 2);
|
||||
expect(children[1]).to.have.property('name', 'bar');
|
||||
expect(children[1]).to.have.property('size', 4);
|
||||
expect(children[0].children[0]).to.have.property('size', 1);
|
||||
expect(children[0].children[0]).to.have.property('aggConfig', fixture.agg.agg_3);
|
||||
expect(children[0].children[0]).to.have.property('name', 'win');
|
||||
expect(children[0].children[1]).to.have.property('size', 1);
|
||||
expect(children[0].children[1]).to.have.property('aggConfig', fixture.agg.agg_3);
|
||||
expect(children[0].children[1]).to.have.property('name', 'mac');
|
||||
expect(children[1].children[0]).to.have.property('size', 2);
|
||||
expect(children[1].children[0]).to.have.property('aggConfig', fixture.agg.agg_3);
|
||||
expect(children[1].children[0]).to.have.property('name', 'win');
|
||||
expect(children[1].children[1]).to.have.property('size', 2);
|
||||
expect(children[1].children[1]).to.have.property('aggConfig', fixture.agg.agg_3);
|
||||
expect(children[1].children[1]).to.have.property('name', 'mac');
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue