mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[aggResponse/tabify] added tests
This commit is contained in:
parent
c4e83b7e65
commit
665cbc013c
8 changed files with 767 additions and 36 deletions
|
@ -68,9 +68,7 @@ define(function (require) {
|
|||
throw new Error('attempted to split when splitting is disabled');
|
||||
}
|
||||
|
||||
_.pull(self.columns, _.find(self.columns, function (col) {
|
||||
return col.aggConfig === agg;
|
||||
}));
|
||||
self._removeAggFromColumns(agg);
|
||||
|
||||
buckets.forEach(function (bucket, key) {
|
||||
// find the existing split that we should extend
|
||||
|
@ -87,6 +85,28 @@ define(function (require) {
|
|||
});
|
||||
};
|
||||
|
||||
TabbedAggResponseWriter.prototype._removeAggFromColumns = function (agg) {
|
||||
var i = _.findIndex(this.columns, function (col) {
|
||||
return col.aggConfig === agg;
|
||||
});
|
||||
|
||||
// we must have already removed this column
|
||||
if (i === -1) return;
|
||||
|
||||
this.columns.splice(i, 1);
|
||||
|
||||
if (!this.vis.isHierarchical()) return;
|
||||
|
||||
// hierarchical vis creats additional columns for each bucket
|
||||
// we will remove those too
|
||||
var mCol = this.columns.splice(i, 1).pop();
|
||||
var mI = _.findIndex(this.aggStack, function (agg) {
|
||||
return agg === mCol.aggConfig;
|
||||
});
|
||||
|
||||
if (mI > -1) this.aggStack.splice(mI, 1);
|
||||
};
|
||||
|
||||
/**
|
||||
* Push a value into the row, then run a block. Once the block is
|
||||
* complete the value is pulled from the stack.
|
||||
|
|
59
test/unit/specs/components/agg_response/tabify/_buckets.js
Normal file
59
test/unit/specs/components/agg_response/tabify/_buckets.js
Normal file
|
@ -0,0 +1,59 @@
|
|||
define(function (require) {
|
||||
return ['Buckets wrapper', function () {
|
||||
var Buckets;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private, $injector) {
|
||||
Buckets = Private(require('components/agg_response/tabify/_buckets'));
|
||||
}));
|
||||
|
||||
|
||||
function test(aggResp, count, keys) {
|
||||
it('reads the length', function () {
|
||||
var buckets = new Buckets(aggResp);
|
||||
expect(buckets).to.have.length(count);
|
||||
});
|
||||
|
||||
it('itterates properly, passing in the key', function () {
|
||||
var buckets = new Buckets(aggResp);
|
||||
var keysSent = [];
|
||||
buckets.forEach(function (bucket, key) {
|
||||
keysSent.push(key);
|
||||
});
|
||||
|
||||
expect(keysSent).to.have.length(count);
|
||||
expect(keysSent).to.eql(keys);
|
||||
});
|
||||
}
|
||||
|
||||
describe('with object style buckets', function () {
|
||||
var aggResp = {
|
||||
buckets: {
|
||||
'0-100': {},
|
||||
'100-200': {},
|
||||
'200-300': {}
|
||||
}
|
||||
};
|
||||
|
||||
var count = 3;
|
||||
var keys = ['0-100', '100-200', '200-300'];
|
||||
|
||||
test(aggResp, count, keys);
|
||||
});
|
||||
|
||||
describe('with array style buckets', function () {
|
||||
var aggResp = {
|
||||
buckets: [
|
||||
{ key: '0-100', value: {} },
|
||||
{ key: '100-200', value: {} },
|
||||
{ key: '200-300', value: {} }
|
||||
]
|
||||
};
|
||||
|
||||
var count = 3;
|
||||
var keys = ['0-100', '100-200', '200-300'];
|
||||
|
||||
test(aggResp, count, keys);
|
||||
});
|
||||
}];
|
||||
});
|
124
test/unit/specs/components/agg_response/tabify/_get_columns.js
Normal file
124
test/unit/specs/components/agg_response/tabify/_get_columns.js
Normal file
|
@ -0,0 +1,124 @@
|
|||
define(function (require) {
|
||||
return ['get columns', function () {
|
||||
var getColumns;
|
||||
var Vis;
|
||||
var indexPattern;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private, $injector) {
|
||||
getColumns = Private(require('components/agg_response/tabify/_get_columns'));
|
||||
Vis = Private(require('components/vis/vis'));
|
||||
indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
}));
|
||||
|
||||
it('should inject a count metric if no aggs exist', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'pie'
|
||||
});
|
||||
while (vis.aggs.length) vis.aggs.pop();
|
||||
var columns = getColumns(vis);
|
||||
|
||||
expect(columns).to.have.length(1);
|
||||
expect(columns[0]).to.have.property('aggConfig');
|
||||
expect(columns[0].aggConfig.type).to.have.property('name', 'count');
|
||||
});
|
||||
|
||||
it('should inject a count metric if only buckets exist', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } }
|
||||
]
|
||||
});
|
||||
|
||||
var columns = getColumns(vis);
|
||||
|
||||
expect(columns).to.have.length(2);
|
||||
expect(columns[1]).to.have.property('aggConfig');
|
||||
expect(columns[1].aggConfig.type).to.have.property('name', 'count');
|
||||
});
|
||||
|
||||
it('should inject the metric after each bucket if the vis is hierarchical', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } }
|
||||
]
|
||||
});
|
||||
|
||||
var columns = getColumns(vis);
|
||||
|
||||
expect(columns).to.have.length(8);
|
||||
columns.forEach(function (column, i) {
|
||||
expect(column).to.have.property('aggConfig');
|
||||
expect(column.aggConfig.type).to.have.property('name', i % 2 ? 'count' : 'date_histogram');
|
||||
});
|
||||
});
|
||||
|
||||
it('should inject the multiple metrics after each bucket if the vis is hierarchical', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'sum', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } }
|
||||
]
|
||||
});
|
||||
|
||||
var columns = getColumns(vis);
|
||||
|
||||
function checkColumns(column, i) {
|
||||
expect(column).to.have.property('aggConfig');
|
||||
switch (i) {
|
||||
case 0:
|
||||
expect(column.aggConfig.type).to.have.property('name', 'date_histogram');
|
||||
break;
|
||||
case 1:
|
||||
expect(column.aggConfig.type).to.have.property('name', 'avg');
|
||||
break;
|
||||
case 2:
|
||||
expect(column.aggConfig.type).to.have.property('name', 'sum');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
expect(columns).to.have.length(12);
|
||||
for (var i = 0; i < columns.length; i += 3) {
|
||||
var counts = { buckets: 0, metrics: 0 };
|
||||
columns.slice(i, i + 3).forEach(checkColumns);
|
||||
}
|
||||
});
|
||||
|
||||
it('should put all metrics at the end of the columns if the vis is not hierarchical', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'sum', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } }
|
||||
]
|
||||
});
|
||||
|
||||
var columns = getColumns(vis);
|
||||
expect(columns).to.have.length(6);
|
||||
|
||||
// sum should be last
|
||||
expect(columns.pop().aggConfig.type).to.have.property('name', 'sum');
|
||||
// avg should be before that
|
||||
expect(columns.pop().aggConfig.type).to.have.property('name', 'avg');
|
||||
// the rest are date_histograms
|
||||
while (columns.length) {
|
||||
expect(columns.pop().aggConfig.type).to.have.property('name', 'date_histogram');
|
||||
}
|
||||
});
|
||||
}];
|
||||
});
|
114
test/unit/specs/components/agg_response/tabify/_integration.js
Normal file
114
test/unit/specs/components/agg_response/tabify/_integration.js
Normal file
|
@ -0,0 +1,114 @@
|
|||
define(function (require) {
|
||||
return ['tabifyAggResponse Simple Integration', function () {
|
||||
var _ = require('lodash');
|
||||
var fixtures = require('fixtures/fake_hierarchical_data');
|
||||
|
||||
var Vis;
|
||||
var Buckets;
|
||||
var indexPattern;
|
||||
var tabifyAggResponse;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private, $injector) {
|
||||
tabifyAggResponse = Private(require('components/agg_response/tabify/tabify_agg_response'));
|
||||
Vis = Private(require('components/vis/vis'));
|
||||
indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
}));
|
||||
|
||||
function normalizeIds(vis) {
|
||||
vis.aggs.forEach(function (agg, i) {
|
||||
agg.id = 'agg_' + (i + 1);
|
||||
});
|
||||
}
|
||||
|
||||
it('transforms a simple response properly', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: []
|
||||
});
|
||||
normalizeIds(vis);
|
||||
|
||||
var resp = tabifyAggResponse(vis, fixtures.metricOnly, { canSplit: false });
|
||||
|
||||
expect(resp).to.not.have.property('tables');
|
||||
expect(resp).to.have.property('rows').and.property('columns');
|
||||
expect(resp.rows).to.have.length(1);
|
||||
expect(resp.columns).to.have.length(1);
|
||||
|
||||
expect(resp.rows[0]).to.eql([1000]);
|
||||
expect(resp.columns[0]).to.have.property('aggConfig', vis.aggs[0]);
|
||||
});
|
||||
|
||||
it('transforms a complex response properly', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'terms', schema: 'split', params: { field: 'extension' } },
|
||||
{ type: 'terms', schema: 'segment', params: { field: 'geo.src' } },
|
||||
{ type: 'terms', schema: 'segment', params: { field: 'machine.os' } }
|
||||
]
|
||||
});
|
||||
normalizeIds(vis);
|
||||
|
||||
var avg = vis.aggs[0];
|
||||
var ext = vis.aggs[1];
|
||||
var src = vis.aggs[2];
|
||||
var os = vis.aggs[3];
|
||||
var esResp = _.cloneDeep(fixtures.threeTermBuckets);
|
||||
// remove the buckets for css in MX
|
||||
esResp.aggregations.agg_2.buckets[1].agg_3.buckets[0].agg_4.buckets = [];
|
||||
var resp = tabifyAggResponse(vis, esResp);
|
||||
|
||||
function verifyExtensionSplit(tableGroup, key) {
|
||||
expect(tableGroup).to.have.property('tables');
|
||||
expect(tableGroup).to.have.property('aggConfig', ext);
|
||||
expect(tableGroup).to.have.property('key', key);
|
||||
expect(tableGroup.tables).to.have.length(1);
|
||||
|
||||
tableGroup.tables.forEach(function (table) {
|
||||
verifyTable(table, key);
|
||||
});
|
||||
}
|
||||
|
||||
function verifyTable(table, splitKey) {
|
||||
expect(table.columns).to.have.length(4);
|
||||
expect(table.columns[0]).to.have.property('aggConfig', src);
|
||||
expect(table.columns[1]).to.have.property('aggConfig', avg);
|
||||
expect(table.columns[2]).to.have.property('aggConfig', os);
|
||||
expect(table.columns[3]).to.have.property('aggConfig', avg);
|
||||
|
||||
table.rows.forEach(function (row) {
|
||||
expect(row).to.have.length(4);
|
||||
|
||||
// two character country code
|
||||
expect(row[0]).to.be.a('string');
|
||||
expect(row[0]).to.have.length(2);
|
||||
|
||||
// average bytes
|
||||
expect(row[1]).to.be.a('number');
|
||||
expect(row[1] === 0 || row[1] > 1000).to.be.ok();
|
||||
|
||||
if (splitKey === 'css' && row[0] === 'MX') {
|
||||
// removed these buckets, we should get empty values
|
||||
expect(row[2]).to.be('');
|
||||
expect(row[3]).to.be('');
|
||||
} else {
|
||||
// os
|
||||
expect(row[2]).to.match(/^(win|mac|linux)$/);
|
||||
|
||||
// average bytes
|
||||
expect(row[3]).to.be.a('number');
|
||||
expect(row[3] === 0 || row[3] > 1000).to.be.ok();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
expect(resp).to.have.property('tables');
|
||||
expect(resp.tables).to.have.length(3);
|
||||
verifyExtensionSplit(resp.tables[0], 'png');
|
||||
verifyExtensionSplit(resp.tables[1], 'css');
|
||||
verifyExtensionSplit(resp.tables[2], 'html');
|
||||
});
|
||||
}];
|
||||
});
|
|
@ -0,0 +1,312 @@
|
|||
define(function (require) {
|
||||
return ['ResponseWriter class', function () {
|
||||
var _ = require('lodash');
|
||||
var Vis;
|
||||
var Table;
|
||||
var Buckets;
|
||||
var TableGroup;
|
||||
var indexPattern;
|
||||
var ResponseWriter;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private, $injector) {
|
||||
ResponseWriter = Private(require('components/agg_response/tabify/_response_writer'));
|
||||
TableGroup = Private(require('components/agg_response/tabify/_table_group'));
|
||||
Buckets = Private(require('components/agg_response/tabify/_buckets'));
|
||||
Table = Private(require('components/agg_response/tabify/_table'));
|
||||
Vis = Private(require('components/vis/vis'));
|
||||
indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
}));
|
||||
|
||||
describe('Constructor', function () {
|
||||
it('gets the columns for the vis', function () {
|
||||
var aggs = [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'terms', schema: 'segment', params: { field: 'extension' } },
|
||||
{ type: 'avg', schema: 'metric', params: { field: '@timestamp' } }
|
||||
];
|
||||
|
||||
var flatVis = new Vis(indexPattern, { type: 'histogram', aggs: aggs });
|
||||
var hierVis = new Vis(indexPattern, { type: 'pie', aggs: aggs });
|
||||
|
||||
var writer = new ResponseWriter(flatVis);
|
||||
expect(writer.columns).to.be.an('array');
|
||||
expect(writer.columns).to.have.length(3);
|
||||
|
||||
writer = new ResponseWriter(hierVis);
|
||||
expect(writer.columns).to.be.an('array');
|
||||
expect(writer.columns).to.have.length(4);
|
||||
});
|
||||
|
||||
it('collects the aggConfigs from each column in aggStack', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'terms', schema: 'segment', params: { field: 'extension' } },
|
||||
{ type: 'avg', schema: 'metric', params: { field: '@timestamp' } }
|
||||
]
|
||||
});
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
expect(writer.aggStack).to.be.an('array');
|
||||
expect(writer.aggStack).to.have.length(3);
|
||||
expect(writer.aggStack[0].type.name).to.be('date_histogram');
|
||||
expect(writer.aggStack[1].type.name).to.be('terms');
|
||||
expect(writer.aggStack[2].type.name).to.be('avg');
|
||||
});
|
||||
|
||||
it('sets canSplit=true by default', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
expect(writer).to.have.property('canSplit', true);
|
||||
});
|
||||
|
||||
it('sets canSplit=false when config says to', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis, { canSplit: false });
|
||||
expect(writer).to.have.property('canSplit', false);
|
||||
});
|
||||
|
||||
it('starts off with a root TableGroup', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
expect(writer.root).to.be.a(TableGroup);
|
||||
expect(writer.splitStack).to.be.an('array');
|
||||
expect(writer.splitStack).to.have.length(1);
|
||||
expect(writer.splitStack[0]).to.be(writer.root);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#response()', function () {
|
||||
it('returns the root TableGroup if splitting', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
expect(writer.response()).to.be(writer.root);
|
||||
});
|
||||
|
||||
it('returns the first table if not splitting', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis, { canSplit: false });
|
||||
var table = writer._table();
|
||||
expect(writer.response()).to.be(table);
|
||||
});
|
||||
|
||||
it('adds columns to all of the tables', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', params: { field: '_type' }, schema: 'split' },
|
||||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
var buckets = new Buckets({ buckets: [ { key: 'nginx' }, { key: 'apache' } ] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
var tables = [];
|
||||
|
||||
writer.split(vis.aggs[0], buckets, function () {
|
||||
writer.cell(100, function () {
|
||||
tables.push(writer.row());
|
||||
});
|
||||
});
|
||||
|
||||
tables.forEach(function (table) {
|
||||
expect(table.columns == null).to.be(true);
|
||||
});
|
||||
|
||||
var resp = writer.response();
|
||||
expect(resp).to.be.a(TableGroup);
|
||||
expect(resp.tables).to.have.length(2);
|
||||
|
||||
var nginx = resp.tables.shift();
|
||||
expect(nginx).to.have.property('aggConfig', vis.aggs[0]);
|
||||
expect(nginx).to.have.property('key', 'nginx');
|
||||
expect(nginx.tables).to.have.length(1);
|
||||
nginx.tables.forEach(function (table) {
|
||||
expect(_.contains(tables, table)).to.be(true);
|
||||
});
|
||||
|
||||
var apache = resp.tables.shift();
|
||||
expect(apache).to.have.property('aggConfig', vis.aggs[0]);
|
||||
expect(apache).to.have.property('key', 'apache');
|
||||
expect(apache.tables).to.have.length(1);
|
||||
apache.tables.forEach(function (table) {
|
||||
expect(_.contains(tables, table)).to.be(true);
|
||||
});
|
||||
|
||||
tables.forEach(function (table) {
|
||||
expect(table.columns).to.be.an('array');
|
||||
expect(table.columns).to.have.length(1);
|
||||
expect(table.columns[0].aggConfig.type.name).to.be('count');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#split()', function () {
|
||||
it('creates a table group, pushes that group onto the splitStack, calls the block, and removes the group from the stack',
|
||||
function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
|
||||
var table = writer._table();
|
||||
writer.cell(1, function () {
|
||||
writer.cell(2, function () {
|
||||
writer.cell(3, function () {
|
||||
writer.row();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
expect(table.rows).to.have.length(1);
|
||||
expect(table.rows[0]).to.eql([1, 2, 3]);
|
||||
});
|
||||
|
||||
it('with break if the user has specified that splitting is to be disabled', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'split', params: { field: '_type' } },
|
||||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
var agg = vis.aggs.bySchemaName.split[0];
|
||||
var buckets = new Buckets({ buckets: [ { key: 'apache' } ]});
|
||||
var writer = new ResponseWriter(vis, { canSplit: false });
|
||||
|
||||
expect(function () {
|
||||
writer.split(agg, buckets, _.noop);
|
||||
}).to.throwException(/splitting is disabled/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#cell()', function () {
|
||||
it('logs a cell in the ResponseWriters row buffer, calls the block arg, then removes the value from the buffer',
|
||||
function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
|
||||
expect(writer.rowBuffer).to.have.length(0);
|
||||
writer.cell(500, function () {
|
||||
expect(writer.rowBuffer).to.have.length(1);
|
||||
expect(writer.rowBuffer[0]).to.be(500);
|
||||
});
|
||||
expect(writer.rowBuffer).to.have.length(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#row()', function () {
|
||||
it('writes the ResponseWriters internal rowBuffer into a table', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram', aggs: [] });
|
||||
var writer = new ResponseWriter(vis);
|
||||
|
||||
var table = writer._table();
|
||||
writer.cell(1, function () {
|
||||
writer.cell(2, function () {
|
||||
writer.cell(3, function () {
|
||||
writer.row();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
expect(table.rows).to.have.length(1);
|
||||
expect(table.rows[0]).to.eql([1, 2, 3]);
|
||||
});
|
||||
|
||||
it('always writes to the table group at the top of the split stack', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'split', params: { field: '_type' } },
|
||||
{ type: 'terms', schema: 'split', params: { field: 'extension' } },
|
||||
{ type: 'terms', schema: 'split', params: { field: 'machine.os' } },
|
||||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
var splits = vis.aggs.bySchemaName.split;
|
||||
|
||||
var type = splits[0];
|
||||
var typeBuckets = new Buckets({ buckets: [ { key: 'nginx' }, { key: 'apache' } ] });
|
||||
|
||||
var ext = splits[1];
|
||||
var extBuckets = new Buckets({ buckets: [ { key: 'jpg' }, { key: 'png' } ] });
|
||||
|
||||
var os = splits[2];
|
||||
var osBuckets = new Buckets({ buckets: [ { key: 'windows' }, { key: 'mac' } ] });
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
writer.split(type, typeBuckets, function () {
|
||||
writer.split(ext, extBuckets, function () {
|
||||
writer.split(os, osBuckets, function (bucket, key) {
|
||||
writer.cell(key === 'windows' ? 1 : 2, function () {
|
||||
writer.row();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
var resp = writer.response();
|
||||
var sum = 0;
|
||||
var tables = 0;
|
||||
(function recurse(t) {
|
||||
if (t.tables) {
|
||||
// table group
|
||||
t.tables.forEach(function (tt) {
|
||||
recurse(tt);
|
||||
});
|
||||
} else {
|
||||
tables += 1;
|
||||
// table
|
||||
t.rows.forEach(function (row) {
|
||||
row.forEach(function (cell) {
|
||||
sum += cell;
|
||||
});
|
||||
});
|
||||
}
|
||||
}(resp));
|
||||
|
||||
expect(tables).to.be(8);
|
||||
expect(sum).to.be(12);
|
||||
});
|
||||
|
||||
it('writes partial rows for hierarchical vis', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'pie',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'segment', params: { field: '_type' }},
|
||||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
var table = writer._table();
|
||||
writer.cell('apache', function () {
|
||||
writer.row();
|
||||
});
|
||||
|
||||
expect(table.rows).to.have.length(1);
|
||||
expect(table.rows[0]).to.eql(['apache', '']);
|
||||
});
|
||||
|
||||
it('skips partial rows for non-hierarchical vis', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'terms', schema: 'segment', params: { field: '_type' }},
|
||||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
|
||||
var writer = new ResponseWriter(vis);
|
||||
var table = writer._table();
|
||||
writer.cell('apache', function () {
|
||||
writer.row();
|
||||
});
|
||||
|
||||
expect(table.rows).to.have.length(0);
|
||||
});
|
||||
});
|
||||
|
||||
}];
|
||||
});
|
110
test/unit/specs/components/agg_response/tabify/_table.js
Normal file
110
test/unit/specs/components/agg_response/tabify/_table.js
Normal file
|
@ -0,0 +1,110 @@
|
|||
define(function (require) {
|
||||
return ['Table class', function () {
|
||||
var Table;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private, $injector) {
|
||||
Table = Private(require('components/agg_response/tabify/_table'));
|
||||
}));
|
||||
|
||||
it('exposes rows array, but not the columns', function () {
|
||||
var table = new Table();
|
||||
expect(table.rows).to.be.an('array');
|
||||
expect(table.columns == null).to.be.ok();
|
||||
});
|
||||
|
||||
describe('#aggConfig', function () {
|
||||
it('accepts a column from the table and returns its agg config', function () {
|
||||
var table = new Table();
|
||||
var football = {};
|
||||
var column = {
|
||||
aggConfig: football
|
||||
};
|
||||
|
||||
expect(table.aggConfig(column)).to.be(football);
|
||||
});
|
||||
|
||||
it('throws a TypeError if the column is malformed', function () {
|
||||
expect(function () {
|
||||
var notAColumn = {};
|
||||
(new Table()).aggConfig(notAColumn);
|
||||
}).to.throwException(TypeError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#title', function () {
|
||||
it('returns nothing if the table is not part of a table group', function () {
|
||||
var table = new Table();
|
||||
expect(table.title()).to.be('');
|
||||
});
|
||||
|
||||
it('returns the title of the TableGroup if the table is part of one', function () {
|
||||
var table = new Table();
|
||||
table.$parent = {
|
||||
title: 'TableGroup Title',
|
||||
tables: [table]
|
||||
};
|
||||
|
||||
expect(table.title()).to.be('TableGroup Title');
|
||||
});
|
||||
});
|
||||
|
||||
describe('#field', function () {
|
||||
it('accepts a column from the table and returns its field', function () {
|
||||
var table = new Table();
|
||||
var football = {};
|
||||
var column = {
|
||||
aggConfig: {
|
||||
params: {
|
||||
field: football
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
expect(table.field(column)).to.be(football);
|
||||
});
|
||||
|
||||
it('returns nothing if the columns does not have a field', function () {
|
||||
var table = new Table();
|
||||
var column = {
|
||||
aggConfig: {
|
||||
params: {}
|
||||
}
|
||||
};
|
||||
|
||||
expect(table.field(column)).to.not.be.ok();
|
||||
});
|
||||
});
|
||||
|
||||
describe('#fieldFormatter', function () {
|
||||
it('accepts a column from the table and returns its field', function () {
|
||||
var table = new Table();
|
||||
var football = {};
|
||||
var column = {
|
||||
aggConfig: {
|
||||
params: {
|
||||
field: {
|
||||
format: {
|
||||
convert: football
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
expect(table.fieldFormatter(column)).to.be(football);
|
||||
});
|
||||
|
||||
it('returns nothing if the columns does not have a field', function () {
|
||||
var table = new Table();
|
||||
var column = {
|
||||
aggConfig: {
|
||||
params: {}
|
||||
}
|
||||
};
|
||||
|
||||
expect(table.field(column)).to.not.be.ok();
|
||||
});
|
||||
});
|
||||
}];
|
||||
});
|
|
@ -0,0 +1,18 @@
|
|||
define(function (require) {
|
||||
return ['Table Group class', function () {
|
||||
var TableGroup;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private, $injector) {
|
||||
TableGroup = Private(require('components/agg_response/tabify/_table_group'));
|
||||
}));
|
||||
|
||||
it('exposes tables array and empty aggConfig, key and title', function () {
|
||||
var tableGroup = new TableGroup();
|
||||
expect(tableGroup.tables).to.be.an('array');
|
||||
expect(tableGroup.aggConfig).to.be(null);
|
||||
expect(tableGroup.key).to.be(null);
|
||||
expect(tableGroup.title).to.be(null);
|
||||
});
|
||||
}];
|
||||
});
|
|
@ -1,37 +1,11 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
|
||||
describe('Tabify Agg Response', function () {
|
||||
describe('result of a hierarchical response', function () {
|
||||
var aggId = _.partial(_.uniqueId, '_agg_fixture');
|
||||
var bucketKey = _.partial(_.uniqueId, '_bucket_key');
|
||||
var docCount = _.partial(_.random, 0, 1000);
|
||||
|
||||
var tabifyAggResponse;
|
||||
var indexPattern;
|
||||
var Vis;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
tabifyAggResponse = Private(require('components/agg_response/tabify/tabify_agg_response'));
|
||||
indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
Vis = Private(require('components/vis/vis'));
|
||||
}));
|
||||
|
||||
describe('with no aggs', function () {
|
||||
});
|
||||
|
||||
describe('with one bucket and no metric', function () {
|
||||
});
|
||||
|
||||
describe('with one bucket and one metric', function () {
|
||||
});
|
||||
|
||||
describe('with three buckets and one metric', function () {
|
||||
});
|
||||
|
||||
describe('with three buckets and three metrics', function () {
|
||||
});
|
||||
});
|
||||
run(require('specs/components/agg_response/tabify/_get_columns'));
|
||||
run(require('specs/components/agg_response/tabify/_buckets'));
|
||||
run(require('specs/components/agg_response/tabify/_table'));
|
||||
run(require('specs/components/agg_response/tabify/_table_group'));
|
||||
run(require('specs/components/agg_response/tabify/_response_writer'));
|
||||
run(require('specs/components/agg_response/tabify/_integration'));
|
||||
function run(module) { describe(module[0], module[1]); }
|
||||
});
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue