mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
fixed conflicts
This commit is contained in:
commit
217ab428b0
43 changed files with 2284 additions and 275 deletions
2
TODOS.md
2
TODOS.md
|
@ -10,8 +10,6 @@
|
|||
- a legit way to update the index pattern
|
||||
- **[src/kibana/apps/settings/sections/indices/_create.js](https://github.com/elasticsearch/kibana4/blob/master/src/kibana/apps/settings/sections/indices/_create.js)**
|
||||
- we should probably display a message of some kind
|
||||
- **[src/kibana/components/agg_types/buckets/terms.js](https://github.com/elasticsearch/kibana4/blob/master/src/kibana/components/agg_types/buckets/terms.js)**
|
||||
- We need more than just _count here.
|
||||
- **[src/kibana/components/index_patterns/_mapper.js](https://github.com/elasticsearch/kibana4/blob/master/src/kibana/components/index_patterns/_mapper.js)**
|
||||
- Change index to be the resolved in some way, last three months, last hour, last year, whatever
|
||||
- **[src/kibana/components/visualize/visualize.js](https://github.com/elasticsearch/kibana4/blob/master/src/kibana/components/visualize/visualize.js)**
|
||||
|
|
|
@ -44,7 +44,8 @@
|
|||
"FileSaver": "*",
|
||||
"elasticsearch": "*",
|
||||
"bluebird": "~2.1.3",
|
||||
"lesshat": "~3.0.2"
|
||||
"lesshat": "~3.0.2",
|
||||
"Faker": "~1.1.0"
|
||||
},
|
||||
"devDependencies": {}
|
||||
}
|
||||
|
|
|
@ -1,23 +1,24 @@
|
|||
define(function (require) {
|
||||
return function DiscoverSegmentedFetch(es, Private, Promise, Notifier) {
|
||||
var activeReq = null;
|
||||
var notifyEvent;
|
||||
var searchPromise;
|
||||
var getStateFromRequest = Private(require('components/courier/fetch/strategy/search')).getSourceStateFromRequest;
|
||||
var _ = require('lodash');
|
||||
var moment = require('moment');
|
||||
|
||||
var segmentedFetch = {};
|
||||
var searchStrategy = Private(require('components/courier/fetch/strategy/search'));
|
||||
var eventName = 'segmented fetch';
|
||||
|
||||
var notify = new Notifier({
|
||||
location: 'Segmented Fetch'
|
||||
});
|
||||
|
||||
segmentedFetch.abort = function () {
|
||||
activeReq = null;
|
||||
searchPromise.abort();
|
||||
clearNotifyEvent();
|
||||
};
|
||||
// var segmentedFetch = {};
|
||||
function segmentedFetch(searchSource) {
|
||||
this.searchSource = searchSource;
|
||||
this.queue = [];
|
||||
this.completedQueue = [];
|
||||
this.requestHandlers = {};
|
||||
this.activeRequest = null;
|
||||
this.notifyEvent = null;
|
||||
this.lastRequestPromise = Promise.resolve();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch search results, but segment by index name.
|
||||
|
@ -29,39 +30,114 @@ define(function (require) {
|
|||
* in decening order, this should be set to descending so that the data comes in its
|
||||
* proper order, otherwize indices will be fetched ascending
|
||||
*
|
||||
* // all callbacks can return a promise to delay furthur processing
|
||||
* // all callbacks can return a promise to delay further processing
|
||||
* @param {function} opts.first - a function that will be called for the first segment
|
||||
* @param {function} opts.each - a function that will be called for each segment
|
||||
* @param {function} opts.eachMerged - a function that will be called with the merged result on each segment
|
||||
* @param {function} opts.status - a function that will be called for each segment and given the process status
|
||||
*
|
||||
* @return {Promise}
|
||||
*/
|
||||
segmentedFetch.fetch = function (opts) {
|
||||
segmentedFetch.prototype.fetch = function (opts) {
|
||||
var self = this;
|
||||
var req;
|
||||
opts = opts || {};
|
||||
var searchSource = opts.searchSource;
|
||||
var direction = opts.direction;
|
||||
var limitSize = false;
|
||||
var remainingSize = false;
|
||||
|
||||
notifyEvent = notify.event('segmented fetch');
|
||||
self._stopRequest();
|
||||
|
||||
if (opts.totalSize) {
|
||||
limitSize = true;
|
||||
remainingSize = opts.totalSize;
|
||||
return (self.lastRequestPromise = self.lastRequestPromise.then(function () {
|
||||
// keep an internal record of the attached handlers
|
||||
self._setRequestHandlers(opts);
|
||||
|
||||
return Promise.try(function () {
|
||||
return self._extractQueue(opts.direction);
|
||||
})
|
||||
.then(function () {
|
||||
req = self._createRequest();
|
||||
return req;
|
||||
})
|
||||
.then(function (req) {
|
||||
return self._startRequest(req);
|
||||
})
|
||||
.then(function () {
|
||||
return self._executeRequest(req, opts);
|
||||
})
|
||||
.then(function () {
|
||||
return self._stopRequest();
|
||||
});
|
||||
}));
|
||||
};
|
||||
|
||||
segmentedFetch.prototype.abort = function () {
|
||||
this._stopRequest();
|
||||
return this.lastRequestPromise;
|
||||
};
|
||||
|
||||
segmentedFetch.prototype._startRequest = function (req) {
|
||||
var self = this;
|
||||
self.requestStats = {
|
||||
took: 0,
|
||||
hits: {
|
||||
hits: [],
|
||||
total: 0,
|
||||
max_score: 0
|
||||
}
|
||||
};
|
||||
|
||||
self._setRequest(req);
|
||||
self.notifyEvent = notify.event(eventName);
|
||||
};
|
||||
|
||||
segmentedFetch.prototype._stopRequest = function () {
|
||||
var self = this;
|
||||
|
||||
self._setRequest();
|
||||
self._clearNotification();
|
||||
if (self.searchPromise && 'abort' in self.searchPromise) {
|
||||
self.searchPromise.abort();
|
||||
}
|
||||
};
|
||||
|
||||
var req = searchSource._createRequest();
|
||||
req.moment = moment();
|
||||
req.source.activeFetchCount += 1;
|
||||
segmentedFetch.prototype._setRequest = function (req) {
|
||||
req = req || null;
|
||||
this.activeRequest = req;
|
||||
};
|
||||
|
||||
// track the req out of scope so that while we are itterating we can
|
||||
// ensure we are still relevant
|
||||
activeReq = req;
|
||||
segmentedFetch.prototype._clearNotification = function () {
|
||||
var self = this;
|
||||
if (_.isFunction(self.notifyEvent)) {
|
||||
self.notifyEvent();
|
||||
}
|
||||
};
|
||||
|
||||
var queue = searchSource.get('index').toIndexList();
|
||||
var total = queue.length;
|
||||
var active = null;
|
||||
var complete = [];
|
||||
segmentedFetch.prototype._setRequestHandlers = function (handlers) {
|
||||
this.requestHandlers = {
|
||||
first: handlers.first,
|
||||
each: handlers.each,
|
||||
eachMerged: handlers.eachMerged,
|
||||
status: handlers.status,
|
||||
};
|
||||
};
|
||||
|
||||
segmentedFetch.prototype._statusReport = function (active) {
|
||||
var self = this;
|
||||
|
||||
if (!self.requestHandlers.status) return;
|
||||
|
||||
var status = {
|
||||
total: self.queue.length,
|
||||
complete: self.completedQueue.length,
|
||||
remaining: self.queue.length,
|
||||
active: active
|
||||
};
|
||||
self.requestHandlers.status(status);
|
||||
|
||||
return status;
|
||||
};
|
||||
|
||||
segmentedFetch.prototype._extractQueue = function (direction) {
|
||||
var self = this;
|
||||
var queue = self.searchSource.get('index').toIndexList();
|
||||
|
||||
if (!_.isArray(queue)) {
|
||||
queue = [queue];
|
||||
|
@ -71,109 +147,156 @@ define(function (require) {
|
|||
queue = queue.reverse();
|
||||
}
|
||||
|
||||
var i = -1;
|
||||
var merged = {
|
||||
took: 0,
|
||||
hits: {
|
||||
hits: [],
|
||||
total: 0,
|
||||
max_score: 0
|
||||
}
|
||||
};
|
||||
|
||||
function reportStatus() {
|
||||
if (!opts.status) return;
|
||||
opts.status({
|
||||
total: total,
|
||||
complete: complete.length,
|
||||
remaining: queue.length,
|
||||
active: active
|
||||
});
|
||||
}
|
||||
|
||||
reportStatus();
|
||||
getStateFromRequest(req)
|
||||
.then(function (state) {
|
||||
return (function recurse() {
|
||||
var index = queue.shift();
|
||||
active = index;
|
||||
|
||||
reportStatus();
|
||||
|
||||
if (limitSize) {
|
||||
state.body.size = remainingSize;
|
||||
}
|
||||
req.state = state;
|
||||
|
||||
return execSearch(index, state)
|
||||
.then(function (resp) {
|
||||
// abort if fetch is called twice quickly
|
||||
if (req !== activeReq) return;
|
||||
|
||||
// a response was swallowed intentionally. Try the next one
|
||||
if (!resp) {
|
||||
if (queue.length) return recurse();
|
||||
else return done();
|
||||
}
|
||||
|
||||
// increment i after we are sure that we have a valid response
|
||||
// so that we always call opts.first()
|
||||
i++;
|
||||
|
||||
var start; // promise that starts the chain
|
||||
if (i === 0 && _.isFunction(opts.first)) {
|
||||
start = Promise.try(opts.first, [resp, req]);
|
||||
} else {
|
||||
start = Promise.resolve();
|
||||
}
|
||||
|
||||
if (limitSize) {
|
||||
remainingSize -= resp.hits.hits.length;
|
||||
}
|
||||
|
||||
return start.then(function () {
|
||||
var prom = each(merged, resp);
|
||||
return prom;
|
||||
})
|
||||
.then(function () {
|
||||
if (_.isFunction(opts.each)) return opts.each(resp, req);
|
||||
})
|
||||
.then(function () {
|
||||
var mergedCopy = _.omit(merged, '_bucketIndex');
|
||||
req.resp = mergedCopy;
|
||||
|
||||
if (_.isFunction(opts.eachMerged)) {
|
||||
// resolve with a "shallow clone" that omits the _aggIndex
|
||||
// which helps with watchers and protects the index
|
||||
return opts.eachMerged(mergedCopy, req);
|
||||
}
|
||||
})
|
||||
.then(function () {
|
||||
complete.push(index);
|
||||
if (queue.length) return recurse();
|
||||
return done();
|
||||
});
|
||||
});
|
||||
}());
|
||||
})
|
||||
.then(req.defer.resolve, req.defer.reject);
|
||||
|
||||
function done() {
|
||||
clearNotifyEvent();
|
||||
req.complete = true;
|
||||
req.ms = req.moment.diff() * -1;
|
||||
req.source.activeFetchCount -= 1;
|
||||
return (i + 1);
|
||||
}
|
||||
|
||||
return req.defer.promise;
|
||||
return self.queue = queue;
|
||||
};
|
||||
|
||||
function each(merged, resp) {
|
||||
merged.took += resp.took;
|
||||
merged.hits.total = Math.max(merged.hits.total, resp.hits.total);
|
||||
merged.hits.max_score = Math.max(merged.hits.max_score, resp.hits.max_score);
|
||||
[].push.apply(merged.hits.hits, resp.hits.hits);
|
||||
segmentedFetch.prototype._createRequest = function () {
|
||||
var self = this;
|
||||
var req = self.searchSource._createRequest();
|
||||
req.moment = moment();
|
||||
req.source.activeFetchCount += 1;
|
||||
return req;
|
||||
};
|
||||
|
||||
segmentedFetch.prototype._executeSearch = function (index, state) {
|
||||
var resolve, reject;
|
||||
|
||||
this.searchPromise = new Promise(function () {
|
||||
resolve = arguments[0];
|
||||
reject = arguments[1];
|
||||
});
|
||||
|
||||
var clientPromise = es.search({
|
||||
index: index,
|
||||
type: state.type,
|
||||
ignoreUnavailable: true,
|
||||
body: state.body
|
||||
});
|
||||
|
||||
this.searchPromise.abort = function () {
|
||||
clientPromise.abort();
|
||||
resolve(false);
|
||||
};
|
||||
|
||||
clientPromise.then(resolve)
|
||||
.catch(function (err) {
|
||||
// don't throw ClusterBlockException errors
|
||||
if (err.status === 403 && err.message.match(/ClusterBlockException.+index closed/)) {
|
||||
resolve(false);
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
return this.searchPromise;
|
||||
};
|
||||
|
||||
segmentedFetch.prototype._executeRequest = function (req, opts) {
|
||||
var self = this;
|
||||
var complete = [];
|
||||
var remainingSize = false;
|
||||
|
||||
if (opts.totalSize) {
|
||||
remainingSize = opts.totalSize;
|
||||
}
|
||||
|
||||
// initial status report
|
||||
self._statusReport(null);
|
||||
|
||||
return searchStrategy.getSourceStateFromRequest(req)
|
||||
.then(function (state) {
|
||||
var loopCount = -1;
|
||||
return self._processQueue(req, state, remainingSize, loopCount);
|
||||
})
|
||||
.then(function (count) {
|
||||
return req.defer.resolve(count);
|
||||
})
|
||||
.catch(function (err) {
|
||||
req.defer.reject(err);
|
||||
return err;
|
||||
});
|
||||
};
|
||||
|
||||
segmentedFetch.prototype._processQueue = function (req, state, remainingSize, loopCount) {
|
||||
var self = this;
|
||||
var index = self.queue.shift();
|
||||
|
||||
// abort if request changed (fetch is called twice quickly)
|
||||
if (req !== self.activeRequest) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (remainingSize !== false) {
|
||||
state.body.size = remainingSize;
|
||||
}
|
||||
|
||||
req.state = state;
|
||||
|
||||
// update the status on every iteration
|
||||
self._statusReport(index);
|
||||
|
||||
return self._executeSearch(index, state)
|
||||
.then(function (resp) {
|
||||
// a response was swallowed intentionally. Try the next one
|
||||
if (!resp) {
|
||||
if (self.queue.length) return self._processQueue(req, state, remainingSize, loopCount);
|
||||
else return self._processQueueComplete(req, loopCount);
|
||||
}
|
||||
|
||||
// increment loopCount after we are sure that we have a valid response
|
||||
// so that we always call self.requestHandlers.first()
|
||||
loopCount++;
|
||||
|
||||
var start; // promise that starts the chain
|
||||
if (loopCount === 0 && _.isFunction(self.requestHandlers.first)) {
|
||||
start = Promise.try(self.requestHandlers.first, [resp, req]);
|
||||
} else {
|
||||
start = Promise.resolve();
|
||||
}
|
||||
|
||||
if (remainingSize !== false) {
|
||||
remainingSize -= resp.hits.hits.length;
|
||||
}
|
||||
|
||||
return start.then(function () {
|
||||
var prom = mergeRequestStats(self.requestStats, resp);
|
||||
return prom;
|
||||
})
|
||||
.then(function () {
|
||||
if (_.isFunction(self.requestHandlers.each)) {
|
||||
return self.requestHandlers.each(resp, req);
|
||||
}
|
||||
})
|
||||
.then(function () {
|
||||
var mergedCopy = _.omit(self.requestStats, '_bucketIndex');
|
||||
req.resp = mergedCopy;
|
||||
|
||||
if (_.isFunction(self.requestHandlers.eachMerged)) {
|
||||
// resolve with a "shallow clone" that omits the _aggIndex
|
||||
// which helps with watchers and protects the index
|
||||
return self.requestHandlers.eachMerged(mergedCopy, req);
|
||||
}
|
||||
})
|
||||
.then(function () {
|
||||
self.completedQueue.push(index);
|
||||
if (self.queue.length) return self._processQueue(req, state, remainingSize, loopCount);
|
||||
return self._processQueueComplete(req, loopCount);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
segmentedFetch.prototype._processQueueComplete = function (req, loopCount) {
|
||||
req.complete = true;
|
||||
req.ms = req.moment.diff() * -1;
|
||||
req.source.activeFetchCount -= 1;
|
||||
return (loopCount + 1);
|
||||
};
|
||||
|
||||
function mergeRequestStats(requestStats, resp) {
|
||||
requestStats.took += resp.took;
|
||||
requestStats.hits.total = Math.max(requestStats.hits.total, resp.hits.total);
|
||||
requestStats.hits.max_score = Math.max(requestStats.hits.max_score, resp.hits.max_score);
|
||||
[].push.apply(requestStats.hits.hits, resp.hits.hits);
|
||||
|
||||
if (!resp.aggregations) return;
|
||||
|
||||
|
@ -181,53 +304,29 @@ define(function (require) {
|
|||
return key.substr(0, 4) === 'agg_';
|
||||
});
|
||||
|
||||
if (!aggKey) throw new Error('aggKey not found in response: ' + Object.keys(resp.aggregations));
|
||||
|
||||
// start merging aggregations
|
||||
if (!merged.aggregations) {
|
||||
merged.aggregations = {};
|
||||
merged.aggregations[aggKey] = {
|
||||
if (!requestStats.aggregations) {
|
||||
requestStats.aggregations = {};
|
||||
requestStats.aggregations[aggKey] = {
|
||||
buckets: []
|
||||
};
|
||||
merged._bucketIndex = {};
|
||||
requestStats._bucketIndex = {};
|
||||
}
|
||||
|
||||
resp.aggregations[aggKey].buckets.forEach(function (bucket) {
|
||||
var mbucket = merged._bucketIndex[bucket.key];
|
||||
var mbucket = requestStats._bucketIndex[bucket.key];
|
||||
if (mbucket) {
|
||||
mbucket.doc_count += bucket.doc_count;
|
||||
return;
|
||||
}
|
||||
|
||||
mbucket = merged._bucketIndex[bucket.key] = bucket;
|
||||
merged.aggregations[aggKey].buckets.push(mbucket);
|
||||
mbucket = requestStats._bucketIndex[bucket.key] = bucket;
|
||||
requestStats.aggregations[aggKey].buckets.push(mbucket);
|
||||
});
|
||||
}
|
||||
|
||||
function execSearch(index, state) {
|
||||
searchPromise = es.search({
|
||||
index: index,
|
||||
type: state.type,
|
||||
ignoreUnavailable: true,
|
||||
body: state.body
|
||||
});
|
||||
|
||||
// don't throw ClusterBlockException errors
|
||||
searchPromise.catch(function (err) {
|
||||
if (err.status === 403 && err.message.match(/ClusterBlockException.+index closed/)) {
|
||||
return false;
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
return searchPromise;
|
||||
}
|
||||
|
||||
function clearNotifyEvent() {
|
||||
if (_.isFunction(notifyEvent)) {
|
||||
notifyEvent();
|
||||
}
|
||||
}
|
||||
|
||||
return segmentedFetch;
|
||||
};
|
||||
});
|
|
@ -45,12 +45,12 @@ define(function (require) {
|
|||
}
|
||||
});
|
||||
|
||||
|
||||
app.controller('discover', function ($scope, config, courier, $route, $window, savedSearches, savedVisualizations,
|
||||
Notifier, $location, globalState, appStateFactory, timefilter, Promise, Private) {
|
||||
|
||||
var Vis = Private(require('components/vis/vis'));
|
||||
var segmentedFetch = $scope.segmentedFetch = Private(require('apps/discover/_segmented_fetch'));
|
||||
var SegmentedFetch = Private(require('apps/discover/_segmented_fetch'));
|
||||
|
||||
var HitSortFn = Private(require('apps/discover/_hit_sort_fn'));
|
||||
|
||||
var notify = new Notifier({
|
||||
|
@ -61,16 +61,17 @@ define(function (require) {
|
|||
var savedSearch = $route.current.locals.savedSearch;
|
||||
$scope.$on('$destroy', savedSearch.destroy);
|
||||
|
||||
// abort any seqmented query requests when leaving discover
|
||||
$scope.$on('$routeChangeStart', function () {
|
||||
segmentedFetch.abort();
|
||||
});
|
||||
|
||||
// list of indexPattern id's
|
||||
var indexPatternList = $route.current.locals.indexList;
|
||||
|
||||
// the actual courier.SearchSource
|
||||
$scope.searchSource = savedSearch.searchSource;
|
||||
var segmentedFetch = $scope.segmentedFetch = new SegmentedFetch($scope.searchSource);
|
||||
|
||||
// abort any seqmented query requests when leaving discover
|
||||
$scope.$on('$routeChangeStart', function () {
|
||||
segmentedFetch.abort();
|
||||
});
|
||||
|
||||
// Manage state & url state
|
||||
var initialQuery = $scope.searchSource.get('query');
|
||||
|
@ -238,7 +239,11 @@ define(function (require) {
|
|||
if (!init.complete) return;
|
||||
|
||||
$scope.updateTime();
|
||||
if (_.isEmpty($state.columns)) refreshColumns();
|
||||
|
||||
if (_.isEmpty($state.columns)) {
|
||||
refreshColumns();
|
||||
}
|
||||
|
||||
$scope.updateDataSource()
|
||||
.then(setupVisualization)
|
||||
.then(function () {
|
||||
|
@ -270,7 +275,6 @@ define(function (require) {
|
|||
}
|
||||
|
||||
return segmentedFetch.fetch({
|
||||
searchSource: $scope.searchSource,
|
||||
totalSize: sortBy === 'non-time' ? false : totalSize,
|
||||
direction: sortBy === 'time' ? sort[1] : 'desc',
|
||||
status: function (status) {
|
||||
|
@ -630,7 +634,7 @@ define(function (require) {
|
|||
});
|
||||
|
||||
$scope.searchSource.aggs(function () {
|
||||
return $scope.vis.aggs.toDSL();
|
||||
return $scope.vis.aggs.toDsl();
|
||||
});
|
||||
|
||||
// stash this promise so that other calls to setupVisualization will have to wait
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
<div class="typeahead" kbn-typeahead="discover">
|
||||
<div class="input-group"
|
||||
ng-class="discoverSearch.$invalid ? 'has-error' : ''">
|
||||
<input query-input="searchSource" input-focus
|
||||
<input query-input="searchSource"
|
||||
input-focus
|
||||
kbn-typeahead-input
|
||||
ng-model="state.query"
|
||||
placeholder="Search..."
|
||||
|
|
|
@ -89,7 +89,7 @@ define(function (require) {
|
|||
}
|
||||
|
||||
self.searchSource.aggs(function () {
|
||||
return self.vis.aggs.toDSL();
|
||||
return self.vis.aggs.toDsl();
|
||||
});
|
||||
|
||||
return self;
|
||||
|
|
|
@ -10,9 +10,9 @@ Collection of `AggType` definition objects. See the [Vis component](../vis) for
|
|||
|
||||
### Included
|
||||
|
||||
- [`AggType`](_agg_type.js) class
|
||||
- `AggParam` classes
|
||||
- [`AggType`](_agg_type.js)
|
||||
- `AggParam`
|
||||
- [`BaseAggParam`](param_types/base.js)
|
||||
- [`FieldAggParam`](param_types/field.js)
|
||||
- [`OptionedAggParam`](param_types/optioned.js)
|
||||
- [`AggParams`](_agg_params.js) class
|
||||
- [`AggParams`](_agg_params.js)
|
|
@ -7,11 +7,31 @@ define(function (require) {
|
|||
var FieldAggParam = Private(require('components/agg_types/param_types/field'));
|
||||
var OptionedAggParam = Private(require('components/agg_types/param_types/optioned'));
|
||||
|
||||
/**
|
||||
* Wraps a list of {{#crossLink "AggParam"}}{{/crossLink}} objects; owned by an {{#crossLink "AggType"}}{{/crossLink}}
|
||||
*
|
||||
* used to create:
|
||||
* - `OptionedAggParam` – When the config has an array of `options: []`
|
||||
* - `FieldAggParam` – When the config has `name: "field"`
|
||||
* - `BaseAggParam` – All other params
|
||||
*
|
||||
* @class AggParams
|
||||
* @constructor
|
||||
* @extends Registry
|
||||
* @param {object[]} params - array of params that get new-ed up as AggParam objects as descibed above
|
||||
*/
|
||||
_(AggParams).inherits(Registry);
|
||||
function AggParams(params) {
|
||||
if (_.isPlainObject(params)) {
|
||||
// convert the names: details format into details[].name
|
||||
params = _.map(params, function (param, name) {
|
||||
param.name = name;
|
||||
return param;
|
||||
});
|
||||
}
|
||||
|
||||
AggParams.Super.call(this, {
|
||||
index: ['name'],
|
||||
group: ['required'],
|
||||
initialSet: params.map(function (param) {
|
||||
if (param.name === 'field') {
|
||||
return new FieldAggParam(param);
|
||||
|
@ -26,6 +46,20 @@ define(function (require) {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an aggConfigs
|
||||
*
|
||||
* @method write
|
||||
* @param {AggConfig} aggConfig
|
||||
* the AggConfig object who's type owns these aggParams and contains the param values for our param defs
|
||||
* @param {object} [locals]
|
||||
* an array of locals that will be available to the write function (can be used to enhance
|
||||
* the quality of things like date_histogram's "auto" interval)
|
||||
* @return {object} output
|
||||
* output of the write calls, reduced into a single object. A `params: {}` property is exposed on the
|
||||
* output object which is used to create the agg dsl for the search request. All other properties
|
||||
* are dependent on the AggParam#write methods which should be studied for each AggType.
|
||||
*/
|
||||
AggParams.prototype.write = function (aggConfig, locals) {
|
||||
var output = { params: {} };
|
||||
locals = locals || {};
|
||||
|
|
|
@ -3,23 +3,76 @@ define(function (require) {
|
|||
var _ = require('lodash');
|
||||
var AggParams = Private(require('components/agg_types/_agg_params'));
|
||||
|
||||
/**
|
||||
* Generic AggType Constructor
|
||||
*
|
||||
* Used to create the values exposed by the agg_types module.
|
||||
*
|
||||
* @class AggType
|
||||
* @private
|
||||
* @param {object} config - used to set the properties of the AggType
|
||||
*/
|
||||
function AggType(config) {
|
||||
|
||||
/**
|
||||
* the unique, unchanging, name that elasticsearch has assigned this aggType
|
||||
*
|
||||
* @property name
|
||||
* @type {string}
|
||||
*/
|
||||
this.name = config.name;
|
||||
|
||||
/**
|
||||
* the user friendly name that will be shown in the ui for this aggType
|
||||
*
|
||||
* @property title
|
||||
* @type {string}
|
||||
*/
|
||||
this.title = config.title;
|
||||
|
||||
/**
|
||||
* a function that will be called when this aggType is assigned to
|
||||
* an aggConfig, and that aggConfig is being rendered (in a form, chart, etc.).
|
||||
*
|
||||
* @method makeLabel
|
||||
* @param {AggConfig} aggConfig - an agg config of this type
|
||||
* @returns {string} - label that can be used in the ui to descripe the aggConfig
|
||||
*/
|
||||
this.makeLabel = config.makeLabel || _.constant(this.name);
|
||||
|
||||
/**
|
||||
* Describes if this aggType creates data that is ordered, and if that ordered data
|
||||
* is some sort of time series.
|
||||
*
|
||||
* If the aggType does not create ordered data, set this to something "falsey".
|
||||
*
|
||||
* If this does create orderedData, then the value should be an object.
|
||||
*
|
||||
* If the orderdata is some sort of time series, `this.ordered` should be an object
|
||||
* with the property `date: true`
|
||||
*
|
||||
* @property ordered
|
||||
* @type {object|undefined}
|
||||
*/
|
||||
this.ordered = config.ordered;
|
||||
|
||||
/**
|
||||
* Flag that prevents this aggregation from being included in the dsl. This is only
|
||||
* used by the count aggregation (currently) since it doesn't really exist and it's output
|
||||
* is available on every bucket.
|
||||
*
|
||||
* @type {Boolean}
|
||||
*/
|
||||
this.hasNoDsl = !!config.hasNoDsl;
|
||||
|
||||
/**
|
||||
* An instance of {{#crossLink "AggParams"}}{{/crossLink}}.
|
||||
*
|
||||
* @property params
|
||||
* @type {AggParams}
|
||||
*/
|
||||
var params = this.params = config.params || [];
|
||||
|
||||
if (!(params instanceof AggParams)) {
|
||||
if (_.isPlainObject(params)) {
|
||||
// convert the names: details format into details[].name
|
||||
params = _.map(params, function (param, name) {
|
||||
param.name = name;
|
||||
return param;
|
||||
});
|
||||
}
|
||||
|
||||
params = this.params = new AggParams(params);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
define(function (require) {
|
||||
return function BucketCountBetweenUtil() {
|
||||
|
||||
/**
|
||||
* Count the number of bucket aggs between two agg config objects owned
|
||||
* by the same vis.
|
||||
*
|
||||
* If one of the two aggs was not found in the agg list, returns null.
|
||||
* If a was found after b, the count will be negative
|
||||
* If a was found first, the count will be positive.
|
||||
*
|
||||
* @param {AggConfig} aggConfigA - the aggConfig that is expected first
|
||||
* @param {AggConfig} aggConfigB - the aggConfig that is expected second
|
||||
* @return {null|number}
|
||||
*/
|
||||
function bucketCountBetween(aggConfigA, aggConfigB) {
|
||||
var aggs = aggConfigA.vis.aggs.getSorted();
|
||||
|
||||
var aIndex = aggs.indexOf(aggConfigA);
|
||||
var bIndex = aggs.indexOf(aggConfigB);
|
||||
|
||||
if (aIndex === -1 || bIndex === -1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// return a negative distance, if b is before a
|
||||
var negative = (aIndex > bIndex);
|
||||
|
||||
var count = aggs
|
||||
.slice(aIndex, bIndex - aIndex - 1)
|
||||
.reduce(function (count, cfg) {
|
||||
if (cfg.schema.group === 'buckets') {
|
||||
return count + 1;
|
||||
} else {
|
||||
return count;
|
||||
}
|
||||
}, 0);
|
||||
|
||||
return (negative ? -1 : 1) * count;
|
||||
}
|
||||
|
||||
return bucketCountBetween;
|
||||
};
|
||||
});
|
|
@ -2,6 +2,7 @@ define(function (require) {
|
|||
return function TermsAggDefinition(Private) {
|
||||
var _ = require('lodash');
|
||||
var AggType = Private(require('components/agg_types/_agg_type'));
|
||||
var bucketCountBetween = Private(require('components/agg_types/buckets/_bucket_count_between'));
|
||||
|
||||
return new AggType({
|
||||
name: 'terms',
|
||||
|
@ -28,10 +29,30 @@ define(function (require) {
|
|||
editor: require('text!components/agg_types/controls/order_and_size.html'),
|
||||
default: 'desc',
|
||||
write: function (aggConfig, output) {
|
||||
// TODO: We need more than just _count here.
|
||||
output.params.order = {
|
||||
_count: aggConfig.params.order.val
|
||||
};
|
||||
var sort = output.params.order = {};
|
||||
var order = aggConfig.params.order.val;
|
||||
|
||||
var metricAggConfig = _.first(aggConfig.vis.aggs.bySchemaGroup.metrics);
|
||||
|
||||
if (metricAggConfig.type.name === 'count') {
|
||||
sort._count = order;
|
||||
return;
|
||||
}
|
||||
|
||||
sort[metricAggConfig.id] = order;
|
||||
|
||||
/**
|
||||
* In order to sort by a metric agg, the metric need to be an immediate
|
||||
* decendant, this checks if that is the case.
|
||||
*
|
||||
* @type {boolean}
|
||||
*/
|
||||
var metricIsOwned = bucketCountBetween(aggConfig, metricAggConfig) === 0;
|
||||
|
||||
if (!metricIsOwned) {
|
||||
output.subAggs = output.subAggs || [];
|
||||
output.subAggs.push(metricAggConfig);
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
|
@ -20,8 +20,26 @@ define(function (require) {
|
|||
});
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Registry of Aggregation Types.
|
||||
*
|
||||
* These types form two groups, metric and buckets.
|
||||
*
|
||||
* @module agg_types
|
||||
* @type {Registry}
|
||||
*/
|
||||
return new Registry({
|
||||
|
||||
/**
|
||||
* @type {Array}
|
||||
*/
|
||||
index: ['name'],
|
||||
|
||||
/**
|
||||
* [group description]
|
||||
* @type {Array}
|
||||
*/
|
||||
group: ['type'],
|
||||
initialSet: aggs.metrics.concat(aggs.buckets)
|
||||
});
|
||||
|
|
|
@ -6,6 +6,7 @@ define(function (require) {
|
|||
{
|
||||
name: 'count',
|
||||
title: 'Count',
|
||||
hasNoDsl: true,
|
||||
makeLabel: function (aggConfig) {
|
||||
return 'Count of documents';
|
||||
}
|
||||
|
|
|
@ -60,6 +60,38 @@ define(function (require) {
|
|||
});
|
||||
};
|
||||
|
||||
AggConfig.prototype.write = function () {
|
||||
return this.type.params.write(this);
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert this aggConfig to it's dsl syntax.
|
||||
*
|
||||
* Adds params and adhoc subaggs to a pojo, then returns it
|
||||
*
|
||||
* @param {AggConfig} aggConfig - the config object to convert
|
||||
* @return {void|Object} - if the config has a dsl representation, it is
|
||||
* returned, else undefined is returned
|
||||
*/
|
||||
AggConfig.prototype.toDsl = function () {
|
||||
if (this.type.hasNoDsl) return;
|
||||
|
||||
var output = this.write();
|
||||
|
||||
var configDsl = {};
|
||||
configDsl[this.type.name] = output.params;
|
||||
|
||||
// if the config requires subAggs, write them to the dsl as well
|
||||
if (output.subAggs) {
|
||||
var subDslLvl = configDsl.aggs || (configDsl.aggs = {});
|
||||
output.subAggs.forEach(function nestAdhocSubAggs(subAggConfig) {
|
||||
subDslLvl[subAggConfig.id] = subAggConfig.toDsl();
|
||||
});
|
||||
}
|
||||
|
||||
return configDsl;
|
||||
};
|
||||
|
||||
AggConfig.prototype.toJSON = function () {
|
||||
var self = this;
|
||||
var params = self.params;
|
||||
|
|
|
@ -7,32 +7,41 @@ define(function (require) {
|
|||
_(AggConfigs).inherits(Registry);
|
||||
function AggConfigs(vis, configStates) {
|
||||
this.vis = vis;
|
||||
|
||||
AggConfigs.Super.call(this, {
|
||||
index: ['id'],
|
||||
group: ['schema.group'],
|
||||
group: ['schema.group', 'type.name'],
|
||||
initialSet: (configStates || []).map(function (aggConfigState) {
|
||||
if (aggConfigState instanceof AggConfig) return aggConfigState;
|
||||
return new AggConfig(vis, aggConfigState);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
AggConfigs.prototype.toDSL = function () {
|
||||
var dsl = {};
|
||||
var current = dsl;
|
||||
AggConfigs.prototype.toDsl = function () {
|
||||
var dslTopLvl = {};
|
||||
var dslLvlCursor;
|
||||
|
||||
this.getSorted().forEach(function (agg) {
|
||||
if (agg.type.name === 'count') return;
|
||||
this.getSorted()
|
||||
.filter(function (config) {
|
||||
return !config.type.hasNoDsl;
|
||||
})
|
||||
.forEach(function nestEachConfig(config, i, list) {
|
||||
var prevConfig = list[i - 1];
|
||||
var prevDsl = prevConfig && dslLvlCursor && dslLvlCursor[prevConfig.id];
|
||||
|
||||
current.aggs = {};
|
||||
// advance the cursor
|
||||
if (prevDsl && prevConfig.schema.group !== 'metrics') {
|
||||
dslLvlCursor = prevDsl.aggs || (prevDsl.aggs = {});
|
||||
}
|
||||
|
||||
var aggDsl = {};
|
||||
var output = agg.type.params.write(agg);
|
||||
aggDsl[agg.type.name] = output.params;
|
||||
current = current.aggs[agg.id] = aggDsl;
|
||||
// start at the top level
|
||||
if (!dslLvlCursor) dslLvlCursor = dslTopLvl;
|
||||
|
||||
dslLvlCursor[config.id] = config.toDsl();
|
||||
});
|
||||
|
||||
// set the dsl to the searchSource
|
||||
return dsl.aggs || {};
|
||||
return dslTopLvl;
|
||||
};
|
||||
|
||||
AggConfigs.prototype.getSorted = function () {
|
||||
|
|
|
@ -256,12 +256,12 @@ path, line, .axis line, .axis path {
|
|||
margin: 10px 0 0 6px;
|
||||
}
|
||||
|
||||
.error-wrapper {
|
||||
.error {
|
||||
flex: 1 1 100%;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.error-wrapper p {
|
||||
.error p {
|
||||
margin-top: 15%;
|
||||
font-size: 18px;
|
||||
text-wrap: wrap;
|
||||
|
|
|
@ -4,6 +4,7 @@ define(function (require) {
|
|||
var _ = require('lodash');
|
||||
|
||||
var ErrorHandler = Private(require('components/vislib/lib/_error_handler'));
|
||||
var Tooltip = Private(require('components/vislib/lib/tooltip'));
|
||||
|
||||
/*
|
||||
* Append chart titles to the visualization
|
||||
|
@ -16,44 +17,57 @@ define(function (require) {
|
|||
}
|
||||
|
||||
this.el = el;
|
||||
this.tooltip = new Tooltip(el, function (d) {
|
||||
return d.label;
|
||||
});
|
||||
}
|
||||
|
||||
_(ChartTitle.prototype).extend(ErrorHandler.prototype);
|
||||
|
||||
// Render chart titles
|
||||
ChartTitle.prototype.render = function () {
|
||||
d3.select(this.el).selectAll('.chart-title').call(this.draw());
|
||||
d3.select(this.el).selectAll('.chart-title').call(this.truncate());
|
||||
return d3.select(this.el).selectAll('.chart-title').call(this.draw());
|
||||
};
|
||||
|
||||
// Return a function that truncates chart title text
|
||||
// Need to refactor this function, so that it is called inside the draw method
|
||||
ChartTitle.prototype.truncate = function () {
|
||||
ChartTitle.prototype.truncate = function (size) {
|
||||
var self = this;
|
||||
|
||||
return function (selection) {
|
||||
selection.each(function () {
|
||||
var div = d3.select(this);
|
||||
var dataType = this.parentNode.__data__.rows ? 'rows' : 'columns';
|
||||
var text = d3.select(this);
|
||||
var n = text[0].length;
|
||||
var maxWidth = size / n * 0.9;
|
||||
var length = this.getComputedTextLength();
|
||||
var str;
|
||||
var avg;
|
||||
var end;
|
||||
|
||||
var text = div.select('text');
|
||||
var textLength = text.node().getComputedTextLength();
|
||||
var maxWidth = dataType === 'rows' ? $(this).height() : $(this).width();
|
||||
var subtractionPercentage = maxWidth * 0.05;
|
||||
var str = text.text();
|
||||
|
||||
// if length of text is longer than the chart div, truncate
|
||||
maxWidth = maxWidth - subtractionPercentage;
|
||||
if (textLength > maxWidth) {
|
||||
var avg = textLength / str.length;
|
||||
var end = Math.floor(maxWidth / avg);
|
||||
if (length > maxWidth) {
|
||||
str = text.text();
|
||||
avg = length / str.length;
|
||||
end = Math.floor(maxWidth / avg) - 5;
|
||||
|
||||
str = str.substr(0, end) + '...';
|
||||
|
||||
// mouseover and mouseout
|
||||
self.addMouseEvents(text);
|
||||
|
||||
return text.text(str);
|
||||
}
|
||||
|
||||
text.text(str);
|
||||
return text.text();
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
// Add mouseover and mouseout events on truncated chart titles
|
||||
ChartTitle.prototype.addMouseEvents = function (target) {
|
||||
if (this.tooltip) {
|
||||
return target.call(this.tooltip.render());
|
||||
}
|
||||
};
|
||||
|
||||
// Return a callback function that appends chart titles to the visualization
|
||||
ChartTitle.prototype.draw = function () {
|
||||
var self = this;
|
||||
|
@ -64,6 +78,7 @@ define(function (require) {
|
|||
var dataType = this.parentNode.__data__.rows ? 'rows' : 'columns';
|
||||
var width = $(this).width();
|
||||
var height = $(this).height();
|
||||
var size = dataType === 'rows' ? height : width;
|
||||
|
||||
// Check if width or height are 0 or NaN
|
||||
self.validateWidthandHeight(width, height);
|
||||
|
@ -80,7 +95,7 @@ define(function (require) {
|
|||
.attr('transform', function () {
|
||||
if (dataType === 'rows') {
|
||||
// if `rows`, rotate the chart titles
|
||||
return 'translate(11,' + height / 2 + ')rotate(270)';
|
||||
return 'translate(11,' + height / 2.2 + ')rotate(270)';
|
||||
}
|
||||
return 'translate(' + width / 2 + ',11)';
|
||||
})
|
||||
|
@ -88,6 +103,9 @@ define(function (require) {
|
|||
.text(function (d) {
|
||||
return d.label;
|
||||
});
|
||||
|
||||
// truncate long chart titles
|
||||
div.selectAll('text').call(self.truncate(size));
|
||||
});
|
||||
};
|
||||
};
|
||||
|
|
|
@ -33,13 +33,14 @@ define(function (require) {
|
|||
'margin' : { top: 10, right: 3, bottom: 5, left: 3 }
|
||||
});
|
||||
|
||||
// Visualizaation constructors
|
||||
// Visualization constructors
|
||||
|
||||
// Add the visualization layout
|
||||
this.layout = new Layout(this.el, this.data.injectZeros(), this._attr.type);
|
||||
|
||||
// Only add legend if addLegend attribute set
|
||||
if (this._attr.addLegend) {
|
||||
this.legend = new Legend(this.el, this.data.getLabels(), this.data.getColorFunc(), this._attr);
|
||||
this.legend = new Legend(this.vis, this.el, this.data.getLabels(), this.data.getColorFunc(), this._attr);
|
||||
}
|
||||
|
||||
// only add tooltip if addTooltip attribute set
|
||||
|
@ -90,7 +91,7 @@ define(function (require) {
|
|||
|
||||
// Render objects in the render array
|
||||
_.forEach(this.renderArray, function (property) {
|
||||
if (typeof property.render === 'function') {
|
||||
if (property && typeof property.render === 'function') {
|
||||
property.render();
|
||||
}
|
||||
});
|
||||
|
@ -105,7 +106,7 @@ define(function (require) {
|
|||
// Bind events to the chart
|
||||
d3.rebind(chart, chart._attr.dispatch, 'on');
|
||||
|
||||
// Bubbles the events up to the Vis Class and Events Class
|
||||
// Bubble events up to the Vis Class and Events Class
|
||||
chart.on('click', function (e) {
|
||||
self.vis.emit('click', e);
|
||||
});
|
||||
|
@ -118,7 +119,9 @@ define(function (require) {
|
|||
self.vis.emit('brush', e);
|
||||
});
|
||||
|
||||
// Save reference to charts
|
||||
charts.push(chart);
|
||||
|
||||
// Render charts to screen
|
||||
chart.render();
|
||||
});
|
||||
|
@ -129,14 +132,16 @@ define(function (require) {
|
|||
return d3.select(el).selectAll('*').remove();
|
||||
};
|
||||
|
||||
// Displays an error message to the screen
|
||||
// Display an error message on the screen
|
||||
Handler.prototype.error = function (message) {
|
||||
this.removeAll(this.el);
|
||||
|
||||
// Return an error wrapper DOM element
|
||||
return d3.select(this.el)
|
||||
.append('div')
|
||||
.attr('class', 'error-wrapper')
|
||||
// class name needs `chart` in it for the polling checkSize function
|
||||
// to continuously call render on resize
|
||||
.attr('class', 'chart error')
|
||||
.append('p')
|
||||
.text(message);
|
||||
};
|
||||
|
|
|
@ -13,7 +13,12 @@ define(function (require) {
|
|||
* color => color function to assign colors to labels
|
||||
* _attr => visualization attributes
|
||||
*/
|
||||
function Legend(el, labels, color, _attr) {
|
||||
function Legend(vis, el, labels, color, _attr) {
|
||||
if (!(this instanceof Legend)) {
|
||||
return new Legend(vis, el, labels, color, _attr);
|
||||
}
|
||||
|
||||
this.vis = vis;
|
||||
this.el = el;
|
||||
this.labels = labels;
|
||||
this.color = color;
|
||||
|
@ -87,19 +92,22 @@ define(function (require) {
|
|||
var self = this;
|
||||
|
||||
// toggle
|
||||
headerIcon.on('click', function (d) {
|
||||
headerIcon.on('click', function () {
|
||||
if (self._attr.isOpen) {
|
||||
// close legend
|
||||
visEl.select('ul.legend-ul')
|
||||
.classed('hidden', true);
|
||||
self._attr.isOpen = false;
|
||||
|
||||
// need to add reference to resize function on toggle
|
||||
self.vis.resize();
|
||||
} else {
|
||||
// open legend
|
||||
visEl.select('ul.legend-ul')
|
||||
.classed('hidden', false);
|
||||
self._attr.isOpen = true;
|
||||
|
||||
|
||||
// need to add reference to resize function on toggle
|
||||
self.vis.resize();
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -23,7 +23,10 @@ define(function (require) {
|
|||
this.xValues = args.xValues;
|
||||
this.ordered = args.ordered;
|
||||
this.xAxisFormatter = args.xAxisFormatter;
|
||||
this._attr = args._attr;
|
||||
this._attr = _.defaults(args._attr || {}, {
|
||||
isDiscover: false,
|
||||
isRotated: true
|
||||
});
|
||||
}
|
||||
|
||||
_(XAxis.prototype).extend(ErrorHandler.prototype);
|
||||
|
@ -58,14 +61,13 @@ define(function (require) {
|
|||
|
||||
// Returns a time domain
|
||||
XAxis.prototype.getTimeDomain = function (scale, xValues, ordered) {
|
||||
// Should think about replacing and not hard coding
|
||||
var spacingPercentage = 0.25;
|
||||
var maxXValue = d3.max(xValues);
|
||||
var timeInterval = ordered.interval;
|
||||
// Take the min of the xValues or the ordered object
|
||||
// Take the min of the xValues or the min date sent on the ordered object
|
||||
var minDate = Math.min(d3.min(xValues), ordered.min);
|
||||
// Take the max of the xValues or the max date that is sent
|
||||
var maxDate = +maxXValue <= ordered.max ? ordered.max : +maxXValue + timeInterval;
|
||||
// Take the max of the xValues or the max date that sent on the ordered object
|
||||
var maxDate = +maxXValue <= ordered.max ?
|
||||
this.calculateMaxDate(ordered.max, +maxXValue, timeInterval) : +maxXValue + timeInterval;
|
||||
|
||||
// Add the domain to the scale
|
||||
scale.domain([minDate, maxDate]);
|
||||
|
@ -73,6 +75,40 @@ define(function (require) {
|
|||
return scale;
|
||||
};
|
||||
|
||||
// Returns an accurate maxDate
|
||||
XAxis.prototype.calculateMaxDate = function (orderedDate, maxXValue, interval) {
|
||||
/*
|
||||
* Elasticsearch returns bucketed data.
|
||||
*
|
||||
* Buckets have a beginning (the start time), an end (the end time),
|
||||
* and an interval, the width of the bar minus padding.
|
||||
*
|
||||
* We need to create an x axis that ends at the end (or end time) of the
|
||||
* last bucket.
|
||||
*
|
||||
* The time stamp values from the maxXValue represent the beginning
|
||||
* of each bucket. We cannot guarantee that the values passed from
|
||||
* the ordered.max field represents the end of a bucket.
|
||||
*
|
||||
* So, if we were to render either as the cutoff date, then the last bar
|
||||
* on the far right side of the axis may be partially cut off.
|
||||
* Therefore, we need to calculate the end time of the last bucket.
|
||||
*/
|
||||
|
||||
// Difference between the ordered.max value and the max x value
|
||||
var diff = orderedDate - maxXValue;
|
||||
|
||||
// if diff is smaller than the interval, but not zero, add the missing
|
||||
// percentage of the interval back to the ordered.max date
|
||||
if (diff !== 0 && diff < interval) {
|
||||
// calculates the appropriate end time
|
||||
return +orderedDate + ((1 - diff / interval) * interval);
|
||||
}
|
||||
|
||||
// if diff is > than the interval or equals 0 return the ordered.max value
|
||||
return orderedDate;
|
||||
};
|
||||
|
||||
// Return a nominal(d3 ordinal) domain
|
||||
XAxis.prototype.getOrdinalDomain = function (scale, xValues) {
|
||||
return scale.domain(xValues);
|
||||
|
@ -130,8 +166,6 @@ define(function (require) {
|
|||
if ($('.discover-timechart').length) {
|
||||
self._attr.isDiscover = true;
|
||||
self._attr.isRotated = false;
|
||||
} else {
|
||||
self._attr.isDiscover = false;
|
||||
}
|
||||
|
||||
return function (selection) {
|
||||
|
|
|
@ -44,8 +44,7 @@ define(function (require) {
|
|||
'is too small for this chart.') {
|
||||
this.handler.error(error.message);
|
||||
} else {
|
||||
console.log(error);
|
||||
//throw(error);
|
||||
throw (error);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -135,15 +135,15 @@ define(function (require) {
|
|||
barWidth = xScale(data.ordered.min + data.ordered.interval) - xScale(data.ordered.min);
|
||||
barSpacing = barWidth * 0.25;
|
||||
|
||||
if (barWidth <= 1) {
|
||||
throw new Error('This container is too small for this chart.');
|
||||
}
|
||||
// if (barWidth <= 1) {
|
||||
// throw new Error('The height and/or width of this container is too small for this chart.');
|
||||
// }
|
||||
return barWidth - barSpacing;
|
||||
}
|
||||
|
||||
if (xScale.rangeBand() <= 1) {
|
||||
throw new Error('This container is too small for this chart.');
|
||||
}
|
||||
// if (xScale.rangeBand() <= 1) {
|
||||
// throw new Error('The height and/or width of this container is too small for this chart.');
|
||||
// }
|
||||
return xScale.rangeBand();
|
||||
})
|
||||
.attr('y', function (d) {
|
||||
|
|
|
@ -26,7 +26,7 @@ define(function (require) {
|
|||
},
|
||||
_.merge(
|
||||
aggConfig.schema.params.write(aggConfig),
|
||||
aggConfig.type.params.write(aggConfig)
|
||||
aggConfig.write()
|
||||
)
|
||||
);
|
||||
return chartDataConfig;
|
||||
|
|
|
@ -11,26 +11,27 @@ require.config({
|
|||
lodash: 'utils/_mixins',
|
||||
|
||||
// bower_components
|
||||
angular: '../bower_components/angular/angular',
|
||||
'angular-route': '../bower_components/angular-route/angular-route',
|
||||
'angular-bootstrap': '../bower_components/angular-bootstrap/ui-bootstrap-tpls',
|
||||
'angular-bindonce': '../bower_components/angular-bindonce/bindonce',
|
||||
'angular-ui-ace': '../bower_components/angular-ui-ace/ui-ace',
|
||||
'angular-bootstrap': '../bower_components/angular-bootstrap/ui-bootstrap-tpls',
|
||||
'angular-elastic': '../bower_components/angular-elastic/elastic',
|
||||
'angular-route': '../bower_components/angular-route/angular-route',
|
||||
'angular-ui-ace': '../bower_components/angular-ui-ace/ui-ace',
|
||||
ace: '../bower_components/ace-builds/src-noconflict/ace',
|
||||
angular: '../bower_components/angular/angular',
|
||||
async: '../bower_components/async/lib/async',
|
||||
bower_components: '../bower_components',
|
||||
css: '../bower_components/require-css/css',
|
||||
d3: '../bower_components/d3/d3',
|
||||
text: '../bower_components/requirejs-text/text',
|
||||
elasticsearch: '../bower_components/elasticsearch/elasticsearch.angular',
|
||||
faker: '../bower_components/Faker/faker',
|
||||
file_saver: '../bower_components/FileSaver/FileSaver',
|
||||
gridster: '../bower_components/gridster/dist/jquery.gridster',
|
||||
inflection: '../bower_components/inflection/lib/inflection',
|
||||
jquery: '../bower_components/jquery/dist/jquery',
|
||||
jsonpath: '../bower_components/jsonpath/lib/jsonpath',
|
||||
lodash_src: '../bower_components/lodash/dist/lodash',
|
||||
moment: '../bower_components/moment/moment',
|
||||
gridster: '../bower_components/gridster/dist/jquery.gridster',
|
||||
jsonpath: '../bower_components/jsonpath/lib/jsonpath',
|
||||
inflection: '../bower_components/inflection/lib/inflection',
|
||||
file_saver: '../bower_components/FileSaver/FileSaver',
|
||||
bower_components: '../bower_components'
|
||||
text: '../bower_components/requirejs-text/text'
|
||||
},
|
||||
shim: {
|
||||
angular: {
|
||||
|
|
|
@ -25,15 +25,19 @@ define(function (require) {
|
|||
// one cache per instance of the Private service
|
||||
var cache = {};
|
||||
|
||||
function Private(fn) {
|
||||
function identify(fn) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new TypeError('Expected private module "' + fn + '" to be a function');
|
||||
}
|
||||
|
||||
var id = fn.$$id;
|
||||
if (id && cache[id]) return cache[id];
|
||||
if (fn.$$id) return fn.$$id;
|
||||
else return (fn.$$id = nextId());
|
||||
}
|
||||
|
||||
if (!id) id = fn.$$id = nextId();
|
||||
function Private(fn) {
|
||||
var id = identify(fn);
|
||||
|
||||
if (cache[id]) return cache[id];
|
||||
else if (~privPath.indexOf(id)) {
|
||||
throw new Error(
|
||||
'Circluar refrence to "' + name(fn) + '"' +
|
||||
|
@ -54,6 +58,11 @@ define(function (require) {
|
|||
return instance;
|
||||
}
|
||||
|
||||
Private.stub = function (fn, val) {
|
||||
cache[identify(fn)] = val;
|
||||
return val;
|
||||
};
|
||||
|
||||
return Private;
|
||||
});
|
||||
});
|
||||
|
|
|
@ -14,6 +14,9 @@ define(function (require) {
|
|||
* Generic extension of Array class, which will index (and reindex) the
|
||||
* objects it contains based on their properties.
|
||||
*
|
||||
* @class Registry
|
||||
* @module utils
|
||||
* @constructor
|
||||
* @param {object} [config] - describes the properties of this registry object
|
||||
* @param {string[]} [config.index] - a list of props/paths that should be used to index the docs.
|
||||
* @param {string[]} [config.group] - a list of keys/paths to group docs by.
|
||||
|
|
17
test/unit/fixtures/stubbed_logstash_index_pattern.js
Normal file
17
test/unit/fixtures/stubbed_logstash_index_pattern.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
define(function (require) {
|
||||
return function stubbedLogstashIndexPatternService(Private) {
|
||||
var StubIndexPattern = Private(require('test_utils/stub_index_pattern'));
|
||||
return new StubIndexPattern('logstash-*', 'time', [
|
||||
{ type: 'number', name: 'bytes' },
|
||||
{ type: 'boolean', name: 'ssl' },
|
||||
{ type: 'date', name: '@timestamp' },
|
||||
{ type: 'ip', name: 'ip' },
|
||||
{ type: 'attachment', name: 'request_body' },
|
||||
{ type: 'string', name: 'extension' },
|
||||
{ type: 'geo_point', name: 'point' },
|
||||
{ type: 'geo_shape', name: 'area' },
|
||||
{ type: 'string', name: 'extension' },
|
||||
{ type: 'conflict', name: 'custom_user_field' }
|
||||
]);
|
||||
};
|
||||
});
|
|
@ -61,6 +61,7 @@
|
|||
'kibana',
|
||||
'sinon/sinon',
|
||||
'specs/apps/discover/hit_sort_fn',
|
||||
'specs/apps/discover/segmented_fetch',
|
||||
'specs/directives/confirm-click',
|
||||
'specs/directives/timepicker',
|
||||
'specs/directives/truncate',
|
||||
|
@ -106,7 +107,9 @@
|
|||
'specs/factories/events',
|
||||
'specs/index_patterns/_flatten_search_response',
|
||||
'specs/utils/registry/index',
|
||||
'specs/directives/filter_bar'
|
||||
'specs/directives/filter_bar',
|
||||
'specs/components/agg_types/index',
|
||||
'specs/components/vis/index'
|
||||
], function (kibana, sinon) {
|
||||
kibana.load(function () {
|
||||
var xhr = sinon.useFakeXMLHttpRequest();
|
||||
|
|
479
test/unit/specs/apps/discover/segmented_fetch.js
Normal file
479
test/unit/specs/apps/discover/segmented_fetch.js
Normal file
|
@ -0,0 +1,479 @@
|
|||
define(function (require) {
|
||||
var sinon = require('test_utils/auto_release_sinon');
|
||||
var faker = require('faker');
|
||||
var Promise = require('bluebird');
|
||||
var _ = require('lodash');
|
||||
|
||||
var SegmentedFetch;
|
||||
var segmentedFetch;
|
||||
var searchStrategy;
|
||||
var searchSource;
|
||||
var mockSearchSource;
|
||||
var searchSourceStubs;
|
||||
var es;
|
||||
var notify;
|
||||
|
||||
function init() {
|
||||
module('kibana', function ($provide) {
|
||||
// mock notifier
|
||||
$provide.factory('Notifier', function () {
|
||||
function NotifierMock(opts) {
|
||||
this.opts = opts;
|
||||
}
|
||||
|
||||
|
||||
var stopEventSpy = sinon.spy();
|
||||
NotifierMock.prototype.event = sinon.stub().returns(stopEventSpy);
|
||||
|
||||
return NotifierMock;
|
||||
});
|
||||
|
||||
// mock es client
|
||||
$provide.factory('es', function () {
|
||||
return {};
|
||||
});
|
||||
});
|
||||
|
||||
inject(function ($injector, Private) {
|
||||
es = $injector.get('es');
|
||||
var Notifier = $injector.get('Notifier');
|
||||
notify = new Notifier();
|
||||
|
||||
SegmentedFetch = Private(require('apps/discover/_segmented_fetch'));
|
||||
|
||||
// mock the searchSource
|
||||
searchSourceStubs = {
|
||||
get: sinon.stub(),
|
||||
toIndexList: sinon.stub().returns([]),
|
||||
createRequest: sinon.spy(function () {
|
||||
return {
|
||||
defer: Promise.defer(),
|
||||
source: {
|
||||
activeFetchCount: 0
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
|
||||
mockSearchSource = {
|
||||
get: searchSourceStubs.get.returns({
|
||||
toIndexList: searchSourceStubs.toIndexList.returns([])
|
||||
}),
|
||||
_createRequest: searchSourceStubs.createRequest
|
||||
};
|
||||
|
||||
// create segmentedFetch instance with mocked searchSource
|
||||
segmentedFetch = new SegmentedFetch(mockSearchSource);
|
||||
|
||||
// stub the searchStrategy
|
||||
searchStrategy = Private(require('components/courier/fetch/strategy/search'));
|
||||
sinon.stub(searchStrategy, 'getSourceStateFromRequest');
|
||||
});
|
||||
}
|
||||
|
||||
describe('segmented fetch', function () {
|
||||
require('test_utils/no_digest_promises').activateForSuite();
|
||||
|
||||
beforeEach(init);
|
||||
|
||||
describe('_executeSearch', function () {
|
||||
it('should attach abort method to searchPromise', function () {
|
||||
es.search = function () { return Promise.resolve(); };
|
||||
segmentedFetch._executeSearch('test-index', {body: '', type: ''});
|
||||
|
||||
expect(segmentedFetch.searchPromise).to.have.property('abort');
|
||||
});
|
||||
|
||||
it('should abort client promise', function () {
|
||||
var clientAbortSpy = sinon.spy();
|
||||
es.search = function () {
|
||||
function MockClass() {
|
||||
}
|
||||
|
||||
// mock the search client promise
|
||||
MockClass.prototype.then = function () {
|
||||
return this;
|
||||
};
|
||||
MockClass.prototype.catch = function () {
|
||||
return this;
|
||||
};
|
||||
MockClass.prototype.abort = clientAbortSpy;
|
||||
|
||||
return new MockClass();
|
||||
};
|
||||
|
||||
segmentedFetch._executeSearch(1, {body: '', type: ''});
|
||||
segmentedFetch.abort();
|
||||
|
||||
|
||||
return segmentedFetch.searchPromise.then(function (resolve) {
|
||||
expect(clientAbortSpy.callCount).to.be(1);
|
||||
expect(resolve).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should resolve on ClusterBlockException', function () {
|
||||
es.search = Promise.method(function () {
|
||||
throw {
|
||||
status: 403,
|
||||
message: 'ClusterBlockException mock error test, index closed'
|
||||
};
|
||||
});
|
||||
|
||||
segmentedFetch._executeSearch('test-index', {body: '', type: ''});
|
||||
|
||||
return segmentedFetch.searchPromise.then(function (resolve) {
|
||||
expect(resolve).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should reject on es client errors', function () {
|
||||
es.search = Promise.method(function () {
|
||||
throw new Error('es client error of some kind');
|
||||
});
|
||||
|
||||
segmentedFetch._executeSearch('test-index', {body: '', type: ''});
|
||||
|
||||
return segmentedFetch.searchPromise.catch(function (err) {
|
||||
expect(err.message).to.be('es client error of some kind');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('_processQueue', function () {
|
||||
var queueSpy;
|
||||
var completeSpy;
|
||||
var queue = [];
|
||||
|
||||
// mock es client response trackers
|
||||
var totalTime;
|
||||
var totalHits;
|
||||
var maxHits;
|
||||
var maxScore;
|
||||
var aggregationKeys;
|
||||
|
||||
var getESResponse = function (index, state) {
|
||||
var took = _.random(20, 60);
|
||||
var score = _.random(20, 90) / 100;
|
||||
var hits = faker.Lorem.sentence().split(' ');
|
||||
var aggKey = 'key' + _.random(1, 100);
|
||||
totalTime += took;
|
||||
totalHits += hits.length;
|
||||
maxHits = Math.max(maxHits, hits.length);
|
||||
maxScore = Math.max(maxScore, score);
|
||||
aggregationKeys = _.union(aggregationKeys, [aggKey]);
|
||||
|
||||
return Promise.resolve({
|
||||
took: took,
|
||||
hits: {
|
||||
hits: hits,
|
||||
total: maxHits,
|
||||
max_score: score
|
||||
},
|
||||
aggregations: {
|
||||
'agg_test': {
|
||||
buckets: [{
|
||||
doc_count: hits.length,
|
||||
key: aggKey
|
||||
}]
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(function () {
|
||||
totalTime = 0;
|
||||
totalHits = 0;
|
||||
maxHits = 0;
|
||||
maxScore = 0;
|
||||
aggregationKeys = [];
|
||||
|
||||
queueSpy = sinon.spy(SegmentedFetch.prototype, '_processQueue');
|
||||
completeSpy = sinon.spy(SegmentedFetch.prototype, '_processQueueComplete');
|
||||
|
||||
for (var i = 0; i < _.random(3, 6); i++) {
|
||||
queue.push('test-' + i);
|
||||
}
|
||||
|
||||
sinon.stub(SegmentedFetch.prototype, '_extractQueue', function () {
|
||||
this.queue = queue.slice(0);
|
||||
});
|
||||
|
||||
searchStrategy.getSourceStateFromRequest.returns(Promise.resolve({
|
||||
body: {
|
||||
size: 10
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
it('should merge stats and complete', function () {
|
||||
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeSearch', getESResponse);
|
||||
|
||||
function eachHandler(resp, req) {
|
||||
// check results from mergeRequestStats
|
||||
expect(segmentedFetch.requestStats).to.have.property('aggregations');
|
||||
expect(segmentedFetch.requestStats.aggregations['agg_test'].buckets.length).to.be(aggregationKeys.length);
|
||||
expect(segmentedFetch.requestStats.took).to.be(totalTime);
|
||||
expect(segmentedFetch.requestStats.hits.hits.length).to.be(totalHits);
|
||||
expect(segmentedFetch.requestStats.hits.total).to.be(maxHits);
|
||||
expect(segmentedFetch.requestStats.hits.max_score).to.be(maxScore);
|
||||
|
||||
// check aggregation stats
|
||||
aggregationKeys.forEach(function (key) {
|
||||
expect(segmentedFetch.requestStats._bucketIndex).to.have.property(key);
|
||||
});
|
||||
}
|
||||
|
||||
return segmentedFetch.fetch({ each: eachHandler }).then(function () {
|
||||
expect(completeSpy.callCount).to.be(1);
|
||||
expect(queueSpy.callCount).to.be(queue.length);
|
||||
});
|
||||
});
|
||||
|
||||
it('should complete on falsey response', function () {
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeSearch', function (index, state) {
|
||||
return Promise.resolve(false);
|
||||
});
|
||||
|
||||
return segmentedFetch.fetch().then(function () {
|
||||
expect(completeSpy.callCount).to.be(1);
|
||||
expect(queueSpy.callCount).to.be(queue.length);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetch', function () {
|
||||
it('should return a promise', function () {
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeRequest', Promise.resolve);
|
||||
|
||||
var fetch = segmentedFetch.fetch();
|
||||
expect('then' in fetch).to.be(true);
|
||||
return fetch;
|
||||
});
|
||||
|
||||
it('should stop the request', function () {
|
||||
var stopSpy = sinon.spy(SegmentedFetch.prototype, '_stopRequest');
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeRequest', Promise.resolve);
|
||||
|
||||
return segmentedFetch.fetch().then(function () {
|
||||
// always called on fetch, called again at resolution
|
||||
expect(stopSpy.callCount).to.be(2);
|
||||
});
|
||||
});
|
||||
|
||||
it('should stop multiple requests', function () {
|
||||
var stopSpy = sinon.spy(SegmentedFetch.prototype, '_stopRequest');
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeRequest').returns(Promise.delay(5));
|
||||
|
||||
segmentedFetch.fetch();
|
||||
|
||||
return Promise.delay(1).then(function () {
|
||||
return segmentedFetch.fetch().then(function () {
|
||||
// 1 for fetch
|
||||
// 1 for second fetch
|
||||
// 1 for stopping the first request early
|
||||
// 1 for resolving the second request
|
||||
expect(stopSpy.callCount).to.be(4);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should wait before starting new requests', function () {
|
||||
var startSpy = sinon.spy(SegmentedFetch.prototype, '_startRequest');
|
||||
var stopSpy = sinon.spy(SegmentedFetch.prototype, '_stopRequest');
|
||||
var fetchCount = _.random(3, 6);
|
||||
var resolveCount = 0;
|
||||
var resolvedPromises = [];
|
||||
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeRequest', function () {
|
||||
// keep resolving faster as we move along
|
||||
return Promise.delay(fetchCount - resolveCount);
|
||||
});
|
||||
|
||||
_.times(fetchCount, function (idx) {
|
||||
resolvedPromises.push(segmentedFetch.fetch().then(function () {
|
||||
var resolveOrder = idx + 1;
|
||||
++resolveCount;
|
||||
|
||||
expect(resolveCount).to.be(resolveOrder);
|
||||
expect(startSpy.callCount).to.be(resolveOrder);
|
||||
// called once for every fetch, and again for each resolution
|
||||
expect(stopSpy.callCount).to.be(fetchCount + resolveOrder);
|
||||
}));
|
||||
});
|
||||
|
||||
return Promise.all(resolvedPromises);
|
||||
});
|
||||
|
||||
it('should perform actions on searchSource', function () {
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeRequest', Promise.resolve);
|
||||
|
||||
return segmentedFetch.fetch().then(function () {
|
||||
// read the searchSource queue
|
||||
expect(searchSourceStubs.get.callCount).to.be(1);
|
||||
expect(searchSourceStubs.toIndexList.callCount).to.be(1);
|
||||
// create the searchSource request
|
||||
expect(searchSourceStubs.createRequest.callCount).to.be(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a notification event', function () {
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeRequest', Promise.resolve);
|
||||
|
||||
return segmentedFetch.fetch().then(function () {
|
||||
expect(notify.event.callCount).to.be(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('should report initial status', function () {
|
||||
var statusStub = sinon.stub();
|
||||
sinon.stub(SegmentedFetch.prototype, '_processQueue', function () {
|
||||
return new Promise(function (res) { return res(); });
|
||||
});
|
||||
searchStrategy.getSourceStateFromRequest.returns(Promise.resolve());
|
||||
|
||||
return segmentedFetch.fetch({
|
||||
status: statusStub
|
||||
}).then(function () {
|
||||
expect(statusStub.callCount).to.be(1);
|
||||
|
||||
var status = statusStub.getCall(0).args[0];
|
||||
expect(status.active).to.be(null);
|
||||
expect(status.total).to.be(searchSourceStubs.toIndexList.length);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('abort', function () {
|
||||
it('should return a promise', function () {
|
||||
var abort = segmentedFetch.abort();
|
||||
expect('then' in abort).to.be(true);
|
||||
return abort;
|
||||
});
|
||||
|
||||
it('should abort the existing fetch', function () {
|
||||
var loopCount = 3;
|
||||
var queue = [];
|
||||
for (var i = 0; i <= loopCount; i++) {
|
||||
queue.push('queue-index-' + i);
|
||||
}
|
||||
|
||||
sinon.stub(SegmentedFetch.prototype, '_extractQueue', function () {
|
||||
this.queue = queue;
|
||||
});
|
||||
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeSearch', function () {
|
||||
return new Promise(function (resolve) {
|
||||
resolve({
|
||||
took: 10,
|
||||
hits: {
|
||||
total: 10,
|
||||
max_score: 1,
|
||||
hits: []
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
searchStrategy.getSourceStateFromRequest.returns(Promise.resolve({
|
||||
body: {
|
||||
size: 10
|
||||
}
|
||||
}));
|
||||
|
||||
var eachHandler = sinon.spy(function () {
|
||||
if (eachHandler.callCount === loopCount) {
|
||||
segmentedFetch.abort();
|
||||
}
|
||||
});
|
||||
|
||||
return segmentedFetch.fetch({ each: eachHandler }).then(function () {
|
||||
expect(eachHandler.callCount).to.be(loopCount);
|
||||
});
|
||||
});
|
||||
|
||||
it('should abort the searchPromise', function () {
|
||||
var searchPromiseAbortStub = sinon.spy();
|
||||
|
||||
sinon.stub(SegmentedFetch.prototype, '_extractQueue', function () {
|
||||
this.queue = ['one', 'two', 'three'];
|
||||
});
|
||||
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeSearch', function () {
|
||||
this.searchPromise = { abort: searchPromiseAbortStub };
|
||||
return Promise.resolve();
|
||||
});
|
||||
|
||||
sinon.stub(SegmentedFetch.prototype, '_executeRequest', function () {
|
||||
var self = this;
|
||||
return self._executeSearch()
|
||||
.then(function () {
|
||||
if (typeof self.requestHandlers.each === 'function') {
|
||||
return self.requestHandlers.each();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
searchStrategy.getSourceStateFromRequest.returns(Promise.resolve({
|
||||
body: {
|
||||
size: 10
|
||||
}
|
||||
}));
|
||||
|
||||
var eachHandler = sinon.spy(function () {
|
||||
segmentedFetch.abort();
|
||||
});
|
||||
|
||||
return segmentedFetch.fetch({ each: eachHandler }).then(function () {
|
||||
expect(eachHandler.callCount).to.be(1);
|
||||
// 1 for fetch, 1 for actual abort call
|
||||
expect(searchPromiseAbortStub.callCount).to.be(2);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should clear the notification', function () {
|
||||
segmentedFetch.notifyEvent = sinon.spy();
|
||||
|
||||
sinon.stub(SegmentedFetch.prototype, 'fetch', function (opts) {
|
||||
var SegmentedFetchSelf = this;
|
||||
var fakeRequest = {};
|
||||
|
||||
return Promise.try(function () {
|
||||
return SegmentedFetchSelf._startRequest();
|
||||
})
|
||||
.then(function () {
|
||||
SegmentedFetchSelf._setRequest(fakeRequest);
|
||||
})
|
||||
.then(function () {
|
||||
// dumb mock or the fetch lifecycle
|
||||
// loop, running each
|
||||
while (SegmentedFetchSelf.activeRequest !== null) {
|
||||
if (typeof opts.each === 'function') {
|
||||
opts.each();
|
||||
}
|
||||
}
|
||||
|
||||
// return when activeRequest is null
|
||||
return;
|
||||
})
|
||||
.then(function () {
|
||||
SegmentedFetchSelf._stopRequest();
|
||||
});
|
||||
});
|
||||
|
||||
var eachHandler = sinon.spy(function () {
|
||||
// will set activeRequest to null
|
||||
segmentedFetch.abort();
|
||||
});
|
||||
|
||||
return segmentedFetch.fetch({ each: eachHandler }).then(function () {
|
||||
expect(eachHandler.callCount).to.be(1);
|
||||
// 1 for stop from fetch, 1 from abort
|
||||
expect(segmentedFetch.notifyEvent.callCount).to.be(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
99
test/unit/specs/components/agg_types/_agg_params.js
Normal file
99
test/unit/specs/components/agg_types/_agg_params.js
Normal file
|
@ -0,0 +1,99 @@
|
|||
define(function (require) {
|
||||
return ['AggParams class', function () {
|
||||
var _ = require('lodash');
|
||||
|
||||
var AggParams;
|
||||
var BaseAggParam;
|
||||
var FieldAggParam;
|
||||
var OptionedAggParam;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
// stub out the param classes before we get the AggParams
|
||||
beforeEach(inject(require('specs/components/agg_types/utils/stub_agg_params')));
|
||||
// fetch out deps
|
||||
beforeEach(inject(function (Private) {
|
||||
AggParams = Private(require('components/agg_types/_agg_params'));
|
||||
BaseAggParam = Private(require('components/agg_types/param_types/base'));
|
||||
FieldAggParam = Private(require('components/agg_types/param_types/field'));
|
||||
OptionedAggParam = Private(require('components/agg_types/param_types/optioned'));
|
||||
}));
|
||||
|
||||
describe('constructor args', function () {
|
||||
it('accepts an object of params defs', function () {
|
||||
var aggParams = new AggParams({
|
||||
one: {},
|
||||
two: {}
|
||||
});
|
||||
|
||||
expect(aggParams).to.have.length(2);
|
||||
expect(aggParams).to.be.an(Array);
|
||||
expect(aggParams.byName).to.have.keys(['one', 'two']);
|
||||
});
|
||||
|
||||
it('accepts an array of param defs', function () {
|
||||
var aggParams = new AggParams([
|
||||
{ name: 'one' },
|
||||
{ name: 'two' }
|
||||
]);
|
||||
|
||||
expect(aggParams).to.have.length(2);
|
||||
expect(aggParams).to.be.an(Array);
|
||||
expect(aggParams.byName).to.have.keys(['one', 'two']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('AggParam creation', function () {
|
||||
it('Uses the FieldAggParam class for params with the name "field"', function () {
|
||||
var aggParams = new AggParams([
|
||||
{ name: 'field' }
|
||||
]);
|
||||
|
||||
expect(aggParams).to.have.length(1);
|
||||
expect(aggParams[0]).to.be.a(FieldAggParam);
|
||||
expect(aggParams[0]).to.be.a(BaseAggParam);
|
||||
});
|
||||
|
||||
it('Uses the OptionedAggParam class for params with defined options', function () {
|
||||
var aggParams = new AggParams([
|
||||
{
|
||||
name: 'interval',
|
||||
options: [
|
||||
{ display: 'Automatic', val: 'auto' },
|
||||
{ display: '2 Hours', val: '2h' }
|
||||
]
|
||||
}
|
||||
]);
|
||||
|
||||
expect(aggParams).to.have.length(1);
|
||||
expect(aggParams[0]).to.be.a(OptionedAggParam);
|
||||
expect(aggParams[0]).to.be.a(BaseAggParam);
|
||||
});
|
||||
|
||||
it('Always converts the params to a BaseAggParam', function () {
|
||||
var aggParams = new AggParams([
|
||||
{
|
||||
name: 'height',
|
||||
editor: '<blink>high</blink>'
|
||||
},
|
||||
{
|
||||
name: 'weight',
|
||||
editor: '<blink>big</blink>'
|
||||
},
|
||||
{
|
||||
name: 'waist',
|
||||
editor: '<blink>small</blink>'
|
||||
}
|
||||
]);
|
||||
|
||||
expect(BaseAggParam).to.have.property('callCount', 3);
|
||||
expect(FieldAggParam).to.have.property('callCount', 0);
|
||||
expect(OptionedAggParam).to.have.property('callCount', 0);
|
||||
|
||||
expect(aggParams).to.have.length(3);
|
||||
aggParams.forEach(function (aggParam) {
|
||||
expect(aggParam).to.be.a(BaseAggParam);
|
||||
});
|
||||
});
|
||||
});
|
||||
}];
|
||||
});
|
100
test/unit/specs/components/agg_types/_agg_type.js
Normal file
100
test/unit/specs/components/agg_types/_agg_type.js
Normal file
|
@ -0,0 +1,100 @@
|
|||
define(function (require) {
|
||||
return ['AggType Class', function () {
|
||||
var _ = require('lodash');
|
||||
var sinon = require('test_utils/auto_release_sinon');
|
||||
var AggType;
|
||||
var AggParams;
|
||||
|
||||
require('services/private');
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
var AggParamsPM = require('components/agg_types/_agg_params');
|
||||
AggParams = sinon.spy(Private(AggParamsPM));
|
||||
Private.stub(AggParamsPM, AggParams);
|
||||
|
||||
AggType = Private(require('components/agg_types/_agg_type'));
|
||||
}));
|
||||
|
||||
describe('constructor', function () {
|
||||
|
||||
it('requires a config object as it\'s first param', function () {
|
||||
expect(function () {
|
||||
new AggType(null);
|
||||
}).to.throwError();
|
||||
});
|
||||
|
||||
describe('application of config properties', function () {
|
||||
var copiedConfigProps = [
|
||||
'name',
|
||||
'title',
|
||||
'makeLabel',
|
||||
'ordered'
|
||||
];
|
||||
|
||||
describe('"' + copiedConfigProps.join('", "') + '"', function () {
|
||||
it('assigns the config value to itself', function () {
|
||||
var config = _.transform(copiedConfigProps, function (config, prop) {
|
||||
config[prop] = {};
|
||||
}, {});
|
||||
|
||||
var aggType = new AggType(config);
|
||||
|
||||
copiedConfigProps.forEach(function (prop) {
|
||||
expect(aggType[prop]).to.be(config[prop]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('makeLabel', function () {
|
||||
it('makes a function when the makeLabel config is not specified', function () {
|
||||
var someGetter = function () {};
|
||||
|
||||
var aggType = new AggType({
|
||||
makeLabel: someGetter
|
||||
});
|
||||
|
||||
expect(aggType.makeLabel).to.be(someGetter);
|
||||
|
||||
aggType = new AggType({
|
||||
name: 'pizza'
|
||||
});
|
||||
|
||||
expect(aggType.makeLabel).to.be.a('function');
|
||||
expect(aggType.makeLabel()).to.be('pizza');
|
||||
});
|
||||
});
|
||||
|
||||
describe('params', function () {
|
||||
it('defaults to an empty AggParams object', function () {
|
||||
var aggType = new AggType({
|
||||
name: 'smart agg'
|
||||
});
|
||||
|
||||
expect(aggType.params).to.be.an(AggParams);
|
||||
expect(aggType.params.length).to.be(0);
|
||||
});
|
||||
|
||||
it('passes the params arg directly to the AggParams constructor', function () {
|
||||
var params = [
|
||||
{name: 'one'},
|
||||
{name: 'two'}
|
||||
];
|
||||
|
||||
var aggType = new AggType({
|
||||
name: 'bucketeer',
|
||||
params: params
|
||||
});
|
||||
|
||||
expect(aggType.params).to.be.an(AggParams);
|
||||
expect(aggType.params.length).to.be(2);
|
||||
expect(AggParams.callCount).to.be(1);
|
||||
expect(AggParams.firstCall.args[0]).to.be(params);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
}];
|
||||
});
|
193
test/unit/specs/components/agg_types/_bucket_count_between.js
Normal file
193
test/unit/specs/components/agg_types/_bucket_count_between.js
Normal file
|
@ -0,0 +1,193 @@
|
|||
define(function (require) {
|
||||
return ['bucketCountBetween util', function () {
|
||||
var _ = require('lodash');
|
||||
var indexPattern;
|
||||
var Vis;
|
||||
var visTypes;
|
||||
var aggTypes;
|
||||
var AggConfig;
|
||||
var bucketCountBetween;
|
||||
|
||||
// http://cwestblog.com/2014/02/25/javascript-testing-for-negative-zero/
|
||||
// works for -0 and +0
|
||||
function isNegative(n) {
|
||||
return ((n = +n) || 1 / n) < 0;
|
||||
}
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
Vis = Private(require('components/vis/vis'));
|
||||
visTypes = Private(require('components/vis_types/index'));
|
||||
aggTypes = Private(require('components/agg_types/index'));
|
||||
AggConfig = Private(require('components/vis/_agg_config'));
|
||||
bucketCountBetween = Private(require('components/agg_types/buckets/_bucket_count_between'));
|
||||
}));
|
||||
|
||||
it('returns a positive number when a is before b', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
},
|
||||
{
|
||||
type: 'terms',
|
||||
schema: 'segment'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var a = vis.aggs.byTypeName.date_histogram[0];
|
||||
var b = vis.aggs.byTypeName.terms[0];
|
||||
var count = bucketCountBetween(a, b);
|
||||
expect(isNegative(count)).to.be(false);
|
||||
});
|
||||
|
||||
it('returns a negative number when a is after b', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
},
|
||||
{
|
||||
type: 'terms',
|
||||
schema: 'segment'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var a = vis.aggs.byTypeName.terms[0];
|
||||
var b = vis.aggs.byTypeName.date_histogram[0];
|
||||
var count = bucketCountBetween(a, b);
|
||||
expect(isNegative(count)).to.be(true);
|
||||
});
|
||||
|
||||
it('returns 0 when there are no buckets between a and b', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
},
|
||||
{
|
||||
type: 'terms',
|
||||
schema: 'segment'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var a = vis.aggs.byTypeName.date_histogram[0];
|
||||
var b = vis.aggs.byTypeName.terms[0];
|
||||
expect(bucketCountBetween(a, b)).to.be(0);
|
||||
});
|
||||
|
||||
it('returns null when b is not in the aggs', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var a = vis.aggs.byTypeName.date_histogram[0];
|
||||
var b = new AggConfig(vis, {
|
||||
type: 'terms',
|
||||
schema: 'segment'
|
||||
});
|
||||
|
||||
expect(bucketCountBetween(a, b)).to.be(null);
|
||||
});
|
||||
|
||||
it('returns null when a is not in the aggs', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var a = new AggConfig(vis, {
|
||||
type: 'terms',
|
||||
schema: 'segment'
|
||||
});
|
||||
var b = vis.aggs.byTypeName.date_histogram[0];
|
||||
|
||||
expect(bucketCountBetween(a, b)).to.be(null);
|
||||
});
|
||||
|
||||
it('returns null when a and b are not in the aggs', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: []
|
||||
});
|
||||
|
||||
var a = new AggConfig(vis, {
|
||||
type: 'terms',
|
||||
schema: 'segment'
|
||||
});
|
||||
|
||||
var b = new AggConfig(vis, {
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
});
|
||||
|
||||
expect(bucketCountBetween(a, b)).to.be(null);
|
||||
});
|
||||
|
||||
it('can count', function () {
|
||||
var schemas = visTypes.byName.histogram.schemas.buckets;
|
||||
|
||||
// slow for this test is actually somewhere around 1/2 a sec
|
||||
this.slow(500);
|
||||
|
||||
function randBucketAggForVis(vis) {
|
||||
var schema = _.sample(schemas);
|
||||
var aggType = _.sample(aggTypes.byType.buckets);
|
||||
|
||||
return new AggConfig(vis, {
|
||||
schema: schema,
|
||||
type: aggType
|
||||
});
|
||||
}
|
||||
|
||||
_.times(50, function (n) {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: []
|
||||
});
|
||||
|
||||
var randBucketAgg = _.partial(randBucketAggForVis, vis);
|
||||
|
||||
var a = randBucketAgg();
|
||||
var b = randBucketAgg();
|
||||
|
||||
// create n aggs between a and b
|
||||
var aggs = [];
|
||||
for (var i = 0; i < n; i++) {
|
||||
aggs.push(randBucketAgg());
|
||||
}
|
||||
|
||||
aggs.unshift(a);
|
||||
aggs.push(b);
|
||||
|
||||
vis.setState({
|
||||
type: 'histogram',
|
||||
aggs: aggs
|
||||
});
|
||||
|
||||
expect(bucketCountBetween(a, b)).to.be(n);
|
||||
});
|
||||
});
|
||||
}];
|
||||
});
|
5
test/unit/specs/components/agg_types/_metric_aggs.js
Normal file
5
test/unit/specs/components/agg_types/_metric_aggs.js
Normal file
|
@ -0,0 +1,5 @@
|
|||
define(function (require) {
|
||||
return ['AggParams', function () {
|
||||
|
||||
}];
|
||||
});
|
|
@ -0,0 +1,113 @@
|
|||
define(function (require) {
|
||||
return ['Date Histogram Agg', function () {
|
||||
var _ = require('lodash');
|
||||
|
||||
describe('ordered', function () {
|
||||
var histogram;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
histogram = Private(require('components/agg_types/index')).byName.histogram;
|
||||
}));
|
||||
|
||||
it('is ordered', function () {
|
||||
expect(histogram.ordered).to.be.ok();
|
||||
});
|
||||
|
||||
it('is not ordered by date', function () {
|
||||
expect(histogram.ordered).to.not.have.property('date');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe('params', function () {
|
||||
var paramWriter;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
var AggParamWriter = Private(require('test_utils/agg_param_writer'));
|
||||
paramWriter = new AggParamWriter({ aggType: 'histogram' });
|
||||
}));
|
||||
|
||||
describe('interval', function () {
|
||||
// reads aggConfig.params.interval, writes to dsl.interval
|
||||
|
||||
it('accepts a number', function () {
|
||||
var output = paramWriter.write({ interval: 100 });
|
||||
expect(output.params).to.have.property('interval', 100);
|
||||
});
|
||||
|
||||
it('accepts a string', function () {
|
||||
var output = paramWriter.write({ interval: '10' });
|
||||
expect(output.params).to.have.property('interval', 10);
|
||||
});
|
||||
|
||||
it('fails on non-numeric values', function () {
|
||||
// template validation prevents this from users, not devs
|
||||
var output = paramWriter.write({ interval: [] });
|
||||
expect(isNaN(output.params.interval)).to.be.ok();
|
||||
});
|
||||
});
|
||||
|
||||
describe('min_doc_count', function () {
|
||||
it('casts true values to 0', function () {
|
||||
var output = paramWriter.write({ min_doc_count: true });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: 'yes' });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: 1 });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: {} });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
});
|
||||
|
||||
it('writes nothing for false values', function () {
|
||||
var output = paramWriter.write({ min_doc_count: '' });
|
||||
expect(output.params).to.not.have.property('min_doc_count');
|
||||
|
||||
output = paramWriter.write({ min_doc_count: null });
|
||||
expect(output.params).to.not.have.property('min_doc_count');
|
||||
|
||||
output = paramWriter.write({ min_doc_count: undefined });
|
||||
expect(output.params).to.not.have.property('min_doc_count');
|
||||
});
|
||||
});
|
||||
|
||||
describe('extended_bounds', function () {
|
||||
it('writes when only eb.min is set', function () {
|
||||
var output = paramWriter.write({
|
||||
extended_bounds: { min: 0 }
|
||||
});
|
||||
expect(output.params.extended_bounds).to.have.property('min', 0);
|
||||
expect(output.params.extended_bounds).to.have.property('max', undefined);
|
||||
});
|
||||
|
||||
it('writes when only eb.max is set', function () {
|
||||
var output = paramWriter.write({
|
||||
extended_bounds: { max: 0 }
|
||||
});
|
||||
expect(output.params.extended_bounds).to.have.property('min', undefined);
|
||||
expect(output.params.extended_bounds).to.have.property('max', 0);
|
||||
});
|
||||
|
||||
it('writes when both eb.min and eb.max are set', function () {
|
||||
var output = paramWriter.write({
|
||||
extended_bounds: { min: 99, max: 100 }
|
||||
});
|
||||
expect(output.params.extended_bounds).to.have.property('min', 99);
|
||||
expect(output.params.extended_bounds).to.have.property('max', 100);
|
||||
});
|
||||
|
||||
it('does not write when nothing is set', function () {
|
||||
var output = paramWriter.write({
|
||||
extended_bounds: {}
|
||||
});
|
||||
expect(output.params).to.not.have.property('extended_bounds');
|
||||
});
|
||||
});
|
||||
});
|
||||
}];
|
||||
});
|
113
test/unit/specs/components/agg_types/bucket_aggs/histogram.js
Normal file
113
test/unit/specs/components/agg_types/bucket_aggs/histogram.js
Normal file
|
@ -0,0 +1,113 @@
|
|||
define(function (require) {
|
||||
return ['Histogram Agg', function () {
|
||||
var _ = require('lodash');
|
||||
|
||||
describe('ordered', function () {
|
||||
var histogram;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
histogram = Private(require('components/agg_types/index')).byName.histogram;
|
||||
}));
|
||||
|
||||
it('is ordered', function () {
|
||||
expect(histogram.ordered).to.be.ok();
|
||||
});
|
||||
|
||||
it('is not ordered by date', function () {
|
||||
expect(histogram.ordered).to.not.have.property('date');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe('params', function () {
|
||||
var paramWriter;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
var AggParamWriter = Private(require('test_utils/agg_param_writer'));
|
||||
paramWriter = new AggParamWriter({ aggType: 'histogram' });
|
||||
}));
|
||||
|
||||
describe('interval', function () {
|
||||
// reads aggConfig.params.interval, writes to dsl.interval
|
||||
|
||||
it('accepts a number', function () {
|
||||
var output = paramWriter.write({ interval: 100 });
|
||||
expect(output.params).to.have.property('interval', 100);
|
||||
});
|
||||
|
||||
it('accepts a string', function () {
|
||||
var output = paramWriter.write({ interval: '10' });
|
||||
expect(output.params).to.have.property('interval', 10);
|
||||
});
|
||||
|
||||
it('fails on non-numeric values', function () {
|
||||
// template validation prevents this from users, not devs
|
||||
var output = paramWriter.write({ interval: [] });
|
||||
expect(isNaN(output.params.interval)).to.be.ok();
|
||||
});
|
||||
});
|
||||
|
||||
describe('min_doc_count', function () {
|
||||
it('casts true values to 0', function () {
|
||||
var output = paramWriter.write({ min_doc_count: true });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: 'yes' });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: 1 });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: {} });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
});
|
||||
|
||||
it('writes nothing for false values', function () {
|
||||
var output = paramWriter.write({ min_doc_count: '' });
|
||||
expect(output.params).to.not.have.property('min_doc_count');
|
||||
|
||||
output = paramWriter.write({ min_doc_count: null });
|
||||
expect(output.params).to.not.have.property('min_doc_count');
|
||||
|
||||
output = paramWriter.write({ min_doc_count: undefined });
|
||||
expect(output.params).to.not.have.property('min_doc_count');
|
||||
});
|
||||
});
|
||||
|
||||
describe('extended_bounds', function () {
|
||||
it('writes when only eb.min is set', function () {
|
||||
var output = paramWriter.write({
|
||||
extended_bounds: { min: 0 }
|
||||
});
|
||||
expect(output.params.extended_bounds).to.have.property('min', 0);
|
||||
expect(output.params.extended_bounds).to.have.property('max', undefined);
|
||||
});
|
||||
|
||||
it('writes when only eb.max is set', function () {
|
||||
var output = paramWriter.write({
|
||||
extended_bounds: { max: 0 }
|
||||
});
|
||||
expect(output.params.extended_bounds).to.have.property('min', undefined);
|
||||
expect(output.params.extended_bounds).to.have.property('max', 0);
|
||||
});
|
||||
|
||||
it('writes when both eb.min and eb.max are set', function () {
|
||||
var output = paramWriter.write({
|
||||
extended_bounds: { min: 99, max: 100 }
|
||||
});
|
||||
expect(output.params.extended_bounds).to.have.property('min', 99);
|
||||
expect(output.params.extended_bounds).to.have.property('max', 100);
|
||||
});
|
||||
|
||||
it('does not write when nothing is set', function () {
|
||||
var output = paramWriter.write({
|
||||
extended_bounds: {}
|
||||
});
|
||||
expect(output.params).to.not.have.property('extended_bounds');
|
||||
});
|
||||
});
|
||||
});
|
||||
}];
|
||||
});
|
14
test/unit/specs/components/agg_types/index.js
Normal file
14
test/unit/specs/components/agg_types/index.js
Normal file
|
@ -0,0 +1,14 @@
|
|||
define(function (require) {
|
||||
describe('AggTypesComponent', function () {
|
||||
var childSuites = [
|
||||
require('specs/components/agg_types/_agg_type'),
|
||||
require('specs/components/agg_types/_agg_params'),
|
||||
require('specs/components/agg_types/_bucket_count_between'),
|
||||
require('specs/components/agg_types/bucket_aggs/histogram'),
|
||||
require('specs/components/agg_types/bucket_aggs/date_histogram'),
|
||||
require('specs/components/agg_types/_metric_aggs')
|
||||
].forEach(function (s) {
|
||||
describe(s[0], s[1]);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,45 @@
|
|||
define(function (require) {
|
||||
var _ = require('lodash');
|
||||
var sinon = require('test_utils/auto_release_sinon');
|
||||
|
||||
function ParamClassStub(parent, body) {
|
||||
var stub = sinon.spy(body || function () {
|
||||
stub.Super && stub.Super.call(this);
|
||||
});
|
||||
if (parent) _.inherits(stub, parent);
|
||||
return stub;
|
||||
}
|
||||
|
||||
/**
|
||||
* stub all of the param classes, but ensure that they still inherit properly.
|
||||
* This method should be passed directly to inject();
|
||||
*
|
||||
* ```js
|
||||
* var stubParamClasses = require('specs/components/agg_types/utils/stub_agg_params');
|
||||
* describe('something', function () {
|
||||
* beforeEach(inject(stubParamClasses));
|
||||
* })
|
||||
* ```
|
||||
*
|
||||
* @param {PrivateLoader} Private - The private module loader, inject by passing this function to inject()
|
||||
* @return {undefined}
|
||||
*/
|
||||
return function stubParamClasses(Private) {
|
||||
var BaseAggParam = Private.stub(
|
||||
require('components/agg_types/param_types/base'),
|
||||
ParamClassStub(null, function (config) {
|
||||
_.assign(this, config);
|
||||
})
|
||||
);
|
||||
|
||||
Private.stub(
|
||||
require('components/agg_types/param_types/field'),
|
||||
ParamClassStub(BaseAggParam)
|
||||
);
|
||||
|
||||
Private.stub(
|
||||
require('components/agg_types/param_types/optioned'),
|
||||
ParamClassStub(BaseAggParam)
|
||||
);
|
||||
};
|
||||
});
|
106
test/unit/specs/components/vis/_agg_config.js
Normal file
106
test/unit/specs/components/vis/_agg_config.js
Normal file
|
@ -0,0 +1,106 @@
|
|||
define(function (require) {
|
||||
return ['AggConfig', function () {
|
||||
var sinon = require('test_utils/auto_release_sinon');
|
||||
|
||||
var Vis;
|
||||
var AggConfig;
|
||||
var indexPattern;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
Vis = Private(require('components/vis/vis'));
|
||||
AggConfig = Private(require('components/vis/_agg_config'));
|
||||
indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
}));
|
||||
|
||||
describe('#toDsl', function () {
|
||||
it('calls #write()', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.date_histogram[0];
|
||||
var stub = sinon.stub(aggConfig, 'write').returns({ params: {} });
|
||||
|
||||
aggConfig.toDsl();
|
||||
expect(stub.callCount).to.be(1);
|
||||
});
|
||||
|
||||
it('uses the type name as the agg name', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.date_histogram[0];
|
||||
sinon.stub(aggConfig, 'write').returns({ params: {} });
|
||||
|
||||
var dsl = aggConfig.toDsl();
|
||||
expect(dsl).to.have.property('date_histogram');
|
||||
});
|
||||
|
||||
|
||||
it('uses the params from #write() output as the agg params', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var aggConfig = vis.aggs.byTypeName.date_histogram[0];
|
||||
var football = {};
|
||||
|
||||
sinon.stub(aggConfig, 'write').returns({ params: football });
|
||||
|
||||
var dsl = aggConfig.toDsl();
|
||||
expect(dsl.date_histogram).to.be(football);
|
||||
});
|
||||
|
||||
it('includes subAggs from #write() output', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'avg',
|
||||
schema: 'metric'
|
||||
},
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var histoConfig = vis.aggs.byTypeName.date_histogram[0];
|
||||
var avgConfig = vis.aggs.byTypeName.avg[0];
|
||||
var football = {};
|
||||
|
||||
sinon.stub(histoConfig, 'write').returns({ params: {}, subAggs: [avgConfig] });
|
||||
sinon.stub(avgConfig, 'write').returns({ params: football });
|
||||
|
||||
var dsl = histoConfig.toDsl();
|
||||
|
||||
// didn't use .eql() because of variable key names, and final check is strict
|
||||
expect(dsl).to.have.property('aggs');
|
||||
expect(dsl.aggs).to.have.property(avgConfig.id);
|
||||
expect(dsl.aggs[avgConfig.id]).to.have.property('avg');
|
||||
expect(dsl.aggs[avgConfig.id].avg).to.be(football);
|
||||
});
|
||||
});
|
||||
}];
|
||||
});
|
193
test/unit/specs/components/vis/_agg_configs.js
Normal file
193
test/unit/specs/components/vis/_agg_configs.js
Normal file
|
@ -0,0 +1,193 @@
|
|||
define(function (require) {
|
||||
return ['AggConfigs', function () {
|
||||
var _ = require('lodash');
|
||||
var sinon = require('test_utils/auto_release_sinon');
|
||||
|
||||
var Vis;
|
||||
var Registry;
|
||||
var AggConfig;
|
||||
var AggConfigs;
|
||||
var SpiedAggConfig;
|
||||
var indexPattern;
|
||||
|
||||
beforeEach(module('kibana'));
|
||||
beforeEach(inject(function (Private) {
|
||||
// replace the AggConfig module with a spy
|
||||
var RealAggConfigPM = require('components/vis/_agg_config');
|
||||
AggConfig = Private(RealAggConfigPM);
|
||||
Private.stub(RealAggConfigPM, sinon.spy(AggConfig));
|
||||
|
||||
// load main deps
|
||||
Vis = Private(require('components/vis/vis'));
|
||||
SpiedAggConfig = Private(require('components/vis/_agg_config'));
|
||||
AggConfigs = Private(require('components/vis/_agg_configs'));
|
||||
Registry = require('utils/registry/registry');
|
||||
indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
}));
|
||||
|
||||
it('extends Registry', function () {
|
||||
var ac = new AggConfigs();
|
||||
expect(ac).to.be.a(Registry);
|
||||
});
|
||||
|
||||
describe('constructor', function () {
|
||||
it('handles passing just a vis', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: []
|
||||
});
|
||||
|
||||
var ac = new AggConfigs(vis);
|
||||
expect(ac).to.have.length(0);
|
||||
});
|
||||
|
||||
it('converts configStates into AggConfig objects if they are not already', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: []
|
||||
});
|
||||
|
||||
var ac = new AggConfigs(vis, [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment'
|
||||
},
|
||||
new AggConfig({
|
||||
type: 'terms',
|
||||
schema: 'split'
|
||||
})
|
||||
]);
|
||||
|
||||
expect(ac).to.have.length(2);
|
||||
expect(SpiedAggConfig).to.have.property('callCount', 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getSorted', function () {
|
||||
it('performs a stable sort, but moves metrics to the bottom', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'avg', schema: 'metric' },
|
||||
{ type: 'terms', schema: 'split' },
|
||||
{ type: 'histogram', schema: 'split' },
|
||||
{ type: 'sum', schema: 'metric' },
|
||||
{ type: 'date_histogram', schema: 'segment' },
|
||||
{ type: 'filters', schema: 'split' },
|
||||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
|
||||
var avg = vis.aggs.byTypeName.avg[0];
|
||||
var sum = vis.aggs.byTypeName.sum[0];
|
||||
var count = vis.aggs.byTypeName.count[0];
|
||||
var terms = vis.aggs.byTypeName.terms[0];
|
||||
var histo = vis.aggs.byTypeName.histogram[0];
|
||||
var dateHisto = vis.aggs.byTypeName.date_histogram[0];
|
||||
var filters = vis.aggs.byTypeName.filters[0];
|
||||
|
||||
var sorted = vis.aggs.getSorted();
|
||||
|
||||
expect(sorted.shift()).to.be(terms);
|
||||
expect(sorted.shift()).to.be(histo);
|
||||
expect(sorted.shift()).to.be(dateHisto);
|
||||
expect(sorted.shift()).to.be(filters);
|
||||
expect(sorted.shift()).to.be(avg);
|
||||
expect(sorted.shift()).to.be(sum);
|
||||
expect(sorted.shift()).to.be(count);
|
||||
expect(sorted).to.have.length(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#toDsl', function () {
|
||||
it('uses the sorted aggs', function () {
|
||||
var vis = new Vis(indexPattern, { type: 'histogram' });
|
||||
sinon.spy(vis.aggs, 'getSorted');
|
||||
vis.aggs.toDsl();
|
||||
expect(vis.aggs.getSorted).to.have.property('callCount', 1);
|
||||
});
|
||||
|
||||
it('calls aggConfig#toDsl() on each aggConfig and compiles the nested output', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment' },
|
||||
{ type: 'filters', schema: 'split' }
|
||||
]
|
||||
});
|
||||
|
||||
var aggInfos = vis.aggs.map(function (aggConfig) {
|
||||
var football = {};
|
||||
|
||||
sinon.stub(aggConfig, 'toDsl', function () {
|
||||
return football;
|
||||
});
|
||||
|
||||
return {
|
||||
id: aggConfig.id,
|
||||
football: football
|
||||
};
|
||||
});
|
||||
|
||||
(function recurse(lvl) {
|
||||
var info = aggInfos.shift();
|
||||
|
||||
expect(lvl).to.have.property(info.id);
|
||||
expect(lvl[info.id]).to.be(info.football);
|
||||
|
||||
if (lvl[info.id].aggs) {
|
||||
return recurse(lvl[info.id].aggs);
|
||||
}
|
||||
}(vis.aggs.toDsl()));
|
||||
|
||||
expect(aggInfos).to.have.length(0);
|
||||
});
|
||||
|
||||
it('skips aggs that don\'t have a dsl representation', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'count', schema: 'metric' }
|
||||
]
|
||||
});
|
||||
|
||||
var dsl = vis.aggs.toDsl();
|
||||
var histo = vis.aggs.byTypeName.date_histogram[0];
|
||||
var count = vis.aggs.byTypeName.count[0];
|
||||
|
||||
expect(dsl).to.have.property(histo.id);
|
||||
expect(dsl[histo.id]).to.be.an('object');
|
||||
expect(dsl[histo.id]).to.not.have.property('aggs');
|
||||
expect(dsl).to.not.have.property(count.id);
|
||||
});
|
||||
|
||||
it('writes multiple metric aggregations at the same level', function () {
|
||||
var vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ type: 'date_histogram', schema: 'segment', params: { field: '@timestamp' } },
|
||||
{ type: 'avg', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'sum', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'min', schema: 'metric', params: { field: 'bytes' } },
|
||||
{ type: 'max', schema: 'metric', params: { field: 'bytes' } }
|
||||
]
|
||||
});
|
||||
|
||||
var dsl = vis.aggs.toDsl();
|
||||
|
||||
var histo = vis.aggs.byTypeName.date_histogram[0];
|
||||
var metrics = vis.aggs.bySchemaGroup.metrics;
|
||||
|
||||
expect(dsl).to.have.property(histo.id);
|
||||
expect(dsl[histo.id]).to.be.an('object');
|
||||
expect(dsl[histo.id]).to.have.property('aggs');
|
||||
|
||||
metrics.forEach(function (metric) {
|
||||
expect(dsl[histo.id].aggs).to.have.property(metric.id);
|
||||
expect(dsl[histo.id].aggs[metric.id]).to.not.have.property('aggs');
|
||||
});
|
||||
});
|
||||
});
|
||||
}];
|
||||
});
|
10
test/unit/specs/components/vis/index.js
Normal file
10
test/unit/specs/components/vis/index.js
Normal file
|
@ -0,0 +1,10 @@
|
|||
define(function (require) {
|
||||
describe('Vis Component', function () {
|
||||
var childSuites = [
|
||||
require('specs/components/vis/_agg_config'),
|
||||
require('specs/components/vis/_agg_configs')
|
||||
].forEach(function (s) {
|
||||
describe(s[0], s[1]);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -171,8 +171,8 @@ define(function (require) {
|
|||
});
|
||||
|
||||
it('should return an error classed DOM element with a text message', function () {
|
||||
expect($('.error-wrapper').length).to.be(1);
|
||||
expect($('.error-wrapper p').html()).to.be('This is an error!');
|
||||
expect($('.error').length).to.be(1);
|
||||
expect($('.error p').html()).to.be('This is an error!');
|
||||
});
|
||||
});
|
||||
|
||||
|
|
100
test/utils/agg_param_writer.js
Normal file
100
test/utils/agg_param_writer.js
Normal file
|
@ -0,0 +1,100 @@
|
|||
define(function (require) {
|
||||
return function AggParamWriterHelper(Private) {
|
||||
var _ = require('lodash');
|
||||
var Vis = Private(require('components/vis/vis'));
|
||||
var aggTypes = Private(require('components/agg_types/index'));
|
||||
var visTypes = Private(require('components/vis_types/index'));
|
||||
var stubbedLogstashIndexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
|
||||
|
||||
/**
|
||||
* Helper object for writing aggParams. Specify an aggType and it will find a vis & schema, and
|
||||
* wire up the supporting objects required to feed in parameters, and get #write() output.
|
||||
*
|
||||
* Use cases:
|
||||
* - Verify that the interval parameter of the histogram visualization casts it's input to a number
|
||||
* ```js
|
||||
* it('casts to a number', function () {
|
||||
* var writer = new AggParamWriter({ aggType: 'histogram' });
|
||||
* var output = writer.write({ interval : '100/10' });
|
||||
* expect(output.params.interval).to.be.a('number');
|
||||
* expect(output.params.interval).to.be(100);
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @class AggParamWriter
|
||||
* @param {object} opts - describe the properties of this paramWriter
|
||||
* @param {string} opts.aggType - the name of the aggType we want to test. ('histogram', 'filter', etc.)
|
||||
*/
|
||||
function AggParamWriter(opts) {
|
||||
var self = this;
|
||||
|
||||
self.aggType = opts.aggType;
|
||||
if (_.isString(self.aggType)) {
|
||||
self.aggType = aggTypes.byName[self.aggType];
|
||||
}
|
||||
|
||||
// not configurable right now, but totally required
|
||||
self.indexPattern = stubbedLogstashIndexPattern;
|
||||
|
||||
// the vis type we will use to write the aggParams
|
||||
self.visType = null;
|
||||
|
||||
// the schema that the aggType satisfies
|
||||
self.visAggSchema = null;
|
||||
|
||||
// find a suitable vis type and schema
|
||||
_.find(visTypes, function (visType) {
|
||||
var schema = _.find(visType.schemas.all, function (schema) {
|
||||
// type, type, type, type, type... :(
|
||||
return schema.group === self.aggType.type;
|
||||
});
|
||||
|
||||
if (schema) {
|
||||
self.visType = visType;
|
||||
self.visAggSchema = schema;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
if (!self.aggType || !self.visType || !self.visAggSchema) {
|
||||
throw new Error('unable to find a usable visType and schema for the ' + opts.aggType + ' agg type');
|
||||
}
|
||||
|
||||
self.vis = new Vis(self.indexPattern, {
|
||||
type: self.visType
|
||||
});
|
||||
}
|
||||
|
||||
AggParamWriter.prototype.write = function (paramValues) {
|
||||
var self = this;
|
||||
paramValues = _.clone(paramValues);
|
||||
|
||||
if (self.aggType.params.byName.field && !paramValues.field) {
|
||||
// pick a field rather than force a field to be specified everywhere
|
||||
if (self.aggType.type === 'metrics') {
|
||||
paramValues.field = _.sample(self.indexPattern.fields.byType.number);
|
||||
} else {
|
||||
paramValues.field = _.sample(self.indexPattern.fields.byType.string);
|
||||
}
|
||||
}
|
||||
|
||||
self.vis.setState({
|
||||
type: self.vis.type.name,
|
||||
aggs: [{
|
||||
type: self.aggType,
|
||||
schema: self.visAggSchema,
|
||||
params: paramValues
|
||||
}]
|
||||
});
|
||||
|
||||
var aggConfig = _.find(self.vis.aggs, function (aggConfig) {
|
||||
return aggConfig.type === self.aggType;
|
||||
});
|
||||
|
||||
return aggConfig.type.params.write(aggConfig);
|
||||
};
|
||||
|
||||
return AggParamWriter;
|
||||
|
||||
};
|
||||
});
|
27
test/utils/stub_index_pattern.js
Normal file
27
test/utils/stub_index_pattern.js
Normal file
|
@ -0,0 +1,27 @@
|
|||
define(function (require) {
|
||||
return function (Private) {
|
||||
var Registry = require('utils/registry/registry');
|
||||
var fieldFormats = Private(require('components/index_patterns/_field_formats'));
|
||||
|
||||
function StubIndexPattern(pattern, timeField, fields) {
|
||||
this.fields = new Registry({
|
||||
index: ['name'],
|
||||
group: ['type'],
|
||||
initialSet: fields.map(function (field) {
|
||||
field.count = field.count || 0;
|
||||
|
||||
// non-enumerable type so that it does not get included in the JSON
|
||||
Object.defineProperty(field, 'format', {
|
||||
enumerable: false,
|
||||
get: function () {
|
||||
fieldFormats.defaultByType[field.type];
|
||||
}
|
||||
});
|
||||
|
||||
return field;
|
||||
})
|
||||
});
|
||||
}
|
||||
return StubIndexPattern;
|
||||
};
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue