mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
parent
40d597908a
commit
a6c3ce58da
2 changed files with 77 additions and 30 deletions
|
@ -1,24 +1,43 @@
|
|||
const filename = require('path').basename(__filename);
|
||||
const fn = require(`../${filename}`);
|
||||
|
||||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
const expect = require('chai').expect;
|
||||
import invoke from './helpers/invoke_series_fn.js';
|
||||
import getSeries from './helpers/get_series';
|
||||
import getSeriesList from './helpers/get_series_list';
|
||||
|
||||
describe(filename, () => {
|
||||
|
||||
let seriesList;
|
||||
beforeEach(() => {
|
||||
seriesList = require('./fixtures/seriesList.js')();
|
||||
});
|
||||
it('computes the moving standard deviation of a list', async () => {
|
||||
const points = [
|
||||
108.48, 111.56, 112.13, 113.75, 114.25, 110.79, 111.21, 116.82, 117.16, 120.38, 116.96,
|
||||
119.56, 118.97, 117.54, 114.42, 111.01, 114.20, 116.43, 117.74, 119.90, 124.65, 124.98,
|
||||
124.70, 123.60, 124.5, 126.85];
|
||||
const buckets = [];
|
||||
buckets[0] = moment('2018-01-01T00:00:00.000Z');
|
||||
for (let i = 1; i < points.length; i++) {
|
||||
buckets[i] = buckets[i - 1].add(1, 'hours');
|
||||
}
|
||||
const series = getSeries('test data', buckets, points);
|
||||
const seriesList = getSeriesList([series]);
|
||||
const numWindows = 5;
|
||||
const position = 'left';
|
||||
const results = await invoke(fn, [seriesList, numWindows, position]);
|
||||
|
||||
it('computes the moving standard deviation of a list', () => {
|
||||
return invoke(fn, [seriesList, 2]).then((r) => {
|
||||
const values = _.map(r.output.list[1].data, 1);
|
||||
expect(values[0]).to.equal(null);
|
||||
expect(values[1]).to.equal(null);
|
||||
expect(values[2]).to.be.within(26, 27);
|
||||
expect(values[3]).to.be.within(7, 8);
|
||||
const resultPoints = results.output.list[0].data.map((row) => {
|
||||
// row is an array; index 0 is the time bucket, index 1 is the value
|
||||
return row[1];
|
||||
});
|
||||
// First 5 result buckets are null since moving window is filling up.
|
||||
const trimmedResultPoints = resultPoints.slice(numWindows);
|
||||
|
||||
const expectedPoints = [
|
||||
2.28, 1.46, 1.53, 2.46, 3.00, 4.14, 3.31, 1.67, 1.50, 1.41,
|
||||
2.01, 3.56, 3.12, 2.50, 2.56, 3.41, 3.97, 3.92, 3.35, 2.12, 0.52];
|
||||
|
||||
expectedPoints.forEach((value, index) => {
|
||||
expect(trimmedResultPoints[index]).to.be.within(value - 0.01, value + 0.01);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -2,6 +2,9 @@ import alter from '../lib/alter.js';
|
|||
import _ from 'lodash';
|
||||
import Chainable from '../lib/classes/chainable';
|
||||
|
||||
const positions = ['left', 'right', 'center'];
|
||||
const defaultPosition = positions[0];
|
||||
|
||||
export default new Chainable('movingstd', {
|
||||
args: [
|
||||
{
|
||||
|
@ -11,36 +14,61 @@ export default new Chainable('movingstd', {
|
|||
{
|
||||
name: 'window',
|
||||
types: ['number'],
|
||||
help: 'Number of points to compute the standard deviation over'
|
||||
help: 'Number of points to compute the standard deviation over.'
|
||||
},
|
||||
{
|
||||
name: 'position',
|
||||
types: ['string', 'null'],
|
||||
help: `Position of the window slice relative to the result time. Options are ${positions.join(', ')}. Default: ${defaultPosition}`
|
||||
}
|
||||
],
|
||||
aliases: ['mvstd'],
|
||||
help: 'Calculate the moving standard deviation over a given window. Uses naive two-pass algorithm. Rounding errors ' +
|
||||
'may become more noticeable with very long series, or series with very large numbers.',
|
||||
fn: function movingstdFn(args) {
|
||||
return alter(args, function (eachSeries, _window) {
|
||||
return alter(args, function (eachSeries, _window, _position) {
|
||||
|
||||
_position = _position || defaultPosition;
|
||||
|
||||
if (!_.contains(positions, _position)) throw new Error('Valid positions are: ' + positions.join(', '));
|
||||
|
||||
const pairs = eachSeries.data;
|
||||
const pairsLen = pairs.length;
|
||||
eachSeries.label = eachSeries.label + ' mvstd=' + _window;
|
||||
|
||||
eachSeries.data = _.map(pairs, function (point, i) {
|
||||
if (i < _window) { return [point[0], null]; }
|
||||
function toPoint(point, pairSlice) {
|
||||
const average = _.chain(pairSlice).map(1).reduce(function (memo, num) {
|
||||
return memo + num;
|
||||
}).value() / _window;
|
||||
|
||||
const average = _.chain(pairs.slice(i - _window, i))
|
||||
.map(function (point) {
|
||||
return point[1];
|
||||
}).reduce(function (memo, num) {
|
||||
return (memo + num);
|
||||
}).value() / _window;
|
||||
|
||||
const variance = _.chain(pairs.slice(i - _window, i))
|
||||
.map(function (point) {
|
||||
return point[1];
|
||||
}).reduce(function (memo, num) {
|
||||
return memo + Math.pow(num - average, 2);
|
||||
}).value() / (_window - 1);
|
||||
const variance = _.chain(pairSlice).map(function (point) {
|
||||
return Math.pow(point[1] - average, 2);
|
||||
}).reduce(function (memo, num) {
|
||||
return memo + num;
|
||||
}).value() / (_window - 1);
|
||||
|
||||
return [point[0], Math.sqrt(variance)];
|
||||
});
|
||||
}
|
||||
|
||||
if (_position === 'center') {
|
||||
const windowLeft = Math.floor(_window / 2);
|
||||
const windowRight = _window - windowLeft;
|
||||
eachSeries.data = _.map(pairs, function (point, i) {
|
||||
if (i < windowLeft || i >= pairsLen - windowRight) return [point[0], null];
|
||||
return toPoint(point, pairs.slice(i - windowLeft, i + windowRight));
|
||||
});
|
||||
} else if (_position === 'left') {
|
||||
eachSeries.data = _.map(pairs, function (point, i) {
|
||||
if (i < _window) return [point[0], null];
|
||||
return toPoint(point, pairs.slice(i - _window, i));
|
||||
});
|
||||
} else if (_position === 'right') {
|
||||
eachSeries.data = _.map(pairs, function (point, i) {
|
||||
if (i >= pairsLen - _window) return [point[0], null];
|
||||
return toPoint(point, pairs.slice(i, i + _window));
|
||||
});
|
||||
}
|
||||
|
||||
return eachSeries;
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue