mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
Fixes for PR 14731 (#15352)
* Remove dependency on UI code in server * Fixing tests
This commit is contained in:
parent
99076d1c28
commit
de109fc344
4 changed files with 153 additions and 85 deletions
|
@ -2,9 +2,8 @@ import { expect } from 'chai';
|
|||
import bucketTransform from '../../helpers/bucket_transform';
|
||||
|
||||
describe('bucketTransform', () => {
|
||||
|
||||
describe('count', () => {
|
||||
it ('returns count agg', () => {
|
||||
it('returns count agg', () => {
|
||||
const metric = { id: 'test', type: 'count' };
|
||||
const fn = bucketTransform.count;
|
||||
expect(fn(metric)).to.eql({
|
||||
|
@ -19,7 +18,7 @@ describe('bucketTransform', () => {
|
|||
|
||||
describe('std metric', () => {
|
||||
['avg', 'max', 'min', 'sum', 'cardinality', 'value_count'].forEach(type => {
|
||||
it (`returns ${type} agg`, () => {
|
||||
it(`returns ${type} agg`, () => {
|
||||
const metric = { id: 'test', type: type, field: 'cpu.pct' };
|
||||
const fn = bucketTransform[type];
|
||||
const result = {};
|
||||
|
@ -41,7 +40,7 @@ describe('bucketTransform', () => {
|
|||
|
||||
describe('extended stats', () => {
|
||||
['std_deviation', 'variance', 'sum_of_squares'].forEach(type => {
|
||||
it (`returns ${type} agg`, () => {
|
||||
it(`returns ${type} agg`, () => {
|
||||
const fn = bucketTransform[type];
|
||||
const metric = { id: 'test', type: type, field: 'cpu.pct' };
|
||||
expect(fn(metric)).to.eql({ extended_stats: { field: 'cpu.pct' } });
|
||||
|
@ -50,17 +49,26 @@ describe('bucketTransform', () => {
|
|||
|
||||
it('returns std_deviation agg with sigma', () => {
|
||||
const fn = bucketTransform.std_deviation;
|
||||
const metric = { id: 'test', type: 'std_deviation', field: 'cpu.pct', sigma: 2 };
|
||||
expect(fn(metric)).to.eql({ extended_stats: { field: 'cpu.pct', sigma: 2 } });
|
||||
const metric = {
|
||||
id: 'test',
|
||||
type: 'std_deviation',
|
||||
field: 'cpu.pct',
|
||||
sigma: 2
|
||||
};
|
||||
expect(fn(metric)).to.eql({
|
||||
extended_stats: { field: 'cpu.pct', sigma: 2 }
|
||||
});
|
||||
});
|
||||
|
||||
it('throws error if type is missing', () => {
|
||||
const run = () => bucketTransform.std_deviation({ id: 'test', field: 'cpu.pct' });
|
||||
const run = () =>
|
||||
bucketTransform.std_deviation({ id: 'test', field: 'cpu.pct' });
|
||||
expect(run).to.throw(Error, 'Metric missing type');
|
||||
});
|
||||
|
||||
it('throws error if field is missing', () => {
|
||||
const run = () => bucketTransform.std_deviation({ id: 'test', type: 'avg' });
|
||||
const run = () =>
|
||||
bucketTransform.std_deviation({ id: 'test', type: 'avg' });
|
||||
expect(run).to.throw(Error, 'Metric missing field');
|
||||
});
|
||||
});
|
||||
|
@ -80,28 +88,38 @@ describe('bucketTransform', () => {
|
|||
expect(fn(metric)).to.eql({
|
||||
percentiles: {
|
||||
field: 'cpu.pct',
|
||||
percents: [
|
||||
50,
|
||||
10,
|
||||
90
|
||||
]
|
||||
percents: [50, 10, 90]
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('throws error if type is missing', () => {
|
||||
const run = () => bucketTransform.percentile({ id: 'test', field: 'cpu.pct', percentiles: [{ value: 50, mode: 'line' }] });
|
||||
const run = () =>
|
||||
bucketTransform.percentile({
|
||||
id: 'test',
|
||||
field: 'cpu.pct',
|
||||
percentiles: [{ value: 50, mode: 'line' }]
|
||||
});
|
||||
expect(run).to.throw(Error, 'Metric missing type');
|
||||
});
|
||||
|
||||
it('throws error if field is missing', () => {
|
||||
const run = () => bucketTransform.percentile({ id: 'test', type: 'avg', percentiles: [{ value: 50, mode: 'line' }] });
|
||||
const run = () =>
|
||||
bucketTransform.percentile({
|
||||
id: 'test',
|
||||
type: 'avg',
|
||||
percentiles: [{ value: 50, mode: 'line' }]
|
||||
});
|
||||
expect(run).to.throw(Error, 'Metric missing field');
|
||||
});
|
||||
|
||||
it('throws error if percentiles is missing', () => {
|
||||
const run = () => bucketTransform.percentile({ id: 'test', type: 'avg', field: 'cpu.pct' });
|
||||
const run = () =>
|
||||
bucketTransform.percentile({
|
||||
id: 'test',
|
||||
type: 'avg',
|
||||
field: 'cpu.pct'
|
||||
});
|
||||
expect(run).to.throw(Error, 'Metric missing percentiles');
|
||||
});
|
||||
});
|
||||
|
@ -111,7 +129,7 @@ describe('bucketTransform', () => {
|
|||
const metric = {
|
||||
id: '2',
|
||||
type: 'derivative',
|
||||
field: '1',
|
||||
field: '1'
|
||||
};
|
||||
const metrics = [{ id: '1', type: 'max', field: 'cpu.pct' }, metric];
|
||||
const fn = bucketTransform.derivative;
|
||||
|
@ -161,12 +179,14 @@ describe('bucketTransform', () => {
|
|||
});
|
||||
|
||||
it('throws error if type is missing', () => {
|
||||
const run = () => bucketTransform.derivative({ id: 'test', field: 'cpu.pct' });
|
||||
const run = () =>
|
||||
bucketTransform.derivative({ id: 'test', field: 'cpu.pct' });
|
||||
expect(run).to.throw(Error, 'Metric missing type');
|
||||
});
|
||||
|
||||
it('throws error if field is missing', () => {
|
||||
const run = () => bucketTransform.derivative({ id: 'test', type: 'derivative' });
|
||||
const run = () =>
|
||||
bucketTransform.derivative({ id: 'test', type: 'derivative' });
|
||||
expect(run).to.throw(Error, 'Metric missing field');
|
||||
});
|
||||
});
|
||||
|
@ -176,7 +196,7 @@ describe('bucketTransform', () => {
|
|||
const metric = {
|
||||
id: '2',
|
||||
type: 'serial_diff',
|
||||
field: '1',
|
||||
field: '1'
|
||||
};
|
||||
const metrics = [{ id: '1', type: 'max', field: 'cpu.pct' }, metric];
|
||||
const fn = bucketTransform.serial_diff;
|
||||
|
@ -226,12 +246,14 @@ describe('bucketTransform', () => {
|
|||
});
|
||||
|
||||
it('throws error if type is missing', () => {
|
||||
const run = () => bucketTransform.serial_diff({ id: 'test', field: 'cpu.pct' });
|
||||
const run = () =>
|
||||
bucketTransform.serial_diff({ id: 'test', field: 'cpu.pct' });
|
||||
expect(run).to.throw(Error, 'Metric missing type');
|
||||
});
|
||||
|
||||
it('throws error if field is missing', () => {
|
||||
const run = () => bucketTransform.serial_diff({ id: 'test', type: 'serial_diff' });
|
||||
const run = () =>
|
||||
bucketTransform.serial_diff({ id: 'test', type: 'serial_diff' });
|
||||
expect(run).to.throw(Error, 'Metric missing field');
|
||||
});
|
||||
});
|
||||
|
@ -247,12 +269,14 @@ describe('bucketTransform', () => {
|
|||
});
|
||||
|
||||
it('throws error if type is missing', () => {
|
||||
const run = () => bucketTransform.cumulative_sum({ id: 'test', field: 'cpu.pct' });
|
||||
const run = () =>
|
||||
bucketTransform.cumulative_sum({ id: 'test', field: 'cpu.pct' });
|
||||
expect(run).to.throw(Error, 'Metric missing type');
|
||||
});
|
||||
|
||||
it('throws error if field is missing', () => {
|
||||
const run = () => bucketTransform.cumulative_sum({ id: 'test', type: 'cumulative_sum' });
|
||||
const run = () =>
|
||||
bucketTransform.cumulative_sum({ id: 'test', type: 'cumulative_sum' });
|
||||
expect(run).to.throw(Error, 'Metric missing field');
|
||||
});
|
||||
});
|
||||
|
@ -272,7 +296,12 @@ describe('bucketTransform', () => {
|
|||
});
|
||||
|
||||
it('returns moving_average agg with predict', () => {
|
||||
const metric = { id: '2', type: 'moving_average', field: '1', predict: 10 };
|
||||
const metric = {
|
||||
id: '2',
|
||||
type: 'moving_average',
|
||||
field: '1',
|
||||
predict: 10
|
||||
};
|
||||
const metrics = [{ id: '1', type: 'avg', field: 'cpu.pct' }, metric];
|
||||
const fn = bucketTransform.moving_average;
|
||||
expect(fn(metric, metrics, '10s')).is.eql({
|
||||
|
@ -313,12 +342,14 @@ describe('bucketTransform', () => {
|
|||
});
|
||||
|
||||
it('throws error if type is missing', () => {
|
||||
const run = () => bucketTransform.moving_average({ id: 'test', field: 'cpu.pct' });
|
||||
const run = () =>
|
||||
bucketTransform.moving_average({ id: 'test', field: 'cpu.pct' });
|
||||
expect(run).to.throw(Error, 'Metric missing type');
|
||||
});
|
||||
|
||||
it('throws error if field is missing', () => {
|
||||
const run = () => bucketTransform.moving_average({ id: 'test', type: 'moving_average' });
|
||||
const run = () =>
|
||||
bucketTransform.moving_average({ id: 'test', type: 'moving_average' });
|
||||
expect(run).to.throw(Error, 'Metric missing field');
|
||||
});
|
||||
});
|
||||
|
@ -341,30 +372,32 @@ describe('bucketTransform', () => {
|
|||
gap_policy: 'skip',
|
||||
script: {
|
||||
source: 'params.idle != null ? 1 - params.idle : 0',
|
||||
lang: 'painless'
|
||||
},
|
||||
params: {
|
||||
_interval: 10000
|
||||
lang: 'painless',
|
||||
params: {
|
||||
_interval: 10000
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('throws error if variables is missing', () => {
|
||||
const run = () => bucketTransform.calculation({
|
||||
id: 'test',
|
||||
type: 'calculation',
|
||||
script: 'params.idle != null ? 1 - params.idle : null'
|
||||
});
|
||||
const run = () =>
|
||||
bucketTransform.calculation({
|
||||
id: 'test',
|
||||
type: 'calculation',
|
||||
script: 'params.idle != null ? 1 - params.idle : null'
|
||||
});
|
||||
expect(run).to.throw(Error, 'Metric missing variables');
|
||||
});
|
||||
|
||||
it('throws error if script is missing', () => {
|
||||
const run = () => bucketTransform.calculation({
|
||||
id: 'test',
|
||||
type: 'calculation',
|
||||
variables: [{ field: '1', name: 'idle' }]
|
||||
});
|
||||
const run = () =>
|
||||
bucketTransform.calculation({
|
||||
id: 'test',
|
||||
type: 'calculation',
|
||||
variables: [{ field: '1', name: 'idle' }]
|
||||
});
|
||||
expect(run).to.throw(Error, 'Metric missing script');
|
||||
});
|
||||
});
|
||||
|
@ -392,6 +425,4 @@ describe('bucketTransform', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import parseSettings from './parse_settings';
|
||||
import getBucketsPath from './get_buckets_path';
|
||||
import { parseInterval } from '../../../../../../ui/public/utils/parse_interval';
|
||||
import { parseInterval } from './parse_interval';
|
||||
|
||||
function checkMetric(metric, fields) {
|
||||
fields.forEach(field => {
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
import dateMath from '@elastic/datemath';
|
||||
|
||||
// Assume interval is in the form (value)(unit), such as "1h"
|
||||
const INTERVAL_STRING_RE = new RegExp(
|
||||
'^([0-9\\.]*)\\s*(' + dateMath.units.join('|') + ')$'
|
||||
);
|
||||
|
||||
export function parseInterval(interval) {
|
||||
const matches = String(interval)
|
||||
.trim()
|
||||
.match(INTERVAL_STRING_RE);
|
||||
|
||||
if (!matches) return null;
|
||||
|
||||
try {
|
||||
const value = parseFloat(matches[1]) || 1;
|
||||
const unit = matches[2];
|
||||
|
||||
const duration = moment.duration(value, unit);
|
||||
|
||||
// There is an error with moment, where if you have a fractional interval between 0 and 1, then when you add that
|
||||
// interval to an existing moment object, it will remain unchanged, which causes problems in the ordered_x_keys
|
||||
// code. To counteract this, we find the first unit that doesn't result in a value between 0 and 1.
|
||||
// For example, if you have '0.5d', then when calculating the x-axis series, we take the start date and begin
|
||||
// adding 0.5 days until we hit the end date. However, since there is a bug in moment, when you add 0.5 days to
|
||||
// the start date, you get the same exact date (instead of being ahead by 12 hours). So instead of returning
|
||||
// a duration corresponding to 0.5 hours, we return a duration corresponding to 12 hours.
|
||||
const selectedUnit = _.find(dateMath.units, unit => {
|
||||
return Math.abs(duration.as(unit)) >= 1;
|
||||
});
|
||||
|
||||
return moment.duration(duration.as(selectedUnit), selectedUnit);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -65,68 +65,68 @@ describe('buildRequestBody(req)', () => {
|
|||
const series = panel.series[0];
|
||||
const doc = buildRequestBody({ payload: body }, panel, series);
|
||||
expect(doc).to.eql({
|
||||
'size': 0,
|
||||
'query': {
|
||||
'bool': {
|
||||
'must': [
|
||||
size: 0,
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
'range': {
|
||||
range: {
|
||||
'@timestamp': {
|
||||
'gte': 1485463055881,
|
||||
'lte': 1485463955881,
|
||||
'format': 'epoch_millis'
|
||||
gte: 1485463055881,
|
||||
lte: 1485463955881,
|
||||
format: 'epoch_millis'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'bool': {
|
||||
'must': [
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
'query_string': {
|
||||
'analyze_wildcard': true,
|
||||
'query': '*'
|
||||
query_string: {
|
||||
analyze_wildcard: true,
|
||||
query: '*'
|
||||
}
|
||||
}
|
||||
],
|
||||
'must_not': []
|
||||
must_not: []
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
'aggs': {
|
||||
aggs: {
|
||||
'c9b5f9c0-e403-11e6-be91-6f7688e9fac7': {
|
||||
'filter': {
|
||||
'match_all': {}
|
||||
filter: {
|
||||
match_all: {}
|
||||
},
|
||||
'meta': {
|
||||
'timeField': '@timestamp',
|
||||
'bucketSize': 10,
|
||||
'intervalString': '10s'
|
||||
meta: {
|
||||
timeField: '@timestamp',
|
||||
bucketSize: 10,
|
||||
intervalString: '10s'
|
||||
},
|
||||
'aggs': {
|
||||
'timeseries': {
|
||||
'date_histogram': {
|
||||
'field': '@timestamp',
|
||||
'interval': '10s',
|
||||
'min_doc_count': 0,
|
||||
'time_zone': 'UTC',
|
||||
'extended_bounds': {
|
||||
'min': 1485463055881,
|
||||
'max': 1485463955881
|
||||
aggs: {
|
||||
timeseries: {
|
||||
date_histogram: {
|
||||
field: '@timestamp',
|
||||
interval: '10s',
|
||||
min_doc_count: 0,
|
||||
time_zone: 'UTC',
|
||||
extended_bounds: {
|
||||
min: 1485463055881,
|
||||
max: 1485463955881
|
||||
}
|
||||
},
|
||||
'aggs': {
|
||||
aggs: {
|
||||
'c9b5f9c1-e403-11e6-be91-6f7688e9fac7': {
|
||||
'bucket_script': {
|
||||
'buckets_path': {
|
||||
'count': '_count'
|
||||
bucket_script: {
|
||||
buckets_path: {
|
||||
count: '_count'
|
||||
},
|
||||
'script': {
|
||||
'inline': 'count * 1',
|
||||
'lang': 'expression'
|
||||
script: {
|
||||
source: 'count * 1',
|
||||
lang: 'expression'
|
||||
},
|
||||
'gap_policy': 'skip'
|
||||
gap_policy: 'skip'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -137,4 +137,3 @@ describe('buildRequestBody(req)', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue