mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
parent
21770fd883
commit
d2b85636c2
41 changed files with 1567 additions and 2019 deletions
|
@ -1,103 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { VisProvider } from '../../vis';
|
||||
import { aggTypes } from '..';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import { AggGroupNames } from '../../vis/editors/default/agg_groups';
|
||||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default function AggParamWriterHelper(Private) {
|
||||
const Vis = Private(VisProvider);
|
||||
const stubbedLogstashIndexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
|
||||
/**
|
||||
* Helper object for writing aggParams. Specify an aggType and it will find a vis & schema, and
|
||||
* wire up the supporting objects required to feed in parameters, and get #write() output.
|
||||
*
|
||||
* Use cases:
|
||||
* - Verify that the interval parameter of the histogram visualization casts its input to a number
|
||||
* ```js
|
||||
* it('casts to a number', function () {
|
||||
* let writer = new AggParamWriter({ aggType: 'histogram' });
|
||||
* let output = writer.write({ interval : '100/10' });
|
||||
* expect(output.params.interval).to.be.a('number');
|
||||
* expect(output.params.interval).to.be(100);
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @class AggParamWriter
|
||||
* @param {object} opts - describe the properties of this paramWriter
|
||||
* @param {string} opts.aggType - the name of the aggType we want to test. ('histogram', 'filter', etc.)
|
||||
*/
|
||||
class AggParamWriter {
|
||||
|
||||
constructor(opts) {
|
||||
this.aggType = opts.aggType;
|
||||
if (_.isString(this.aggType)) {
|
||||
this.aggType = aggTypes.buckets.find(agg => agg.name === this.aggType) || aggTypes.metrics.find(agg => agg.name === this.aggType);
|
||||
}
|
||||
|
||||
// not configurable right now, but totally required
|
||||
this.indexPattern = stubbedLogstashIndexPattern;
|
||||
|
||||
// the schema that the aggType satisfies
|
||||
this.visAggSchema = null;
|
||||
|
||||
this.vis = new Vis(this.indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [{
|
||||
id: 1,
|
||||
type: this.aggType.name,
|
||||
params: {}
|
||||
}]
|
||||
});
|
||||
}
|
||||
|
||||
write(paramValues, modifyAggConfig = null) {
|
||||
paramValues = _.clone(paramValues);
|
||||
|
||||
if (this.aggType.paramByName('field') && !paramValues.field) {
|
||||
// pick a field rather than force a field to be specified everywhere
|
||||
if (this.aggType.type === AggGroupNames.Metrics) {
|
||||
paramValues.field = _.sample(this.indexPattern.fields.getByType('number'));
|
||||
} else {
|
||||
const type = this.aggType.paramByName('field').filterFieldTypes || 'string';
|
||||
let field;
|
||||
do {
|
||||
field = _.sample(this.indexPattern.fields.getByType(type));
|
||||
} while (!field.aggregatable);
|
||||
paramValues.field = field.name;
|
||||
}
|
||||
}
|
||||
|
||||
const aggConfig = this.vis.aggs.aggs[0];
|
||||
aggConfig.setParams(paramValues);
|
||||
|
||||
if (modifyAggConfig) {
|
||||
modifyAggConfig(aggConfig);
|
||||
}
|
||||
|
||||
return aggConfig.write(this.vis.aggs);
|
||||
}
|
||||
}
|
||||
|
||||
return AggParamWriter;
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { set } from 'lodash';
|
||||
import expect from '@kbn/expect';
|
||||
import sinon from 'sinon';
|
||||
import ngMock from 'ng_mock';
|
||||
import { aggTypes } from '../..';
|
||||
import AggParamWriterProvider from '../agg_param_writer';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import chrome from '../../../chrome';
|
||||
|
||||
const config = chrome.getUiSettingsClient();
|
||||
|
||||
describe('date_range params', function () {
|
||||
let paramWriter;
|
||||
let timeField;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
const AggParamWriter = Private(AggParamWriterProvider);
|
||||
const indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
|
||||
timeField = indexPattern.timeFieldName;
|
||||
paramWriter = new AggParamWriter({ aggType: 'date_range' });
|
||||
}));
|
||||
|
||||
describe('getKey', () => {
|
||||
const dateRange = aggTypes.buckets.find(agg => agg.name === 'date_range');
|
||||
it('should return object', () => {
|
||||
const bucket = { from: 'from-date', to: 'to-date', key: 'from-dateto-date' };
|
||||
expect(dateRange.getKey(bucket)).to.equal({ from: 'from-date', to: 'to-date' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('time_zone', () => {
|
||||
beforeEach(() => {
|
||||
sinon.stub(config, 'get');
|
||||
sinon.stub(config, 'isDefault');
|
||||
});
|
||||
|
||||
it('should use the specified time_zone', () => {
|
||||
const output = paramWriter.write({ time_zone: 'Europe/Kiev' });
|
||||
expect(output.params).to.have.property('time_zone', 'Europe/Kiev');
|
||||
});
|
||||
|
||||
it('should use the Kibana time_zone if no parameter specified', () => {
|
||||
config.isDefault.withArgs('dateFormat:tz').returns(false);
|
||||
config.get.withArgs('dateFormat:tz').returns('Europe/Riga');
|
||||
const output = paramWriter.write({});
|
||||
expect(output.params).to.have.property('time_zone', 'Europe/Riga');
|
||||
});
|
||||
|
||||
it('should use the fixed time_zone from the index pattern typeMeta', () => {
|
||||
set(paramWriter.indexPattern, ['typeMeta', 'aggs', 'date_range', timeField, 'time_zone'], 'Europe/Rome');
|
||||
const output = paramWriter.write({ field: timeField });
|
||||
expect(output.params).to.have.property('time_zone', 'Europe/Rome');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
config.get.restore();
|
||||
config.isDefault.restore();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,257 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import sinon from 'sinon';
|
||||
import { geoHashBucketAgg } from '../../buckets/geo_hash';
|
||||
import * as AggConfigModule from '../../agg_config';
|
||||
import * as BucketAggTypeModule from '../../buckets/_bucket_agg_type';
|
||||
|
||||
describe('Geohash Agg', () => {
|
||||
|
||||
const initialZoom = 10;
|
||||
const initialMapBounds = {
|
||||
top_left: { lat: 1.0, lon: -1.0 },
|
||||
bottom_right: { lat: -1.0, lon: 1.0 }
|
||||
};
|
||||
|
||||
const BucketAggTypeMock = (aggOptions) => {
|
||||
return aggOptions;
|
||||
};
|
||||
const AggConfigMock = (parent, aggOptions) => {
|
||||
return aggOptions;
|
||||
};
|
||||
const createAggregationMock = (aggOptions) => {
|
||||
return new AggConfigMock(null, aggOptions);
|
||||
};
|
||||
|
||||
const aggMock = {
|
||||
getField: () => {
|
||||
return {
|
||||
name: 'location'
|
||||
};
|
||||
},
|
||||
params: {
|
||||
isFilteredByCollar: true,
|
||||
useGeocentroid: true,
|
||||
mapZoom: initialZoom
|
||||
},
|
||||
aggConfigs: {},
|
||||
type: 'geohash_grid',
|
||||
};
|
||||
aggMock.aggConfigs.createAggConfig = createAggregationMock;
|
||||
|
||||
|
||||
before(function () {
|
||||
sinon.stub(AggConfigModule, 'AggConfig').callsFake(AggConfigMock);
|
||||
sinon.stub(BucketAggTypeModule, 'BucketAggType').callsFake(BucketAggTypeMock);
|
||||
});
|
||||
|
||||
after(function () {
|
||||
AggConfigModule.AggConfig.restore();
|
||||
BucketAggTypeModule.BucketAggType.restore();
|
||||
});
|
||||
|
||||
function initAggParams() {
|
||||
aggMock.params.isFilteredByCollar = true;
|
||||
aggMock.params.useGeocentroid = true;
|
||||
aggMock.params.mapBounds = initialMapBounds;
|
||||
}
|
||||
|
||||
function zoomMap(zoomChange) {
|
||||
aggMock.params.mapZoom += zoomChange;
|
||||
}
|
||||
|
||||
function moveMap(newBounds) {
|
||||
aggMock.params.mapBounds = newBounds;
|
||||
}
|
||||
|
||||
function resetMap() {
|
||||
aggMock.params.mapZoom = initialZoom;
|
||||
aggMock.params.mapBounds = initialMapBounds;
|
||||
aggMock.params.mapCollar = {
|
||||
top_left: { lat: 1.5, lon: -1.5 },
|
||||
bottom_right: { lat: -1.5, lon: 1.5 },
|
||||
zoom: initialZoom
|
||||
};
|
||||
}
|
||||
|
||||
describe('precision parameter', () => {
|
||||
|
||||
const PRECISION_PARAM_INDEX = 2;
|
||||
let precisionParam;
|
||||
beforeEach(() => {
|
||||
precisionParam = geoHashBucketAgg.params[PRECISION_PARAM_INDEX];
|
||||
});
|
||||
|
||||
it('should select precision parameter', () => {
|
||||
expect(precisionParam.name).to.equal('precision');
|
||||
});
|
||||
|
||||
describe('precision parameter write', () => {
|
||||
|
||||
const zoomToGeoHashPrecision = {
|
||||
0: 1,
|
||||
1: 2,
|
||||
2: 2,
|
||||
3: 2,
|
||||
4: 3,
|
||||
5: 3,
|
||||
6: 4,
|
||||
7: 4,
|
||||
8: 4,
|
||||
9: 5,
|
||||
10: 5,
|
||||
11: 6,
|
||||
12: 6,
|
||||
13: 6,
|
||||
14: 7,
|
||||
15: 7,
|
||||
16: 7,
|
||||
17: 7,
|
||||
18: 7,
|
||||
19: 7,
|
||||
20: 7,
|
||||
21: 7
|
||||
};
|
||||
|
||||
Object.keys(zoomToGeoHashPrecision).forEach((zoomLevel) => {
|
||||
it(`zoom level ${zoomLevel} should correspond to correct geohash-precision`, () => {
|
||||
const output = { params: {} };
|
||||
precisionParam.write({
|
||||
params: {
|
||||
autoPrecision: true,
|
||||
mapZoom: zoomLevel
|
||||
}
|
||||
}, output);
|
||||
expect(output.params.precision).to.equal(zoomToGeoHashPrecision[zoomLevel]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('getRequestAggs', () => {
|
||||
|
||||
describe('initial aggregation creation', () => {
|
||||
let requestAggs;
|
||||
beforeEach(() => {
|
||||
initAggParams();
|
||||
requestAggs = geoHashBucketAgg.getRequestAggs(aggMock);
|
||||
});
|
||||
|
||||
it('should create filter, geohash_grid, and geo_centroid aggregations', () => {
|
||||
expect(requestAggs.length).to.equal(3);
|
||||
expect(requestAggs[0].type).to.equal('filter');
|
||||
expect(requestAggs[1].type).to.equal('geohash_grid');
|
||||
expect(requestAggs[2].type).to.equal('geo_centroid');
|
||||
});
|
||||
|
||||
it('should set mapCollar in vis session state', () => {
|
||||
expect(aggMock).to.have.property('lastMapCollar');
|
||||
expect(aggMock.lastMapCollar).to.have.property('top_left');
|
||||
expect(aggMock.lastMapCollar).to.have.property('bottom_right');
|
||||
expect(aggMock.lastMapCollar).to.have.property('zoom');
|
||||
});
|
||||
|
||||
// there was a bug because of an "&& mapZoom" check which excluded 0 as a valid mapZoom, but it is.
|
||||
it('should create filter, geohash_grid, and geo_centroid aggregations when zoom level 0', () => {
|
||||
aggMock.params.mapZoom = 0;
|
||||
requestAggs = geoHashBucketAgg.getRequestAggs(aggMock);
|
||||
expect(requestAggs.length).to.equal(3);
|
||||
expect(requestAggs[0].type).to.equal('filter');
|
||||
expect(requestAggs[1].type).to.equal('geohash_grid');
|
||||
expect(requestAggs[2].type).to.equal('geo_centroid');
|
||||
});
|
||||
});
|
||||
|
||||
describe('aggregation options', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
initAggParams();
|
||||
});
|
||||
|
||||
it('should only create geohash_grid and geo_centroid aggregations when isFilteredByCollar is false', () => {
|
||||
aggMock.params.isFilteredByCollar = false;
|
||||
const requestAggs = geoHashBucketAgg.getRequestAggs(aggMock);
|
||||
expect(requestAggs.length).to.equal(2);
|
||||
expect(requestAggs[0].type).to.equal('geohash_grid');
|
||||
expect(requestAggs[1].type).to.equal('geo_centroid');
|
||||
});
|
||||
|
||||
it('should only create filter and geohash_grid aggregations when useGeocentroid is false', () => {
|
||||
aggMock.params.useGeocentroid = false;
|
||||
const requestAggs = geoHashBucketAgg.getRequestAggs(aggMock);
|
||||
expect(requestAggs.length).to.equal(2);
|
||||
expect(requestAggs[0].type).to.equal('filter');
|
||||
expect(requestAggs[1].type).to.equal('geohash_grid');
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
describe('aggregation creation after map interaction', () => {
|
||||
|
||||
let origRequestAggs;
|
||||
let origMapCollar;
|
||||
beforeEach(() => {
|
||||
resetMap();
|
||||
initAggParams();
|
||||
origRequestAggs = geoHashBucketAgg.getRequestAggs(aggMock);
|
||||
origMapCollar = JSON.stringify(aggMock.lastMapCollar, null, '');
|
||||
});
|
||||
|
||||
it('should not change geo_bounding_box filter aggregation and vis session state when map movement is within map collar', () => {
|
||||
moveMap({
|
||||
top_left: { lat: 1.1, lon: -1.1 },
|
||||
bottom_right: { lat: -0.9, lon: 0.9 }
|
||||
});
|
||||
|
||||
const newRequestAggs = geoHashBucketAgg.getRequestAggs(aggMock);
|
||||
expect(JSON.stringify(origRequestAggs[0].params, null, '')).to.equal(JSON.stringify(newRequestAggs[0].params, null, ''));
|
||||
|
||||
const newMapCollar = JSON.stringify(aggMock.lastMapCollar, null, '');
|
||||
expect(origMapCollar).to.equal(newMapCollar);
|
||||
});
|
||||
|
||||
it('should change geo_bounding_box filter aggregation and vis session state when map movement is outside map collar', () => {
|
||||
moveMap({
|
||||
top_left: { lat: 10.0, lon: -10.0 },
|
||||
bottom_right: { lat: 9.0, lon: -9.0 }
|
||||
});
|
||||
|
||||
const newRequestAggs = geoHashBucketAgg.getRequestAggs(aggMock);
|
||||
expect(JSON.stringify(origRequestAggs[0].params, null, '')).not.to.equal(JSON.stringify(newRequestAggs[0].params, null, ''));
|
||||
|
||||
const newMapCollar = JSON.stringify(aggMock.lastMapCollar, null, '');
|
||||
expect(origMapCollar).not.to.equal(newMapCollar);
|
||||
});
|
||||
|
||||
it('should change geo_bounding_box filter aggregation and vis session state when map zoom level changes', () => {
|
||||
zoomMap(-1);
|
||||
|
||||
geoHashBucketAgg.getRequestAggs(aggMock);
|
||||
|
||||
const newMapCollar = JSON.stringify(aggMock.lastMapCollar, null, '');
|
||||
expect(origMapCollar).not.to.equal(newMapCollar);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
});
|
|
@ -1,210 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import sinon from 'sinon';
|
||||
import ngMock from 'ng_mock';
|
||||
import { aggTypes } from '../..';
|
||||
import chrome from '../../../chrome';
|
||||
import AggParamWriterProvider from '../agg_param_writer';
|
||||
|
||||
const config = chrome.getUiSettingsClient();
|
||||
const histogram = aggTypes.buckets.find(agg => agg.name === 'histogram');
|
||||
describe('Histogram Agg', function () {
|
||||
|
||||
describe('ordered', function () {
|
||||
|
||||
it('is ordered', function () {
|
||||
expect(histogram.ordered).to.be.ok();
|
||||
});
|
||||
|
||||
it('is not ordered by date', function () {
|
||||
expect(histogram.ordered).to.not.have.property('date');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe('params', function () {
|
||||
let paramWriter;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
const AggParamWriter = Private(AggParamWriterProvider);
|
||||
paramWriter = new AggParamWriter({ aggType: 'histogram' });
|
||||
}));
|
||||
|
||||
describe('intervalBase', () => {
|
||||
it('should not be written to the DSL', () => {
|
||||
const output = paramWriter.write({ intervalBase: 100 });
|
||||
expect(output.params).not.to.have.property('intervalBase');
|
||||
});
|
||||
});
|
||||
|
||||
describe('interval', function () {
|
||||
// reads aggConfig.params.interval, writes to dsl.interval
|
||||
|
||||
it('accepts a whole number', function () {
|
||||
const output = paramWriter.write({ interval: 100 });
|
||||
expect(output.params).to.have.property('interval', 100);
|
||||
});
|
||||
|
||||
it('accepts a decimal number', function () {
|
||||
const output = paramWriter.write({ interval: 0.1 });
|
||||
expect(output.params).to.have.property('interval', 0.1);
|
||||
});
|
||||
|
||||
it('accepts a decimal number string', function () {
|
||||
const output = paramWriter.write({ interval: '0.1' });
|
||||
expect(output.params).to.have.property('interval', 0.1);
|
||||
});
|
||||
|
||||
it('accepts a whole number string', function () {
|
||||
const output = paramWriter.write({ interval: '10' });
|
||||
expect(output.params).to.have.property('interval', 10);
|
||||
});
|
||||
|
||||
it('fails on non-numeric values', function () {
|
||||
// template validation prevents this from users, not devs
|
||||
const output = paramWriter.write({ interval: [] });
|
||||
expect(isNaN(output.params.interval)).to.be.ok();
|
||||
});
|
||||
|
||||
describe('interval scaling', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
sinon.stub(config, 'get');
|
||||
});
|
||||
|
||||
it('will respect the histogram:maxBars setting', () => {
|
||||
config.get.withArgs('histogram:maxBars').returns(5);
|
||||
const output = paramWriter.write({ interval: 5 },
|
||||
aggConfig => aggConfig.setAutoBounds({ min: 0, max: 10000 }));
|
||||
expect(output.params).to.have.property('interval', 2000);
|
||||
});
|
||||
|
||||
it('will return specified interval, if bars are below histogram:maxBars config', () => {
|
||||
config.get.withArgs('histogram:maxBars').returns(10000);
|
||||
const output = paramWriter.write({ interval: 5 },
|
||||
aggConfig => aggConfig.setAutoBounds({ min: 0, max: 10000 }));
|
||||
expect(output.params).to.have.property('interval', 5);
|
||||
});
|
||||
|
||||
it('will set to intervalBase if interval is below base', () => {
|
||||
const output = paramWriter.write({ interval: 3, intervalBase: 8 });
|
||||
expect(output.params).to.have.property('interval', 8);
|
||||
});
|
||||
|
||||
it('will round to nearest intervalBase multiple if interval is above base', () => {
|
||||
const roundUp = paramWriter.write({ interval: 46, intervalBase: 10 });
|
||||
expect(roundUp.params).to.have.property('interval', 50);
|
||||
const roundDown = paramWriter.write({ interval: 43, intervalBase: 10 });
|
||||
expect(roundDown.params).to.have.property('interval', 40);
|
||||
});
|
||||
|
||||
it('will not change interval if it is a multiple of base', () => {
|
||||
const output = paramWriter.write({ interval: 35, intervalBase: 5 });
|
||||
expect(output.params).to.have.property('interval', 35);
|
||||
});
|
||||
|
||||
it('will round to intervalBase after scaling histogram:maxBars', () => {
|
||||
config.get.withArgs('histogram:maxBars').returns(100);
|
||||
const output = paramWriter.write({ interval: 5, intervalBase: 6 },
|
||||
aggConfig => aggConfig.setAutoBounds({ min: 0, max: 1000 }));
|
||||
// 100 buckets in 0 to 1000 would result in an interval of 10, so we should
|
||||
// round to the next multiple of 6 -> 12
|
||||
expect(output.params).to.have.property('interval', 12);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
config.get.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('min_doc_count', function () {
|
||||
it('casts true values to 0', function () {
|
||||
let output = paramWriter.write({ min_doc_count: true });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: 'yes' });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: 1 });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: {} });
|
||||
expect(output.params).to.have.property('min_doc_count', 0);
|
||||
});
|
||||
|
||||
it('writes 1 for falsy values', function () {
|
||||
let output = paramWriter.write({ min_doc_count: '' });
|
||||
expect(output.params).to.have.property('min_doc_count', 1);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: null });
|
||||
expect(output.params).to.have.property('min_doc_count', 1);
|
||||
|
||||
output = paramWriter.write({ min_doc_count: undefined });
|
||||
expect(output.params).to.have.property('min_doc_count', 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extended_bounds', function () {
|
||||
it('does not write when only eb.min is set', function () {
|
||||
const output = paramWriter.write({
|
||||
has_extended_bounds: true,
|
||||
extended_bounds: { min: 0 }
|
||||
});
|
||||
expect(output.params).not.to.have.property('extended_bounds');
|
||||
});
|
||||
|
||||
it('does not write when only eb.max is set', function () {
|
||||
const output = paramWriter.write({
|
||||
has_extended_bounds: true,
|
||||
extended_bounds: { max: 0 }
|
||||
});
|
||||
expect(output.params).not.to.have.property('extended_bounds');
|
||||
});
|
||||
|
||||
it('writes when both eb.min and eb.max are set', function () {
|
||||
const output = paramWriter.write({
|
||||
has_extended_bounds: true,
|
||||
extended_bounds: { min: 99, max: 100 }
|
||||
});
|
||||
expect(output.params.extended_bounds).to.have.property('min', 99);
|
||||
expect(output.params.extended_bounds).to.have.property('max', 100);
|
||||
});
|
||||
|
||||
it('does not write when nothing is set', function () {
|
||||
const output = paramWriter.write({
|
||||
has_extended_bounds: true,
|
||||
extended_bounds: {}
|
||||
});
|
||||
expect(output.params).to.not.have.property('extended_bounds');
|
||||
});
|
||||
|
||||
it('does not write when has_extended_bounds is false', function () {
|
||||
const output = paramWriter.write({
|
||||
has_extended_bounds: false,
|
||||
extended_bounds: { min: 99, max: 100 }
|
||||
});
|
||||
expect(output.params).to.not.have.property('extended_bounds');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,71 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { values } from 'lodash';
|
||||
import ngMock from 'ng_mock';
|
||||
import expect from '@kbn/expect';
|
||||
import resp from 'fixtures/agg_resp/range';
|
||||
import { VisProvider } from '../../../vis';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
|
||||
describe('Range Agg', function () {
|
||||
const buckets = values(resp.aggregations[1].buckets);
|
||||
|
||||
let Vis;
|
||||
let indexPattern;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
Vis = Private(VisProvider);
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
indexPattern.stubSetFieldFormat('bytes', 'bytes', {
|
||||
pattern: '0,0.[000] b'
|
||||
});
|
||||
}));
|
||||
|
||||
describe('formating', function () {
|
||||
it('formats bucket keys properly', function () {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'range',
|
||||
schema: 'segment',
|
||||
params: {
|
||||
field: 'bytes',
|
||||
ranges: [
|
||||
{ from: 0, to: 1000 },
|
||||
{ from: 1000, to: 2000 }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
const agg = vis.aggs.byName('range')[0];
|
||||
const format = function (val) {
|
||||
return agg.fieldFormatter()(agg.getKey(val));
|
||||
};
|
||||
expect(format(buckets[0])).to.be('≥ -∞ and < 1 KB');
|
||||
expect(format(buckets[1])).to.be('≥ 1 KB and < 2.5 KB');
|
||||
expect(format(buckets[2])).to.be('≥ 2.5 KB and < +∞');
|
||||
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,121 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
import aggResp from 'fixtures/agg_resp/date_histogram';
|
||||
import ngMock from 'ng_mock';
|
||||
import expect from '@kbn/expect';
|
||||
import { VisProvider } from '../../../../vis';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import { createFilterDateHistogram } from '../../../buckets/create_filter/date_histogram';
|
||||
import { intervalOptions } from '../../../buckets/_interval_options';
|
||||
|
||||
describe('AggConfig Filters', function () {
|
||||
describe('date_histogram', function () {
|
||||
let vis;
|
||||
let agg;
|
||||
let field;
|
||||
let filter;
|
||||
let bucketKey;
|
||||
let bucketStart;
|
||||
|
||||
let init;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
const Vis = Private(VisProvider);
|
||||
const indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
|
||||
init = function (interval, duration) {
|
||||
interval = interval || 'auto';
|
||||
if (interval === 'custom') interval = agg.params.customInterval;
|
||||
duration = duration || moment.duration(15, 'minutes');
|
||||
field = _.sample(_.reject(indexPattern.fields.getByType('date'), 'scripted'));
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_histogram',
|
||||
schema: 'segment',
|
||||
params: { field: field.name, interval: interval, customInterval: '5d' }
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
agg = vis.aggs.aggs[0];
|
||||
bucketKey = _.sample(aggResp.aggregations['1'].buckets).key;
|
||||
bucketStart = moment(bucketKey);
|
||||
|
||||
const timePad = moment.duration(duration / 2);
|
||||
agg.buckets.setBounds({
|
||||
min: bucketStart.clone().subtract(timePad),
|
||||
max: bucketStart.clone().add(timePad),
|
||||
});
|
||||
agg.buckets.setInterval(interval);
|
||||
|
||||
filter = createFilterDateHistogram(agg, bucketKey);
|
||||
};
|
||||
}));
|
||||
|
||||
it('creates a valid range filter', function () {
|
||||
init();
|
||||
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter.range).to.have.property(field.name);
|
||||
|
||||
const fieldParams = filter.range[field.name];
|
||||
expect(fieldParams).to.have.property('gte');
|
||||
expect(fieldParams.gte).to.be.a('string');
|
||||
|
||||
expect(fieldParams).to.have.property('lt');
|
||||
expect(fieldParams.lt).to.be.a('string');
|
||||
|
||||
expect(fieldParams).to.have.property('format');
|
||||
expect(fieldParams.format).to.be('strict_date_optional_time');
|
||||
|
||||
expect(fieldParams.gte).to.be.lessThan(fieldParams.lt);
|
||||
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', vis.indexPattern.id);
|
||||
});
|
||||
|
||||
|
||||
it('extends the filter edge to 1ms before the next bucket for all interval options', function () {
|
||||
intervalOptions.forEach(function (option) {
|
||||
let duration;
|
||||
if (option.val !== 'custom' && moment(1, option.val).isValid()) {
|
||||
duration = moment.duration(10, option.val);
|
||||
|
||||
if (+duration < 10) {
|
||||
throw new Error('unable to create interval for ' + option.val);
|
||||
}
|
||||
}
|
||||
|
||||
init(option.val, duration);
|
||||
|
||||
const interval = agg.buckets.getInterval();
|
||||
const params = filter.range[field.name];
|
||||
|
||||
expect(params.gte).to.be(bucketStart.toISOString());
|
||||
expect(params.lt).to.be(bucketStart.clone().add(interval).toISOString());
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import ngMock from 'ng_mock';
|
||||
import moment from 'moment';
|
||||
import { VisProvider } from '../../../../vis';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import { createFilterDateRange } from '../../../buckets/create_filter/date_range';
|
||||
|
||||
describe('AggConfig Filters', function () {
|
||||
describe('Date range', function () {
|
||||
let indexPattern;
|
||||
let Vis;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
Vis = Private(VisProvider);
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
}));
|
||||
|
||||
it('should return a range filter for date_range agg', function () {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'date_range',
|
||||
params: {
|
||||
field: '@timestamp',
|
||||
ranges: [
|
||||
{ from: '2014-01-01', to: '2014-12-31' }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
const aggConfig = vis.aggs.byName('date_range')[0];
|
||||
const from = new Date('1 Feb 2015');
|
||||
const to = new Date('7 Feb 2015');
|
||||
const filter = createFilterDateRange(aggConfig, { from: from.valueOf(), to: to.valueOf() });
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
expect(filter.range).to.have.property('@timestamp');
|
||||
expect(filter.range['@timestamp']).to.have.property('gte', moment(from).toISOString());
|
||||
expect(filter.range['@timestamp']).to.have.property('lt', moment(to).toISOString());
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import ngMock from 'ng_mock';
|
||||
import { VisProvider } from '../../../../vis';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import { createFilterFilters } from '../../../buckets/create_filter/filters';
|
||||
|
||||
describe('AggConfig Filters', function () {
|
||||
describe('filters', function () {
|
||||
let indexPattern;
|
||||
let Vis;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
Vis = Private(VisProvider);
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
}));
|
||||
|
||||
it('should return a filters filter', function () {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'filters',
|
||||
schema: 'segment',
|
||||
params: {
|
||||
filters: [
|
||||
{ input: { query: 'type:apache', language: 'lucene' } },
|
||||
{ input: { query: 'type:nginx', language: 'lucene' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
const aggConfig = vis.aggs.byName('filters')[0];
|
||||
const filter = createFilterFilters(aggConfig, 'type:nginx');
|
||||
expect(filter.query.bool.must[0].query_string.query).to.be('type:nginx');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
expect(filter.meta).to.have.property('alias', 'type:nginx');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import ngMock from 'ng_mock';
|
||||
import { VisProvider } from '../../../../vis';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import { createFilterHistogram } from '../../../buckets/create_filter/histogram';
|
||||
|
||||
describe('AggConfig Filters', function () {
|
||||
describe('histogram', function () {
|
||||
let indexPattern;
|
||||
let Vis;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
Vis = Private(VisProvider);
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
}));
|
||||
|
||||
it('should return an range filter for histogram', function () {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'histogram',
|
||||
schema: 'segment',
|
||||
params: { field: 'bytes', interval: 1024 }
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
const aggConfig = vis.aggs.byName('histogram')[0];
|
||||
const filter = createFilterHistogram(aggConfig, 2048);
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter.range).to.have.property('bytes');
|
||||
expect(filter.range.bytes).to.have.property('gte', 2048);
|
||||
expect(filter.range.bytes).to.have.property('lt', 3072);
|
||||
expect(filter.meta).to.have.property('formattedValue', '2,048');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,97 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import ngMock from 'ng_mock';
|
||||
import { VisProvider } from '../../../../vis';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import { createFilterIpRange } from '../../../buckets/create_filter/ip_range';
|
||||
describe('AggConfig Filters', function () {
|
||||
|
||||
describe('IP range', function () {
|
||||
let indexPattern;
|
||||
let Vis;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
Vis = Private(VisProvider);
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
}));
|
||||
|
||||
it('should return a range filter for ip_range agg', function () {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'ip_range',
|
||||
schema: 'segment',
|
||||
params: {
|
||||
field: 'ip',
|
||||
ipRangeType: 'fromTo',
|
||||
ranges: {
|
||||
fromTo: [
|
||||
{ from: '0.0.0.0', to: '1.1.1.1' }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
const aggConfig = vis.aggs.byName('ip_range')[0];
|
||||
const filter = createFilterIpRange(aggConfig, { type: 'fromTo', from: '0.0.0.0', to: '1.1.1.1' });
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
expect(filter.range).to.have.property('ip');
|
||||
expect(filter.range.ip).to.have.property('gte', '0.0.0.0');
|
||||
expect(filter.range.ip).to.have.property('lte', '1.1.1.1');
|
||||
});
|
||||
|
||||
it('should return a range filter for ip_range agg using a CIDR mask', function () {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'ip_range',
|
||||
schema: 'segment',
|
||||
params: {
|
||||
field: 'ip',
|
||||
ipRangeType: 'mask',
|
||||
ranges: {
|
||||
mask: [
|
||||
{ mask: '67.129.65.201/27' }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
const aggConfig = vis.aggs.byName('ip_range')[0];
|
||||
const filter = createFilterIpRange(aggConfig, { type: 'mask', mask: '67.129.65.201/27' });
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
expect(filter.range).to.have.property('ip');
|
||||
expect(filter.range.ip).to.have.property('gte', '67.129.65.192');
|
||||
expect(filter.range.ip).to.have.property('lte', '67.129.65.223');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,66 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import ngMock from 'ng_mock';
|
||||
import { VisProvider } from '../../../../vis';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import { createFilterRange } from '../../../buckets/create_filter/range';
|
||||
|
||||
describe('AggConfig Filters', function () {
|
||||
|
||||
describe('range', function () {
|
||||
let indexPattern;
|
||||
let Vis;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
Vis = Private(VisProvider);
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
}));
|
||||
|
||||
it('should return a range filter for range agg', function () {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{
|
||||
type: 'range',
|
||||
schema: 'segment',
|
||||
params: {
|
||||
field: 'bytes',
|
||||
ranges: [
|
||||
{ from: 1024, to: 2048 }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
const aggConfig = vis.aggs.byName('range')[0];
|
||||
const filter = createFilterRange(aggConfig, { gte: 1024, lt: 2048.0 });
|
||||
expect(filter).to.have.property('range');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
expect(filter.range).to.have.property('bytes');
|
||||
expect(filter.range.bytes).to.have.property('gte', 1024.0);
|
||||
expect(filter.range.bytes).to.have.property('lt', 2048.0);
|
||||
expect(filter.meta).to.have.property('formattedValue', '≥ 1,024 and < 2,048');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,104 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import ngMock from 'ng_mock';
|
||||
import { VisProvider } from '../../../../vis';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import { createFilterTerms } from '../../../buckets/create_filter/terms';
|
||||
|
||||
describe('AggConfig Filters', function () {
|
||||
|
||||
describe('terms', function () {
|
||||
let indexPattern;
|
||||
let Vis;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
Vis = Private(VisProvider);
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
}));
|
||||
|
||||
it('should return a match_phrase filter for terms', function () {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [ { type: 'terms', schema: 'segment', params: { field: '_type' } } ]
|
||||
});
|
||||
const aggConfig = vis.aggs.byName('terms')[0];
|
||||
const filter = createFilterTerms(aggConfig, 'apache');
|
||||
expect(filter).to.have.property('query');
|
||||
expect(filter.query).to.have.property('match_phrase');
|
||||
expect(filter.query.match_phrase).to.have.property('_type');
|
||||
expect(filter.query.match_phrase._type).to.be('apache');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
|
||||
});
|
||||
|
||||
it('should set query to true or false for boolean filter', () => {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [ { type: 'terms', schema: 'segment', params: { field: 'ssl' } } ]
|
||||
});
|
||||
const aggConfig = vis.aggs.byName('terms')[0];
|
||||
const filterFalse = createFilterTerms(aggConfig, 0);
|
||||
expect(filterFalse).to.have.property('query');
|
||||
expect(filterFalse.query).to.have.property('match_phrase');
|
||||
expect(filterFalse.query.match_phrase).to.have.property('ssl');
|
||||
expect(filterFalse.query.match_phrase.ssl).to.be(false);
|
||||
|
||||
const filterTrue = createFilterTerms(aggConfig, 1);
|
||||
expect(filterTrue).to.have.property('query');
|
||||
expect(filterTrue.query).to.have.property('match_phrase');
|
||||
expect(filterTrue.query.match_phrase).to.have.property('ssl');
|
||||
expect(filterTrue.query.match_phrase.ssl).to.be(true);
|
||||
});
|
||||
|
||||
it('should generate correct __missing__ filter', () => {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [ { type: 'terms', schema: 'segment', params: { field: '_type' } } ]
|
||||
});
|
||||
const aggConfig = vis.aggs.byName('terms')[0];
|
||||
const filter = createFilterTerms(aggConfig, '__missing__');
|
||||
expect(filter).to.have.property('exists');
|
||||
expect(filter.exists).to.have.property('field', '_type');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
expect(filter.meta).to.have.property('negate', true);
|
||||
});
|
||||
|
||||
it('should generate correct __other__ filter', () => {
|
||||
const vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [ { type: 'terms', schema: 'segment', params: { field: '_type' } } ]
|
||||
});
|
||||
const aggConfig = vis.aggs.byName('terms')[0];
|
||||
const filter = createFilterTerms(aggConfig, '__other__', { terms: ['apache'] })[0];
|
||||
expect(filter).to.have.property('query');
|
||||
expect(filter.query).to.have.property('bool');
|
||||
expect(filter.query.bool).to.have.property('should');
|
||||
expect(filter.query.bool.should[0]).to.have.property('match_phrase');
|
||||
expect(filter.query.bool.should[0].match_phrase).to.have.property('_type', 'apache');
|
||||
expect(filter).to.have.property('meta');
|
||||
expect(filter.meta).to.have.property('index', indexPattern.id);
|
||||
expect(filter.meta).to.have.property('negate', true);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,124 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import $ from 'jquery';
|
||||
import ngMock from 'ng_mock';
|
||||
import expect from '@kbn/expect';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import { VisProvider } from '../../../../vis';
|
||||
import { intervalOptions } from '../../../buckets/_interval_options';
|
||||
|
||||
describe.skip('editor', function () {
|
||||
|
||||
let indexPattern;
|
||||
let vis;
|
||||
let agg;
|
||||
let render;
|
||||
let $scope;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private, $injector, $compile) {
|
||||
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
|
||||
const Vis = Private(VisProvider);
|
||||
|
||||
/**
|
||||
* Render the AggParams editor for the date histogram aggregation
|
||||
*
|
||||
* @param {object} params - the agg params to give to the date_histogram
|
||||
* by default
|
||||
* @return {object} - object pointing to the different inputs, keys
|
||||
* are the aggParam name and the value is an object
|
||||
* with $el, $scope, and a few helpers for getting
|
||||
* data from them.
|
||||
*/
|
||||
render = function (params) {
|
||||
vis = new Vis(indexPattern, {
|
||||
type: 'histogram',
|
||||
aggs: [
|
||||
{ schema: 'metric', type: 'avg', params: { field: 'bytes' } },
|
||||
{ schema: 'segment', type: 'date_histogram', params: params || {} }
|
||||
]
|
||||
});
|
||||
|
||||
const $el = $('<vis-editor-agg-params agg="agg" ' +
|
||||
'index-pattern="agg.getIndexPattern()" ' +
|
||||
'group-name="groupName">' +
|
||||
'</vis-editor-agg-params>');
|
||||
const $parentScope = $injector.get('$rootScope').$new();
|
||||
|
||||
agg = $parentScope.agg = vis.aggs.bySchemaName('segment')[0];
|
||||
$parentScope.groupName = 'buckets';
|
||||
$parentScope.vis = vis;
|
||||
|
||||
$compile($el)($parentScope);
|
||||
$scope = $el.scope();
|
||||
$scope.$digest();
|
||||
|
||||
const $inputs = $('vis-agg-param-editor', $el);
|
||||
return _.transform($inputs.toArray(), function (inputs, e) {
|
||||
const $el = $(e);
|
||||
const $scope = $el.scope();
|
||||
|
||||
inputs[$scope.aggParam.name] = {
|
||||
$el: $el,
|
||||
$scope: $scope,
|
||||
$input: function () {
|
||||
return $el.find('[ng-model]').first();
|
||||
},
|
||||
modelValue: function () {
|
||||
return this.$input().controller('ngModel').$modelValue;
|
||||
}
|
||||
};
|
||||
}, {});
|
||||
};
|
||||
|
||||
}));
|
||||
|
||||
describe('random field/interval', function () {
|
||||
let params;
|
||||
let field;
|
||||
let interval;
|
||||
|
||||
beforeEach(ngMock.inject(function () {
|
||||
field = _.sample(indexPattern.fields);
|
||||
interval = _.sample(intervalOptions);
|
||||
params = render({ field: field, interval: interval.val });
|
||||
}));
|
||||
|
||||
it('renders the field editor', function () {
|
||||
expect(agg.params.field).to.be(field);
|
||||
|
||||
expect(params).to.have.property('field');
|
||||
expect(params.field).to.have.property('$el');
|
||||
expect($scope.agg.params.field).to.be(field);
|
||||
});
|
||||
|
||||
it('renders the interval editor', function () {
|
||||
expect(agg.params.interval).to.be(interval.val);
|
||||
|
||||
expect(params).to.have.property('interval');
|
||||
expect(params.interval).to.have.property('$el');
|
||||
expect($scope.agg.params.interval).to.be(interval.val);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
});
|
|
@ -1,215 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
import expect from '@kbn/expect';
|
||||
import sinon from 'sinon';
|
||||
import ngMock from 'ng_mock';
|
||||
import AggParamWriterProvider from '../../agg_param_writer';
|
||||
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import chrome from '../../../../chrome';
|
||||
import { aggTypes } from '../../..';
|
||||
import { AggConfig } from '../../../agg_config';
|
||||
import { timefilter } from 'ui/timefilter';
|
||||
|
||||
const config = chrome.getUiSettingsClient();
|
||||
|
||||
describe('date_histogram params', function () {
|
||||
|
||||
let paramWriter;
|
||||
let writeInterval;
|
||||
let write;
|
||||
|
||||
let getTimeBounds;
|
||||
let timeField;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private) {
|
||||
const AggParamWriter = Private(AggParamWriterProvider);
|
||||
const indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
|
||||
|
||||
timeField = indexPattern.timeFieldName;
|
||||
|
||||
paramWriter = new AggParamWriter({ aggType: 'date_histogram' });
|
||||
writeInterval = function (interval, timeRange, params = {}) {
|
||||
return paramWriter.write({ ...params, interval: interval, field: timeField, timeRange: timeRange });
|
||||
};
|
||||
write = (params) => {
|
||||
return paramWriter.write({ interval: '10s', ...params });
|
||||
};
|
||||
|
||||
const now = moment();
|
||||
getTimeBounds = function (n, units) {
|
||||
timefilter.enableAutoRefreshSelector();
|
||||
timefilter.enableTimeRangeSelector();
|
||||
return {
|
||||
from: now.clone().subtract(n, units),
|
||||
to: now.clone()
|
||||
};
|
||||
};
|
||||
}));
|
||||
|
||||
describe('interval', function () {
|
||||
it('accepts a valid calendar interval', function () {
|
||||
const output = writeInterval('d');
|
||||
expect(output.params).to.have.property('calendar_interval', '1d');
|
||||
});
|
||||
|
||||
it('accepts a valid fixed interval', () => {
|
||||
const output = writeInterval('100s');
|
||||
expect(output.params).to.have.property('fixed_interval', '100s');
|
||||
});
|
||||
|
||||
it('throws error when interval is invalid', function () {
|
||||
expect(() => writeInterval('foo')).to.throw('TypeError: "foo" is not a valid interval.');
|
||||
});
|
||||
|
||||
it('automatically picks an interval', function () {
|
||||
const timeBounds = getTimeBounds(15, 'm');
|
||||
const output = writeInterval('auto', timeBounds);
|
||||
expect(output.params).to.have.property('fixed_interval', '30s');
|
||||
});
|
||||
|
||||
it('does not scale down the interval', () => {
|
||||
const timeBounds = getTimeBounds(1, 'm');
|
||||
const output = writeInterval('h', timeBounds);
|
||||
expect(output.params).to.have.property('calendar_interval', '1h');
|
||||
expect(output).not.to.have.property('metricScaleText');
|
||||
expect(output).not.to.have.property('metricScale');
|
||||
});
|
||||
|
||||
describe('scaling behavior', () => {
|
||||
|
||||
it('should not scale without scaleMetricValues: true', function () {
|
||||
const timeBounds = getTimeBounds(30, 'm');
|
||||
const output = writeInterval('s', timeBounds);
|
||||
expect(output.params).to.have.property('fixed_interval', '10s');
|
||||
expect(output).not.to.have.property('metricScaleText');
|
||||
expect(output).not.to.property('metricScale');
|
||||
});
|
||||
|
||||
describe('only scales when all metrics are sum or count', function () {
|
||||
const tests = [
|
||||
[ false, 'avg', 'count', 'sum' ],
|
||||
[ true, 'count', 'sum' ],
|
||||
[ false, 'count', 'cardinality' ]
|
||||
];
|
||||
|
||||
tests.forEach(function (test) {
|
||||
const should = test.shift();
|
||||
const typeNames = test.slice();
|
||||
|
||||
it(typeNames.join(', ') + ' should ' + (should ? '' : 'not') + ' scale', function () {
|
||||
const timeBounds = getTimeBounds(1, 'y');
|
||||
|
||||
const vis = paramWriter.vis;
|
||||
vis.aggs.aggs.splice(0);
|
||||
|
||||
const histoConfig = new AggConfig(vis.aggs, {
|
||||
type: aggTypes.buckets.find(agg => agg.name === 'date_histogram'),
|
||||
schema: 'segment',
|
||||
params: { interval: 's', field: timeField, timeRange: timeBounds, scaleMetricValues: true }
|
||||
});
|
||||
|
||||
vis.aggs.aggs.push(histoConfig);
|
||||
|
||||
typeNames.forEach(function (type) {
|
||||
vis.aggs.aggs.push(new AggConfig(vis.aggs, {
|
||||
type: aggTypes.metrics.find(agg => agg.name === type),
|
||||
schema: 'metric'
|
||||
}));
|
||||
});
|
||||
|
||||
const output = histoConfig.write(vis.aggs);
|
||||
expect(_.has(output, 'metricScale')).to.be(should);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('time_zone', () => {
|
||||
beforeEach(() => {
|
||||
sinon.stub(config, 'get');
|
||||
sinon.stub(config, 'isDefault');
|
||||
});
|
||||
|
||||
it('should use the specified time_zone', () => {
|
||||
const output = write({ time_zone: 'Europe/Kiev' });
|
||||
expect(output.params).to.have.property('time_zone', 'Europe/Kiev');
|
||||
});
|
||||
|
||||
it('should use the Kibana time_zone if no parameter specified', () => {
|
||||
config.isDefault.withArgs('dateFormat:tz').returns(false);
|
||||
config.get.withArgs('dateFormat:tz').returns('Europe/Riga');
|
||||
const output = write({});
|
||||
expect(output.params).to.have.property('time_zone', 'Europe/Riga');
|
||||
});
|
||||
|
||||
it('should use the fixed time_zone from the index pattern typeMeta', () => {
|
||||
_.set(paramWriter.indexPattern, ['typeMeta', 'aggs', 'date_histogram', timeField, 'time_zone'], 'Europe/Rome');
|
||||
const output = write({ field: timeField });
|
||||
expect(output.params).to.have.property('time_zone', 'Europe/Rome');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
config.get.restore();
|
||||
config.isDefault.restore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('extended_bounds', function () {
|
||||
it('should write a long value if a moment passed in', function () {
|
||||
const then = moment(0);
|
||||
const now = moment(500);
|
||||
const output = write({
|
||||
extended_bounds: {
|
||||
min: then,
|
||||
max: now
|
||||
}
|
||||
});
|
||||
|
||||
expect(typeof output.params.extended_bounds.min).to.be('number');
|
||||
expect(typeof output.params.extended_bounds.max).to.be('number');
|
||||
expect(output.params.extended_bounds.min).to.be(then.valueOf());
|
||||
expect(output.params.extended_bounds.max).to.be(now.valueOf());
|
||||
|
||||
|
||||
});
|
||||
|
||||
it('should write a long if a long is passed', function () {
|
||||
const then = 0;
|
||||
const now = 500;
|
||||
const output = write({
|
||||
extended_bounds: {
|
||||
min: then,
|
||||
max: now
|
||||
}
|
||||
});
|
||||
|
||||
expect(typeof output.params.extended_bounds.min).to.be('number');
|
||||
expect(typeof output.params.extended_bounds.max).to.be('number');
|
||||
expect(output.params.extended_bounds.min).to.be(then.valueOf());
|
||||
expect(output.params.extended_bounds.max).to.be(now.valueOf());
|
||||
|
||||
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,95 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import ngMock from 'ng_mock';
|
||||
import { aggTypes } from '../..';
|
||||
|
||||
describe('Significant Terms Agg', function () {
|
||||
|
||||
describe('order agg editor UI', function () {
|
||||
|
||||
describe('convert include/exclude from old format', function () {
|
||||
|
||||
let $rootScope;
|
||||
|
||||
function init({ aggParams = {} }) {
|
||||
ngMock.module('kibana');
|
||||
ngMock.inject(function (_$rootScope_) {
|
||||
const significantTerms = aggTypes.buckets.find(agg => agg.name === 'significant_terms');
|
||||
|
||||
$rootScope = _$rootScope_;
|
||||
$rootScope.agg = {
|
||||
id: 'test',
|
||||
params: aggParams,
|
||||
type: significantTerms,
|
||||
getParam: key => aggParams[key],
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function testSerializeAndWrite(aggConfig) {
|
||||
const includeArg = $rootScope.agg.type.paramByName('include');
|
||||
const excludeArg = $rootScope.agg.type.paramByName('exclude');
|
||||
|
||||
expect(includeArg.serialize(aggConfig.params.include, aggConfig)).to.equal('404');
|
||||
expect(excludeArg.serialize(aggConfig.params.exclude, aggConfig)).to.equal('400');
|
||||
|
||||
const output = { params: {} };
|
||||
|
||||
includeArg.write(aggConfig, output);
|
||||
excludeArg.write(aggConfig, output);
|
||||
|
||||
expect(output.params.include).to.equal('404');
|
||||
expect(output.params.exclude).to.equal('400');
|
||||
}
|
||||
|
||||
it('it doesnt do anything with string type', function () {
|
||||
init({
|
||||
aggParams: {
|
||||
include: '404',
|
||||
exclude: '400',
|
||||
field: {
|
||||
type: 'string'
|
||||
},
|
||||
}
|
||||
});
|
||||
|
||||
testSerializeAndWrite($rootScope.agg);
|
||||
});
|
||||
|
||||
it('converts object to string type', function () {
|
||||
init({
|
||||
aggParams: {
|
||||
include: {
|
||||
pattern: '404'
|
||||
}, exclude: {
|
||||
pattern: '400'
|
||||
},
|
||||
field: {
|
||||
type: 'string'
|
||||
},
|
||||
}
|
||||
});
|
||||
|
||||
testSerializeAndWrite($rootScope.agg);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,193 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import ngMock from 'ng_mock';
|
||||
import { aggTypes } from '../..';
|
||||
|
||||
describe('Terms Agg', function () {
|
||||
describe('order agg editor UI', function () {
|
||||
|
||||
let $rootScope;
|
||||
|
||||
function init({ metricAggs = [], aggParams = {} }) {
|
||||
ngMock.module('kibana');
|
||||
ngMock.inject(function ($controller, _$rootScope_) {
|
||||
const terms = aggTypes.buckets.find(agg => agg.name === 'terms');
|
||||
const orderAggController = terms.paramByName('orderAgg').controller;
|
||||
|
||||
$rootScope = _$rootScope_;
|
||||
$rootScope.agg = {
|
||||
id: 'test',
|
||||
params: aggParams,
|
||||
type: terms,
|
||||
vis: {
|
||||
aggs: []
|
||||
},
|
||||
getParam: key => aggParams[key],
|
||||
};
|
||||
$rootScope.metricAggs = metricAggs;
|
||||
$controller(orderAggController, { $scope: $rootScope });
|
||||
$rootScope.$digest();
|
||||
});
|
||||
}
|
||||
|
||||
// should be rewritten after EUIficate order_agg.html
|
||||
it.skip('selects _key if the selected metric becomes incompatible', function () {
|
||||
init({
|
||||
metricAggs: [
|
||||
{
|
||||
id: 'agg1',
|
||||
type: {
|
||||
name: 'count'
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
expect($rootScope.agg.params.orderBy).to.be('agg1');
|
||||
$rootScope.metricAggs = [
|
||||
{
|
||||
id: 'agg1',
|
||||
type: {
|
||||
name: 'top_hits'
|
||||
}
|
||||
}
|
||||
];
|
||||
$rootScope.$digest();
|
||||
expect($rootScope.agg.params.orderBy).to.be('_key');
|
||||
});
|
||||
|
||||
// should be rewritten after EUIficate order_agg.html
|
||||
it.skip('selects _key if the selected metric is removed', function () {
|
||||
init({
|
||||
metricAggs: [
|
||||
{
|
||||
id: 'agg1',
|
||||
type: {
|
||||
name: 'count'
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
expect($rootScope.agg.params.orderBy).to.be('agg1');
|
||||
$rootScope.metricAggs = [];
|
||||
$rootScope.$digest();
|
||||
expect($rootScope.agg.params.orderBy).to.be('_key');
|
||||
});
|
||||
|
||||
describe.skip('custom field formatter', () => {
|
||||
beforeEach(() => {
|
||||
init({
|
||||
metricAggs: [
|
||||
{
|
||||
id: 'agg1',
|
||||
type: {
|
||||
name: 'count'
|
||||
}
|
||||
}
|
||||
],
|
||||
aggParams: {
|
||||
otherBucketLabel: 'Other',
|
||||
missingBucketLabel: 'Missing'
|
||||
}
|
||||
});
|
||||
$rootScope.$digest();
|
||||
});
|
||||
|
||||
it ('converts __other__ key', () => {
|
||||
const formatter = $rootScope.agg.type.getFormat($rootScope.agg).getConverterFor('text');
|
||||
expect(formatter('__other__')).to.be('Other');
|
||||
});
|
||||
|
||||
it ('converts __missing__ key', () => {
|
||||
const formatter = $rootScope.agg.type.getFormat($rootScope.agg).getConverterFor('text');
|
||||
expect(formatter('__missing__')).to.be('Missing');
|
||||
});
|
||||
});
|
||||
|
||||
it('adds "custom metric" option');
|
||||
it('lists all metric agg responses');
|
||||
it('lists individual values of a multi-value metric');
|
||||
it('displays a metric editor if "custom metric" is selected');
|
||||
it('saves the "custom metric" to state and refreshes from it');
|
||||
it('invalidates the form if the metric agg form is not complete');
|
||||
|
||||
describe.skip('convert include/exclude from old format', function () {
|
||||
|
||||
it('it doesnt do anything with string type', function () {
|
||||
init({
|
||||
aggParams: {
|
||||
include: '404',
|
||||
exclude: '400',
|
||||
field: {
|
||||
type: 'string'
|
||||
},
|
||||
}
|
||||
});
|
||||
|
||||
const aggConfig = $rootScope.agg;
|
||||
const includeArg = $rootScope.agg.type.params.byName.include;
|
||||
const excludeArg = $rootScope.agg.type.params.byName.exclude;
|
||||
|
||||
expect(includeArg.serialize(aggConfig.params.include, aggConfig)).to.equal('404');
|
||||
expect(excludeArg.serialize(aggConfig.params.exclude, aggConfig)).to.equal('400');
|
||||
|
||||
const output = { params: {} };
|
||||
|
||||
includeArg.write(aggConfig, output);
|
||||
excludeArg.write(aggConfig, output);
|
||||
|
||||
expect(output.params.include).to.equal('404');
|
||||
expect(output.params.exclude).to.equal('400');
|
||||
});
|
||||
|
||||
it('converts object to string type', function () {
|
||||
init({
|
||||
aggParams: {
|
||||
include: {
|
||||
pattern: '404'
|
||||
}, exclude: {
|
||||
pattern: '400'
|
||||
},
|
||||
field: {
|
||||
type: 'string'
|
||||
},
|
||||
}
|
||||
});
|
||||
|
||||
const aggConfig = $rootScope.agg;
|
||||
const includeArg = $rootScope.agg.type.params.byName.include;
|
||||
const excludeArg = $rootScope.agg.type.params.byName.exclude;
|
||||
|
||||
expect(includeArg.serialize(aggConfig.params.include, aggConfig)).to.equal('404');
|
||||
expect(excludeArg.serialize(aggConfig.params.exclude, aggConfig)).to.equal('400');
|
||||
|
||||
const output = { params: {} };
|
||||
|
||||
includeArg.write(aggConfig, output);
|
||||
excludeArg.write(aggConfig, output);
|
||||
|
||||
expect(output.params.include).to.equal('404');
|
||||
expect(output.params.exclude).to.equal('400');
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import sinon from 'sinon';
|
||||
import { BaseParamType } from '../../param_types/base';
|
||||
import { FieldParamType } from '../../param_types/field';
|
||||
import { OptionedParamType } from '../../param_types/optioned';
|
||||
import { createLegacyClass } from '../../../utils/legacy_class';
|
||||
|
||||
function ParamClassStub(parent, body) {
|
||||
const stub = sinon.spy(body || function () {
|
||||
stub.Super && stub.Super.call(this);
|
||||
});
|
||||
if (parent) createLegacyClass(stub).inherits(parent);
|
||||
return stub;
|
||||
}
|
||||
|
||||
/**
|
||||
* stub all of the param classes, but ensure that they still inherit properly.
|
||||
* This method should be passed directly to ngMock.inject();
|
||||
*
|
||||
* ```js
|
||||
* let stubParamClasses = require('./utils/_stub_agg_params');
|
||||
* describe('something', function () {
|
||||
* beforeEach(ngMock.inject(stubParamClasses));
|
||||
* })
|
||||
* ```
|
||||
*
|
||||
* @param {PrivateLoader} Private - The private module loader, inject by passing this function to ngMock.inject()
|
||||
* @return {undefined}
|
||||
*/
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default function stubParamClasses(Private) {
|
||||
const BaseAggParam = Private.stub(
|
||||
BaseParamType,
|
||||
new ParamClassStub(null, function (config) {
|
||||
_.assign(this, config);
|
||||
})
|
||||
);
|
||||
|
||||
Private.stub(
|
||||
FieldParamType,
|
||||
new ParamClassStub(BaseAggParam)
|
||||
);
|
||||
|
||||
Private.stub(
|
||||
OptionedParamType,
|
||||
new ParamClassStub(BaseAggParam)
|
||||
);
|
||||
}
|
|
@ -332,7 +332,7 @@ export class AggConfig {
|
|||
return this.type.getValue(this, bucket);
|
||||
}
|
||||
|
||||
getKey(bucket: any, key: string) {
|
||||
getKey(bucket: any, key?: string) {
|
||||
if (this.type.getKey) {
|
||||
return this.type.getKey(bucket, key, this);
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
import { RangeFilter } from '@kbn/es-query';
|
||||
import { createFilterDateHistogram } from './date_histogram';
|
||||
import { intervalOptions } from '../_interval_options';
|
||||
import { AggConfigs } from '../../agg_configs';
|
||||
import { IBucketDateHistogramAggConfig } from '../date_histogram';
|
||||
import { BUCKET_TYPES } from '../bucket_agg_types';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('AggConfig Filters', () => {
|
||||
describe('date_histogram', () => {
|
||||
let agg: IBucketDateHistogramAggConfig;
|
||||
let filter: RangeFilter;
|
||||
let bucketStart: any;
|
||||
let field: any;
|
||||
|
||||
const init = (interval: string = 'auto', duration: any = moment.duration(15, 'minutes')) => {
|
||||
field = {
|
||||
name: 'date',
|
||||
};
|
||||
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
const aggConfigs = new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
type: BUCKET_TYPES.DATE_HISTOGRAM,
|
||||
schema: 'segment',
|
||||
params: { field: field.name, interval, customInterval: '5d' },
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
const bucketKey = 1422579600000;
|
||||
|
||||
agg = aggConfigs.aggs[0] as IBucketDateHistogramAggConfig;
|
||||
bucketStart = moment(bucketKey);
|
||||
|
||||
const timePad = moment.duration(duration / 2);
|
||||
|
||||
agg.buckets.setBounds({
|
||||
min: bucketStart.clone().subtract(timePad),
|
||||
max: bucketStart.clone().add(timePad),
|
||||
});
|
||||
agg.buckets.setInterval(interval);
|
||||
filter = createFilterDateHistogram(agg, bucketKey);
|
||||
};
|
||||
|
||||
it('creates a valid range filter', () => {
|
||||
init();
|
||||
|
||||
expect(filter).toHaveProperty('range');
|
||||
expect(filter.range).toHaveProperty(field.name);
|
||||
|
||||
const fieldParams = filter.range[field.name];
|
||||
expect(fieldParams).toHaveProperty('gte');
|
||||
expect(typeof fieldParams.gte).toBe('string');
|
||||
|
||||
expect(fieldParams).toHaveProperty('lt');
|
||||
expect(typeof fieldParams.lt).toBe('string');
|
||||
|
||||
expect(fieldParams).toHaveProperty('format');
|
||||
expect(fieldParams.format).toBe('strict_date_optional_time');
|
||||
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
});
|
||||
|
||||
it('extends the filter edge to 1ms before the next bucket for all interval options', () => {
|
||||
intervalOptions.forEach(option => {
|
||||
let duration;
|
||||
if (option.val !== 'custom' && moment(1, option.val).isValid()) {
|
||||
// @ts-ignore
|
||||
duration = moment.duration(10, option.val);
|
||||
|
||||
if (+duration < 10) {
|
||||
throw new Error('unable to create interval for ' + option.val);
|
||||
}
|
||||
}
|
||||
init(option.val, duration);
|
||||
|
||||
const interval = agg.buckets.getInterval();
|
||||
const params = filter.range[field.name];
|
||||
|
||||
expect(params.gte).toBe(bucketStart.toISOString());
|
||||
expect(params.lt).toBe(
|
||||
bucketStart
|
||||
.clone()
|
||||
.add(interval)
|
||||
.toISOString()
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -21,7 +21,10 @@ import moment from 'moment';
|
|||
import { buildRangeFilter } from '@kbn/es-query';
|
||||
import { IBucketDateHistogramAggConfig } from '../date_histogram';
|
||||
|
||||
export const createFilterDateHistogram = (agg: IBucketDateHistogramAggConfig, key: string) => {
|
||||
export const createFilterDateHistogram = (
|
||||
agg: IBucketDateHistogramAggConfig,
|
||||
key: string | number
|
||||
) => {
|
||||
const start = moment(key);
|
||||
const interval = agg.buckets.getInterval();
|
||||
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
import { createFilterDateRange } from './date_range';
|
||||
import { DateFormat } from '../../../../../../plugins/data/common';
|
||||
import { AggConfigs } from '../../agg_configs';
|
||||
import { BUCKET_TYPES } from '../bucket_agg_types';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('AggConfig Filters', () => {
|
||||
describe('Date range', () => {
|
||||
const getAggConfigs = () => {
|
||||
const field = {
|
||||
name: '@timestamp',
|
||||
format: new DateFormat({}, () => {}),
|
||||
};
|
||||
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
type: BUCKET_TYPES.DATE_RANGE,
|
||||
params: {
|
||||
field: '@timestamp',
|
||||
ranges: [{ from: '2014-01-01', to: '2014-12-31' }],
|
||||
},
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
|
||||
it('should return a range filter for date_range agg', () => {
|
||||
const aggConfigs = getAggConfigs();
|
||||
const from = new Date('1 Feb 2015');
|
||||
const to = new Date('7 Feb 2015');
|
||||
const filter = createFilterDateRange(aggConfigs.aggs[0], {
|
||||
from: from.valueOf(),
|
||||
to: to.valueOf(),
|
||||
});
|
||||
|
||||
expect(filter).toHaveProperty('range');
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
expect(filter.range).toHaveProperty('@timestamp');
|
||||
expect(filter.range['@timestamp']).toHaveProperty('gte', moment(from).toISOString());
|
||||
expect(filter.range['@timestamp']).toHaveProperty('lt', moment(to).toISOString());
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { createFilterFilters } from './filters';
|
||||
import { AggConfigs } from '../../agg_configs';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('AggConfig Filters', () => {
|
||||
describe('filters', () => {
|
||||
const getAggConfigs = () => {
|
||||
const field = {
|
||||
name: 'bytes',
|
||||
};
|
||||
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
type: 'filters',
|
||||
schema: 'segment',
|
||||
params: {
|
||||
filters: [
|
||||
{ input: { query: 'type:apache', language: 'lucene' } },
|
||||
{ input: { query: 'type:nginx', language: 'lucene' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
it('should return a filters filter', () => {
|
||||
const aggConfigs = getAggConfigs();
|
||||
const filter = createFilterFilters(aggConfigs.aggs[0], 'type:nginx');
|
||||
|
||||
expect(filter!.query.bool.must[0].query_string.query).toBe('type:nginx');
|
||||
expect(filter!.meta).toHaveProperty('index', '1234');
|
||||
expect(filter!.meta).toHaveProperty('alias', 'type:nginx');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { createFilterHistogram } from './histogram';
|
||||
import { AggConfigs } from '../../agg_configs';
|
||||
import { BUCKET_TYPES } from '../bucket_agg_types';
|
||||
import { BytesFormat } from '../../../../../../plugins/data/common';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('AggConfig Filters', () => {
|
||||
describe('histogram', () => {
|
||||
const getAggConfigs = () => {
|
||||
const field = {
|
||||
name: 'bytes',
|
||||
format: new BytesFormat({}, () => {}),
|
||||
};
|
||||
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
id: BUCKET_TYPES.HISTOGRAM,
|
||||
type: BUCKET_TYPES.HISTOGRAM,
|
||||
schema: 'buckets',
|
||||
params: {
|
||||
field: 'bytes',
|
||||
interval: 1024,
|
||||
},
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
|
||||
it('should return an range filter for histogram', () => {
|
||||
const aggConfigs = getAggConfigs();
|
||||
const filter = createFilterHistogram(aggConfigs.aggs[0], '2048');
|
||||
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
expect(filter).toHaveProperty('range');
|
||||
expect(filter.range).toHaveProperty('bytes');
|
||||
expect(filter.range.bytes).toHaveProperty('gte', 2048);
|
||||
expect(filter.range.bytes).toHaveProperty('lt', 3072);
|
||||
expect(filter.meta).toHaveProperty('formattedValue', '2,048');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { createFilterIpRange } from './ip_range';
|
||||
import { AggConfigs } from '../../agg_configs';
|
||||
import { IpFormat } from '../../../../../../plugins/data/common';
|
||||
import { BUCKET_TYPES } from '../bucket_agg_types';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('AggConfig Filters', () => {
|
||||
describe('IP range', () => {
|
||||
const getAggConfigs = (aggs: Array<Record<string, any>>) => {
|
||||
const field = {
|
||||
name: 'ip',
|
||||
format: IpFormat,
|
||||
};
|
||||
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
return new AggConfigs(indexPattern, aggs, null);
|
||||
};
|
||||
|
||||
it('should return a range filter for ip_range agg', () => {
|
||||
const aggConfigs = getAggConfigs([
|
||||
{
|
||||
type: BUCKET_TYPES.IP_RANGE,
|
||||
schema: 'segment',
|
||||
params: {
|
||||
field: 'ip',
|
||||
ipRangeType: 'range',
|
||||
ranges: {
|
||||
fromTo: [{ from: '0.0.0.0', to: '1.1.1.1' }],
|
||||
},
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const filter = createFilterIpRange(aggConfigs.aggs[0], {
|
||||
type: 'range',
|
||||
from: '0.0.0.0',
|
||||
to: '1.1.1.1',
|
||||
});
|
||||
|
||||
expect(filter).toHaveProperty('range');
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
expect(filter.range).toHaveProperty('ip');
|
||||
expect(filter.range.ip).toHaveProperty('gte', '0.0.0.0');
|
||||
expect(filter.range.ip).toHaveProperty('lte', '1.1.1.1');
|
||||
});
|
||||
|
||||
it('should return a range filter for ip_range agg using a CIDR mask', () => {
|
||||
const aggConfigs = getAggConfigs([
|
||||
{
|
||||
type: BUCKET_TYPES.IP_RANGE,
|
||||
schema: 'segment',
|
||||
params: {
|
||||
field: 'ip',
|
||||
ipRangeType: 'mask',
|
||||
ranges: {
|
||||
mask: [{ mask: '67.129.65.201/27' }],
|
||||
},
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const filter = createFilterIpRange(aggConfigs.aggs[0], {
|
||||
type: 'mask',
|
||||
mask: '67.129.65.201/27',
|
||||
});
|
||||
|
||||
expect(filter).toHaveProperty('range');
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
expect(filter.range).toHaveProperty('ip');
|
||||
expect(filter.range.ip).toHaveProperty('gte', '67.129.65.192');
|
||||
expect(filter.range.ip).toHaveProperty('lte', '67.129.65.223');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { createFilterRange } from './range';
|
||||
import { BytesFormat } from '../../../../../../plugins/data/common';
|
||||
import { AggConfigs } from '../../agg_configs';
|
||||
import { BUCKET_TYPES } from '../bucket_agg_types';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('AggConfig Filters', () => {
|
||||
describe('range', () => {
|
||||
const getAggConfigs = () => {
|
||||
const field = {
|
||||
name: 'bytes',
|
||||
format: new BytesFormat({}, () => {}),
|
||||
};
|
||||
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
id: BUCKET_TYPES.RANGE,
|
||||
type: BUCKET_TYPES.RANGE,
|
||||
schema: 'buckets',
|
||||
params: {
|
||||
field: 'bytes',
|
||||
ranges: [{ from: 1024, to: 2048 }],
|
||||
},
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
|
||||
it('should return a range filter for range agg', () => {
|
||||
const aggConfigs = getAggConfigs();
|
||||
const filter = createFilterRange(aggConfigs.aggs[0], { gte: 1024, lt: 2048.0 });
|
||||
|
||||
expect(filter).toHaveProperty('range');
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
expect(filter.range).toHaveProperty('bytes');
|
||||
expect(filter.range.bytes).toHaveProperty('gte', 1024.0);
|
||||
expect(filter.range.bytes).toHaveProperty('lt', 2048.0);
|
||||
expect(filter.meta).toHaveProperty('formattedValue', '≥ 1,024 and < 2,048');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ExistsFilter, Filter } from '@kbn/es-query';
|
||||
import { createFilterTerms } from './terms';
|
||||
import { AggConfigs } from '../../agg_configs';
|
||||
import { BUCKET_TYPES } from '../bucket_agg_types';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('AggConfig Filters', () => {
|
||||
describe('terms', () => {
|
||||
const getAggConfigs = (aggs: Array<Record<string, any>>) => {
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
const field = {
|
||||
name: 'field',
|
||||
indexPattern,
|
||||
};
|
||||
|
||||
return new AggConfigs(indexPattern, aggs, null);
|
||||
};
|
||||
|
||||
it('should return a match_phrase filter for terms', () => {
|
||||
const aggConfigs = getAggConfigs([
|
||||
{ type: BUCKET_TYPES.TERMS, schema: 'segment', params: { field: 'field' } },
|
||||
]);
|
||||
|
||||
const filter = createFilterTerms(aggConfigs.aggs[0], 'apache', {}) as Filter;
|
||||
|
||||
expect(filter).toHaveProperty('query');
|
||||
expect(filter.query).toHaveProperty('match_phrase');
|
||||
expect(filter.query.match_phrase).toHaveProperty('field');
|
||||
expect(filter.query.match_phrase.field).toBe('apache');
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
});
|
||||
|
||||
it('should set query to true or false for boolean filter', () => {
|
||||
const aggConfigs = getAggConfigs([
|
||||
{ type: BUCKET_TYPES.TERMS, schema: 'segment', params: { field: 'field' } },
|
||||
]);
|
||||
|
||||
const filterFalse = createFilterTerms(aggConfigs.aggs[0], '', {}) as Filter;
|
||||
|
||||
expect(filterFalse).toHaveProperty('query');
|
||||
expect(filterFalse.query).toHaveProperty('match_phrase');
|
||||
expect(filterFalse.query.match_phrase).toHaveProperty('field');
|
||||
expect(filterFalse.query.match_phrase.field).toBeFalsy();
|
||||
|
||||
const filterTrue = createFilterTerms(aggConfigs.aggs[0], '1', {}) as Filter;
|
||||
|
||||
expect(filterTrue).toHaveProperty('query');
|
||||
expect(filterTrue.query).toHaveProperty('match_phrase');
|
||||
expect(filterTrue.query.match_phrase).toHaveProperty('field');
|
||||
expect(filterTrue.query.match_phrase.field).toBeTruthy();
|
||||
});
|
||||
//
|
||||
it('should generate correct __missing__ filter', () => {
|
||||
const aggConfigs = getAggConfigs([
|
||||
{ type: BUCKET_TYPES.TERMS, schema: 'segment', params: { field: 'field' } },
|
||||
]);
|
||||
const filter = createFilterTerms(aggConfigs.aggs[0], '__missing__', {}) as ExistsFilter;
|
||||
|
||||
expect(filter).toHaveProperty('exists');
|
||||
expect(filter.exists).toHaveProperty('field', 'field');
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
expect(filter.meta).toHaveProperty('negate', true);
|
||||
});
|
||||
//
|
||||
it('should generate correct __other__ filter', () => {
|
||||
const aggConfigs = getAggConfigs([
|
||||
{ type: BUCKET_TYPES.TERMS, schema: 'segment', params: { field: 'field' } },
|
||||
]);
|
||||
|
||||
const [filter] = createFilterTerms(aggConfigs.aggs[0], '__other__', {
|
||||
terms: ['apache'],
|
||||
}) as Filter[];
|
||||
|
||||
expect(filter).toHaveProperty('query');
|
||||
expect(filter.query).toHaveProperty('bool');
|
||||
expect(filter.query.bool).toHaveProperty('should');
|
||||
expect(filter.query.bool.should[0]).toHaveProperty('match_phrase');
|
||||
expect(filter.query.bool.should[0].match_phrase).toHaveProperty('field', 'apache');
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
expect(filter.meta).toHaveProperty('negate', true);
|
||||
});
|
||||
});
|
||||
});
|
112
src/legacy/ui/public/agg_types/buckets/date_range.test.ts
Normal file
112
src/legacy/ui/public/agg_types/buckets/date_range.test.ts
Normal file
|
@ -0,0 +1,112 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { AggConfigs } from '../agg_configs';
|
||||
import { BUCKET_TYPES } from './bucket_agg_types';
|
||||
import { npStart } from 'ui/new_platform';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('date_range params', () => {
|
||||
const getAggConfigs = (params: Record<string, any> = {}, hasIncludeTypeMeta: boolean = true) => {
|
||||
const field = {
|
||||
name: 'bytes',
|
||||
};
|
||||
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
typeMeta: hasIncludeTypeMeta
|
||||
? {
|
||||
aggs: {
|
||||
date_range: {
|
||||
bytes: {
|
||||
time_zone: 'defaultTimeZone',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
: undefined,
|
||||
} as any;
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
id: BUCKET_TYPES.DATE_RANGE,
|
||||
type: BUCKET_TYPES.DATE_RANGE,
|
||||
schema: 'buckets',
|
||||
params,
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
|
||||
describe('getKey', () => {
|
||||
it('should return object', () => {
|
||||
const aggConfigs = getAggConfigs();
|
||||
const dateRange = aggConfigs.aggs[0];
|
||||
const bucket = { from: 'from-date', to: 'to-date', key: 'from-dateto-date' };
|
||||
|
||||
expect(dateRange.getKey(bucket)).toEqual({ from: 'from-date', to: 'to-date' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('time_zone', () => {
|
||||
it('should use the specified time_zone', () => {
|
||||
const aggConfigs = getAggConfigs({
|
||||
time_zone: 'Europe/Minsk',
|
||||
field: 'bytes',
|
||||
});
|
||||
const dateRange = aggConfigs.aggs[0];
|
||||
const params = dateRange.toDsl()[BUCKET_TYPES.DATE_RANGE];
|
||||
|
||||
expect(params.time_zone).toBe('Europe/Minsk');
|
||||
});
|
||||
|
||||
it('should use the fixed time_zone from the index pattern typeMeta', () => {
|
||||
const aggConfigs = getAggConfigs({
|
||||
field: 'bytes',
|
||||
});
|
||||
const dateRange = aggConfigs.aggs[0];
|
||||
const params = dateRange.toDsl()[BUCKET_TYPES.DATE_RANGE];
|
||||
|
||||
expect(params.time_zone).toBe('defaultTimeZone');
|
||||
});
|
||||
|
||||
it('should use the Kibana time_zone if no parameter specified', () => {
|
||||
npStart.core.uiSettings.get = jest.fn(() => 'kibanaTimeZone');
|
||||
|
||||
const aggConfigs = getAggConfigs(
|
||||
{
|
||||
field: 'bytes',
|
||||
},
|
||||
false
|
||||
);
|
||||
const dateRange = aggConfigs.aggs[0];
|
||||
const params = dateRange.toDsl()[BUCKET_TYPES.DATE_RANGE];
|
||||
|
||||
expect(params.time_zone).toBe('kibanaTimeZone');
|
||||
});
|
||||
});
|
||||
});
|
216
src/legacy/ui/public/agg_types/buckets/geo_hash.test.ts
Normal file
216
src/legacy/ui/public/agg_types/buckets/geo_hash.test.ts
Normal file
|
@ -0,0 +1,216 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { geoHashBucketAgg, IBucketGeoHashGridAggConfig } from './geo_hash';
|
||||
import { AggConfigs } from '../agg_configs';
|
||||
import { BUCKET_TYPES } from './bucket_agg_types';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('Geohash Agg', () => {
|
||||
const getAggConfigs = (params?: Record<string, any>) => {
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
const field = {
|
||||
name: 'location',
|
||||
indexPattern,
|
||||
};
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
id: BUCKET_TYPES.GEOHASH_GRID,
|
||||
type: BUCKET_TYPES.GEOHASH_GRID,
|
||||
schema: 'segment',
|
||||
params: {
|
||||
field: {
|
||||
name: 'location',
|
||||
},
|
||||
isFilteredByCollar: true,
|
||||
useGeocentroid: true,
|
||||
mapZoom: 10,
|
||||
mapBounds: {
|
||||
top_left: { lat: 1.0, lon: -1.0 },
|
||||
bottom_right: { lat: -1.0, lon: 1.0 },
|
||||
},
|
||||
...params,
|
||||
},
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
|
||||
describe('precision parameter', () => {
|
||||
const PRECISION_PARAM_INDEX = 2;
|
||||
|
||||
let precisionParam: any;
|
||||
|
||||
beforeEach(() => {
|
||||
precisionParam = geoHashBucketAgg.params[PRECISION_PARAM_INDEX];
|
||||
});
|
||||
|
||||
it('should select precision parameter', () => {
|
||||
expect(precisionParam.name).toEqual('precision');
|
||||
});
|
||||
|
||||
describe('precision parameter write', () => {
|
||||
const zoomToGeoHashPrecision: Record<string, any> = {
|
||||
0: 1,
|
||||
1: 2,
|
||||
2: 2,
|
||||
3: 2,
|
||||
4: 3,
|
||||
5: 3,
|
||||
6: 4,
|
||||
7: 4,
|
||||
8: 4,
|
||||
9: 5,
|
||||
10: 5,
|
||||
11: 6,
|
||||
12: 6,
|
||||
13: 6,
|
||||
14: 7,
|
||||
15: 7,
|
||||
16: 8,
|
||||
17: 8,
|
||||
18: 8,
|
||||
19: 9,
|
||||
20: 9,
|
||||
21: 10,
|
||||
};
|
||||
|
||||
Object.keys(zoomToGeoHashPrecision).forEach((zoomLevel: string) => {
|
||||
it(`zoom level ${zoomLevel} should correspond to correct geohash-precision`, () => {
|
||||
const aggConfigs = getAggConfigs({
|
||||
autoPrecision: true,
|
||||
mapZoom: zoomLevel,
|
||||
});
|
||||
|
||||
const { [BUCKET_TYPES.GEOHASH_GRID]: params } = aggConfigs.aggs[0].toDsl();
|
||||
|
||||
expect(params.precision).toEqual(zoomToGeoHashPrecision[zoomLevel]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRequestAggs', () => {
|
||||
describe('initial aggregation creation', () => {
|
||||
let aggConfigs: AggConfigs;
|
||||
let geoHashGridAgg: IBucketGeoHashGridAggConfig;
|
||||
|
||||
beforeEach(() => {
|
||||
aggConfigs = getAggConfigs();
|
||||
geoHashGridAgg = aggConfigs.aggs[0] as IBucketGeoHashGridAggConfig;
|
||||
});
|
||||
|
||||
it('should create filter, geohash_grid, and geo_centroid aggregations', () => {
|
||||
const requestAggs = geoHashBucketAgg.getRequestAggs(
|
||||
geoHashGridAgg
|
||||
) as IBucketGeoHashGridAggConfig[];
|
||||
|
||||
expect(requestAggs.length).toEqual(3);
|
||||
expect(requestAggs[0].type.name).toEqual('filter');
|
||||
expect(requestAggs[1].type.name).toEqual('geohash_grid');
|
||||
expect(requestAggs[2].type.name).toEqual('geo_centroid');
|
||||
});
|
||||
|
||||
it('should set mapCollar in vis session state', () => {
|
||||
const [, geoHashAgg] = geoHashBucketAgg.getRequestAggs(
|
||||
geoHashGridAgg
|
||||
) as IBucketGeoHashGridAggConfig[];
|
||||
|
||||
expect(geoHashAgg).toHaveProperty('lastMapCollar');
|
||||
expect(geoHashAgg.lastMapCollar).toHaveProperty('top_left');
|
||||
expect(geoHashAgg.lastMapCollar).toHaveProperty('bottom_right');
|
||||
expect(geoHashAgg.lastMapCollar).toHaveProperty('zoom');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('aggregation options', () => {
|
||||
it('should only create geohash_grid and geo_centroid aggregations when isFilteredByCollar is false', () => {
|
||||
const aggConfigs = getAggConfigs({ isFilteredByCollar: false });
|
||||
const requestAggs = geoHashBucketAgg.getRequestAggs(aggConfigs
|
||||
.aggs[0] as IBucketGeoHashGridAggConfig) as IBucketGeoHashGridAggConfig[];
|
||||
|
||||
expect(requestAggs.length).toEqual(2);
|
||||
expect(requestAggs[0].type.name).toEqual('geohash_grid');
|
||||
expect(requestAggs[1].type.name).toEqual('geo_centroid');
|
||||
});
|
||||
|
||||
it('should only create filter and geohash_grid aggregations when useGeocentroid is false', () => {
|
||||
const aggConfigs = getAggConfigs({ useGeocentroid: false });
|
||||
const requestAggs = geoHashBucketAgg.getRequestAggs(aggConfigs
|
||||
.aggs[0] as IBucketGeoHashGridAggConfig) as IBucketGeoHashGridAggConfig[];
|
||||
|
||||
expect(requestAggs.length).toEqual(2);
|
||||
expect(requestAggs[0].type.name).toEqual('filter');
|
||||
expect(requestAggs[1].type.name).toEqual('geohash_grid');
|
||||
});
|
||||
});
|
||||
|
||||
describe('aggregation creation after map interaction', () => {
|
||||
let originalRequestAggs: IBucketGeoHashGridAggConfig[];
|
||||
|
||||
beforeEach(() => {
|
||||
originalRequestAggs = geoHashBucketAgg.getRequestAggs(getAggConfigs()
|
||||
.aggs[0] as IBucketGeoHashGridAggConfig) as IBucketGeoHashGridAggConfig[];
|
||||
});
|
||||
|
||||
it('should change geo_bounding_box filter aggregation and vis session state when map movement is outside map collar', () => {
|
||||
const [, geoBoxingBox] = geoHashBucketAgg.getRequestAggs(getAggConfigs({
|
||||
mapBounds: {
|
||||
top_left: { lat: 10.0, lon: -10.0 },
|
||||
bottom_right: { lat: 9.0, lon: -9.0 },
|
||||
},
|
||||
}).aggs[0] as IBucketGeoHashGridAggConfig) as IBucketGeoHashGridAggConfig[];
|
||||
|
||||
expect(originalRequestAggs[1].params).not.toEqual(geoBoxingBox.params);
|
||||
});
|
||||
|
||||
it('should not change geo_bounding_box filter aggregation and vis session state when map movement is within map collar', () => {
|
||||
const [, geoBoxingBox] = geoHashBucketAgg.getRequestAggs(getAggConfigs({
|
||||
mapBounds: {
|
||||
top_left: { lat: 1, lon: -1 },
|
||||
bottom_right: { lat: -1, lon: 1 },
|
||||
},
|
||||
}).aggs[0] as IBucketGeoHashGridAggConfig) as IBucketGeoHashGridAggConfig[];
|
||||
|
||||
expect(originalRequestAggs[1].params).toEqual(geoBoxingBox.params);
|
||||
});
|
||||
|
||||
it('should change geo_bounding_box filter aggregation and vis session state when map zoom level changes', () => {
|
||||
const [, geoBoxingBox] = geoHashBucketAgg.getRequestAggs(getAggConfigs({
|
||||
mapZoom: -1,
|
||||
}).aggs[0] as IBucketGeoHashGridAggConfig) as IBucketGeoHashGridAggConfig[];
|
||||
|
||||
expect(originalRequestAggs[1].lastMapCollar).not.toEqual(geoBoxingBox.lastMapCollar);
|
||||
});
|
||||
});
|
||||
});
|
292
src/legacy/ui/public/agg_types/buckets/histogram.test.ts
Normal file
292
src/legacy/ui/public/agg_types/buckets/histogram.test.ts
Normal file
|
@ -0,0 +1,292 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { npStart } from 'ui/new_platform';
|
||||
import { AggConfigs } from '../index';
|
||||
import { BUCKET_TYPES } from './bucket_agg_types';
|
||||
import { IBucketHistogramAggConfig, histogramBucketAgg, AutoBounds } from './histogram';
|
||||
import { BucketAggType } from './_bucket_agg_type';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('Histogram Agg', () => {
|
||||
const getAggConfigs = (params: Record<string, any> = {}) => {
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
const field = {
|
||||
name: 'field',
|
||||
indexPattern,
|
||||
};
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
field: {
|
||||
name: 'field',
|
||||
},
|
||||
id: 'test',
|
||||
type: BUCKET_TYPES.HISTOGRAM,
|
||||
schema: 'segment',
|
||||
params,
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
|
||||
const getParams = (options: Record<string, any>) => {
|
||||
const aggConfigs = getAggConfigs({
|
||||
...options,
|
||||
field: {
|
||||
name: 'field',
|
||||
},
|
||||
});
|
||||
return aggConfigs.aggs[0].toDsl()[BUCKET_TYPES.HISTOGRAM];
|
||||
};
|
||||
|
||||
describe('ordered', () => {
|
||||
let histogramType: BucketAggType<IBucketHistogramAggConfig>;
|
||||
|
||||
beforeEach(() => {
|
||||
histogramType = histogramBucketAgg;
|
||||
});
|
||||
|
||||
it('is ordered', () => {
|
||||
expect(histogramType.ordered).toBeDefined();
|
||||
});
|
||||
|
||||
it('is not ordered by date', () => {
|
||||
expect(histogramType.ordered).not.toHaveProperty('date');
|
||||
});
|
||||
});
|
||||
|
||||
describe('params', () => {
|
||||
describe('intervalBase', () => {
|
||||
it('should not be written to the DSL', () => {
|
||||
const aggConfigs = getAggConfigs({
|
||||
intervalBase: 100,
|
||||
field: {
|
||||
name: 'field',
|
||||
},
|
||||
});
|
||||
const { [BUCKET_TYPES.HISTOGRAM]: params } = aggConfigs.aggs[0].toDsl();
|
||||
|
||||
expect(params).not.toHaveProperty('intervalBase');
|
||||
});
|
||||
});
|
||||
|
||||
describe('interval', () => {
|
||||
it('accepts a whole number', () => {
|
||||
const params = getParams({
|
||||
interval: 100,
|
||||
});
|
||||
|
||||
expect(params).toHaveProperty('interval', 100);
|
||||
});
|
||||
|
||||
it('accepts a decimal number', function() {
|
||||
const params = getParams({
|
||||
interval: 0.1,
|
||||
});
|
||||
|
||||
expect(params).toHaveProperty('interval', 0.1);
|
||||
});
|
||||
|
||||
it('accepts a decimal number string', function() {
|
||||
const params = getParams({
|
||||
interval: '0.1',
|
||||
});
|
||||
|
||||
expect(params).toHaveProperty('interval', 0.1);
|
||||
});
|
||||
|
||||
it('accepts a whole number string', function() {
|
||||
const params = getParams({
|
||||
interval: '10',
|
||||
});
|
||||
|
||||
expect(params).toHaveProperty('interval', 10);
|
||||
});
|
||||
|
||||
it('fails on non-numeric values', function() {
|
||||
const params = getParams({
|
||||
interval: [],
|
||||
});
|
||||
|
||||
expect(params.interval).toBeNaN();
|
||||
});
|
||||
|
||||
describe('interval scaling', () => {
|
||||
const getInterval = (
|
||||
maxBars: number,
|
||||
params?: Record<string, any>,
|
||||
autoBounds?: AutoBounds
|
||||
) => {
|
||||
const aggConfigs = getAggConfigs({
|
||||
...params,
|
||||
field: {
|
||||
name: 'field',
|
||||
},
|
||||
});
|
||||
const aggConfig = aggConfigs.aggs[0] as IBucketHistogramAggConfig;
|
||||
|
||||
if (autoBounds) {
|
||||
aggConfig.setAutoBounds(autoBounds);
|
||||
}
|
||||
|
||||
// mock histogram:maxBars value;
|
||||
npStart.core.uiSettings.get = jest.fn(() => maxBars);
|
||||
|
||||
return aggConfig.write(aggConfigs).params;
|
||||
};
|
||||
|
||||
it('will respect the histogram:maxBars setting', () => {
|
||||
const params = getInterval(
|
||||
5,
|
||||
{ interval: 5 },
|
||||
{
|
||||
min: 0,
|
||||
max: 10000,
|
||||
}
|
||||
);
|
||||
|
||||
expect(params).toHaveProperty('interval', 2000);
|
||||
});
|
||||
|
||||
it('will return specified interval, if bars are below histogram:maxBars config', () => {
|
||||
const params = getInterval(100, { interval: 5 });
|
||||
|
||||
expect(params).toHaveProperty('interval', 5);
|
||||
});
|
||||
|
||||
it('will set to intervalBase if interval is below base', () => {
|
||||
const params = getInterval(1000, { interval: 3, intervalBase: 8 });
|
||||
|
||||
expect(params).toHaveProperty('interval', 8);
|
||||
});
|
||||
|
||||
it('will round to nearest intervalBase multiple if interval is above base', () => {
|
||||
const roundUp = getInterval(1000, { interval: 46, intervalBase: 10 });
|
||||
expect(roundUp).toHaveProperty('interval', 50);
|
||||
|
||||
const roundDown = getInterval(1000, { interval: 43, intervalBase: 10 });
|
||||
expect(roundDown).toHaveProperty('interval', 40);
|
||||
});
|
||||
|
||||
it('will not change interval if it is a multiple of base', () => {
|
||||
const output = getInterval(1000, { interval: 35, intervalBase: 5 });
|
||||
|
||||
expect(output).toHaveProperty('interval', 35);
|
||||
});
|
||||
|
||||
it('will round to intervalBase after scaling histogram:maxBars', () => {
|
||||
const output = getInterval(100, { interval: 5, intervalBase: 6 }, { min: 0, max: 1000 });
|
||||
|
||||
// 100 buckets in 0 to 1000 would result in an interval of 10, so we should
|
||||
// round to the next multiple of 6 -> 12
|
||||
expect(output).toHaveProperty('interval', 12);
|
||||
});
|
||||
});
|
||||
|
||||
describe('min_doc_count', () => {
|
||||
let output: Record<string, any>;
|
||||
|
||||
it('casts true values to 0', () => {
|
||||
output = getParams({ min_doc_count: true });
|
||||
expect(output).toHaveProperty('min_doc_count', 0);
|
||||
|
||||
output = getParams({ min_doc_count: 'yes' });
|
||||
expect(output).toHaveProperty('min_doc_count', 0);
|
||||
|
||||
output = getParams({ min_doc_count: 1 });
|
||||
expect(output).toHaveProperty('min_doc_count', 0);
|
||||
|
||||
output = getParams({ min_doc_count: {} });
|
||||
expect(output).toHaveProperty('min_doc_count', 0);
|
||||
});
|
||||
|
||||
it('writes 1 for falsy values', () => {
|
||||
output = getParams({ min_doc_count: '' });
|
||||
expect(output).toHaveProperty('min_doc_count', 1);
|
||||
|
||||
output = getParams({ min_doc_count: null });
|
||||
expect(output).toHaveProperty('min_doc_count', 1);
|
||||
|
||||
output = getParams({ min_doc_count: undefined });
|
||||
expect(output).toHaveProperty('min_doc_count', 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extended_bounds', function() {
|
||||
it('does not write when only eb.min is set', function() {
|
||||
const output = getParams({
|
||||
has_extended_bounds: true,
|
||||
extended_bounds: { min: 0 },
|
||||
});
|
||||
expect(output).not.toHaveProperty('extended_bounds');
|
||||
});
|
||||
|
||||
it('does not write when only eb.max is set', function() {
|
||||
const output = getParams({
|
||||
has_extended_bounds: true,
|
||||
extended_bounds: { max: 0 },
|
||||
});
|
||||
|
||||
expect(output).not.toHaveProperty('extended_bounds');
|
||||
});
|
||||
|
||||
it('writes when both eb.min and eb.max are set', function() {
|
||||
const output = getParams({
|
||||
has_extended_bounds: true,
|
||||
extended_bounds: { min: 99, max: 100 },
|
||||
});
|
||||
|
||||
expect(output.extended_bounds).toHaveProperty('min', 99);
|
||||
expect(output.extended_bounds).toHaveProperty('max', 100);
|
||||
});
|
||||
|
||||
it('does not write when nothing is set', function() {
|
||||
const output = getParams({
|
||||
has_extended_bounds: true,
|
||||
extended_bounds: {},
|
||||
});
|
||||
|
||||
expect(output).not.toHaveProperty('extended_bounds');
|
||||
});
|
||||
|
||||
it('does not write when has_extended_bounds is false', function() {
|
||||
const output = getParams({
|
||||
has_extended_bounds: false,
|
||||
extended_bounds: { min: 99, max: 100 },
|
||||
});
|
||||
|
||||
expect(output).not.toHaveProperty('extended_bounds');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -21,7 +21,7 @@ import _ from 'lodash';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { toastNotifications } from 'ui/notify';
|
||||
|
||||
import chrome from '../../chrome';
|
||||
import { npStart } from 'ui/new_platform';
|
||||
import { BucketAggType, IBucketAggConfig, BucketAggParam } from './_bucket_agg_type';
|
||||
import { createFilterHistogram } from './create_filter/histogram';
|
||||
import { NumberIntervalParamEditor } from '../../vis/editors/default/controls/number_interval';
|
||||
|
@ -32,7 +32,7 @@ import { AggConfig } from '../agg_config';
|
|||
import { KBN_FIELD_TYPES } from '../../../../../plugins/data/common';
|
||||
import { BUCKET_TYPES } from './bucket_agg_types';
|
||||
|
||||
interface AutoBounds {
|
||||
export interface AutoBounds {
|
||||
min: number;
|
||||
max: number;
|
||||
}
|
||||
|
@ -42,7 +42,8 @@ export interface IBucketHistogramAggConfig extends IBucketAggConfig {
|
|||
getAutoBounds: () => AutoBounds;
|
||||
}
|
||||
|
||||
const config = chrome.getUiSettingsClient();
|
||||
const getUIConfig = () => npStart.core.uiSettings;
|
||||
|
||||
export const histogramBucketAgg = new BucketAggType<IBucketHistogramAggConfig>({
|
||||
name: BUCKET_TYPES.HISTOGRAM,
|
||||
title: i18n.translate('common.ui.aggTypes.buckets.histogramTitle', {
|
||||
|
@ -135,25 +136,30 @@ export const histogramBucketAgg = new BucketAggType<IBucketHistogramAggConfig>({
|
|||
if (interval <= 0) {
|
||||
interval = 1;
|
||||
}
|
||||
const autoBounds = aggConfig.getAutoBounds();
|
||||
|
||||
// ensure interval does not create too many buckets and crash browser
|
||||
if (aggConfig.getAutoBounds()) {
|
||||
const range = aggConfig.getAutoBounds().max - aggConfig.getAutoBounds().min;
|
||||
if (autoBounds) {
|
||||
const range = autoBounds.max - autoBounds.min;
|
||||
const bars = range / interval;
|
||||
|
||||
const config = getUIConfig();
|
||||
if (bars > config.get('histogram:maxBars')) {
|
||||
const minInterval = range / config.get('histogram:maxBars');
|
||||
|
||||
// Round interval by order of magnitude to provide clean intervals
|
||||
// Always round interval up so there will always be less buckets than histogram:maxBars
|
||||
const orderOfMagnitude = Math.pow(10, Math.floor(Math.log10(minInterval)));
|
||||
let roundInterval = orderOfMagnitude;
|
||||
|
||||
while (roundInterval < minInterval) {
|
||||
roundInterval += orderOfMagnitude;
|
||||
}
|
||||
interval = roundInterval;
|
||||
}
|
||||
}
|
||||
|
||||
const base = aggConfig.params.intervalBase;
|
||||
|
||||
if (base) {
|
||||
if (interval < base) {
|
||||
// In case the specified interval is below the base, just increase it to it's base
|
||||
|
|
95
src/legacy/ui/public/agg_types/buckets/range.test.ts
Normal file
95
src/legacy/ui/public/agg_types/buckets/range.test.ts
Normal file
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { AggConfigs } from '../agg_configs';
|
||||
import { BUCKET_TYPES } from './bucket_agg_types';
|
||||
import { NumberFormat } from '../../../../../plugins/data/common/';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
const buckets = [
|
||||
{
|
||||
to: 1024,
|
||||
to_as_string: '1024.0',
|
||||
doc_count: 20904,
|
||||
},
|
||||
{
|
||||
from: 1024,
|
||||
from_as_string: '1024.0',
|
||||
to: 2560,
|
||||
to_as_string: '2560.0',
|
||||
doc_count: 23358,
|
||||
},
|
||||
{
|
||||
from: 2560,
|
||||
from_as_string: '2560.0',
|
||||
doc_count: 174250,
|
||||
},
|
||||
];
|
||||
|
||||
describe('Range Agg', () => {
|
||||
const getAggConfigs = () => {
|
||||
const field = {
|
||||
name: 'bytes',
|
||||
format: new NumberFormat(
|
||||
{
|
||||
pattern: '0,0.[000] b',
|
||||
},
|
||||
() => {}
|
||||
),
|
||||
};
|
||||
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
type: BUCKET_TYPES.RANGE,
|
||||
schema: 'segment',
|
||||
params: {
|
||||
field: 'bytes',
|
||||
ranges: [{ from: 0, to: 1000 }, { from: 1000, to: 2000 }],
|
||||
},
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
|
||||
describe('formating', () => {
|
||||
it('formats bucket keys properly', () => {
|
||||
const aggConfigs = getAggConfigs();
|
||||
const agg = aggConfigs.aggs[0];
|
||||
|
||||
const format = (val: any) => agg.fieldFormatter()(agg.getKey(val));
|
||||
|
||||
expect(format(buckets[0])).toBe('≥ -∞ and < 1 KB');
|
||||
expect(format(buckets[1])).toBe('≥ 1 KB and < 2.5 KB');
|
||||
expect(format(buckets[2])).toBe('≥ 2.5 KB and < +∞');
|
||||
});
|
||||
});
|
||||
});
|
110
src/legacy/ui/public/agg_types/buckets/significant_terms.test.ts
Normal file
110
src/legacy/ui/public/agg_types/buckets/significant_terms.test.ts
Normal file
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { AggConfigs } from '../index';
|
||||
import { BUCKET_TYPES } from './bucket_agg_types';
|
||||
import { significantTermsBucketAgg } from './significant_terms';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('Significant Terms Agg', () => {
|
||||
describe('order agg editor UI', () => {
|
||||
describe('convert include/exclude from old format', () => {
|
||||
const getAggConfigs = (params: Record<string, any> = {}) => {
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
const field = {
|
||||
name: 'field',
|
||||
indexPattern,
|
||||
};
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
id: 'test',
|
||||
type: BUCKET_TYPES.SIGNIFICANT_TERMS,
|
||||
schema: 'segment',
|
||||
params,
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
|
||||
const testSerializeAndWrite = (aggs: AggConfigs) => {
|
||||
const agg = aggs.aggs[0];
|
||||
const { [BUCKET_TYPES.SIGNIFICANT_TERMS]: params } = agg.toDsl();
|
||||
|
||||
expect(params.field).toBe('field');
|
||||
expect(params.include).toBe('404');
|
||||
expect(params.exclude).toBe('400');
|
||||
};
|
||||
|
||||
it('should generate correct label', () => {
|
||||
const aggConfigs = getAggConfigs({
|
||||
size: 'SIZE',
|
||||
field: {
|
||||
name: 'FIELD',
|
||||
},
|
||||
});
|
||||
const label = significantTermsBucketAgg.makeLabel(aggConfigs.aggs[0]);
|
||||
|
||||
expect(label).toBe('Top SIZE unusual terms in FIELD');
|
||||
});
|
||||
|
||||
it('should doesnt do anything with string type', () => {
|
||||
const aggConfigs = getAggConfigs({
|
||||
include: '404',
|
||||
exclude: '400',
|
||||
field: {
|
||||
name: 'field',
|
||||
type: 'string',
|
||||
},
|
||||
});
|
||||
|
||||
testSerializeAndWrite(aggConfigs);
|
||||
});
|
||||
|
||||
it('should converts object to string type', () => {
|
||||
const aggConfigs = getAggConfigs({
|
||||
include: {
|
||||
pattern: '404',
|
||||
},
|
||||
exclude: {
|
||||
pattern: '400',
|
||||
},
|
||||
field: {
|
||||
name: 'field',
|
||||
type: 'string',
|
||||
},
|
||||
});
|
||||
|
||||
testSerializeAndWrite(aggConfigs);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
78
src/legacy/ui/public/agg_types/buckets/terms.test.ts
Normal file
78
src/legacy/ui/public/agg_types/buckets/terms.test.ts
Normal file
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { AggConfigs } from '../index';
|
||||
import { BUCKET_TYPES } from './bucket_agg_types';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
describe('Terms Agg', () => {
|
||||
describe('order agg editor UI', () => {
|
||||
const getAggConfigs = (params: Record<string, any> = {}) => {
|
||||
const indexPattern = {
|
||||
id: '1234',
|
||||
title: 'logstash-*',
|
||||
fields: {
|
||||
getByName: () => field,
|
||||
filter: () => [field],
|
||||
},
|
||||
} as any;
|
||||
|
||||
const field = {
|
||||
name: 'field',
|
||||
indexPattern,
|
||||
};
|
||||
|
||||
return new AggConfigs(
|
||||
indexPattern,
|
||||
[
|
||||
{
|
||||
id: 'test',
|
||||
params,
|
||||
type: BUCKET_TYPES.TERMS,
|
||||
},
|
||||
],
|
||||
null
|
||||
);
|
||||
};
|
||||
|
||||
it('converts object to string type', function() {
|
||||
const aggConfigs = getAggConfigs({
|
||||
include: {
|
||||
pattern: '404',
|
||||
},
|
||||
exclude: {
|
||||
pattern: '400',
|
||||
},
|
||||
field: {
|
||||
name: 'field',
|
||||
},
|
||||
orderAgg: {
|
||||
type: 'count',
|
||||
},
|
||||
});
|
||||
|
||||
const { [BUCKET_TYPES.TERMS]: params } = aggConfigs.aggs[0].toDsl();
|
||||
|
||||
expect(params.field).toBe('field');
|
||||
expect(params.include).toBe('404');
|
||||
expect(params.exclude).toBe('400');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -27,7 +27,7 @@ describe('prop filter', () => {
|
|||
nameFilter = propFilter('name');
|
||||
});
|
||||
|
||||
function getObjects(...names: string[]) {
|
||||
const getObjects = (...names: string[]) => {
|
||||
const count = new Map();
|
||||
const objects = [];
|
||||
|
||||
|
@ -41,8 +41,9 @@ describe('prop filter', () => {
|
|||
});
|
||||
count.set(name, count.get(name) + 1);
|
||||
}
|
||||
|
||||
return objects;
|
||||
}
|
||||
};
|
||||
|
||||
it('returns list when no filters are provided', () => {
|
||||
const objects = getObjects('table', 'table', 'pie');
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
|
||||
import { makeNestedLabel } from './make_nested_label';
|
||||
import { IMetricAggConfig } from 'ui/agg_types/metrics/metric_agg_type';
|
||||
import { IMetricAggConfig } from '../metric_agg_type';
|
||||
|
||||
describe('metric agg make_nested_label', () => {
|
||||
const generateAggConfig = (metricLabel: string): IMetricAggConfig => {
|
||||
|
|
|
@ -22,8 +22,8 @@ import { derivativeMetricAgg } from './derivative';
|
|||
import { cumulativeSumMetricAgg } from './cumulative_sum';
|
||||
import { movingAvgMetricAgg } from './moving_avg';
|
||||
import { serialDiffMetricAgg } from './serial_diff';
|
||||
import { AggConfigs } from 'ui/agg_types';
|
||||
import { IMetricAggConfig, MetricAggType } from 'ui/agg_types/metrics/metric_agg_type';
|
||||
import { AggConfigs } from '../agg_configs';
|
||||
import { IMetricAggConfig, MetricAggType } from './metric_agg_type';
|
||||
|
||||
jest.mock('../../vis/editors/default/schemas', () => {
|
||||
class MockedSchemas {
|
||||
|
|
|
@ -23,8 +23,8 @@ import { bucketAvgMetricAgg } from './bucket_avg';
|
|||
import { bucketMinMetricAgg } from './bucket_min';
|
||||
import { bucketMaxMetricAgg } from './bucket_max';
|
||||
|
||||
import { AggConfigs } from 'ui/agg_types';
|
||||
import { IMetricAggConfig, MetricAggType } from 'ui/agg_types/metrics/metric_agg_type';
|
||||
import { AggConfigs } from '../agg_configs';
|
||||
import { IMetricAggConfig, MetricAggType } from './metric_agg_type';
|
||||
|
||||
jest.mock('../../vis/editors/default/schemas', () => {
|
||||
class MockedSchemas {
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
*/
|
||||
|
||||
import { IStdDevAggConfig, stdDeviationMetricAgg } from './std_deviation';
|
||||
import { AggConfigs } from 'ui/agg_types';
|
||||
import { METRIC_TYPES } from 'ui/agg_types/metrics/metric_agg_types';
|
||||
import { AggConfigs } from '../agg_configs';
|
||||
import { METRIC_TYPES } from './metric_agg_types';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
import { dropRight, last } from 'lodash';
|
||||
import { topHitMetricAgg } from './top_hit';
|
||||
import { AggConfigs } from 'ui/agg_types';
|
||||
import { IMetricAggConfig } from 'ui/agg_types/metrics/metric_agg_type';
|
||||
import { AggConfigs } from '../agg_configs';
|
||||
import { IMetricAggConfig } from './metric_agg_type';
|
||||
import { KBN_FIELD_TYPES } from '../../../../../plugins/data/common';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
import { BaseParamType } from './base';
|
||||
import { JsonParamType } from './json';
|
||||
import { AggConfig } from 'ui/agg_types';
|
||||
import { AggConfig } from '../agg_config';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
|
@ -28,13 +28,12 @@ describe('JSON', function() {
|
|||
let aggConfig: AggConfig;
|
||||
let output: Record<string, any>;
|
||||
|
||||
function initAggParam(config: Record<string, any> = {}) {
|
||||
return new JsonParamType({
|
||||
const initAggParam = (config: Record<string, any> = {}) =>
|
||||
new JsonParamType({
|
||||
...config,
|
||||
type: 'json',
|
||||
name: paramName,
|
||||
});
|
||||
}
|
||||
|
||||
beforeEach(function() {
|
||||
aggConfig = { params: {} } as AggConfig;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
import { BaseParamType } from './base';
|
||||
import { StringParamType } from './string';
|
||||
import { AggConfig } from 'ui/agg_types';
|
||||
import { AggConfig } from '../agg_config';
|
||||
|
||||
jest.mock('ui/new_platform');
|
||||
|
||||
|
@ -28,13 +28,12 @@ describe('String', function() {
|
|||
let aggConfig: AggConfig;
|
||||
let output: Record<string, any>;
|
||||
|
||||
function initAggParam(config: Record<string, any> = {}) {
|
||||
return new StringParamType({
|
||||
const initAggParam = (config: Record<string, any> = {}) =>
|
||||
new StringParamType({
|
||||
...config,
|
||||
type: 'string',
|
||||
name: paramName,
|
||||
});
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
aggConfig = { params: {} } as AggConfig;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue