mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[Visualizations] Navigate to lens agg based vis library tests. (#141353)
* Added tests for convert_to_lens lib at vis_types. Co-authored-by: Uladzislau Lasitsa <vlad.lasitsa@gmail.com>
This commit is contained in:
parent
4bab191faf
commit
94fe1e6353
22 changed files with 3484 additions and 5 deletions
|
@ -0,0 +1,267 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { BUCKET_TYPES, METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { convertBucketToColumns } from '.';
|
||||
import { DateHistogramColumn, FiltersColumn, RangeColumn, TermsColumn } from '../../types';
|
||||
import { AggBasedColumn, SchemaConfig } from '../../..';
|
||||
|
||||
const mockConvertToDateHistogramColumn = jest.fn();
|
||||
const mockConvertToFiltersColumn = jest.fn();
|
||||
const mockConvertToTermsColumn = jest.fn();
|
||||
const mockConvertToRangeColumn = jest.fn();
|
||||
|
||||
jest.mock('../convert', () => ({
|
||||
convertToDateHistogramColumn: jest.fn(() => mockConvertToDateHistogramColumn()),
|
||||
convertToFiltersColumn: jest.fn(() => mockConvertToFiltersColumn()),
|
||||
convertToTermsColumn: jest.fn(() => mockConvertToTermsColumn()),
|
||||
convertToRangeColumn: jest.fn(() => mockConvertToRangeColumn()),
|
||||
}));
|
||||
|
||||
describe('convertBucketToColumns', () => {
|
||||
const field = stubLogstashDataView.fields[0].name;
|
||||
const dateField = stubLogstashDataView.fields.find((f) => f.type === 'date')!.name;
|
||||
const bucketAggs: SchemaConfig[] = [
|
||||
{
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: BUCKET_TYPES.FILTERS,
|
||||
aggParams: {
|
||||
filters: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: BUCKET_TYPES.DATE_HISTOGRAM,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
},
|
||||
{
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: BUCKET_TYPES.TERMS,
|
||||
aggParams: {
|
||||
field,
|
||||
orderBy: '_key',
|
||||
},
|
||||
},
|
||||
{
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: BUCKET_TYPES.TERMS,
|
||||
aggParams: {
|
||||
field: dateField,
|
||||
orderBy: '_key',
|
||||
},
|
||||
},
|
||||
{
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: BUCKET_TYPES.HISTOGRAM,
|
||||
aggParams: {
|
||||
field,
|
||||
interval: '1h',
|
||||
},
|
||||
},
|
||||
{
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: BUCKET_TYPES.RANGE,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
},
|
||||
{
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: BUCKET_TYPES.DATE_RANGE,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
},
|
||||
];
|
||||
const aggs: Array<SchemaConfig<METRIC_TYPES>> = [
|
||||
{
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
},
|
||||
];
|
||||
const metricColumns: AggBasedColumn[] = [
|
||||
{
|
||||
columnId: 'column-1',
|
||||
operationType: 'average',
|
||||
isBucketed: false,
|
||||
isSplit: false,
|
||||
sourceField: field,
|
||||
dataType: 'number',
|
||||
params: {},
|
||||
meta: {
|
||||
aggId: '1',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test.each<
|
||||
[
|
||||
string,
|
||||
Parameters<typeof convertBucketToColumns>,
|
||||
() => void,
|
||||
Partial<TermsColumn | DateHistogramColumn | FiltersColumn | RangeColumn> | null
|
||||
]
|
||||
>([
|
||||
[
|
||||
'null if bucket agg type is not supported',
|
||||
[{ dataView: stubLogstashDataView, agg: bucketAggs[6], aggs, metricColumns }],
|
||||
() => {},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if bucket agg does not have aggParams',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
agg: { ...bucketAggs[0], aggParams: undefined },
|
||||
aggs,
|
||||
metricColumns,
|
||||
},
|
||||
],
|
||||
() => {},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'filters column if bucket agg is valid filters agg',
|
||||
[{ dataView: stubLogstashDataView, agg: bucketAggs[0], aggs, metricColumns }],
|
||||
() => {
|
||||
mockConvertToFiltersColumn.mockReturnValue({
|
||||
operationType: 'filters',
|
||||
});
|
||||
},
|
||||
{
|
||||
operationType: 'filters',
|
||||
},
|
||||
],
|
||||
[
|
||||
'date histogram column if bucket agg is valid date histogram agg',
|
||||
[{ dataView: stubLogstashDataView, agg: bucketAggs[1], aggs, metricColumns }],
|
||||
() => {
|
||||
mockConvertToDateHistogramColumn.mockReturnValue({
|
||||
operationType: 'date_histogram',
|
||||
});
|
||||
},
|
||||
{
|
||||
operationType: 'date_histogram',
|
||||
},
|
||||
],
|
||||
[
|
||||
'date histogram column if bucket agg is valid terms agg with date field',
|
||||
[{ dataView: stubLogstashDataView, agg: bucketAggs[3], aggs, metricColumns }],
|
||||
() => {
|
||||
mockConvertToDateHistogramColumn.mockReturnValue({
|
||||
operationType: 'date_histogram',
|
||||
});
|
||||
},
|
||||
{
|
||||
operationType: 'date_histogram',
|
||||
},
|
||||
],
|
||||
[
|
||||
'terms column if bucket agg is valid terms agg with no date field',
|
||||
[{ dataView: stubLogstashDataView, agg: bucketAggs[2], aggs, metricColumns }],
|
||||
() => {
|
||||
mockConvertToTermsColumn.mockReturnValue({
|
||||
operationType: 'terms',
|
||||
});
|
||||
},
|
||||
{
|
||||
operationType: 'terms',
|
||||
},
|
||||
],
|
||||
[
|
||||
'range column if bucket agg is valid histogram agg',
|
||||
[{ dataView: stubLogstashDataView, agg: bucketAggs[4], aggs, metricColumns }],
|
||||
() => {
|
||||
mockConvertToRangeColumn.mockReturnValue({
|
||||
operationType: 'range',
|
||||
});
|
||||
},
|
||||
{
|
||||
operationType: 'range',
|
||||
},
|
||||
],
|
||||
[
|
||||
'range column if bucket agg is valid range agg',
|
||||
[{ dataView: stubLogstashDataView, agg: bucketAggs[5], aggs, metricColumns }],
|
||||
() => {
|
||||
mockConvertToRangeColumn.mockReturnValue({
|
||||
operationType: 'range',
|
||||
});
|
||||
},
|
||||
{
|
||||
operationType: 'range',
|
||||
},
|
||||
],
|
||||
])('should return %s', (_, input, actions, expected) => {
|
||||
actions();
|
||||
if (expected === null) {
|
||||
expect(convertBucketToColumns(...input)).toBeNull();
|
||||
} else {
|
||||
expect(convertBucketToColumns(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { METRIC_TYPES } from '@kbn/data-plugin/public';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { createColumn } from './column';
|
||||
import { GeneralColumnWithMeta } from './types';
|
||||
|
||||
describe('createColumn', () => {
|
||||
const field = stubLogstashDataView.fields[0];
|
||||
const aggId = `some-id`;
|
||||
const customLabel = 'some-custom-label';
|
||||
const label = 'some label';
|
||||
const timeShift = '1h';
|
||||
|
||||
const agg: SchemaConfig<METRIC_TYPES.AVG> = {
|
||||
accessor: 0,
|
||||
label,
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggId,
|
||||
aggParams: {
|
||||
field: field.name,
|
||||
},
|
||||
};
|
||||
|
||||
const aggWithCustomLabel: SchemaConfig<METRIC_TYPES.AVG> = {
|
||||
...agg,
|
||||
aggParams: {
|
||||
field: field.name,
|
||||
customLabel,
|
||||
},
|
||||
};
|
||||
|
||||
const aggWithTimeShift: SchemaConfig<METRIC_TYPES.AVG> = {
|
||||
...agg,
|
||||
aggParams: {
|
||||
field: field.name,
|
||||
timeShift,
|
||||
},
|
||||
};
|
||||
|
||||
const extraColumnFields = { isBucketed: true, isSplit: true, reducedTimeRange: '1m' };
|
||||
|
||||
test.each<[string, Parameters<typeof createColumn>, Partial<GeneralColumnWithMeta>]>([
|
||||
[
|
||||
'with default params',
|
||||
[agg, field],
|
||||
{
|
||||
dataType: 'number',
|
||||
isBucketed: false,
|
||||
isSplit: false,
|
||||
label,
|
||||
meta: { aggId },
|
||||
},
|
||||
],
|
||||
[
|
||||
'with custom label',
|
||||
[aggWithCustomLabel, field],
|
||||
{
|
||||
dataType: 'number',
|
||||
isBucketed: false,
|
||||
isSplit: false,
|
||||
label: customLabel,
|
||||
meta: { aggId },
|
||||
},
|
||||
],
|
||||
[
|
||||
'with timeShift',
|
||||
[aggWithTimeShift, field],
|
||||
{
|
||||
dataType: 'number',
|
||||
isBucketed: false,
|
||||
isSplit: false,
|
||||
label,
|
||||
meta: { aggId },
|
||||
timeShift,
|
||||
},
|
||||
],
|
||||
[
|
||||
'with extra column fields',
|
||||
[agg, field, extraColumnFields],
|
||||
{
|
||||
dataType: 'number',
|
||||
isBucketed: extraColumnFields.isBucketed,
|
||||
isSplit: extraColumnFields.isSplit,
|
||||
reducedTimeRange: extraColumnFields.reducedTimeRange,
|
||||
label,
|
||||
meta: { aggId },
|
||||
},
|
||||
],
|
||||
])('should create column by agg %s', (_, input, expected) => {
|
||||
expect(createColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
});
|
||||
});
|
|
@ -0,0 +1,90 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { AggParamsDateHistogram } from '@kbn/data-plugin/common';
|
||||
import { convertToDateHistogramColumn } from './date_histogram';
|
||||
import { DateHistogramColumn } from './types';
|
||||
import { DataType } from '../../types';
|
||||
|
||||
describe('convertToDateHistogramColumn', () => {
|
||||
const aggId = `some-id`;
|
||||
const timeShift = '1h';
|
||||
const aggParams: AggParamsDateHistogram = {
|
||||
interval: '1d',
|
||||
drop_partials: true,
|
||||
field: stubLogstashDataView.fields[0].name,
|
||||
};
|
||||
|
||||
test.each<
|
||||
[string, Parameters<typeof convertToDateHistogramColumn>, Partial<DateHistogramColumn> | null]
|
||||
>([
|
||||
[
|
||||
'date histogram column if field is provided',
|
||||
[aggId, aggParams, stubLogstashDataView, false, false],
|
||||
{
|
||||
dataType: stubLogstashDataView.fields[0].type as DataType,
|
||||
isBucketed: true,
|
||||
isSplit: false,
|
||||
timeShift: undefined,
|
||||
sourceField: stubLogstashDataView.fields[0].name,
|
||||
meta: { aggId },
|
||||
params: {
|
||||
interval: '1d',
|
||||
includeEmptyRows: true,
|
||||
dropPartials: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'null if field is not provided',
|
||||
[aggId, { interval: '1d', field: undefined }, stubLogstashDataView, false, false],
|
||||
null,
|
||||
],
|
||||
[
|
||||
'date histogram column with isSplit and timeShift if specified',
|
||||
[aggId, { ...aggParams, timeShift }, stubLogstashDataView, true, false],
|
||||
{
|
||||
dataType: stubLogstashDataView.fields[0].type as DataType,
|
||||
isBucketed: true,
|
||||
isSplit: true,
|
||||
sourceField: stubLogstashDataView.fields[0].name,
|
||||
timeShift,
|
||||
meta: { aggId },
|
||||
params: {
|
||||
interval: '1d',
|
||||
includeEmptyRows: true,
|
||||
dropPartials: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'date histogram column with dropEmptyRowsInDateHistogram if specified',
|
||||
[aggId, aggParams, stubLogstashDataView, true, true],
|
||||
{
|
||||
dataType: stubLogstashDataView.fields[0].type as DataType,
|
||||
isBucketed: true,
|
||||
isSplit: true,
|
||||
sourceField: stubLogstashDataView.fields[0].name,
|
||||
timeShift: undefined,
|
||||
meta: { aggId },
|
||||
params: {
|
||||
interval: '1d',
|
||||
includeEmptyRows: false,
|
||||
dropPartials: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
])('should return %s', (_, input, expected) => {
|
||||
if (expected === null) {
|
||||
expect(convertToDateHistogramColumn(...input)).toBeNull();
|
||||
} else {
|
||||
expect(convertToDateHistogramColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { AggParamsFilters } from '@kbn/data-plugin/common';
|
||||
import { convertToFiltersColumn } from './filters';
|
||||
import { FiltersColumn } from './types';
|
||||
|
||||
describe('convertToFiltersColumn', () => {
|
||||
const aggId = `some-id`;
|
||||
const timeShift = '1h';
|
||||
const filters = [{ input: { language: 'lucene', query: 'some other query' }, label: 'split' }];
|
||||
const aggParams: AggParamsFilters = {
|
||||
filters,
|
||||
};
|
||||
|
||||
test.each<[string, Parameters<typeof convertToFiltersColumn>, Partial<FiltersColumn> | null]>([
|
||||
[
|
||||
'filters column if filters are provided',
|
||||
[aggId, aggParams],
|
||||
{
|
||||
dataType: 'string',
|
||||
isBucketed: true,
|
||||
isSplit: false,
|
||||
timeShift: undefined,
|
||||
meta: { aggId },
|
||||
params: { filters: aggParams.filters! },
|
||||
},
|
||||
],
|
||||
['null if filters are not provided', [aggId, {}], null],
|
||||
[
|
||||
'filters column with isSplit and timeShift if specified',
|
||||
[aggId, { ...aggParams, timeShift }, true],
|
||||
{
|
||||
dataType: 'string',
|
||||
isBucketed: true,
|
||||
isSplit: true,
|
||||
timeShift,
|
||||
meta: { aggId },
|
||||
params: { filters: aggParams.filters! },
|
||||
},
|
||||
],
|
||||
])('should return %s', (_, input, expected) => {
|
||||
if (expected === null) {
|
||||
expect(convertToFiltersColumn(...input)).toBeNull();
|
||||
} else {
|
||||
expect(convertToFiltersColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { IAggConfig, METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { createFormulaColumn } from './formula';
|
||||
|
||||
describe('createFormulaColumn', () => {
|
||||
const aggId = `some-id`;
|
||||
const label = 'some label';
|
||||
const agg: SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM> = {
|
||||
accessor: 0,
|
||||
label,
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.CUMULATIVE_SUM,
|
||||
aggId,
|
||||
aggParams: {
|
||||
customMetric: {
|
||||
id: 'some-id-metric',
|
||||
enabled: true,
|
||||
type: { name: METRIC_TYPES.AVG },
|
||||
params: {
|
||||
field: stubLogstashDataView.fields[0],
|
||||
},
|
||||
} as IAggConfig,
|
||||
},
|
||||
};
|
||||
test('should return formula column', () => {
|
||||
expect(createFormulaColumn('test-formula', agg)).toEqual(
|
||||
expect.objectContaining({
|
||||
isBucketed: false,
|
||||
isSplit: false,
|
||||
meta: {
|
||||
aggId,
|
||||
},
|
||||
operationType: 'formula',
|
||||
params: {
|
||||
formula: 'test-formula',
|
||||
},
|
||||
references: [],
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,120 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { AggParamsTopHit, METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { convertToLastValueColumn } from './last_value';
|
||||
import { FiltersColumn } from './types';
|
||||
|
||||
const mockGetFieldNameFromField = jest.fn();
|
||||
const mockGetFieldByName = jest.fn();
|
||||
const mockGetLabel = jest.fn();
|
||||
|
||||
jest.mock('../utils', () => ({
|
||||
getFieldNameFromField: jest.fn(() => mockGetFieldNameFromField()),
|
||||
getLabel: jest.fn(() => mockGetLabel()),
|
||||
}));
|
||||
|
||||
describe('convertToLastValueColumn', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
const sortField = dataView.fields[0];
|
||||
|
||||
const topHitAggParams: AggParamsTopHit = {
|
||||
sortOrder: {
|
||||
value: 'desc',
|
||||
text: 'some text',
|
||||
},
|
||||
sortField,
|
||||
field: '',
|
||||
aggregate: 'min',
|
||||
size: 1,
|
||||
};
|
||||
|
||||
const topHitAgg: SchemaConfig<METRIC_TYPES.TOP_HITS> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.TOP_HITS,
|
||||
aggParams: topHitAggParams,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockGetFieldNameFromField.mockReturnValue(dataView.fields[0]);
|
||||
mockGetFieldByName.mockReturnValue(dataView.fields[0]);
|
||||
mockGetLabel.mockReturnValue('someLabel');
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
});
|
||||
|
||||
test.each<[string, Parameters<typeof convertToLastValueColumn>, Partial<FiltersColumn> | null]>([
|
||||
[
|
||||
'null if top hits size is more than 1',
|
||||
[{ agg: { ...topHitAgg, aggParams: { ...topHitAgg.aggParams!, size: 2 } }, dataView }],
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if top hits sord order is not desc',
|
||||
[
|
||||
{
|
||||
agg: {
|
||||
...topHitAgg,
|
||||
aggParams: {
|
||||
...topHitAgg.aggParams!,
|
||||
sortOrder: { ...topHitAgg.aggParams!.sortOrder!, value: 'asc' },
|
||||
},
|
||||
},
|
||||
dataView,
|
||||
},
|
||||
],
|
||||
null,
|
||||
],
|
||||
])('should return %s', (_, input, expected) => {
|
||||
if (expected === null) {
|
||||
expect(convertToLastValueColumn(...input)).toBeNull();
|
||||
} else {
|
||||
expect(convertToLastValueColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
|
||||
test('should skip if top hit field is not specified', () => {
|
||||
mockGetFieldNameFromField.mockReturnValue(null);
|
||||
expect(convertToLastValueColumn({ agg: topHitAgg, dataView })).toBeNull();
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should skip if top hit field is not present in index pattern', () => {
|
||||
mockGetFieldByName.mockReturnValue(null);
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
|
||||
expect(convertToLastValueColumn({ agg: topHitAgg, dataView })).toBeNull();
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
expect(mockGetLabel).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should return top hit column if top hit field is not present in index pattern', () => {
|
||||
expect(convertToLastValueColumn({ agg: topHitAgg, dataView })).toEqual(
|
||||
expect.objectContaining({
|
||||
dataType: 'number',
|
||||
label: 'someLabel',
|
||||
operationType: 'last_value',
|
||||
params: { showArrayValues: true, sortField: 'bytes' },
|
||||
sourceField: 'bytes',
|
||||
})
|
||||
);
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
expect(mockGetLabel).toBeCalledTimes(1);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,92 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { convertMetricAggregationColumnWithoutSpecialParams } from './metric';
|
||||
import { SUPPORTED_METRICS } from './supported_metrics';
|
||||
|
||||
const mockGetFieldByName = jest.fn();
|
||||
|
||||
describe('convertToLastValueColumn', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
|
||||
const agg: SchemaConfig<METRIC_TYPES.AVG> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field: dataView.fields[0].displayName,
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockGetFieldByName.mockReturnValue(dataView.fields[0]);
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
});
|
||||
|
||||
test('should return null metric is not supported', () => {
|
||||
expect(
|
||||
convertMetricAggregationColumnWithoutSpecialParams(SUPPORTED_METRICS[METRIC_TYPES.TOP_HITS], {
|
||||
agg,
|
||||
dataView,
|
||||
})
|
||||
).toBeNull();
|
||||
});
|
||||
|
||||
test('should skip if field is not present and is required for the aggregation', () => {
|
||||
mockGetFieldByName.mockReturnValue(null);
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
|
||||
expect(
|
||||
convertMetricAggregationColumnWithoutSpecialParams(SUPPORTED_METRICS[METRIC_TYPES.AVG], {
|
||||
agg,
|
||||
dataView,
|
||||
})
|
||||
).toBeNull();
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return column if field is not present and is not required for the aggregation', () => {
|
||||
mockGetFieldByName.mockReturnValue(null);
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
|
||||
expect(
|
||||
convertMetricAggregationColumnWithoutSpecialParams(SUPPORTED_METRICS[METRIC_TYPES.COUNT], {
|
||||
agg,
|
||||
dataView,
|
||||
})
|
||||
).toEqual(expect.objectContaining({ operationType: 'count' }));
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return column if field is present and is required for the aggregation', () => {
|
||||
mockGetFieldByName.mockReturnValue(dataView.fields[0]);
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
|
||||
expect(
|
||||
convertMetricAggregationColumnWithoutSpecialParams(SUPPORTED_METRICS[METRIC_TYPES.AVG], {
|
||||
agg,
|
||||
dataView,
|
||||
})
|
||||
).toEqual(
|
||||
expect.objectContaining({
|
||||
dataType: 'number',
|
||||
operationType: 'average',
|
||||
})
|
||||
);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,438 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { FormulaColumn, AggBasedColumn } from './types';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import {
|
||||
convertToOtherParentPipelineAggColumns,
|
||||
ParentPipelineAggColumn,
|
||||
convertToCumulativeSumAggColumn,
|
||||
} from './parent_pipeline';
|
||||
|
||||
const mockGetMetricFromParentPipelineAgg = jest.fn();
|
||||
const mockGetFormulaForPipelineAgg = jest.fn();
|
||||
const mockConvertMetricToColumns = jest.fn();
|
||||
const mockGetFieldByName = jest.fn();
|
||||
const mockConvertMetricAggregationColumnWithoutSpecialParams = jest.fn();
|
||||
|
||||
jest.mock('../utils', () => ({
|
||||
getMetricFromParentPipelineAgg: jest.fn(() => mockGetMetricFromParentPipelineAgg()),
|
||||
getLabel: jest.fn(() => 'label'),
|
||||
getFieldNameFromField: jest.fn(() => 'document'),
|
||||
}));
|
||||
|
||||
jest.mock('./metric', () => ({
|
||||
convertMetricAggregationColumnWithoutSpecialParams: jest.fn(() =>
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams()
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('../metrics', () => ({
|
||||
getFormulaForPipelineAgg: jest.fn(() => mockGetFormulaForPipelineAgg()),
|
||||
convertMetricToColumns: jest.fn(() => mockConvertMetricToColumns()),
|
||||
}));
|
||||
|
||||
describe('convertToOtherParentPipelineAggColumns', () => {
|
||||
const field = stubLogstashDataView.fields[0].name;
|
||||
const aggs: Array<SchemaConfig<METRIC_TYPES>> = [
|
||||
{
|
||||
aggId: '1',
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: { field },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'average',
|
||||
format: {},
|
||||
},
|
||||
{
|
||||
aggId: '1',
|
||||
aggType: METRIC_TYPES.MOVING_FN,
|
||||
aggParams: { metricAgg: '2' },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'Moving Average of Average',
|
||||
format: {},
|
||||
},
|
||||
];
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test.each<
|
||||
[
|
||||
string,
|
||||
Parameters<typeof convertToOtherParentPipelineAggColumns>,
|
||||
() => void,
|
||||
Partial<FormulaColumn> | [Partial<ParentPipelineAggColumn>, Partial<AggBasedColumn>] | null
|
||||
]
|
||||
>([
|
||||
[
|
||||
'null if getMetricFromParentPipelineAgg returns null',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.MOVING_FN>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if cutom metric of parent pipeline agg is not supported',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.MOVING_FN>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.GEO_BOUNDS,
|
||||
});
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if cutom metric of parent pipeline agg is sibling pipeline agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.MOVING_FN>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.AVG_BUCKET,
|
||||
});
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if cannot build formula if cutom metric of parent pipeline agg is parent pipeline agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.MOVING_FN>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.MOVING_FN,
|
||||
});
|
||||
mockGetFormulaForPipelineAgg.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'formula column if cutom metric of parent pipeline agg is valid parent pipeline agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.MOVING_FN>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.MOVING_FN,
|
||||
});
|
||||
mockGetFormulaForPipelineAgg.mockReturnValue('test-formula');
|
||||
},
|
||||
{
|
||||
operationType: 'formula',
|
||||
params: {
|
||||
formula: 'test-formula',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'null if cutom metric of parent pipeline agg is invalid not pipeline agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.MOVING_FN>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
});
|
||||
mockConvertMetricToColumns.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'parent pipeline and metric columns if cutom metric of parent pipeline agg is valid not pipeline agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.MOVING_FN>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
});
|
||||
mockConvertMetricToColumns.mockReturnValue([
|
||||
{
|
||||
columnId: 'test-id-1',
|
||||
operationType: 'average',
|
||||
sourceField: field,
|
||||
},
|
||||
]);
|
||||
},
|
||||
[
|
||||
{ operationType: 'moving_average', references: ['test-id-1'] },
|
||||
{
|
||||
columnId: 'test-id-1',
|
||||
operationType: 'average',
|
||||
sourceField: field,
|
||||
},
|
||||
],
|
||||
],
|
||||
])('should return %s', (_, input, actions, expected) => {
|
||||
actions();
|
||||
if (expected === null) {
|
||||
expect(convertToOtherParentPipelineAggColumns(...input)).toBeNull();
|
||||
} else if (Array.isArray(expected)) {
|
||||
expect(convertToOtherParentPipelineAggColumns(...input)).toEqual(
|
||||
expected.map(expect.objectContaining)
|
||||
);
|
||||
} else {
|
||||
expect(convertToOtherParentPipelineAggColumns(...input)).toEqual(
|
||||
expect.objectContaining(expected)
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertToCumulativeSumAggColumn', () => {
|
||||
const field = stubLogstashDataView.fields[0].name;
|
||||
const aggs: Array<SchemaConfig<METRIC_TYPES>> = [
|
||||
{
|
||||
aggId: '1',
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: { field },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'average',
|
||||
format: {},
|
||||
},
|
||||
{
|
||||
aggId: '1',
|
||||
aggType: METRIC_TYPES.CUMULATIVE_SUM,
|
||||
aggParams: { metricAgg: '2' },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'Moving Average of Average',
|
||||
format: {},
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
mockGetFieldByName.mockReturnValue({
|
||||
aggregatable: true,
|
||||
type: 'number',
|
||||
sourceField: 'bytes',
|
||||
});
|
||||
|
||||
stubLogstashDataView.getFieldByName = mockGetFieldByName;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test.each<
|
||||
[
|
||||
string,
|
||||
Parameters<typeof convertToCumulativeSumAggColumn>,
|
||||
() => void,
|
||||
Partial<FormulaColumn> | [Partial<ParentPipelineAggColumn>, Partial<AggBasedColumn>] | null
|
||||
]
|
||||
>([
|
||||
[
|
||||
'null if cumulative sum does not have aggParams',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: { ...aggs[1], aggParams: undefined } as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if getMetricFromParentPipelineAgg returns null',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if cutom metric of parent pipeline agg is not supported',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.GEO_BOUNDS,
|
||||
});
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if cutom metric of parent pipeline agg is sibling pipeline agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.AVG_BUCKET,
|
||||
});
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if cannot build formula if cutom metric of parent pipeline agg is parent pipeline agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.MOVING_FN,
|
||||
});
|
||||
mockGetFormulaForPipelineAgg.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'formula column if cutom metric of parent pipeline agg is valid parent pipeline agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.MOVING_FN,
|
||||
});
|
||||
mockGetFormulaForPipelineAgg.mockReturnValue('test-formula');
|
||||
},
|
||||
{
|
||||
operationType: 'formula',
|
||||
params: {
|
||||
formula: 'test-formula',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'null if cutom metric of parent pipeline agg is invalid sum or count agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.SUM,
|
||||
});
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'cumulative sum and metric columns if cutom metric of parent pipeline agg is valid sum or count agg',
|
||||
[
|
||||
{
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
},
|
||||
],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggId: '2-metric',
|
||||
aggType: METRIC_TYPES.SUM,
|
||||
});
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams.mockReturnValue({
|
||||
columnId: 'test-id-1',
|
||||
operationType: 'sum',
|
||||
sourceField: field,
|
||||
});
|
||||
},
|
||||
[
|
||||
{ operationType: 'cumulative_sum', references: ['test-id-1'] },
|
||||
{
|
||||
columnId: 'test-id-1',
|
||||
operationType: 'sum',
|
||||
sourceField: field,
|
||||
},
|
||||
],
|
||||
],
|
||||
])('should return %s', (_, input, actions, expected) => {
|
||||
actions();
|
||||
if (expected === null) {
|
||||
expect(convertToCumulativeSumAggColumn(...input)).toBeNull();
|
||||
} else if (Array.isArray(expected)) {
|
||||
expect(convertToCumulativeSumAggColumn(...input)).toEqual(
|
||||
expected.map(expect.objectContaining)
|
||||
);
|
||||
} else {
|
||||
expect(convertToCumulativeSumAggColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
});
|
|
@ -122,6 +122,7 @@ export const convertToCumulativeSumAggColumn = (
|
|||
{ agg: metric as SchemaConfig<METRIC_TYPES.SUM | METRIC_TYPES.COUNT>, dataView },
|
||||
reducedTimeRange
|
||||
);
|
||||
|
||||
if (subMetric === null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -134,8 +135,8 @@ export const convertToCumulativeSumAggColumn = (
|
|||
return [
|
||||
{
|
||||
operationType: op.name,
|
||||
references: [subMetric?.columnId],
|
||||
...createColumn(agg),
|
||||
references: [subMetric?.columnId],
|
||||
params: {},
|
||||
timeShift: agg.aggParams?.timeShift,
|
||||
} as ParentPipelineAggColumn,
|
||||
|
|
|
@ -0,0 +1,142 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { convertToPercentileColumn } from './percentile';
|
||||
import { PercentileColumn } from './types';
|
||||
|
||||
const mockGetFieldNameFromField = jest.fn();
|
||||
const mockGetFieldByName = jest.fn();
|
||||
const mockGetLabel = jest.fn();
|
||||
const mockGetLabelForPercentile = jest.fn();
|
||||
|
||||
jest.mock('../utils', () => ({
|
||||
getFieldNameFromField: jest.fn(() => mockGetFieldNameFromField()),
|
||||
getLabel: jest.fn(() => mockGetLabel()),
|
||||
getLabelForPercentile: jest.fn(() => mockGetLabelForPercentile()),
|
||||
}));
|
||||
|
||||
describe('convertToPercentileColumn', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
const field = dataView.fields[0].displayName;
|
||||
const aggId = 'pr.10';
|
||||
const percentile = 10;
|
||||
const percents = [percentile];
|
||||
|
||||
const agg: SchemaConfig<METRIC_TYPES.PERCENTILES> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.PERCENTILES,
|
||||
aggParams: { field, percents },
|
||||
aggId,
|
||||
};
|
||||
const singlePercentileRankAgg: SchemaConfig<METRIC_TYPES.SINGLE_PERCENTILE> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.SINGLE_PERCENTILE,
|
||||
aggParams: { field, percentile },
|
||||
aggId,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockGetFieldNameFromField.mockReturnValue(dataView.fields[0]);
|
||||
mockGetFieldByName.mockReturnValue(dataView.fields[0]);
|
||||
mockGetLabel.mockReturnValue('someLabel');
|
||||
mockGetLabelForPercentile.mockReturnValue('someOtherLabel');
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
});
|
||||
|
||||
test.each<
|
||||
[string, Parameters<typeof convertToPercentileColumn>, Partial<PercentileColumn> | null]
|
||||
>([
|
||||
['null if no percents', [{ agg: { ...agg, aggId: 'pr' }, dataView }], null],
|
||||
[
|
||||
'null if no value',
|
||||
[{ agg: { ...singlePercentileRankAgg, aggParams: undefined }, dataView }],
|
||||
null,
|
||||
],
|
||||
['null if no aggId', [{ agg: { ...agg, aggId: undefined }, dataView }], null],
|
||||
['null if no aggParams', [{ agg: { ...agg, aggParams: undefined }, dataView }], null],
|
||||
['null if aggId is invalid', [{ agg: { ...agg, aggId: 'pr.invalid' }, dataView }], null],
|
||||
[
|
||||
'null if values are undefined',
|
||||
[{ agg: { ...agg, aggParams: { percents: undefined, field } }, dataView }],
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if values are empty',
|
||||
[{ agg: { ...agg, aggParams: { percents: [], field } }, dataView }],
|
||||
null,
|
||||
],
|
||||
])('should return %s', (_, input, expected) => {
|
||||
if (expected === null) {
|
||||
expect(convertToPercentileColumn(...input)).toBeNull();
|
||||
} else {
|
||||
expect(convertToPercentileColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
|
||||
test('should return null if field is not specified', () => {
|
||||
mockGetFieldNameFromField.mockReturnValue(null);
|
||||
expect(convertToPercentileColumn({ agg, dataView })).toBeNull();
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should return null if field absent at the index pattern', () => {
|
||||
mockGetFieldByName.mockReturnValueOnce(null);
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
|
||||
expect(convertToPercentileColumn({ agg, dataView })).toBeNull();
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return percentile rank column for percentiles', () => {
|
||||
expect(convertToPercentileColumn({ agg, dataView })).toEqual(
|
||||
expect.objectContaining({
|
||||
dataType: 'number',
|
||||
label: 'someOtherLabel',
|
||||
meta: { aggId: 'pr.10' },
|
||||
operationType: 'percentile',
|
||||
params: { percentile: 10 },
|
||||
sourceField: 'bytes',
|
||||
})
|
||||
);
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return percentile rank column for single percentile', () => {
|
||||
expect(convertToPercentileColumn({ agg: singlePercentileRankAgg, dataView })).toEqual(
|
||||
expect.objectContaining({
|
||||
dataType: 'number',
|
||||
label: 'someOtherLabel',
|
||||
meta: { aggId: 'pr.10' },
|
||||
operationType: 'percentile',
|
||||
params: { percentile: 10 },
|
||||
sourceField: 'bytes',
|
||||
})
|
||||
);
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,146 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { convertToPercentileRankColumn } from './percentile_rank';
|
||||
import { PercentileRanksColumn } from './types';
|
||||
|
||||
const mockGetFieldNameFromField = jest.fn();
|
||||
const mockGetFieldByName = jest.fn();
|
||||
const mockGetLabel = jest.fn();
|
||||
const mockGetLabelForPercentile = jest.fn();
|
||||
|
||||
jest.mock('../utils', () => ({
|
||||
getFieldNameFromField: jest.fn(() => mockGetFieldNameFromField()),
|
||||
getLabel: jest.fn(() => mockGetLabel()),
|
||||
getLabelForPercentile: jest.fn(() => mockGetLabelForPercentile()),
|
||||
}));
|
||||
|
||||
describe('convertToPercentileRankColumn', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
const field = dataView.fields[0].displayName;
|
||||
const aggId = 'pr.10';
|
||||
const value = 10;
|
||||
const values = [value];
|
||||
|
||||
const agg: SchemaConfig<METRIC_TYPES.PERCENTILE_RANKS> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.PERCENTILE_RANKS,
|
||||
aggParams: { field, values },
|
||||
aggId,
|
||||
};
|
||||
const singlePercentileRankAgg: SchemaConfig<METRIC_TYPES.SINGLE_PERCENTILE_RANK> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.SINGLE_PERCENTILE_RANK,
|
||||
aggParams: { field, value },
|
||||
aggId,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockGetFieldNameFromField.mockReturnValue(dataView.fields[0]);
|
||||
mockGetFieldByName.mockReturnValue(dataView.fields[0]);
|
||||
mockGetLabel.mockReturnValue('someLabel');
|
||||
mockGetLabelForPercentile.mockReturnValue('someOtherLabel');
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
});
|
||||
|
||||
test.each<
|
||||
[
|
||||
string,
|
||||
Parameters<typeof convertToPercentileRankColumn>,
|
||||
Partial<PercentileRanksColumn> | null
|
||||
]
|
||||
>([
|
||||
['null if no percents', [{ agg: { ...agg, aggId: 'pr' }, dataView }], null],
|
||||
[
|
||||
'null if no value',
|
||||
[{ agg: { ...singlePercentileRankAgg, aggParams: undefined }, dataView }],
|
||||
null,
|
||||
],
|
||||
['null if no aggId', [{ agg: { ...agg, aggId: undefined }, dataView }], null],
|
||||
['null if no aggParams', [{ agg: { ...agg, aggParams: undefined }, dataView }], null],
|
||||
['null if aggId is invalid', [{ agg: { ...agg, aggId: 'pr.invalid' }, dataView }], null],
|
||||
[
|
||||
'null if values are undefined',
|
||||
[{ agg: { ...agg, aggParams: { values: undefined, field } }, dataView }],
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if values are empty',
|
||||
[{ agg: { ...agg, aggParams: { values: [], field } }, dataView }],
|
||||
null,
|
||||
],
|
||||
])('should return %s', (_, input, expected) => {
|
||||
if (expected === null) {
|
||||
expect(convertToPercentileRankColumn(...input)).toBeNull();
|
||||
} else {
|
||||
expect(convertToPercentileRankColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
|
||||
test('should return null if field is not specified', () => {
|
||||
mockGetFieldNameFromField.mockReturnValue(null);
|
||||
expect(convertToPercentileRankColumn({ agg, dataView })).toBeNull();
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should return null if field absent at the index pattern', () => {
|
||||
mockGetFieldByName.mockReturnValueOnce(null);
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
|
||||
expect(convertToPercentileRankColumn({ agg, dataView })).toBeNull();
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return percentile rank column for percentile ranks', () => {
|
||||
expect(convertToPercentileRankColumn({ agg, dataView })).toEqual(
|
||||
expect.objectContaining({
|
||||
dataType: 'number',
|
||||
label: 'someOtherLabel',
|
||||
meta: { aggId: 'pr.10' },
|
||||
operationType: 'percentile_rank',
|
||||
params: { value: 10 },
|
||||
sourceField: 'bytes',
|
||||
})
|
||||
);
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return percentile rank column for single percentile rank', () => {
|
||||
expect(convertToPercentileRankColumn({ agg: singlePercentileRankAgg, dataView })).toEqual(
|
||||
expect.objectContaining({
|
||||
dataType: 'number',
|
||||
label: 'someOtherLabel',
|
||||
meta: { aggId: 'pr.10' },
|
||||
operationType: 'percentile_rank',
|
||||
params: { value: 10 },
|
||||
sourceField: 'bytes',
|
||||
})
|
||||
);
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { AggParamsRange, AggParamsHistogram } from '@kbn/data-plugin/common';
|
||||
import { convertToRangeColumn } from './range';
|
||||
import { RangeColumn } from './types';
|
||||
import { DataType } from '../../types';
|
||||
import { RANGE_MODES } from '../../constants';
|
||||
|
||||
describe('convertToRangeColumn', () => {
|
||||
const aggId = `some-id`;
|
||||
const ranges = [
|
||||
{
|
||||
from: 1,
|
||||
to: 1000,
|
||||
label: '1',
|
||||
},
|
||||
];
|
||||
const aggParamsRange: AggParamsRange = {
|
||||
field: stubLogstashDataView.fields[0].name,
|
||||
ranges,
|
||||
};
|
||||
const aggParamsHistogram: AggParamsHistogram = {
|
||||
interval: '1d',
|
||||
field: stubLogstashDataView.fields[0].name,
|
||||
};
|
||||
|
||||
test.each<[string, Parameters<typeof convertToRangeColumn>, Partial<RangeColumn> | null]>([
|
||||
[
|
||||
'range column if provide valid range agg',
|
||||
[aggId, aggParamsRange, '', stubLogstashDataView],
|
||||
{
|
||||
dataType: stubLogstashDataView.fields[0].type as DataType,
|
||||
isBucketed: true,
|
||||
isSplit: false,
|
||||
sourceField: stubLogstashDataView.fields[0].name,
|
||||
meta: { aggId },
|
||||
params: {
|
||||
type: RANGE_MODES.Range,
|
||||
maxBars: 'auto',
|
||||
ranges,
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'range column if provide valid histogram agg',
|
||||
[aggId, aggParamsHistogram, '', stubLogstashDataView, true],
|
||||
{
|
||||
dataType: stubLogstashDataView.fields[0].type as DataType,
|
||||
isBucketed: true,
|
||||
isSplit: true,
|
||||
sourceField: stubLogstashDataView.fields[0].name,
|
||||
meta: { aggId },
|
||||
params: {
|
||||
type: RANGE_MODES.Histogram,
|
||||
maxBars: 'auto',
|
||||
ranges: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
])('should return %s', (_, input, expected) => {
|
||||
if (expected === null) {
|
||||
expect(convertToRangeColumn(...input)).toBeNull();
|
||||
} else {
|
||||
expect(convertToRangeColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,79 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { IAggConfig, METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { convertToSiblingPipelineColumns } from './sibling_pipeline';
|
||||
|
||||
const mockConvertMetricToColumns = jest.fn();
|
||||
const mockConvertToSchemaConfig = jest.fn();
|
||||
|
||||
jest.mock('../metrics', () => ({
|
||||
convertMetricToColumns: jest.fn(() => mockConvertMetricToColumns()),
|
||||
}));
|
||||
|
||||
jest.mock('../../../vis_schemas', () => ({
|
||||
convertToSchemaConfig: jest.fn(() => mockConvertToSchemaConfig()),
|
||||
}));
|
||||
|
||||
describe('convertToSiblingPipelineColumns', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
const aggId = 'agg-id-1';
|
||||
const agg: SchemaConfig<METRIC_TYPES.AVG_BUCKET> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG_BUCKET,
|
||||
aggParams: { customMetric: {} as IAggConfig },
|
||||
aggId,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockConvertMetricToColumns.mockReturnValue([{}]);
|
||||
mockConvertToSchemaConfig.mockReturnValue({});
|
||||
});
|
||||
|
||||
test('should return null if aggParams are not defined', () => {
|
||||
expect(
|
||||
convertToSiblingPipelineColumns({ agg: { ...agg, aggParams: undefined }, aggs: [], dataView })
|
||||
).toBeNull();
|
||||
expect(mockConvertMetricToColumns).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should return null if customMetric is not defined', () => {
|
||||
expect(
|
||||
convertToSiblingPipelineColumns({
|
||||
agg: { ...agg, aggParams: { customMetric: undefined } },
|
||||
aggs: [],
|
||||
dataView,
|
||||
})
|
||||
).toBeNull();
|
||||
expect(mockConvertMetricToColumns).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should return null if sibling agg is not supported', () => {
|
||||
mockConvertMetricToColumns.mockReturnValue(null);
|
||||
expect(convertToSiblingPipelineColumns({ agg, aggs: [], dataView })).toBeNull();
|
||||
expect(mockConvertToSchemaConfig).toBeCalledTimes(1);
|
||||
expect(mockConvertMetricToColumns).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return column', () => {
|
||||
const column = { operationType: 'formula' };
|
||||
mockConvertMetricToColumns.mockReturnValue([column]);
|
||||
expect(convertToSiblingPipelineColumns({ agg, aggs: [], dataView })).toEqual(column);
|
||||
expect(mockConvertToSchemaConfig).toBeCalledTimes(1);
|
||||
expect(mockConvertMetricToColumns).toBeCalledTimes(1);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,115 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { convertToStdDeviationFormulaColumns } from './std_deviation';
|
||||
import { FormulaColumn } from './types';
|
||||
|
||||
const mockGetFieldNameFromField = jest.fn();
|
||||
const mockGetFieldByName = jest.fn();
|
||||
const mockGetLabel = jest.fn();
|
||||
|
||||
jest.mock('../utils', () => ({
|
||||
getFieldNameFromField: jest.fn(() => mockGetFieldNameFromField()),
|
||||
getLabel: jest.fn(() => mockGetLabel()),
|
||||
}));
|
||||
|
||||
describe('convertToStdDeviationFormulaColumns', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
const stdLowerAggId = 'agg-id.std_lower';
|
||||
const stdUpperAggId = 'agg-id.std_upper';
|
||||
const label = 'std label';
|
||||
const agg: SchemaConfig<METRIC_TYPES.STD_DEV> = {
|
||||
accessor: 0,
|
||||
label,
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.STD_DEV,
|
||||
aggId: stdLowerAggId,
|
||||
aggParams: {
|
||||
field: dataView.fields[0].displayName,
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockGetFieldNameFromField.mockReturnValue(dataView.fields[0].displayName);
|
||||
mockGetFieldByName.mockReturnValue(dataView.fields[0]);
|
||||
mockGetLabel.mockReturnValue('some label');
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
});
|
||||
|
||||
test.each<
|
||||
[string, Parameters<typeof convertToStdDeviationFormulaColumns>, Partial<FormulaColumn> | null]
|
||||
>([['null if no aggId is passed', [{ agg: { ...agg, aggId: undefined }, dataView }], null]])(
|
||||
'should return %s',
|
||||
(_, input, expected) => {
|
||||
if (expected === null) {
|
||||
expect(convertToStdDeviationFormulaColumns(...input)).toBeNull();
|
||||
} else {
|
||||
expect(convertToStdDeviationFormulaColumns(...input)).toEqual(
|
||||
expect.objectContaining(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
test('should return null if field is not present', () => {
|
||||
mockGetFieldNameFromField.mockReturnValue(null);
|
||||
expect(convertToStdDeviationFormulaColumns({ agg, dataView })).toBeNull();
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test("should return null if field doesn't exist in dataView", () => {
|
||||
mockGetFieldByName.mockReturnValue(null);
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
expect(convertToStdDeviationFormulaColumns({ agg, dataView })).toBeNull();
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return null if agg id is invalid', () => {
|
||||
expect(
|
||||
convertToStdDeviationFormulaColumns({ agg: { ...agg, aggId: 'some-id' }, dataView })
|
||||
).toBeNull();
|
||||
expect(mockGetFieldNameFromField).toBeCalledTimes(1);
|
||||
expect(dataView.getFieldByName).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return formula column for lower std deviation', () => {
|
||||
expect(
|
||||
convertToStdDeviationFormulaColumns({ agg: { ...agg, aggId: stdLowerAggId }, dataView })
|
||||
).toEqual(
|
||||
expect.objectContaining({
|
||||
label,
|
||||
meta: { aggId: 'agg-id.std_lower' },
|
||||
operationType: 'formula',
|
||||
params: { formula: 'average(bytes) - 2 * standard_deviation(bytes)' },
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('should return formula column for upper std deviation', () => {
|
||||
expect(
|
||||
convertToStdDeviationFormulaColumns({ agg: { ...agg, aggId: stdUpperAggId }, dataView })
|
||||
).toEqual(
|
||||
expect.objectContaining({
|
||||
label,
|
||||
meta: { aggId: 'agg-id.std_upper' },
|
||||
operationType: 'formula',
|
||||
params: { formula: 'average(bytes) + 2 * standard_deviation(bytes)' },
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,241 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { AggParamsTerms, IAggConfig, METRIC_TYPES, BUCKET_TYPES } from '@kbn/data-plugin/common';
|
||||
import { convertToTermsColumn } from './terms';
|
||||
import { AggBasedColumn, TermsColumn } from './types';
|
||||
import { SchemaConfig } from '../../..';
|
||||
|
||||
const mockConvertMetricToColumns = jest.fn();
|
||||
|
||||
jest.mock('../metrics', () => ({
|
||||
convertMetricToColumns: jest.fn(() => mockConvertMetricToColumns()),
|
||||
}));
|
||||
|
||||
jest.mock('../../../vis_schemas', () => ({
|
||||
convertToSchemaConfig: jest.fn(() => ({})),
|
||||
}));
|
||||
|
||||
describe('convertToDateHistogramColumn', () => {
|
||||
const aggId = `some-id`;
|
||||
const aggParams: AggParamsTerms = {
|
||||
field: stubLogstashDataView.fields[0].name,
|
||||
orderBy: '_key',
|
||||
order: {
|
||||
value: 'asc',
|
||||
text: '',
|
||||
},
|
||||
size: 5,
|
||||
};
|
||||
const aggs: Array<SchemaConfig<METRIC_TYPES>> = [
|
||||
{
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field: stubLogstashDataView.fields[0].name,
|
||||
},
|
||||
},
|
||||
];
|
||||
const metricColumns: AggBasedColumn[] = [
|
||||
{
|
||||
columnId: 'column-1',
|
||||
operationType: 'average',
|
||||
isBucketed: false,
|
||||
isSplit: false,
|
||||
sourceField: stubLogstashDataView.fields[0].name,
|
||||
dataType: 'number',
|
||||
params: {},
|
||||
meta: {
|
||||
aggId: '1',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test.each<
|
||||
[string, Parameters<typeof convertToTermsColumn>, Partial<TermsColumn> | null, () => void]
|
||||
>([
|
||||
[
|
||||
'null if dataview does not include field from terms params',
|
||||
[
|
||||
aggId,
|
||||
{
|
||||
agg: { aggParams: { ...aggParams, field: '' } } as SchemaConfig<BUCKET_TYPES.TERMS>,
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
metricColumns,
|
||||
},
|
||||
'',
|
||||
false,
|
||||
],
|
||||
null,
|
||||
() => {},
|
||||
],
|
||||
[
|
||||
'terms column with alphabetical orderBy',
|
||||
[
|
||||
aggId,
|
||||
{
|
||||
agg: { aggParams } as SchemaConfig<BUCKET_TYPES.TERMS>,
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
metricColumns,
|
||||
},
|
||||
'',
|
||||
false,
|
||||
],
|
||||
{
|
||||
operationType: 'terms',
|
||||
sourceField: stubLogstashDataView.fields[0].name,
|
||||
isBucketed: true,
|
||||
params: {
|
||||
size: 5,
|
||||
include: [],
|
||||
exclude: [],
|
||||
parentFormat: { id: 'terms' },
|
||||
orderBy: { type: 'alphabetical' },
|
||||
orderDirection: 'asc',
|
||||
},
|
||||
},
|
||||
() => {},
|
||||
],
|
||||
[
|
||||
'terms column with column orderBy if provided column for orderBy is exist',
|
||||
[
|
||||
aggId,
|
||||
{
|
||||
agg: { aggParams: { ...aggParams, orderBy: '1' } } as SchemaConfig<BUCKET_TYPES.TERMS>,
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
metricColumns,
|
||||
},
|
||||
'',
|
||||
false,
|
||||
],
|
||||
{
|
||||
operationType: 'terms',
|
||||
sourceField: stubLogstashDataView.fields[0].name,
|
||||
isBucketed: true,
|
||||
params: {
|
||||
size: 5,
|
||||
include: [],
|
||||
exclude: [],
|
||||
parentFormat: { id: 'terms' },
|
||||
orderBy: { type: 'column', columnId: metricColumns[0].columnId },
|
||||
orderAgg: metricColumns[0],
|
||||
orderDirection: 'asc',
|
||||
},
|
||||
},
|
||||
() => {},
|
||||
],
|
||||
[
|
||||
'null if provided column for orderBy is not exist',
|
||||
[
|
||||
aggId,
|
||||
{
|
||||
agg: { aggParams: { ...aggParams, orderBy: '2' } } as SchemaConfig<BUCKET_TYPES.TERMS>,
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
metricColumns,
|
||||
},
|
||||
'',
|
||||
false,
|
||||
],
|
||||
null,
|
||||
() => {},
|
||||
],
|
||||
[
|
||||
'null if provided custom orderBy without orderAgg',
|
||||
[
|
||||
aggId,
|
||||
{
|
||||
agg: {
|
||||
aggParams: { ...aggParams, orderBy: 'custom', orderAgg: undefined },
|
||||
} as SchemaConfig<BUCKET_TYPES.TERMS>,
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
metricColumns,
|
||||
},
|
||||
'',
|
||||
false,
|
||||
],
|
||||
null,
|
||||
() => {},
|
||||
],
|
||||
[
|
||||
'null if provided custom orderBy and not valid orderAgg',
|
||||
[
|
||||
aggId,
|
||||
{
|
||||
agg: {
|
||||
aggParams: { ...aggParams, orderBy: 'custom', orderAgg: {} as IAggConfig },
|
||||
} as SchemaConfig<BUCKET_TYPES.TERMS>,
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
metricColumns,
|
||||
},
|
||||
'',
|
||||
false,
|
||||
],
|
||||
null,
|
||||
() => {
|
||||
mockConvertMetricToColumns.mockReturnValue(null);
|
||||
},
|
||||
],
|
||||
[
|
||||
'terms column with custom orderBy and prepared orderAgg',
|
||||
[
|
||||
aggId,
|
||||
{
|
||||
agg: {
|
||||
aggParams: { ...aggParams, orderBy: 'custom', orderAgg: {} as IAggConfig },
|
||||
} as SchemaConfig<BUCKET_TYPES.TERMS>,
|
||||
dataView: stubLogstashDataView,
|
||||
aggs,
|
||||
metricColumns,
|
||||
},
|
||||
'',
|
||||
false,
|
||||
],
|
||||
{
|
||||
operationType: 'terms',
|
||||
sourceField: stubLogstashDataView.fields[0].name,
|
||||
isBucketed: true,
|
||||
params: {
|
||||
size: 5,
|
||||
include: [],
|
||||
exclude: [],
|
||||
parentFormat: { id: 'terms' },
|
||||
orderBy: { type: 'custom' },
|
||||
orderAgg: metricColumns[0],
|
||||
orderDirection: 'asc',
|
||||
},
|
||||
},
|
||||
() => {
|
||||
mockConvertMetricToColumns.mockReturnValue(metricColumns);
|
||||
},
|
||||
],
|
||||
])('should return %s', (_, input, expected, actions) => {
|
||||
actions();
|
||||
if (expected === null) {
|
||||
expect(convertToTermsColumn(...input)).toBeNull();
|
||||
} else {
|
||||
expect(convertToTermsColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,474 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { DataViewField, IAggConfig, METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { getFormulaForPipelineAgg, getFormulaForAgg } from './formula';
|
||||
|
||||
const mockGetMetricFromParentPipelineAgg = jest.fn();
|
||||
const mockIsPercentileAgg = jest.fn();
|
||||
const mockIsPercentileRankAgg = jest.fn();
|
||||
const mockIsPipeline = jest.fn();
|
||||
const mockIsStdDevAgg = jest.fn();
|
||||
const mockGetFieldByName = jest.fn();
|
||||
const originalGetFieldByName = stubLogstashDataView.getFieldByName;
|
||||
|
||||
jest.mock('../utils', () => ({
|
||||
getFieldNameFromField: jest.fn((field) => field),
|
||||
getMetricFromParentPipelineAgg: jest.fn(() => mockGetMetricFromParentPipelineAgg()),
|
||||
isPercentileAgg: jest.fn(() => mockIsPercentileAgg()),
|
||||
isPercentileRankAgg: jest.fn(() => mockIsPercentileRankAgg()),
|
||||
isPipeline: jest.fn(() => mockIsPipeline()),
|
||||
isStdDevAgg: jest.fn(() => mockIsStdDevAgg()),
|
||||
}));
|
||||
|
||||
const dataView = stubLogstashDataView;
|
||||
|
||||
const field = stubLogstashDataView.fields[0].name;
|
||||
const aggs: Array<SchemaConfig<METRIC_TYPES>> = [
|
||||
{
|
||||
aggId: '1',
|
||||
aggType: METRIC_TYPES.CUMULATIVE_SUM,
|
||||
aggParams: { customMetric: {} as IAggConfig },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'cumulative sum',
|
||||
format: {},
|
||||
},
|
||||
{
|
||||
aggId: '2',
|
||||
aggType: METRIC_TYPES.AVG_BUCKET,
|
||||
aggParams: { customMetric: {} as IAggConfig },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'overall average',
|
||||
format: {},
|
||||
},
|
||||
{
|
||||
aggId: '3.10',
|
||||
aggType: METRIC_TYPES.PERCENTILES,
|
||||
aggParams: { percents: [0, 10], field },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'percentile',
|
||||
format: {},
|
||||
},
|
||||
{
|
||||
aggId: '4.5',
|
||||
aggType: METRIC_TYPES.PERCENTILE_RANKS,
|
||||
aggParams: { values: [0, 5], field },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'percintile rank',
|
||||
format: {},
|
||||
},
|
||||
{
|
||||
aggId: '5.std_upper',
|
||||
aggType: METRIC_TYPES.STD_DEV,
|
||||
aggParams: { field },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'std dev',
|
||||
format: {},
|
||||
},
|
||||
{
|
||||
aggId: '6',
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: { field },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'average',
|
||||
format: {},
|
||||
},
|
||||
];
|
||||
|
||||
describe('getFormulaForPipelineAgg', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
dataView.getFieldByName = originalGetFieldByName;
|
||||
});
|
||||
|
||||
test.each<[string, Parameters<typeof getFormulaForPipelineAgg>, () => void, string | null]>([
|
||||
[
|
||||
'null if custom metric is invalid',
|
||||
[{ agg: aggs[0] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>, aggs, dataView }],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if custom metric type is not supported',
|
||||
[{ agg: aggs[0] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>, aggs, dataView }],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValue({
|
||||
aggType: METRIC_TYPES.GEO_BOUNDS,
|
||||
});
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'correct formula if agg is parent pipeline agg and custom metric is valid and supported pipeline agg',
|
||||
[{ agg: aggs[0] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>, aggs, dataView }],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg
|
||||
.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.MOVING_FN,
|
||||
aggParams: {},
|
||||
aggId: '2',
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
aggId: '3',
|
||||
});
|
||||
},
|
||||
'cumulative_sum(moving_average(average(bytes)))',
|
||||
],
|
||||
[
|
||||
'correct formula if agg is parent pipeline agg and custom metric is valid and supported not pipeline agg',
|
||||
[{ agg: aggs[0] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>, aggs, dataView }],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
aggId: '2',
|
||||
});
|
||||
},
|
||||
'cumulative_sum(average(bytes))',
|
||||
],
|
||||
[
|
||||
'correct formula if agg is parent pipeline agg and custom metric is valid and supported percentile rank agg',
|
||||
[{ agg: aggs[0] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>, aggs, dataView }],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.PERCENTILE_RANKS,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
aggId: '3.10',
|
||||
});
|
||||
},
|
||||
'cumulative_sum(percentile_rank(bytes, value=10))',
|
||||
],
|
||||
[
|
||||
'correct formula if agg is sibling pipeline agg and custom metric is valid and supported agg',
|
||||
[{ agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>, aggs, dataView }],
|
||||
() => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
aggId: '3',
|
||||
});
|
||||
},
|
||||
'average(bytes)',
|
||||
],
|
||||
])('should return %s', (_, input, actions, expected) => {
|
||||
actions();
|
||||
if (expected === null) {
|
||||
expect(getFormulaForPipelineAgg(...input)).toBeNull();
|
||||
} else {
|
||||
expect(getFormulaForPipelineAgg(...input)).toEqual(expected);
|
||||
}
|
||||
});
|
||||
|
||||
test('null if agg is sibling pipeline agg, custom metric is valid, agg is supported and field type is not supported', () => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
aggId: '3',
|
||||
});
|
||||
|
||||
const field1: DataViewField = {
|
||||
name: 'bytes',
|
||||
type: 'geo',
|
||||
esTypes: ['long'],
|
||||
aggregatable: true,
|
||||
searchable: true,
|
||||
count: 10,
|
||||
readFromDocValues: true,
|
||||
scripted: false,
|
||||
isMapped: true,
|
||||
} as DataViewField;
|
||||
|
||||
mockGetFieldByName.mockReturnValueOnce(field1);
|
||||
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
const agg = getFormulaForPipelineAgg({
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
aggs,
|
||||
dataView,
|
||||
});
|
||||
expect(agg).toBeNull();
|
||||
});
|
||||
|
||||
test('null if agg is sibling pipeline agg, custom metric is valid, agg is supported, field type is supported and field is not aggregatable', () => {
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
aggId: '3',
|
||||
});
|
||||
|
||||
const field1: DataViewField = {
|
||||
name: 'str',
|
||||
type: 'string',
|
||||
esTypes: ['text'],
|
||||
aggregatable: false,
|
||||
searchable: true,
|
||||
count: 10,
|
||||
readFromDocValues: true,
|
||||
scripted: false,
|
||||
isMapped: true,
|
||||
} as DataViewField;
|
||||
|
||||
mockGetFieldByName.mockReturnValueOnce(field1);
|
||||
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
const agg = getFormulaForPipelineAgg({
|
||||
agg: aggs[1] as SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM>,
|
||||
aggs,
|
||||
dataView,
|
||||
});
|
||||
expect(agg).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormulaForAgg', () => {
|
||||
beforeEach(() => {
|
||||
mockIsPercentileAgg.mockReturnValue(false);
|
||||
mockIsPipeline.mockReturnValue(false);
|
||||
mockIsStdDevAgg.mockReturnValue(false);
|
||||
mockIsPercentileRankAgg.mockReturnValue(false);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
dataView.getFieldByName = originalGetFieldByName;
|
||||
});
|
||||
|
||||
test.each<[string, Parameters<typeof getFormulaForAgg>, () => void, string | null]>([
|
||||
[
|
||||
'null if agg type is not supported',
|
||||
[
|
||||
{
|
||||
agg: { ...aggs[0], aggType: METRIC_TYPES.GEO_BOUNDS, aggParams: { field } },
|
||||
aggs,
|
||||
dataView,
|
||||
},
|
||||
],
|
||||
() => {},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'correct pipeline formula if agg is valid pipeline agg',
|
||||
[{ agg: aggs[0], aggs, dataView }],
|
||||
() => {
|
||||
mockIsPipeline.mockReturnValue(true);
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
aggId: '2',
|
||||
});
|
||||
},
|
||||
'cumulative_sum(average(bytes))',
|
||||
],
|
||||
[
|
||||
'correct percentile formula if agg is valid percentile agg',
|
||||
[{ agg: aggs[2], aggs, dataView }],
|
||||
() => {
|
||||
mockIsPercentileAgg.mockReturnValue(true);
|
||||
},
|
||||
'percentile(bytes, percentile=10)',
|
||||
],
|
||||
[
|
||||
'correct percentile rank formula if agg is valid percentile rank agg',
|
||||
[{ agg: aggs[3], aggs, dataView }],
|
||||
() => {
|
||||
mockIsPercentileRankAgg.mockReturnValue(true);
|
||||
},
|
||||
'percentile_rank(bytes, value=5)',
|
||||
],
|
||||
[
|
||||
'correct standart deviation formula if agg is valid standart deviation agg',
|
||||
[{ agg: aggs[4], aggs, dataView }],
|
||||
() => {
|
||||
mockIsStdDevAgg.mockReturnValue(true);
|
||||
},
|
||||
'average(bytes) + 2 * standard_deviation(bytes)',
|
||||
],
|
||||
[
|
||||
'correct metric formula if agg is valid other metric agg',
|
||||
[{ agg: aggs[5], aggs, dataView }],
|
||||
() => {},
|
||||
'average(bytes)',
|
||||
],
|
||||
])('should return %s', (_, input, actions, expected) => {
|
||||
actions();
|
||||
if (expected === null) {
|
||||
expect(getFormulaForAgg(...input)).toBeNull();
|
||||
} else {
|
||||
expect(getFormulaForAgg(...input)).toEqual(expected);
|
||||
}
|
||||
});
|
||||
|
||||
test.each([
|
||||
[
|
||||
'null if agg is valid pipeline agg',
|
||||
aggs[0],
|
||||
() => {
|
||||
mockIsPipeline.mockReturnValue(true);
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
aggId: '2',
|
||||
});
|
||||
},
|
||||
],
|
||||
[
|
||||
'null if percentile rank agg is valid percentile agg',
|
||||
aggs[2],
|
||||
() => {
|
||||
mockIsPercentileAgg.mockReturnValue(true);
|
||||
},
|
||||
],
|
||||
[
|
||||
'null if agg is valid percentile rank agg',
|
||||
aggs[3],
|
||||
() => {
|
||||
mockIsPercentileRankAgg.mockReturnValue(true);
|
||||
},
|
||||
],
|
||||
[
|
||||
'null if agg is valid standart deviation agg',
|
||||
aggs[4],
|
||||
() => {
|
||||
mockIsStdDevAgg.mockReturnValue(true);
|
||||
},
|
||||
],
|
||||
['null if agg is valid other metric agg', aggs[5], () => {}],
|
||||
])('should return %s and field type is not supported', (_, agg, actions) => {
|
||||
actions();
|
||||
const field1: DataViewField = {
|
||||
name: 'bytes',
|
||||
type: 'geo',
|
||||
esTypes: ['long'],
|
||||
aggregatable: true,
|
||||
searchable: true,
|
||||
count: 10,
|
||||
readFromDocValues: true,
|
||||
scripted: false,
|
||||
isMapped: true,
|
||||
} as DataViewField;
|
||||
|
||||
mockGetFieldByName.mockReturnValueOnce(field1);
|
||||
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
const result = getFormulaForPipelineAgg({
|
||||
agg: agg as SchemaConfig<
|
||||
| METRIC_TYPES.CUMULATIVE_SUM
|
||||
| METRIC_TYPES.DERIVATIVE
|
||||
| METRIC_TYPES.MOVING_FN
|
||||
| METRIC_TYPES.AVG_BUCKET
|
||||
| METRIC_TYPES.MAX_BUCKET
|
||||
| METRIC_TYPES.MIN_BUCKET
|
||||
| METRIC_TYPES.SUM_BUCKET
|
||||
>,
|
||||
aggs,
|
||||
dataView,
|
||||
});
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test.each([
|
||||
[
|
||||
'null if agg is valid pipeline agg',
|
||||
aggs[0],
|
||||
() => {
|
||||
mockIsPipeline.mockReturnValue(true);
|
||||
mockGetMetricFromParentPipelineAgg.mockReturnValueOnce({
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: {
|
||||
field,
|
||||
},
|
||||
aggId: '2',
|
||||
});
|
||||
},
|
||||
],
|
||||
[
|
||||
'null if percentile rank agg is valid percentile agg',
|
||||
aggs[2],
|
||||
() => {
|
||||
mockIsPercentileAgg.mockReturnValue(true);
|
||||
},
|
||||
],
|
||||
[
|
||||
'null if agg is valid percentile rank agg',
|
||||
aggs[3],
|
||||
() => {
|
||||
mockIsPercentileRankAgg.mockReturnValue(true);
|
||||
},
|
||||
],
|
||||
[
|
||||
'null if agg is valid standart deviation agg',
|
||||
aggs[4],
|
||||
() => {
|
||||
mockIsStdDevAgg.mockReturnValue(true);
|
||||
},
|
||||
],
|
||||
['null if agg is valid other metric agg', aggs[5], () => {}],
|
||||
])(
|
||||
'should return %s, field type is supported and field is not aggregatable',
|
||||
(_, agg, actions) => {
|
||||
actions();
|
||||
const field1: DataViewField = {
|
||||
name: 'str',
|
||||
type: 'string',
|
||||
esTypes: ['text'],
|
||||
aggregatable: false,
|
||||
searchable: true,
|
||||
count: 10,
|
||||
readFromDocValues: true,
|
||||
scripted: false,
|
||||
isMapped: true,
|
||||
} as DataViewField;
|
||||
|
||||
mockGetFieldByName.mockReturnValueOnce(field1);
|
||||
|
||||
dataView.getFieldByName = mockGetFieldByName;
|
||||
const result = getFormulaForPipelineAgg({
|
||||
agg: agg as SchemaConfig<
|
||||
| METRIC_TYPES.CUMULATIVE_SUM
|
||||
| METRIC_TYPES.DERIVATIVE
|
||||
| METRIC_TYPES.MOVING_FN
|
||||
| METRIC_TYPES.AVG_BUCKET
|
||||
| METRIC_TYPES.MAX_BUCKET
|
||||
| METRIC_TYPES.MIN_BUCKET
|
||||
| METRIC_TYPES.SUM_BUCKET
|
||||
>,
|
||||
aggs,
|
||||
dataView,
|
||||
});
|
||||
expect(result).toBeNull();
|
||||
}
|
||||
);
|
||||
});
|
|
@ -0,0 +1,368 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { SchemaConfig } from '../../..';
|
||||
import { convertMetricToColumns } from './metrics';
|
||||
|
||||
const mockConvertMetricAggregationColumnWithoutSpecialParams = jest.fn();
|
||||
const mockConvertToOtherParentPipelineAggColumns = jest.fn();
|
||||
const mockConvertToPercentileColumn = jest.fn();
|
||||
const mockConvertToPercentileRankColumn = jest.fn();
|
||||
const mockConvertToSiblingPipelineColumns = jest.fn();
|
||||
const mockConvertToStdDeviationFormulaColumns = jest.fn();
|
||||
const mockConvertToLastValueColumn = jest.fn();
|
||||
const mockConvertToCumulativeSumAggColumn = jest.fn();
|
||||
|
||||
jest.mock('../convert', () => ({
|
||||
convertMetricAggregationColumnWithoutSpecialParams: jest.fn(() =>
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams()
|
||||
),
|
||||
convertToOtherParentPipelineAggColumns: jest.fn(() =>
|
||||
mockConvertToOtherParentPipelineAggColumns()
|
||||
),
|
||||
convertToPercentileColumn: jest.fn(() => mockConvertToPercentileColumn()),
|
||||
convertToPercentileRankColumn: jest.fn(() => mockConvertToPercentileRankColumn()),
|
||||
convertToSiblingPipelineColumns: jest.fn(() => mockConvertToSiblingPipelineColumns()),
|
||||
convertToStdDeviationFormulaColumns: jest.fn(() => mockConvertToStdDeviationFormulaColumns()),
|
||||
convertToLastValueColumn: jest.fn(() => mockConvertToLastValueColumn()),
|
||||
convertToCumulativeSumAggColumn: jest.fn(() => mockConvertToCumulativeSumAggColumn()),
|
||||
}));
|
||||
|
||||
describe('convertMetricToColumns invalid cases', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
beforeAll(() => {
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams.mockReturnValue(null);
|
||||
mockConvertToOtherParentPipelineAggColumns.mockReturnValue(null);
|
||||
mockConvertToPercentileColumn.mockReturnValue(null);
|
||||
mockConvertToPercentileRankColumn.mockReturnValue(null);
|
||||
mockConvertToSiblingPipelineColumns.mockReturnValue(null);
|
||||
mockConvertToStdDeviationFormulaColumns.mockReturnValue(null);
|
||||
mockConvertToLastValueColumn.mockReturnValue(null);
|
||||
mockConvertToCumulativeSumAggColumn.mockReturnValue(null);
|
||||
});
|
||||
|
||||
test.each<[string, Parameters<typeof convertMetricToColumns>, null, jest.Mock | undefined]>([
|
||||
[
|
||||
'null if agg is not supported',
|
||||
[{ aggType: METRIC_TYPES.GEO_BOUNDS } as unknown as SchemaConfig, dataView, []],
|
||||
null,
|
||||
undefined,
|
||||
],
|
||||
[
|
||||
'null if supported agg AVG is not valid',
|
||||
[{ aggType: METRIC_TYPES.AVG } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'null if supported agg MIN is not valid',
|
||||
[{ aggType: METRIC_TYPES.MIN } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'null if supported agg MAX is not valid',
|
||||
[{ aggType: METRIC_TYPES.MAX } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'null if supported agg SUM is not valid',
|
||||
[{ aggType: METRIC_TYPES.SUM } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'null if supported agg COUNT is not valid',
|
||||
[{ aggType: METRIC_TYPES.COUNT } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'null if supported agg CARDINALITY is not valid',
|
||||
[{ aggType: METRIC_TYPES.CARDINALITY } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'null if supported agg VALUE_COUNT is not valid',
|
||||
[{ aggType: METRIC_TYPES.VALUE_COUNT } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'null if supported agg MEDIAN is not valid',
|
||||
[{ aggType: METRIC_TYPES.MEDIAN } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'null if supported agg STD_DEV is not valid',
|
||||
[{ aggType: METRIC_TYPES.STD_DEV } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToStdDeviationFormulaColumns,
|
||||
],
|
||||
[
|
||||
'null if supported agg PERCENTILES is not valid',
|
||||
[{ aggType: METRIC_TYPES.PERCENTILES } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToPercentileColumn,
|
||||
],
|
||||
[
|
||||
'null if supported agg SINGLE_PERCENTILE is not valid',
|
||||
[{ aggType: METRIC_TYPES.SINGLE_PERCENTILE } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToPercentileColumn,
|
||||
],
|
||||
[
|
||||
'null if supported agg PERCENTILE_RANKS is not valid',
|
||||
[{ aggType: METRIC_TYPES.PERCENTILE_RANKS } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToPercentileRankColumn,
|
||||
],
|
||||
[
|
||||
'null if supported agg SINGLE_PERCENTILE_RANK is not valid',
|
||||
[{ aggType: METRIC_TYPES.SINGLE_PERCENTILE_RANK } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToPercentileRankColumn,
|
||||
],
|
||||
[
|
||||
'null if supported agg TOP_HITS is not valid',
|
||||
[{ aggType: METRIC_TYPES.TOP_HITS } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToLastValueColumn,
|
||||
],
|
||||
[
|
||||
'null if supported agg TOP_METRICS is not valid',
|
||||
[{ aggType: METRIC_TYPES.TOP_METRICS } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToLastValueColumn,
|
||||
],
|
||||
[
|
||||
'null if supported agg CUMULATIVE_SUM is not valid',
|
||||
[{ aggType: METRIC_TYPES.CUMULATIVE_SUM } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToCumulativeSumAggColumn,
|
||||
],
|
||||
[
|
||||
'null if supported agg DERIVATIVE is not valid',
|
||||
[{ aggType: METRIC_TYPES.DERIVATIVE } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToOtherParentPipelineAggColumns,
|
||||
],
|
||||
[
|
||||
'null if supported agg MOVING_FN is not valid',
|
||||
[{ aggType: METRIC_TYPES.MOVING_FN } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToOtherParentPipelineAggColumns,
|
||||
],
|
||||
[
|
||||
'null if supported agg SUM_BUCKET is not valid',
|
||||
[{ aggType: METRIC_TYPES.SUM_BUCKET } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToSiblingPipelineColumns,
|
||||
],
|
||||
[
|
||||
'null if supported agg MIN_BUCKET is not valid',
|
||||
[{ aggType: METRIC_TYPES.MIN_BUCKET } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToSiblingPipelineColumns,
|
||||
],
|
||||
[
|
||||
'null if supported agg MAX_BUCKET is not valid',
|
||||
[{ aggType: METRIC_TYPES.MAX_BUCKET } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToSiblingPipelineColumns,
|
||||
],
|
||||
[
|
||||
'null if supported agg AVG_BUCKET is not valid',
|
||||
[{ aggType: METRIC_TYPES.AVG_BUCKET } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
mockConvertToSiblingPipelineColumns,
|
||||
],
|
||||
[
|
||||
'null if supported agg SERIAL_DIFF is not valid',
|
||||
[{ aggType: METRIC_TYPES.SERIAL_DIFF } as SchemaConfig, dataView, []],
|
||||
null,
|
||||
undefined,
|
||||
],
|
||||
])('should return %s', (_, input, expected, mock) => {
|
||||
expect(convertMetricToColumns(...input)).toBeNull();
|
||||
|
||||
if (mock) {
|
||||
expect(mock).toBeCalledTimes(1);
|
||||
}
|
||||
});
|
||||
});
|
||||
describe('convertMetricToColumns valid cases', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
const result = [{}];
|
||||
|
||||
beforeAll(() => {
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams.mockReturnValue(result);
|
||||
mockConvertToOtherParentPipelineAggColumns.mockReturnValue(result);
|
||||
mockConvertToPercentileColumn.mockReturnValue(result);
|
||||
mockConvertToPercentileRankColumn.mockReturnValue(result);
|
||||
mockConvertToSiblingPipelineColumns.mockReturnValue(result);
|
||||
mockConvertToStdDeviationFormulaColumns.mockReturnValue(result);
|
||||
mockConvertToLastValueColumn.mockReturnValue(result);
|
||||
mockConvertToCumulativeSumAggColumn.mockReturnValue(result);
|
||||
});
|
||||
|
||||
test.each<[string, Parameters<typeof convertMetricToColumns>, Array<{}>, jest.Mock]>([
|
||||
[
|
||||
'array of columns if supported agg AVG is valid',
|
||||
[{ aggType: METRIC_TYPES.AVG } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg MIN is valid',
|
||||
[{ aggType: METRIC_TYPES.MIN } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg MAX is valid',
|
||||
[{ aggType: METRIC_TYPES.MAX } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg SUM is valid',
|
||||
[{ aggType: METRIC_TYPES.SUM } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg COUNT is valid',
|
||||
[{ aggType: METRIC_TYPES.COUNT } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg CARDINALITY is valid',
|
||||
[{ aggType: METRIC_TYPES.CARDINALITY } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg VALUE_COUNT is valid',
|
||||
[{ aggType: METRIC_TYPES.VALUE_COUNT } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg MEDIAN is valid',
|
||||
[{ aggType: METRIC_TYPES.MEDIAN } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertMetricAggregationColumnWithoutSpecialParams,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg STD_DEV is valid',
|
||||
[{ aggType: METRIC_TYPES.STD_DEV } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToStdDeviationFormulaColumns,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg PERCENTILES is valid',
|
||||
[{ aggType: METRIC_TYPES.PERCENTILES } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToPercentileColumn,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg SINGLE_PERCENTILE is valid',
|
||||
[{ aggType: METRIC_TYPES.SINGLE_PERCENTILE } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToPercentileColumn,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg PERCENTILE_RANKS is valid',
|
||||
[{ aggType: METRIC_TYPES.PERCENTILE_RANKS } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToPercentileRankColumn,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg SINGLE_PERCENTILE_RANK is valid',
|
||||
[{ aggType: METRIC_TYPES.SINGLE_PERCENTILE_RANK } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToPercentileRankColumn,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg TOP_HITS is valid',
|
||||
[{ aggType: METRIC_TYPES.TOP_HITS } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToLastValueColumn,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg TOP_METRICS is valid',
|
||||
[{ aggType: METRIC_TYPES.TOP_METRICS } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToLastValueColumn,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg CUMULATIVE_SUM is valid',
|
||||
[{ aggType: METRIC_TYPES.CUMULATIVE_SUM } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToCumulativeSumAggColumn,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg DERIVATIVE is valid',
|
||||
[{ aggType: METRIC_TYPES.DERIVATIVE } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToOtherParentPipelineAggColumns,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg MOVING_FN is valid',
|
||||
[{ aggType: METRIC_TYPES.MOVING_FN } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToOtherParentPipelineAggColumns,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg SUM_BUCKET is valid',
|
||||
[{ aggType: METRIC_TYPES.SUM_BUCKET } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToSiblingPipelineColumns,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg MIN_BUCKET is valid',
|
||||
[{ aggType: METRIC_TYPES.MIN_BUCKET } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToSiblingPipelineColumns,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg MAX_BUCKET is valid',
|
||||
[{ aggType: METRIC_TYPES.MAX_BUCKET } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToSiblingPipelineColumns,
|
||||
],
|
||||
[
|
||||
'array of columns if supported agg AVG_BUCKET is valid',
|
||||
[{ aggType: METRIC_TYPES.AVG_BUCKET } as SchemaConfig, dataView, []],
|
||||
result,
|
||||
mockConvertToSiblingPipelineColumns,
|
||||
],
|
||||
])('should return %s', (_, input, expected, mock) => {
|
||||
expect(convertMetricToColumns(...input)).toEqual(expected.map(expect.objectContaining));
|
||||
if (mock) {
|
||||
expect(mock).toBeCalledTimes(1);
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { getPercentageColumnFormulaColumn } from './percentage_formula';
|
||||
import { FormulaColumn } from '../../types';
|
||||
import { SchemaConfig } from '../../..';
|
||||
|
||||
const mockGetFormulaForAgg = jest.fn();
|
||||
const mockCreateFormulaColumn = jest.fn();
|
||||
|
||||
jest.mock('./formula', () => ({
|
||||
getFormulaForAgg: jest.fn(() => mockGetFormulaForAgg()),
|
||||
}));
|
||||
|
||||
jest.mock('../convert', () => ({
|
||||
createFormulaColumn: jest.fn((formula) => mockCreateFormulaColumn(formula)),
|
||||
}));
|
||||
|
||||
describe('getPercentageColumnFormulaColumn', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
const field = stubLogstashDataView.fields[0].name;
|
||||
const aggs: Array<SchemaConfig<METRIC_TYPES>> = [
|
||||
{
|
||||
aggId: '1',
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggParams: { field },
|
||||
accessor: 0,
|
||||
params: {},
|
||||
label: 'average',
|
||||
format: {},
|
||||
},
|
||||
];
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test.each<
|
||||
[
|
||||
string,
|
||||
Parameters<typeof getPercentageColumnFormulaColumn>,
|
||||
() => void,
|
||||
Partial<FormulaColumn> | null
|
||||
]
|
||||
>([
|
||||
[
|
||||
'null if cannot build formula for provided agg',
|
||||
[{ agg: aggs[0], aggs, dataView }],
|
||||
() => {
|
||||
mockGetFormulaForAgg.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'null if cannot create formula column for provided arguments',
|
||||
[{ agg: aggs[0], aggs, dataView }],
|
||||
() => {
|
||||
mockGetFormulaForAgg.mockReturnValue('test-formula');
|
||||
mockCreateFormulaColumn.mockReturnValue(null);
|
||||
},
|
||||
null,
|
||||
],
|
||||
[
|
||||
'formula column if provided arguments are valid',
|
||||
[{ agg: aggs[0], aggs, dataView }],
|
||||
() => {
|
||||
mockGetFormulaForAgg.mockReturnValue('test-formula');
|
||||
mockCreateFormulaColumn.mockImplementation((formula) => ({
|
||||
operationType: 'formula',
|
||||
params: { formula },
|
||||
label: 'Average',
|
||||
}));
|
||||
},
|
||||
{
|
||||
operationType: 'formula',
|
||||
params: {
|
||||
formula: `(test-formula) / overall_sum(test-formula)`,
|
||||
format: { id: 'percent' },
|
||||
},
|
||||
label: `Average percentages`,
|
||||
},
|
||||
],
|
||||
])('should return %s', (_, input, actions, expected) => {
|
||||
actions();
|
||||
if (expected === null) {
|
||||
expect(getPercentageColumnFormulaColumn(...input)).toBeNull();
|
||||
} else {
|
||||
expect(getPercentageColumnFormulaColumn(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,521 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { stubLogstashDataView } from '@kbn/data-views-plugin/common/data_view.stub';
|
||||
import { IAggConfig, METRIC_TYPES } from '@kbn/data-plugin/common';
|
||||
import { AggBasedColumn, ColumnWithMeta, Operations } from '../..';
|
||||
import { SchemaConfig } from '../../types';
|
||||
import {
|
||||
getCustomBucketsFromSiblingAggs,
|
||||
getFieldNameFromField,
|
||||
getLabel,
|
||||
getLabelForPercentile,
|
||||
getMetricFromParentPipelineAgg,
|
||||
getValidColumns,
|
||||
isColumnWithMeta,
|
||||
isMetricAggWithoutParams,
|
||||
isPercentileAgg,
|
||||
isPercentileRankAgg,
|
||||
isPipeline,
|
||||
isSchemaConfig,
|
||||
isSiblingPipeline,
|
||||
isStdDevAgg,
|
||||
} from './utils';
|
||||
|
||||
describe('getLabel', () => {
|
||||
const label = 'some label';
|
||||
const customLabel = 'some custom label';
|
||||
|
||||
const agg: SchemaConfig<METRIC_TYPES.AVG> = {
|
||||
accessor: 0,
|
||||
label,
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggId: 'id',
|
||||
aggParams: { field: 'some-field' },
|
||||
};
|
||||
|
||||
test('should return label', () => {
|
||||
const { aggParams, ...aggWithoutAggParams } = agg;
|
||||
expect(getLabel(aggWithoutAggParams)).toEqual(label);
|
||||
expect(getLabel(agg)).toEqual(label);
|
||||
expect(getLabel({ ...agg, aggParams: { ...aggParams!, customLabel: undefined } })).toEqual(
|
||||
label
|
||||
);
|
||||
});
|
||||
|
||||
test('should return customLabel', () => {
|
||||
const aggParams = { ...agg.aggParams!, customLabel };
|
||||
const aggWithCustomLabel = { ...agg, aggParams };
|
||||
expect(getLabel(aggWithCustomLabel)).toEqual(customLabel);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLabelForPercentile', () => {
|
||||
const label = 'some label';
|
||||
const customLabel = 'some custom label';
|
||||
|
||||
const agg: SchemaConfig<METRIC_TYPES.PERCENTILES> = {
|
||||
accessor: 0,
|
||||
label,
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.PERCENTILES,
|
||||
aggId: 'id',
|
||||
aggParams: { field: 'some-field' },
|
||||
};
|
||||
|
||||
test('should return empty string if no custom label is specified', () => {
|
||||
const { aggParams, ...aggWithoutAggParams } = agg;
|
||||
expect(getLabelForPercentile(aggWithoutAggParams)).toEqual('');
|
||||
expect(getLabel({ ...agg, aggParams: { ...aggParams!, customLabel: '' } })).toEqual('');
|
||||
});
|
||||
|
||||
test('should return label if custom label is specified', () => {
|
||||
const aggParams = { ...agg.aggParams!, customLabel };
|
||||
const aggWithCustomLabel = { ...agg, aggParams };
|
||||
expect(getLabelForPercentile(aggWithCustomLabel)).toEqual(label);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getValidColumns', () => {
|
||||
const dataView = stubLogstashDataView;
|
||||
const columns: AggBasedColumn[] = [
|
||||
{
|
||||
operationType: Operations.AVERAGE,
|
||||
sourceField: dataView.fields[0].name,
|
||||
columnId: 'some-id-0',
|
||||
dataType: 'number',
|
||||
params: {},
|
||||
meta: { aggId: 'aggId-0' },
|
||||
isSplit: false,
|
||||
isBucketed: true,
|
||||
},
|
||||
{
|
||||
operationType: Operations.SUM,
|
||||
sourceField: dataView.fields[0].name,
|
||||
columnId: 'some-id-1',
|
||||
dataType: 'number',
|
||||
params: {},
|
||||
meta: { aggId: 'aggId-1' },
|
||||
isSplit: false,
|
||||
isBucketed: true,
|
||||
},
|
||||
];
|
||||
test.each<[string, Parameters<typeof getValidColumns>, AggBasedColumn[] | null]>([
|
||||
['null if array contains null', [[null, ...columns]], null],
|
||||
['null if columns is null', [null], null],
|
||||
['null if columns is undefined', [undefined], null],
|
||||
['columns', [columns], columns],
|
||||
['columns if one column is passed', [columns[0]], [columns[0]]],
|
||||
])('should return %s', (_, input, expected) => {
|
||||
if (expected === null) {
|
||||
expect(getValidColumns(...input)).toBeNull();
|
||||
} else {
|
||||
expect(getValidColumns(...input)).toEqual(expect.objectContaining(expected));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFieldNameFromField', () => {
|
||||
test('should return null if no field is passed', () => {
|
||||
expect(getFieldNameFromField(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
test('should return field name if field is string', () => {
|
||||
const fieldName = 'some-field-name';
|
||||
expect(getFieldNameFromField(fieldName)).toEqual(fieldName);
|
||||
});
|
||||
|
||||
test('should return field name if field is DataViewField', () => {
|
||||
const field = stubLogstashDataView.fields[0];
|
||||
expect(getFieldNameFromField(field)).toEqual(field.name);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isSchemaConfig', () => {
|
||||
const iAggConfig = {
|
||||
id: '',
|
||||
enabled: false,
|
||||
params: {},
|
||||
} as IAggConfig;
|
||||
|
||||
const schemaConfig: SchemaConfig<METRIC_TYPES.AVG> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
};
|
||||
|
||||
test('should be false if is IAggConfig', () => {
|
||||
expect(isSchemaConfig(iAggConfig)).toBeFalsy();
|
||||
});
|
||||
|
||||
test('should be false if is SchemaConfig', () => {
|
||||
expect(isSchemaConfig(schemaConfig)).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isColumnWithMeta', () => {
|
||||
const column: AggBasedColumn = {
|
||||
sourceField: '',
|
||||
columnId: '',
|
||||
operationType: 'terms',
|
||||
isBucketed: false,
|
||||
isSplit: false,
|
||||
dataType: 'string',
|
||||
} as AggBasedColumn;
|
||||
|
||||
const columnWithMeta: ColumnWithMeta = {
|
||||
sourceField: '',
|
||||
columnId: '',
|
||||
operationType: 'average',
|
||||
isBucketed: false,
|
||||
isSplit: false,
|
||||
dataType: 'string',
|
||||
params: {},
|
||||
meta: { aggId: 'some-agg-id' },
|
||||
};
|
||||
|
||||
test('should return false if column without meta', () => {
|
||||
expect(isColumnWithMeta(column)).toBeFalsy();
|
||||
});
|
||||
|
||||
test('should return true if column with meta', () => {
|
||||
expect(isColumnWithMeta(columnWithMeta)).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isSiblingPipeline', () => {
|
||||
const metric: Omit<SchemaConfig, 'aggType'> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
};
|
||||
|
||||
test.each<[METRIC_TYPES, boolean]>([
|
||||
[METRIC_TYPES.AVG_BUCKET, true],
|
||||
[METRIC_TYPES.SUM_BUCKET, true],
|
||||
[METRIC_TYPES.MAX_BUCKET, true],
|
||||
[METRIC_TYPES.MIN_BUCKET, true],
|
||||
[METRIC_TYPES.CUMULATIVE_SUM, false],
|
||||
])('for %s should return %s', (aggType, expected) => {
|
||||
expect(isSiblingPipeline({ ...metric, aggType } as SchemaConfig<typeof aggType>)).toBe(
|
||||
expected
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isPipeline', () => {
|
||||
const metric: Omit<SchemaConfig, 'aggType'> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
};
|
||||
|
||||
test.each<[METRIC_TYPES, boolean]>([
|
||||
[METRIC_TYPES.AVG_BUCKET, true],
|
||||
[METRIC_TYPES.SUM_BUCKET, true],
|
||||
[METRIC_TYPES.MAX_BUCKET, true],
|
||||
[METRIC_TYPES.MIN_BUCKET, true],
|
||||
[METRIC_TYPES.CUMULATIVE_SUM, true],
|
||||
[METRIC_TYPES.DERIVATIVE, true],
|
||||
[METRIC_TYPES.MOVING_FN, true],
|
||||
[METRIC_TYPES.AVG, false],
|
||||
])('for %s should return %s', (aggType, expected) => {
|
||||
expect(isPipeline({ ...metric, aggType } as SchemaConfig<typeof aggType>)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isMetricAggWithoutParams', () => {
|
||||
const metric: Omit<SchemaConfig, 'aggType'> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
};
|
||||
|
||||
test.each<[METRIC_TYPES, boolean]>([
|
||||
[METRIC_TYPES.AVG, true],
|
||||
[METRIC_TYPES.COUNT, true],
|
||||
[METRIC_TYPES.MAX, true],
|
||||
[METRIC_TYPES.MIN, true],
|
||||
[METRIC_TYPES.SUM, true],
|
||||
[METRIC_TYPES.MEDIAN, true],
|
||||
[METRIC_TYPES.CARDINALITY, true],
|
||||
[METRIC_TYPES.VALUE_COUNT, true],
|
||||
[METRIC_TYPES.DERIVATIVE, false],
|
||||
])('for %s should return %s', (aggType, expected) => {
|
||||
expect(isMetricAggWithoutParams({ ...metric, aggType } as SchemaConfig<typeof aggType>)).toBe(
|
||||
expected
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isPercentileAgg', () => {
|
||||
const metric: Omit<SchemaConfig, 'aggType'> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
};
|
||||
|
||||
test.each<[METRIC_TYPES, boolean]>([
|
||||
[METRIC_TYPES.PERCENTILES, true],
|
||||
[METRIC_TYPES.DERIVATIVE, false],
|
||||
])('for %s should return %s', (aggType, expected) => {
|
||||
expect(isPercentileAgg({ ...metric, aggType } as SchemaConfig<typeof aggType>)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isPercentileRankAgg', () => {
|
||||
const metric: Omit<SchemaConfig, 'aggType'> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
};
|
||||
|
||||
test.each<[METRIC_TYPES, boolean]>([
|
||||
[METRIC_TYPES.PERCENTILE_RANKS, true],
|
||||
[METRIC_TYPES.PERCENTILES, false],
|
||||
])('for %s should return %s', (aggType, expected) => {
|
||||
expect(isPercentileRankAgg({ ...metric, aggType } as SchemaConfig<typeof aggType>)).toBe(
|
||||
expected
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isStdDevAgg', () => {
|
||||
const metric: Omit<SchemaConfig, 'aggType'> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
};
|
||||
|
||||
test.each<[METRIC_TYPES, boolean]>([
|
||||
[METRIC_TYPES.STD_DEV, true],
|
||||
[METRIC_TYPES.PERCENTILES, false],
|
||||
])('for %s should return %s', (aggType, expected) => {
|
||||
expect(isStdDevAgg({ ...metric, aggType } as SchemaConfig<typeof aggType>)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCustomBucketsFromSiblingAggs', () => {
|
||||
const bucket1 = {
|
||||
id: 'some-id',
|
||||
params: { type: 'some-type' },
|
||||
type: 'type1',
|
||||
enabled: true,
|
||||
} as unknown as IAggConfig;
|
||||
const serialize1 = () => bucket1;
|
||||
|
||||
const bucket2 = {
|
||||
id: 'some-id-1',
|
||||
params: { type: 'some-type-1' },
|
||||
type: 'type2',
|
||||
enabled: false,
|
||||
} as unknown as IAggConfig;
|
||||
const serialize2 = () => bucket2;
|
||||
|
||||
const bucketWithSerialize1 = { ...bucket1, serialize: serialize1 } as unknown as IAggConfig;
|
||||
const metric1: SchemaConfig<METRIC_TYPES.AVG_BUCKET> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG_BUCKET,
|
||||
aggId: 'some-agg-id',
|
||||
aggParams: {
|
||||
customBucket: bucketWithSerialize1,
|
||||
},
|
||||
};
|
||||
|
||||
const bucketWithSerialize2 = { ...bucket2, serialize: serialize2 } as unknown as IAggConfig;
|
||||
const metric2: SchemaConfig<METRIC_TYPES.AVG_BUCKET> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG_BUCKET,
|
||||
aggId: 'some-agg-id',
|
||||
aggParams: {
|
||||
customBucket: bucketWithSerialize2,
|
||||
},
|
||||
};
|
||||
const bucket3 = { ...bucket1, id: 'other id' } as unknown as IAggConfig;
|
||||
const serialize3 = () => bucket3;
|
||||
|
||||
const bucketWithSerialize3 = { ...bucket3, serialize: serialize3 } as unknown as IAggConfig;
|
||||
const metric3: SchemaConfig<METRIC_TYPES.AVG_BUCKET> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG_BUCKET,
|
||||
aggId: 'some-agg-id',
|
||||
aggParams: {
|
||||
customBucket: bucketWithSerialize3,
|
||||
},
|
||||
};
|
||||
|
||||
test("should filter out duplicated custom buckets, ignoring id's", () => {
|
||||
expect(getCustomBucketsFromSiblingAggs([metric1, metric2, metric3])).toEqual([
|
||||
bucketWithSerialize1,
|
||||
bucketWithSerialize2,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
const mockConvertToSchemaConfig = jest.fn();
|
||||
|
||||
jest.mock('../../vis_schemas', () => ({
|
||||
convertToSchemaConfig: jest.fn(() => mockConvertToSchemaConfig()),
|
||||
}));
|
||||
|
||||
describe('getMetricFromParentPipelineAgg', () => {
|
||||
const metricAggId = 'agg-id-0';
|
||||
const aggId = 'agg-id-1';
|
||||
const plainAgg: SchemaConfig<METRIC_TYPES.AVG> = {
|
||||
accessor: 0,
|
||||
label: 'some-label',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG,
|
||||
aggId: metricAggId,
|
||||
};
|
||||
const agg: SchemaConfig<METRIC_TYPES.AVG_BUCKET> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.AVG_BUCKET,
|
||||
aggParams: { customMetric: {} as IAggConfig },
|
||||
aggId,
|
||||
};
|
||||
|
||||
const parentPipelineAgg: SchemaConfig<METRIC_TYPES.CUMULATIVE_SUM> = {
|
||||
accessor: 0,
|
||||
label: '',
|
||||
format: {
|
||||
id: undefined,
|
||||
params: undefined,
|
||||
},
|
||||
params: {},
|
||||
aggType: METRIC_TYPES.CUMULATIVE_SUM,
|
||||
aggParams: { metricAgg: 'custom' },
|
||||
aggId,
|
||||
};
|
||||
|
||||
const metric = { aggType: METRIC_TYPES.CUMULATIVE_SUM };
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
beforeAll(() => {
|
||||
mockConvertToSchemaConfig.mockReturnValue(metric);
|
||||
});
|
||||
|
||||
test('should return null if aggParams are undefined', () => {
|
||||
expect(getMetricFromParentPipelineAgg({ ...agg, aggParams: undefined }, [])).toBeNull();
|
||||
expect(mockConvertToSchemaConfig).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should return null if is sibling pipeline agg and custom metric is not defined', () => {
|
||||
expect(
|
||||
getMetricFromParentPipelineAgg({ ...agg, aggParams: { customMetric: undefined } }, [])
|
||||
).toBeNull();
|
||||
expect(mockConvertToSchemaConfig).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should return null if is parent pipeline agg, metricAgg is custom and custom metric is not defined', () => {
|
||||
expect(getMetricFromParentPipelineAgg(parentPipelineAgg, [])).toBeNull();
|
||||
expect(mockConvertToSchemaConfig).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should return metric if is parent pipeline agg, metricAgg is equal to aggId and custom metric is not defined', () => {
|
||||
const parentPipelineAggWithLink = {
|
||||
...parentPipelineAgg,
|
||||
aggParams: {
|
||||
metricAgg: metricAggId,
|
||||
},
|
||||
};
|
||||
expect(
|
||||
getMetricFromParentPipelineAgg(parentPipelineAggWithLink, [
|
||||
parentPipelineAggWithLink,
|
||||
plainAgg,
|
||||
])
|
||||
).toEqual(plainAgg);
|
||||
expect(mockConvertToSchemaConfig).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
test('should return metric if sibling pipeline agg with custom metric', () => {
|
||||
expect(getMetricFromParentPipelineAgg(agg, [agg])).toEqual(metric);
|
||||
expect(mockConvertToSchemaConfig).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return metric if parent pipeline agg with custom metric', () => {
|
||||
expect(
|
||||
getMetricFromParentPipelineAgg(
|
||||
{
|
||||
...parentPipelineAgg,
|
||||
aggParams: { ...parentPipelineAgg.aggParams, customMetric: {} as IAggConfig },
|
||||
},
|
||||
[agg]
|
||||
)
|
||||
).toEqual(metric);
|
||||
expect(mockConvertToSchemaConfig).toBeCalledTimes(1);
|
||||
});
|
||||
});
|
|
@ -150,7 +150,7 @@ export const isStdDevAgg = (metric: SchemaConfig): metric is SchemaConfig<METRIC
|
|||
return metric.aggType === METRIC_TYPES.STD_DEV;
|
||||
};
|
||||
|
||||
export const getCutomBucketsFromSiblingAggs = (metrics: SchemaConfig[]) => {
|
||||
export const getCustomBucketsFromSiblingAggs = (metrics: SchemaConfig[]) => {
|
||||
return metrics.reduce<IAggConfig[]>((acc, metric) => {
|
||||
if (
|
||||
isSiblingPipeline(metric) &&
|
||||
|
|
|
@ -40,7 +40,7 @@ jest.mock('../../common/convert_to_lens/lib/buckets', () => ({
|
|||
}));
|
||||
|
||||
jest.mock('../../common/convert_to_lens/lib/utils', () => ({
|
||||
getCutomBucketsFromSiblingAggs: jest.fn(() => mockGetCutomBucketsFromSiblingAggs()),
|
||||
getCustomBucketsFromSiblingAggs: jest.fn(() => mockGetCutomBucketsFromSiblingAggs()),
|
||||
}));
|
||||
|
||||
jest.mock('../vis_schemas', () => ({
|
||||
|
|
|
@ -11,7 +11,7 @@ import { METRIC_TYPES, TimefilterContract } from '@kbn/data-plugin/public';
|
|||
import { AggBasedColumn, SchemaConfig } from '../../common';
|
||||
import { convertMetricToColumns } from '../../common/convert_to_lens/lib/metrics';
|
||||
import { convertBucketToColumns } from '../../common/convert_to_lens/lib/buckets';
|
||||
import { getCutomBucketsFromSiblingAggs } from '../../common/convert_to_lens/lib/utils';
|
||||
import { getCustomBucketsFromSiblingAggs } from '../../common/convert_to_lens/lib/utils';
|
||||
import type { Vis } from '../types';
|
||||
import { getVisSchemas, Schemas } from '../vis_schemas';
|
||||
import {
|
||||
|
@ -57,7 +57,7 @@ export const getColumnsFromVis = <T>(
|
|||
return null;
|
||||
}
|
||||
|
||||
const customBuckets = getCutomBucketsFromSiblingAggs(visSchemas.metric);
|
||||
const customBuckets = getCustomBucketsFromSiblingAggs(visSchemas.metric);
|
||||
|
||||
// doesn't support sibbling pipeline aggs with different bucket aggs
|
||||
if (customBuckets.length > 1) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue