mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[TSVB] [Table] js -> ts conversion (#105094)
* table js -> ts * remove any's * fix CI Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
a6211f86f2
commit
fcaa4aaf40
66 changed files with 1189 additions and 987 deletions
|
@ -8,6 +8,7 @@
|
|||
|
||||
import { getLastValue, isEmptyValue, EMPTY_VALUE } from './last_value_utils';
|
||||
import { clone } from 'lodash';
|
||||
import { PanelDataArray } from './types/vis_data';
|
||||
|
||||
describe('getLastValue(data)', () => {
|
||||
test('should return data, if data is not an array', () => {
|
||||
|
@ -40,7 +41,7 @@ describe('getLastValue(data)', () => {
|
|||
getLastValue([
|
||||
[1, null],
|
||||
[2, undefined],
|
||||
])
|
||||
] as PanelDataArray[])
|
||||
).toBe(EMPTY_VALUE);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -7,18 +7,19 @@
|
|||
*/
|
||||
|
||||
import { isArray, last, isEqual } from 'lodash';
|
||||
import type { PanelDataArray } from './types/vis_data';
|
||||
|
||||
export const EMPTY_VALUE = null;
|
||||
export const DISPLAY_EMPTY_VALUE = '-';
|
||||
|
||||
const extractValue = (data: unknown[] | void) => (data && data[1]) ?? EMPTY_VALUE;
|
||||
const extractValue = (data: PanelDataArray) => (data && data[1]) ?? EMPTY_VALUE;
|
||||
|
||||
export const getLastValue = (data: unknown) => {
|
||||
export const getLastValue = (data: PanelDataArray[] | string | number) => {
|
||||
if (!isArray(data)) {
|
||||
return data;
|
||||
}
|
||||
|
||||
return extractValue(last(data));
|
||||
return extractValue(last(data)!);
|
||||
};
|
||||
|
||||
export const isEmptyValue = (value: unknown) => isEqual(value, EMPTY_VALUE);
|
||||
|
|
|
@ -8,5 +8,5 @@
|
|||
|
||||
const percentileNumberTest = /\d+\.\d+/;
|
||||
|
||||
export const toPercentileNumber = (value: string) =>
|
||||
export const toPercentileNumber = (value: number | string) =>
|
||||
percentileNumberTest.test(`${value}`) ? value : `${value}.0`;
|
||||
|
|
|
@ -39,13 +39,15 @@ export interface PanelSeries {
|
|||
export interface PanelData {
|
||||
id: string;
|
||||
label: string;
|
||||
data: Array<[number, number]>;
|
||||
data: PanelDataArray[];
|
||||
seriesId: string;
|
||||
splitByLabel: string;
|
||||
isSplitByTerms: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export type PanelDataArray = [number | undefined | string, number | string | null];
|
||||
|
||||
export interface Annotation {
|
||||
key: number;
|
||||
docs: Array<Record<string, string>>;
|
||||
|
|
|
@ -10,13 +10,14 @@ import moment from 'moment';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { get } from 'lodash';
|
||||
import { search } from '../../../../../../plugins/data/public';
|
||||
const { parseEsInterval } = search.aggs;
|
||||
import { GTE_INTERVAL_RE } from '../../../../common/interval_regexp';
|
||||
import { AUTO_INTERVAL } from '../../../../common/constants';
|
||||
import { isVisTableData } from '../../../../common/vis_data_utils';
|
||||
import type { PanelData, TimeseriesVisData } from '../../../../common/types';
|
||||
import { TimeseriesVisParams } from '../../../types';
|
||||
|
||||
const { parseEsInterval } = search.aggs;
|
||||
|
||||
export const unitLookup = {
|
||||
s: i18n.translate('visTypeTimeseries.getInterval.secondsLabel', { defaultMessage: 'seconds' }),
|
||||
m: i18n.translate('visTypeTimeseries.getInterval.minutesLabel', { defaultMessage: 'minutes' }),
|
||||
|
@ -76,7 +77,11 @@ export const getInterval = (visData: TimeseriesVisData, model: TimeseriesVisPara
|
|||
) as PanelData[];
|
||||
|
||||
return series.reduce((currentInterval, item) => {
|
||||
if (item.data.length > 1) {
|
||||
if (
|
||||
item.data.length > 1 &&
|
||||
typeof item.data[1][0] === 'number' &&
|
||||
typeof item.data[0][0] === 'number'
|
||||
) {
|
||||
const seriesInterval = item.data[1][0] - item.data[0][0];
|
||||
if (!currentInterval || seriesInterval < currentInterval) return seriesInterval;
|
||||
}
|
||||
|
|
|
@ -9,11 +9,8 @@
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { get } from 'lodash';
|
||||
|
||||
// not typed yet
|
||||
// @ts-expect-error
|
||||
import { buildRequestBody } from './table/build_request_body';
|
||||
import { buildTableRequest } from './table/build_request_body';
|
||||
import { handleErrorResponse } from './handle_error_response';
|
||||
// @ts-expect-error
|
||||
import { processBucket } from './table/process_bucket';
|
||||
|
||||
import { createFieldsFetcher } from '../search_strategies/lib/fields_fetcher';
|
||||
|
@ -74,15 +71,16 @@ export async function getTableData(
|
|||
const handleError = handleErrorResponse(panel);
|
||||
|
||||
try {
|
||||
const body = await buildRequestBody(
|
||||
const body = await buildTableRequest({
|
||||
req,
|
||||
panel,
|
||||
services.esQueryConfig,
|
||||
panelIndex,
|
||||
esQueryConfig: services.esQueryConfig,
|
||||
seriesIndex: panelIndex,
|
||||
capabilities,
|
||||
services.uiSettings,
|
||||
() => services.buildSeriesMetaParams(panelIndex, Boolean(panel.use_kibana_indexes))
|
||||
);
|
||||
uiSettings: services.uiSettings,
|
||||
buildSeriesMetaParams: () =>
|
||||
services.buildSeriesMetaParams(panelIndex, Boolean(panel.use_kibana_indexes)),
|
||||
});
|
||||
|
||||
const [resp] = await searchStrategy.search(requestContext, req, [
|
||||
{
|
||||
|
@ -100,9 +98,7 @@ export async function getTableData(
|
|||
[]
|
||||
);
|
||||
|
||||
const series = await Promise.all(
|
||||
buckets.map(processBucket(panel, req, searchStrategy, capabilities, extractFields))
|
||||
);
|
||||
const series = await Promise.all(buckets.map(processBucket({ panel, extractFields })));
|
||||
|
||||
return {
|
||||
...meta,
|
||||
|
|
|
@ -6,8 +6,10 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export function formatKey(key, series) {
|
||||
if (/{{\s*key\s*}}/.test(series.label)) {
|
||||
import { Series } from '../../../../common/types';
|
||||
|
||||
export function formatKey(key: string, series: Series) {
|
||||
if (series.label && /{{\s*key\s*}}/.test(series.label)) {
|
||||
return series.label.replace(/{{\s*key\s*}}/, key);
|
||||
}
|
||||
return key;
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { startsWith } from 'lodash';
|
||||
import { toPercentileNumber } from '../../../../common/to_percentile_number';
|
||||
import { METRIC_TYPES } from '../../../../common/enums';
|
||||
|
||||
const percentileTest = /\[[0-9\.]+\]$/;
|
||||
|
||||
export const getBucketsPath = (id, metrics) => {
|
||||
const metric = metrics.find((m) => startsWith(id, m.id));
|
||||
let bucketsPath = String(id);
|
||||
|
||||
switch (metric.type) {
|
||||
case METRIC_TYPES.DERIVATIVE:
|
||||
bucketsPath += '[normalized_value]';
|
||||
break;
|
||||
// For percentiles we need to breakout the percentile key that the user
|
||||
// specified. This information is stored in the key using the following pattern
|
||||
// {metric.id}[{percentile}]
|
||||
case METRIC_TYPES.PERCENTILE:
|
||||
if (percentileTest.test(bucketsPath)) break;
|
||||
const percent = metric.percentiles[0];
|
||||
bucketsPath += `[${toPercentileNumber(percent.value)}]`;
|
||||
break;
|
||||
case METRIC_TYPES.PERCENTILE_RANK:
|
||||
if (percentileTest.test(bucketsPath)) break;
|
||||
bucketsPath += `[${toPercentileNumber(metric.value)}]`;
|
||||
break;
|
||||
case METRIC_TYPES.STD_DEVIATION:
|
||||
case METRIC_TYPES.VARIANCE:
|
||||
case METRIC_TYPES.SUM_OF_SQUARES:
|
||||
if (/^std_deviation/.test(metric.type) && ~['upper', 'lower'].indexOf(metric.mode)) {
|
||||
bucketsPath += `[std_${metric.mode}]`;
|
||||
} else {
|
||||
bucketsPath += `[${metric.type}]`;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return bucketsPath;
|
||||
};
|
|
@ -7,9 +7,10 @@
|
|||
*/
|
||||
|
||||
import { getBucketsPath } from './get_buckets_path';
|
||||
import type { Metric } from '../../../../common/types';
|
||||
|
||||
describe('getBucketsPath', () => {
|
||||
const metrics = [
|
||||
const metrics = ([
|
||||
{ id: 1, type: 'derivative' },
|
||||
{ id: 2, type: 'percentile', percentiles: [{ value: '50' }] },
|
||||
{ id: 3, type: 'percentile', percentiles: [{ value: '20.0' }, { value: '10.0' }] },
|
||||
|
@ -19,18 +20,18 @@ describe('getBucketsPath', () => {
|
|||
{ id: 7, type: 'sum_of_squares' },
|
||||
{ id: 8, type: 'variance' },
|
||||
{ id: 9, type: 'max' },
|
||||
];
|
||||
] as unknown) as Metric[];
|
||||
|
||||
test('return path for derivative', () => {
|
||||
expect(getBucketsPath(1, metrics)).toEqual('1[normalized_value]');
|
||||
expect(getBucketsPath('1', metrics)).toEqual('1[normalized_value]');
|
||||
});
|
||||
|
||||
test('return path for percentile(50)', () => {
|
||||
expect(getBucketsPath(2, metrics)).toEqual('2[50.0]');
|
||||
expect(getBucketsPath('2', metrics)).toEqual('2[50.0]');
|
||||
});
|
||||
|
||||
test('return path for percentile(20.0)', () => {
|
||||
expect(getBucketsPath(3, metrics)).toEqual('3[20.0]');
|
||||
expect(getBucketsPath('3', metrics)).toEqual('3[20.0]');
|
||||
});
|
||||
|
||||
test('return path for percentile(10.0) with alt id', () => {
|
||||
|
@ -38,26 +39,26 @@ describe('getBucketsPath', () => {
|
|||
});
|
||||
|
||||
test('return path for std_deviation(raw)', () => {
|
||||
expect(getBucketsPath(4, metrics)).toEqual('4[std_deviation]');
|
||||
expect(getBucketsPath('4', metrics)).toEqual('4[std_deviation]');
|
||||
});
|
||||
|
||||
test('return path for std_deviation(upper)', () => {
|
||||
expect(getBucketsPath(5, metrics)).toEqual('5[std_upper]');
|
||||
expect(getBucketsPath('5', metrics)).toEqual('5[std_upper]');
|
||||
});
|
||||
|
||||
test('return path for std_deviation(lower)', () => {
|
||||
expect(getBucketsPath(6, metrics)).toEqual('6[std_lower]');
|
||||
expect(getBucketsPath('6', metrics)).toEqual('6[std_lower]');
|
||||
});
|
||||
|
||||
test('return path for sum_of_squares', () => {
|
||||
expect(getBucketsPath(7, metrics)).toEqual('7[sum_of_squares]');
|
||||
expect(getBucketsPath('7', metrics)).toEqual('7[sum_of_squares]');
|
||||
});
|
||||
|
||||
test('return path for variance', () => {
|
||||
expect(getBucketsPath(8, metrics)).toEqual('8[variance]');
|
||||
expect(getBucketsPath('8', metrics)).toEqual('8[variance]');
|
||||
});
|
||||
|
||||
test('return path for basic metric', () => {
|
||||
expect(getBucketsPath(9, metrics)).toEqual('9');
|
||||
expect(getBucketsPath('9', metrics)).toEqual('9');
|
||||
});
|
||||
});
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { startsWith } from 'lodash';
|
||||
import { toPercentileNumber } from '../../../../common/to_percentile_number';
|
||||
import { METRIC_TYPES } from '../../../../common/enums';
|
||||
import type { Metric } from '../../../../common/types';
|
||||
|
||||
const percentileTest = /\[[0-9\.]+\]$/;
|
||||
|
||||
export const getBucketsPath = (id: string, metrics: Metric[]) => {
|
||||
const metric = metrics.find((m) => startsWith(id, m.id));
|
||||
let bucketsPath = String(id);
|
||||
|
||||
if (metric) {
|
||||
switch (metric.type) {
|
||||
case METRIC_TYPES.DERIVATIVE:
|
||||
bucketsPath += '[normalized_value]';
|
||||
break;
|
||||
// For percentiles we need to breakout the percentile key that the user
|
||||
// specified. This information is stored in the key using the following pattern
|
||||
// {metric.id}[{percentile}]
|
||||
case METRIC_TYPES.PERCENTILE:
|
||||
if (percentileTest.test(bucketsPath)) break;
|
||||
if (metric.percentiles?.length) {
|
||||
const percent = metric.percentiles[0];
|
||||
|
||||
bucketsPath += `[${toPercentileNumber(percent.value!)}]`;
|
||||
}
|
||||
break;
|
||||
case METRIC_TYPES.PERCENTILE_RANK:
|
||||
if (percentileTest.test(bucketsPath)) break;
|
||||
bucketsPath += `[${toPercentileNumber(metric.value!)}]`;
|
||||
break;
|
||||
case METRIC_TYPES.STD_DEVIATION:
|
||||
case METRIC_TYPES.VARIANCE:
|
||||
case METRIC_TYPES.SUM_OF_SQUARES:
|
||||
if (/^std_deviation/.test(metric.type) && ['upper', 'lower'].includes(metric.mode!)) {
|
||||
bucketsPath += `[std_${metric.mode}]`;
|
||||
} else {
|
||||
bucketsPath += `[${metric.type}]`;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return bucketsPath;
|
||||
};
|
|
@ -7,24 +7,28 @@
|
|||
*/
|
||||
|
||||
import { getLastMetric } from './get_last_metric';
|
||||
import type { Series } from '../../../../common/types';
|
||||
|
||||
describe('getLastMetric(series)', () => {
|
||||
test('returns the last metric', () => {
|
||||
const series = {
|
||||
const series = ({
|
||||
metrics: [
|
||||
{ id: 1, type: 'avg' },
|
||||
{ id: 2, type: 'moving_average' },
|
||||
],
|
||||
};
|
||||
} as unknown) as Series;
|
||||
|
||||
expect(getLastMetric(series)).toEqual({ id: 2, type: 'moving_average' });
|
||||
});
|
||||
|
||||
test('returns the last metric that not a series_agg', () => {
|
||||
const series = {
|
||||
const series = ({
|
||||
metrics: [
|
||||
{ id: 1, type: 'avg' },
|
||||
{ id: 2, type: 'series_agg' },
|
||||
],
|
||||
};
|
||||
} as unknown) as Series;
|
||||
|
||||
expect(getLastMetric(series)).toEqual({ id: 1, type: 'avg' });
|
||||
});
|
||||
});
|
|
@ -6,11 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { mathAgg } from '../series/math';
|
||||
import { last } from 'lodash';
|
||||
|
||||
export function math(bucket, panel, series, meta, extractFields) {
|
||||
return (next) => (results) => {
|
||||
const mathFn = mathAgg({ aggregations: bucket }, panel, series, meta, extractFields);
|
||||
return mathFn(next)(results);
|
||||
};
|
||||
}
|
||||
import type { Series, Metric } from '../../../../common/types';
|
||||
|
||||
export const getLastMetric = (series: Series) =>
|
||||
last(series.metrics.filter((s) => s.type !== 'series_agg')) as Metric;
|
|
@ -7,22 +7,51 @@
|
|||
*/
|
||||
|
||||
import Color from 'color';
|
||||
import { get, isPlainObject } from 'lodash';
|
||||
import { overwrite } from '../helpers';
|
||||
|
||||
import { calculateLabel } from '../../../../common/calculate_label';
|
||||
import _ from 'lodash';
|
||||
import { getLastMetric } from './get_last_metric';
|
||||
import { formatKey } from './format_key';
|
||||
|
||||
const getTimeSeries = (resp, series) =>
|
||||
_.get(resp, `aggregations.timeseries`) || _.get(resp, `aggregations.${series.id}.timeseries`);
|
||||
import type { Panel, Series } from '../../../../common/types';
|
||||
import type { BaseMeta } from '../request_processors/types';
|
||||
|
||||
export async function getSplits(resp, panel, series, meta, extractFields) {
|
||||
const getTimeSeries = <TRawResponse = unknown>(resp: TRawResponse, series: Series) =>
|
||||
get(resp, `aggregations.timeseries`) || get(resp, `aggregations.${series.id}.timeseries`);
|
||||
|
||||
interface SplittedData<TMeta extends BaseMeta = BaseMeta> {
|
||||
id: string;
|
||||
splitByLabel: string;
|
||||
label: string;
|
||||
color: string;
|
||||
meta: TMeta;
|
||||
timeseries: {
|
||||
buckets: [
|
||||
{
|
||||
[s: string]: {
|
||||
// should be typed
|
||||
values: Record<string, unknown>;
|
||||
};
|
||||
} & { key: string | number }
|
||||
];
|
||||
};
|
||||
}
|
||||
|
||||
export async function getSplits<TRawResponse = unknown, TMeta extends BaseMeta = BaseMeta>(
|
||||
resp: TRawResponse,
|
||||
panel: Panel,
|
||||
series: Series,
|
||||
meta: TMeta,
|
||||
extractFields: Function
|
||||
): Promise<Array<SplittedData<TMeta>>> {
|
||||
if (!meta) {
|
||||
meta = _.get(resp, `aggregations.${series.id}.meta`);
|
||||
meta = get(resp, `aggregations.${series.id}.meta`);
|
||||
}
|
||||
|
||||
const color = new Color(series.color);
|
||||
const metric = getLastMetric(series);
|
||||
const buckets = _.get(resp, `aggregations.${series.id}.buckets`);
|
||||
const buckets = get(resp, `aggregations.${series.id}.buckets`);
|
||||
const fieldsForSeries = meta.index ? await extractFields({ id: meta.index }) : [];
|
||||
const splitByLabel = calculateLabel(metric, series.metrics, fieldsForSeries);
|
||||
|
||||
|
@ -39,29 +68,30 @@ export async function getSplits(resp, panel, series, meta, extractFields) {
|
|||
});
|
||||
}
|
||||
|
||||
if (series.split_mode === 'filters' && _.isPlainObject(buckets)) {
|
||||
return series.split_filters.map((filter) => {
|
||||
const bucket = _.get(resp, `aggregations.${series.id}.buckets.${filter.id}`);
|
||||
if (series.split_mode === 'filters' && isPlainObject(buckets)) {
|
||||
return (series.split_filters || []).map((filter) => {
|
||||
const bucket = get(resp, `aggregations.${series.id}.buckets.${filter.id}`);
|
||||
bucket.id = `${series.id}:${filter.id}`;
|
||||
bucket.key = filter.id;
|
||||
bucket.splitByLabel = splitByLabel;
|
||||
bucket.color = filter.color;
|
||||
bucket.label = filter.label || filter.filter.query || '*';
|
||||
bucket.label = (filter.label || filter.filter?.query) ?? '*';
|
||||
bucket.meta = meta;
|
||||
return bucket;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const timeseries = getTimeSeries(resp, series);
|
||||
const timeseries: SplittedData<TMeta>['timeseries'] = getTimeSeries<TRawResponse>(resp, series);
|
||||
|
||||
const mergeObj = {
|
||||
timeseries,
|
||||
};
|
||||
|
||||
series.metrics
|
||||
.filter((m) => /_bucket/.test(m.type))
|
||||
.forEach((m) => {
|
||||
mergeObj[m.id] = _.get(resp, `aggregations.${series.id}.${m.id}`);
|
||||
overwrite(mergeObj, m.id, get(resp, `aggregations.${series.id}.${m.id}`));
|
||||
});
|
||||
|
||||
return [
|
|
@ -7,48 +7,42 @@
|
|||
*/
|
||||
|
||||
import { PANEL_TYPES, TIME_RANGE_DATA_MODES, TIME_RANGE_MODE_KEY } from '../../../../common/enums';
|
||||
import type { Series, Panel } from '../../../../common/types';
|
||||
|
||||
const OVERRIDE_INDEX_PATTERN_KEY = 'override_index_pattern';
|
||||
|
||||
/**
|
||||
* Check if passed 'series' has overridden index pattern or not.
|
||||
* @private
|
||||
* @param series - specific series
|
||||
* @return {boolean}
|
||||
*/
|
||||
const hasOverriddenIndexPattern = (series) => Boolean(series[OVERRIDE_INDEX_PATTERN_KEY]);
|
||||
const hasOverriddenIndexPattern = (series?: Series) =>
|
||||
Boolean(series?.[OVERRIDE_INDEX_PATTERN_KEY]);
|
||||
|
||||
/**
|
||||
* Get value of Time Range Mode for panel
|
||||
* @private
|
||||
* @param panel - panel configuration
|
||||
* @return {string} - value of TIME_RANGE_DATA_MODES type
|
||||
*/
|
||||
const getPanelTimeRangeMode = (panel) => panel[TIME_RANGE_MODE_KEY];
|
||||
const getPanelTimeRangeMode = (panel: Panel) => panel[TIME_RANGE_MODE_KEY];
|
||||
|
||||
/**
|
||||
* Get value of Time Range Mode for series
|
||||
* @private
|
||||
* @param series - specific series
|
||||
* @return {string} - value of TIME_RANGE_DATA_MODES type
|
||||
*/
|
||||
const getSeriesTimeRangeMode = (series) => series[TIME_RANGE_MODE_KEY];
|
||||
const getSeriesTimeRangeMode = (series: Series) => series[TIME_RANGE_MODE_KEY];
|
||||
|
||||
/**
|
||||
* Check if 'Entire Time Range' mode active or not.
|
||||
* @public
|
||||
* @param panel - panel configuration
|
||||
* @param series - specific series
|
||||
* @return {boolean}
|
||||
*/
|
||||
export const isEntireTimeRangeMode = (panel, series = {}) => {
|
||||
export const isEntireTimeRangeMode = (panel: Panel, series?: Series) => {
|
||||
if (panel.type === PANEL_TYPES.TIMESERIES) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const timeRangeMode = hasOverriddenIndexPattern(series)
|
||||
? getSeriesTimeRangeMode(series)
|
||||
: getPanelTimeRangeMode(panel);
|
||||
const timeRangeMode =
|
||||
series && hasOverriddenIndexPattern(series)
|
||||
? getSeriesTimeRangeMode(series)
|
||||
: getPanelTimeRangeMode(panel);
|
||||
|
||||
return timeRangeMode === TIME_RANGE_DATA_MODES.ENTIRE_TIME_RANGE;
|
||||
};
|
||||
|
@ -56,8 +50,6 @@ export const isEntireTimeRangeMode = (panel, series = {}) => {
|
|||
/**
|
||||
* Check if 'Last Value Time Range' mode active or not.
|
||||
* @public
|
||||
* @param panel - panel configuration
|
||||
* @param series - specific series
|
||||
* @return {boolean}
|
||||
*/
|
||||
export const isLastValueTimerangeMode = (panel, series) => !isEntireTimeRangeMode(panel, series);
|
||||
**/
|
||||
export const isLastValueTimerangeMode = (panel: Panel, series?: Series) =>
|
||||
!isEntireTimeRangeMode(panel, series);
|
|
@ -10,20 +10,17 @@ export { overwrite } from './overwrite';
|
|||
export { getTimerange } from './get_timerange';
|
||||
export { getBucketSize } from './get_bucket_size';
|
||||
export { mapEmptyToZero } from './map_empty_to_zero';
|
||||
|
||||
// @ts-expect-error
|
||||
export { bucketTransform } from './bucket_transform';
|
||||
// @ts-expect-error
|
||||
export { getAggValue } from './get_agg_value';
|
||||
// @ts-expect-error
|
||||
export { getActiveSeries } from './get_active_series';
|
||||
export { getBucketsPath } from './get_buckets_path';
|
||||
// @ts-expect-error
|
||||
export { getDefaultDecoration } from './get_default_decoration';
|
||||
// @ts-expect-error
|
||||
export { isEntireTimeRangeMode, isLastValueTimerangeMode } from './get_timerange_mode';
|
||||
export { getLastMetric } from './get_last_metric';
|
||||
// @ts-expect-error
|
||||
export { getSiblingAggValue } from './get_sibling_agg_value';
|
||||
// @ts-expect-error
|
||||
export { getSplits } from './get_splits';
|
||||
// @ts-expect-error
|
||||
export { parseSettings } from './parse_settings';
|
||||
|
||||
// @ts-expect-error no typed yet
|
||||
export { bucketTransform } from './bucket_transform';
|
||||
// @ts-expect-error no typed yet
|
||||
export { getAggValue } from './get_agg_value';
|
||||
// @ts-expect-error no typed yet
|
||||
export { getDefaultDecoration } from './get_default_decoration';
|
||||
// @ts-expect-error no typed yet
|
||||
export { getSiblingAggValue } from './get_sibling_agg_value';
|
||||
|
|
|
@ -10,8 +10,16 @@
|
|||
import { getAggValue } from './get_agg_value';
|
||||
import { METRIC_TYPES } from '../../../../../data/common';
|
||||
import type { Metric } from '../../../../common/types';
|
||||
import type { PanelDataArray } from '../../../../common/types/vis_data';
|
||||
|
||||
export const mapEmptyToZero = (metric: Metric, buckets: any[]) => {
|
||||
export const mapEmptyToZero = (
|
||||
metric: Metric,
|
||||
buckets: Array<
|
||||
{
|
||||
key: number | string;
|
||||
} & Record<string, unknown>
|
||||
>
|
||||
): PanelDataArray[] => {
|
||||
// Metric types where an empty set equals `zero`
|
||||
const isSettableToZero = [
|
||||
METRIC_TYPES.COUNT,
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const numericKeys = ['alpha', 'beta', 'gamma', 'period'];
|
||||
const booleanKeys = ['pad'];
|
||||
function castBasedOnKey(key, val) {
|
||||
if (~numericKeys.indexOf(key)) return Number(val);
|
||||
if (~booleanKeys.indexOf(key)) {
|
||||
switch (val) {
|
||||
case 'true':
|
||||
case 1:
|
||||
case '1':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return val;
|
||||
}
|
||||
export const parseSettings = (settingsStr) => {
|
||||
return settingsStr.split(/\s/).reduce((acc, value) => {
|
||||
const [key, val] = value.split(/=/);
|
||||
acc[key] = castBasedOnKey(key, val);
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
|
@ -1,51 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { parseSettings } from './parse_settings';
|
||||
|
||||
describe('parseSettings', () => {
|
||||
test('returns the true for "true"', () => {
|
||||
const settings = 'pad=true';
|
||||
expect(parseSettings(settings)).toEqual({
|
||||
pad: true,
|
||||
});
|
||||
});
|
||||
|
||||
test('returns the false for "false"', () => {
|
||||
const settings = 'pad=false';
|
||||
expect(parseSettings(settings)).toEqual({
|
||||
pad: false,
|
||||
});
|
||||
});
|
||||
|
||||
test('returns the true for 1', () => {
|
||||
const settings = 'pad=1';
|
||||
expect(parseSettings(settings)).toEqual({
|
||||
pad: true,
|
||||
});
|
||||
});
|
||||
|
||||
test('returns the false for 0', () => {
|
||||
const settings = 'pad=0';
|
||||
expect(parseSettings(settings)).toEqual({
|
||||
pad: false,
|
||||
});
|
||||
});
|
||||
|
||||
test('returns the settings as an object', () => {
|
||||
const settings = 'alpha=0.9 beta=0.4 gamma=0.2 period=5 pad=false type=add';
|
||||
expect(parseSettings(settings)).toEqual({
|
||||
alpha: 0.9,
|
||||
beta: 0.4,
|
||||
gamma: 0.2,
|
||||
period: 5,
|
||||
pad: false,
|
||||
type: 'add',
|
||||
});
|
||||
});
|
||||
});
|
|
@ -37,7 +37,10 @@ export function getLastSeriesTimestamp(seriesGroup: Array<PanelSeries['series']>
|
|||
|
||||
if (lastValue) {
|
||||
const [dataLastTimestamp] = lastValue;
|
||||
lastTimestamp = Math.max(dataLastTimestamp, lastTimestamp ?? dataLastTimestamp);
|
||||
|
||||
if (typeof dataLastTimestamp === 'number') {
|
||||
lastTimestamp = Math.max(dataLastTimestamp, lastTimestamp ?? dataLastTimestamp);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,11 +6,15 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { has } from 'lodash';
|
||||
|
||||
export function calculateAggRoot(doc, column) {
|
||||
import type { TableSearchRequest } from '../table/types';
|
||||
import type { Series } from '../../../../../common/types';
|
||||
|
||||
export function calculateAggRoot(doc: TableSearchRequest, column: Series) {
|
||||
let aggRoot = `aggs.pivot.aggs.${column.id}.aggs`;
|
||||
if (_.has(doc, `aggs.pivot.aggs.${column.id}.aggs.column_filter`)) {
|
||||
|
||||
if (has(doc, `aggs.pivot.aggs.${column.id}.aggs.column_filter`)) {
|
||||
aggRoot = `aggs.pivot.aggs.${column.id}.aggs.column_filter.aggs`;
|
||||
}
|
||||
return aggRoot;
|
|
@ -1,87 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { overwrite } from '../../helpers';
|
||||
import { getBucketSize } from '../../helpers/get_bucket_size';
|
||||
import { isLastValueTimerangeMode } from '../../helpers/get_timerange_mode';
|
||||
import { getTimerange } from '../../helpers/get_timerange';
|
||||
import { calculateAggRoot } from './calculate_agg_root';
|
||||
import { search, UI_SETTINGS } from '../../../../../../../plugins/data/server';
|
||||
|
||||
const { dateHistogramInterval } = search.aggs;
|
||||
|
||||
export function dateHistogram(
|
||||
req,
|
||||
panel,
|
||||
esQueryConfig,
|
||||
seriesIndex,
|
||||
capabilities,
|
||||
uiSettings,
|
||||
buildSeriesMetaParams
|
||||
) {
|
||||
return (next) => async (doc) => {
|
||||
const barTargetUiSettings = await uiSettings.get(UI_SETTINGS.HISTOGRAM_BAR_TARGET);
|
||||
const { timeField, interval } = await buildSeriesMetaParams();
|
||||
const { from, to } = getTimerange(req);
|
||||
|
||||
const meta = {
|
||||
timeField,
|
||||
index: panel.use_kibana_indexes ? seriesIndex.indexPattern?.id : undefined,
|
||||
panelId: panel.id,
|
||||
};
|
||||
|
||||
const overwriteDateHistogramForLastBucketMode = () => {
|
||||
const { intervalString } = getBucketSize(req, interval, capabilities, barTargetUiSettings);
|
||||
const { timezone } = capabilities;
|
||||
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
|
||||
overwrite(doc, `${aggRoot}.timeseries.date_histogram`, {
|
||||
field: timeField,
|
||||
min_doc_count: 0,
|
||||
time_zone: timezone,
|
||||
extended_bounds: {
|
||||
min: from.valueOf(),
|
||||
max: to.valueOf(),
|
||||
},
|
||||
...dateHistogramInterval(intervalString),
|
||||
});
|
||||
|
||||
overwrite(doc, aggRoot.replace(/\.aggs$/, '.meta'), {
|
||||
...meta,
|
||||
intervalString,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const overwriteDateHistogramForEntireTimerangeMode = () => {
|
||||
const intervalString = `${to.valueOf() - from.valueOf()}ms`;
|
||||
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
|
||||
overwrite(doc, `${aggRoot}.timeseries.auto_date_histogram`, {
|
||||
field: timeField,
|
||||
buckets: 1,
|
||||
});
|
||||
|
||||
overwrite(doc, aggRoot.replace(/\.aggs$/, '.meta'), {
|
||||
...meta,
|
||||
intervalString,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
isLastValueTimerangeMode(panel)
|
||||
? overwriteDateHistogramForLastBucketMode()
|
||||
: overwriteDateHistogramForEntireTimerangeMode();
|
||||
|
||||
return next(doc);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { overwrite, getBucketSize, isLastValueTimerangeMode, getTimerange } from '../../helpers';
|
||||
import { calculateAggRoot } from './calculate_agg_root';
|
||||
import { search, UI_SETTINGS } from '../../../../../../../plugins/data/server';
|
||||
|
||||
import type { TableRequestProcessorsFunction, TableSearchRequestMeta } from './types';
|
||||
|
||||
const { dateHistogramInterval } = search.aggs;
|
||||
|
||||
export const dateHistogram: TableRequestProcessorsFunction = ({
|
||||
req,
|
||||
panel,
|
||||
seriesIndex,
|
||||
capabilities,
|
||||
uiSettings,
|
||||
buildSeriesMetaParams,
|
||||
}) => (next) => async (doc) => {
|
||||
const barTargetUiSettings = await uiSettings.get(UI_SETTINGS.HISTOGRAM_BAR_TARGET);
|
||||
const { timeField, interval } = await buildSeriesMetaParams();
|
||||
const { from, to } = getTimerange(req);
|
||||
|
||||
const meta: TableSearchRequestMeta = {
|
||||
timeField,
|
||||
index: panel.use_kibana_indexes ? seriesIndex.indexPattern?.id : undefined,
|
||||
panelId: panel.id,
|
||||
};
|
||||
|
||||
const overwriteDateHistogramForLastBucketMode = () => {
|
||||
const { intervalString } = getBucketSize(req, interval, capabilities, barTargetUiSettings);
|
||||
const { timezone } = capabilities;
|
||||
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
|
||||
overwrite(doc, `${aggRoot}.timeseries.date_histogram`, {
|
||||
field: timeField,
|
||||
min_doc_count: 0,
|
||||
time_zone: timezone,
|
||||
extended_bounds: {
|
||||
min: from.valueOf(),
|
||||
max: to.valueOf(),
|
||||
},
|
||||
...dateHistogramInterval(intervalString),
|
||||
});
|
||||
|
||||
overwrite(doc, aggRoot.replace(/\.aggs$/, '.meta'), {
|
||||
...meta,
|
||||
intervalString,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const overwriteDateHistogramForEntireTimerangeMode = () => {
|
||||
const intervalString = `${to.valueOf() - from.valueOf()}ms`;
|
||||
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
|
||||
overwrite(doc, `${aggRoot}.timeseries.auto_date_histogram`, {
|
||||
field: timeField,
|
||||
buckets: 1,
|
||||
});
|
||||
|
||||
overwrite(doc, aggRoot.replace(/\.aggs$/, '.meta'), {
|
||||
...meta,
|
||||
intervalString,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
if (isLastValueTimerangeMode(panel)) {
|
||||
overwriteDateHistogramForLastBucketMode();
|
||||
} else {
|
||||
overwriteDateHistogramForEntireTimerangeMode();
|
||||
}
|
||||
|
||||
return next(doc);
|
||||
};
|
|
@ -7,13 +7,21 @@
|
|||
*/
|
||||
|
||||
import { esQuery } from '../../../../../../data/server';
|
||||
import { bucketTransform } from '../../helpers/bucket_transform';
|
||||
import { overwrite } from '../../helpers';
|
||||
import { overwrite, bucketTransform } from '../../helpers';
|
||||
import { calculateAggRoot } from './calculate_agg_root';
|
||||
|
||||
const filter = (metric) => metric.type === 'filter_ratio';
|
||||
import type { TableRequestProcessorsFunction } from './types';
|
||||
import type { Metric } from '../../../../../common/types';
|
||||
|
||||
const filter = (metric: Metric) => metric.type === 'filter_ratio';
|
||||
|
||||
export const filterRatios: TableRequestProcessorsFunction = ({
|
||||
panel,
|
||||
esQueryConfig,
|
||||
seriesIndex,
|
||||
}) => {
|
||||
const indexPattern = seriesIndex.indexPattern || undefined;
|
||||
|
||||
export function ratios(req, panel, esQueryConfig, seriesIndex) {
|
||||
return (next) => (doc) => {
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
|
@ -22,12 +30,12 @@ export function ratios(req, panel, esQueryConfig, seriesIndex) {
|
|||
overwrite(
|
||||
doc,
|
||||
`${aggRoot}.timeseries.aggs.${metric.id}-numerator.filter`,
|
||||
esQuery.buildEsQuery(seriesIndex.indexPattern, metric.numerator, [], esQueryConfig)
|
||||
esQuery.buildEsQuery(indexPattern, metric.numerator!, [], esQueryConfig)
|
||||
);
|
||||
overwrite(
|
||||
doc,
|
||||
`${aggRoot}.timeseries.aggs.${metric.id}-denominator.filter`,
|
||||
esQuery.buildEsQuery(seriesIndex.indexPattern, metric.denominator, [], esQueryConfig)
|
||||
esQuery.buildEsQuery(indexPattern, metric.denominator!, [], esQueryConfig)
|
||||
);
|
||||
|
||||
let numeratorPath = `${metric.id}-numerator>_count`;
|
||||
|
@ -61,4 +69,4 @@ export function ratios(req, panel, esQueryConfig, seriesIndex) {
|
|||
});
|
||||
return next(doc);
|
||||
};
|
||||
}
|
||||
};
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { pivot } from './pivot';
|
||||
import { query } from './query';
|
||||
import { splitByEverything } from './split_by_everything';
|
||||
import { splitByTerms } from './split_by_terms';
|
||||
import { dateHistogram } from './date_histogram';
|
||||
import { metricBuckets } from './metric_buckets';
|
||||
import { siblingBuckets } from './sibling_buckets';
|
||||
import { ratios as filterRatios } from './filter_ratios';
|
||||
import { normalizeQuery } from './normalize_query';
|
||||
import { positiveRate } from './positive_rate';
|
||||
|
||||
export const processors = [
|
||||
query,
|
||||
pivot,
|
||||
splitByTerms,
|
||||
splitByEverything,
|
||||
dateHistogram,
|
||||
metricBuckets,
|
||||
siblingBuckets,
|
||||
filterRatios,
|
||||
positiveRate,
|
||||
normalizeQuery,
|
||||
];
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { pivot } from './pivot';
|
||||
export { query } from './query';
|
||||
export { splitByEverything } from './split_by_everything';
|
||||
export { splitByTerms } from './split_by_terms';
|
||||
export { dateHistogram } from './date_histogram';
|
||||
export { metricBuckets } from './metric_buckets';
|
||||
export { siblingBuckets } from './sibling_buckets';
|
||||
export { filterRatios } from './filter_ratios';
|
||||
export { normalizeQuery } from './normalize_query';
|
||||
export { positiveRate } from './positive_rate';
|
|
@ -1,36 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import { overwrite } from '../../helpers';
|
||||
import { bucketTransform } from '../../helpers/bucket_transform';
|
||||
import { calculateAggRoot } from './calculate_agg_root';
|
||||
|
||||
export function metricBuckets(req, panel) {
|
||||
return (next) => async (doc) => {
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
column.metrics
|
||||
.filter((row) => !/_bucket$/.test(row.type) && !/^series/.test(row.type))
|
||||
.forEach((metric) => {
|
||||
const fn = bucketTransform[metric.type];
|
||||
if (fn) {
|
||||
try {
|
||||
const intervalString = get(doc, aggRoot.replace(/\.aggs$/, '.meta.intervalString'));
|
||||
const bucket = fn(metric, column.metrics, intervalString);
|
||||
|
||||
overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}`, bucket);
|
||||
} catch (e) {
|
||||
// meh
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
return next(doc);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import { overwrite, bucketTransform } from '../../helpers';
|
||||
import { calculateAggRoot } from './calculate_agg_root';
|
||||
|
||||
import type { TableRequestProcessorsFunction } from './types';
|
||||
|
||||
export const metricBuckets: TableRequestProcessorsFunction = ({ req, panel }) => (next) => async (
|
||||
doc
|
||||
) => {
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
column.metrics
|
||||
.filter((row) => !/_bucket$/.test(row.type) && !/^series/.test(row.type))
|
||||
.forEach((metric) => {
|
||||
const fn = bucketTransform[metric.type];
|
||||
if (fn) {
|
||||
try {
|
||||
const intervalString = get(doc, aggRoot.replace(/\.aggs$/, '.meta.intervalString'));
|
||||
const bucket = fn(metric, column.metrics, intervalString);
|
||||
|
||||
overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}`, bucket);
|
||||
} catch (e) {
|
||||
// meh
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return next(doc);
|
||||
};
|
|
@ -7,17 +7,24 @@
|
|||
*/
|
||||
|
||||
import { normalizeQuery } from './normalize_query';
|
||||
import { overwrite } from '../../helpers';
|
||||
|
||||
import {
|
||||
TableRequestProcessorsFunction,
|
||||
TableRequestProcessorsParams,
|
||||
TableSearchRequest,
|
||||
} from './types';
|
||||
|
||||
describe('normalizeQuery', () => {
|
||||
const req = 'req';
|
||||
const seriesId = '61ca57f1-469d-11e7-af02-69e470af7417';
|
||||
const panelId = '39d49073-a924-426b-aa32-35acb40a9bb7';
|
||||
const tableRequestProcessorsParams = {} as TableRequestProcessorsParams;
|
||||
|
||||
let next;
|
||||
let panel;
|
||||
let series;
|
||||
const next = (jest.fn((x) => x) as unknown) as ReturnType<
|
||||
ReturnType<TableRequestProcessorsFunction>
|
||||
>;
|
||||
|
||||
const getMockedDoc = () => ({
|
||||
const getMockedDoc = (): TableSearchRequest => ({
|
||||
size: 0,
|
||||
query: {},
|
||||
aggs: {
|
||||
|
@ -65,22 +72,14 @@ describe('normalizeQuery', () => {
|
|||
},
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
next = jest.fn((x) => x);
|
||||
panel = {};
|
||||
series = {
|
||||
id: seriesId,
|
||||
};
|
||||
});
|
||||
|
||||
test('should remove the top level aggregation if filter.match_all is empty', () => {
|
||||
test('should remove the top level aggregation if filter.match_all is empty', async () => {
|
||||
const doc = getMockedDoc();
|
||||
|
||||
doc.aggs.pivot.aggs[seriesId].filter = {
|
||||
overwrite(doc, `aggs.pivot.aggs.${seriesId}.filter`, {
|
||||
match_all: {},
|
||||
};
|
||||
});
|
||||
|
||||
const modifiedDoc = normalizeQuery(req, panel, series)(next)(doc);
|
||||
const modifiedDoc = await normalizeQuery(tableRequestProcessorsParams)(next)(doc);
|
||||
expect(modifiedDoc.aggs.pivot.aggs[seriesId].aggs.timeseries).toBeUndefined();
|
||||
expect(modifiedDoc.aggs.pivot.aggs[seriesId].aggs[seriesId]).toBeDefined();
|
||||
|
||||
|
@ -93,33 +92,34 @@ describe('normalizeQuery', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should not remove the top level aggregation if filter.match_all is not empty', () => {
|
||||
test('should not remove the top level aggregation if filter.match_all is not empty', async () => {
|
||||
const doc = getMockedDoc();
|
||||
|
||||
doc.aggs.pivot.aggs[seriesId].filter = {
|
||||
overwrite(doc, `aggs.pivot.aggs.${seriesId}.filter`, {
|
||||
match_all: { filter: 1 },
|
||||
};
|
||||
});
|
||||
|
||||
const modifiedDoc = normalizeQuery(req, panel, series)(next)(doc);
|
||||
const modifiedDoc = await normalizeQuery(tableRequestProcessorsParams)(next)(doc);
|
||||
|
||||
expect(modifiedDoc.aggs.pivot.aggs[seriesId].aggs.timeseries).toBeDefined();
|
||||
expect(modifiedDoc.aggs.pivot.aggs[seriesId].aggs[seriesId]).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should not remove the top level aggregation for Sibling Pipeline queries', () => {
|
||||
test('should not remove the top level aggregation for Sibling Pipeline queries', async () => {
|
||||
const doc = getMockedDoc();
|
||||
const pipelineId = 'd4167fe0-afb0-11e9-b141-7b94c69f37eb';
|
||||
|
||||
doc.aggs.pivot.aggs[seriesId].filter = {
|
||||
overwrite(doc, `aggs.pivot.aggs[${seriesId}].filter`, {
|
||||
match_all: {},
|
||||
};
|
||||
doc.aggs.pivot.aggs[seriesId].aggs[pipelineId] = {
|
||||
});
|
||||
|
||||
overwrite(doc, `aggs.pivot.aggs[${seriesId}].aggs[${pipelineId}]`, {
|
||||
extended_stats_bucket: {
|
||||
buckets_path: 'timeseries>61ca57f2-469d-11e7-af02-69e470af7417',
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const modifiedDoc = normalizeQuery(req, panel, series)(next)(doc);
|
||||
const modifiedDoc = await normalizeQuery(tableRequestProcessorsParams)(next)(doc);
|
||||
|
||||
expect(modifiedDoc.aggs.pivot.aggs[seriesId].aggs.timeseries).toBeDefined();
|
||||
expect(modifiedDoc.aggs.pivot.aggs[seriesId].aggs[seriesId]).toBeUndefined();
|
|
@ -6,10 +6,16 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { get, forEach, isEmpty } from 'lodash';
|
||||
import { overwrite } from '../../helpers';
|
||||
const isEmptyFilter = (filter = {}) => Boolean(filter.match_all) && _.isEmpty(filter.match_all);
|
||||
const hasSiblingPipelineAggregation = (aggs = {}) => Object.keys(aggs).length > 1;
|
||||
|
||||
import type { TableRequestProcessorsFunction } from './types';
|
||||
|
||||
const isEmptyFilter = (filter: { match_all?: string }) =>
|
||||
filter && Boolean(filter.match_all) && isEmpty(filter.match_all);
|
||||
|
||||
const hasSiblingPipelineAggregation = (aggs: Record<string, unknown> = {}) =>
|
||||
Object.keys(aggs).length > 1;
|
||||
|
||||
/* Last query handler in the chain. You can use this handler
|
||||
* as the last place where you can modify the "doc" (request body) object before sending it to ES.
|
||||
|
@ -17,20 +23,23 @@ const hasSiblingPipelineAggregation = (aggs = {}) => Object.keys(aggs).length >
|
|||
* Important: for Sibling Pipeline aggregation we cannot apply this logic
|
||||
*
|
||||
*/
|
||||
export function normalizeQuery() {
|
||||
export const normalizeQuery: TableRequestProcessorsFunction = () => {
|
||||
return () => (doc) => {
|
||||
const series = _.get(doc, 'aggs.pivot.aggs');
|
||||
const series = get(doc, 'aggs.pivot.aggs') as Array<{
|
||||
aggs: Record<string, unknown>;
|
||||
}>;
|
||||
const normalizedSeries = {};
|
||||
|
||||
_.forEach(series, (value, seriesId) => {
|
||||
const filter = _.get(value, `filter`);
|
||||
forEach(series, (value, seriesId) => {
|
||||
const filter = get(value, `filter`);
|
||||
|
||||
if (isEmptyFilter(filter) && !hasSiblingPipelineAggregation(value.aggs)) {
|
||||
const agg = _.get(value, 'aggs.timeseries');
|
||||
const agg = get(value, 'aggs.timeseries');
|
||||
const meta = {
|
||||
..._.get(value, 'meta'),
|
||||
...get(value, 'meta'),
|
||||
seriesId,
|
||||
};
|
||||
|
||||
overwrite(normalizedSeries, `${seriesId}`, agg);
|
||||
overwrite(normalizedSeries, `${seriesId}.meta`, meta);
|
||||
} else {
|
||||
|
@ -42,4 +51,4 @@ export function normalizeQuery() {
|
|||
|
||||
return doc;
|
||||
};
|
||||
}
|
||||
};
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { get, last } from 'lodash';
|
||||
import { overwrite } from '../../helpers';
|
||||
|
||||
import { basicAggs } from '../../../../../common/basic_aggs';
|
||||
import { getBucketsPath } from '../../helpers/get_buckets_path';
|
||||
import { bucketTransform } from '../../helpers/bucket_transform';
|
||||
|
||||
export function pivot(req, panel) {
|
||||
return (next) => (doc) => {
|
||||
const { sort } = req.body.state;
|
||||
|
||||
if (panel.pivot_id) {
|
||||
overwrite(doc, 'aggs.pivot.terms.field', panel.pivot_id);
|
||||
overwrite(doc, 'aggs.pivot.terms.size', panel.pivot_rows);
|
||||
if (sort) {
|
||||
const series = panel.series.find((item) => item.id === sort.column);
|
||||
const metric = series && last(series.metrics);
|
||||
if (metric && metric.type === 'count') {
|
||||
overwrite(doc, 'aggs.pivot.terms.order', { _count: sort.order });
|
||||
} else if (metric && basicAggs.includes(metric.type)) {
|
||||
const sortAggKey = `${metric.id}-SORT`;
|
||||
const fn = bucketTransform[metric.type];
|
||||
const bucketPath = getBucketsPath(metric.id, series.metrics).replace(
|
||||
metric.id,
|
||||
sortAggKey
|
||||
);
|
||||
overwrite(doc, `aggs.pivot.terms.order`, { [bucketPath]: sort.order });
|
||||
overwrite(doc, `aggs.pivot.aggs`, { [sortAggKey]: fn(metric) });
|
||||
} else {
|
||||
overwrite(doc, 'aggs.pivot.terms.order', {
|
||||
_key: get(sort, 'order', 'asc'),
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
overwrite(doc, 'aggs.pivot.filter.match_all', {});
|
||||
}
|
||||
return next(doc);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { get, last } from 'lodash';
|
||||
import { overwrite, getBucketsPath, bucketTransform } from '../../helpers';
|
||||
|
||||
import { basicAggs } from '../../../../../common/basic_aggs';
|
||||
|
||||
import type { TableRequestProcessorsFunction } from './types';
|
||||
|
||||
export const pivot: TableRequestProcessorsFunction = ({ req, panel }) => (next) => (doc) => {
|
||||
const { sort } = req.body.state;
|
||||
|
||||
if (panel.pivot_id) {
|
||||
overwrite(doc, 'aggs.pivot.terms.field', panel.pivot_id);
|
||||
overwrite(doc, 'aggs.pivot.terms.size', panel.pivot_rows);
|
||||
if (sort) {
|
||||
const series = panel.series.find((item) => item.id === sort.column);
|
||||
const metric = series && last(series.metrics);
|
||||
if (metric && metric.type === 'count') {
|
||||
overwrite(doc, 'aggs.pivot.terms.order', { _count: sort.order });
|
||||
} else if (metric && series && basicAggs.includes(metric.type)) {
|
||||
const sortAggKey = `${metric.id}-SORT`;
|
||||
const fn = bucketTransform[metric.type];
|
||||
const bucketPath = getBucketsPath(metric.id, series.metrics).replace(metric.id, sortAggKey);
|
||||
overwrite(doc, `aggs.pivot.terms.order`, { [bucketPath]: sort.order });
|
||||
overwrite(doc, `aggs.pivot.aggs`, { [sortAggKey]: fn(metric) });
|
||||
} else {
|
||||
overwrite(doc, 'aggs.pivot.terms.order', {
|
||||
_key: get(sort, 'order', 'asc'),
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
overwrite(doc, 'aggs.pivot.filter.match_all', {});
|
||||
}
|
||||
|
||||
return next(doc);
|
||||
};
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getBucketSize } from '../../helpers/get_bucket_size';
|
||||
import { calculateAggRoot } from './calculate_agg_root';
|
||||
import { createPositiveRate, filter } from '../series/positive_rate';
|
||||
import { UI_SETTINGS } from '../../../../../../data/common';
|
||||
|
||||
export function positiveRate(
|
||||
req,
|
||||
panel,
|
||||
esQueryConfig,
|
||||
seriesIndex,
|
||||
capabilities,
|
||||
uiSettings,
|
||||
buildSeriesMetaParams
|
||||
) {
|
||||
return (next) => async (doc) => {
|
||||
const barTargetUiSettings = await uiSettings.get(UI_SETTINGS.HISTOGRAM_BAR_TARGET);
|
||||
const { interval } = await buildSeriesMetaParams();
|
||||
const { intervalString } = getBucketSize(req, interval, capabilities, barTargetUiSettings);
|
||||
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
column.metrics.filter(filter).forEach(createPositiveRate(doc, intervalString, aggRoot));
|
||||
});
|
||||
return next(doc);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getBucketSize } from '../../helpers/get_bucket_size';
|
||||
import { calculateAggRoot } from './calculate_agg_root';
|
||||
import { UI_SETTINGS } from '../../../../../../data/common';
|
||||
|
||||
import type { TableRequestProcessorsFunction } from './types';
|
||||
|
||||
// @ts-expect-error not typed yet
|
||||
import { createPositiveRate, filter } from '../series/positive_rate';
|
||||
|
||||
export const positiveRate: TableRequestProcessorsFunction = ({
|
||||
req,
|
||||
panel,
|
||||
capabilities,
|
||||
uiSettings,
|
||||
buildSeriesMetaParams,
|
||||
}) => (next) => async (doc) => {
|
||||
const barTargetUiSettings = await uiSettings.get(UI_SETTINGS.HISTOGRAM_BAR_TARGET);
|
||||
const { interval } = await buildSeriesMetaParams();
|
||||
const { intervalString } = getBucketSize(req, interval, capabilities, barTargetUiSettings);
|
||||
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
column.metrics.filter(filter).forEach(createPositiveRate(doc, intervalString, aggRoot));
|
||||
});
|
||||
return next(doc);
|
||||
};
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getTimerange } from '../../helpers/get_timerange';
|
||||
import { esQuery } from '../../../../../../data/server';
|
||||
|
||||
export function query(
|
||||
req,
|
||||
panel,
|
||||
esQueryConfig,
|
||||
seriesIndex,
|
||||
capabilities,
|
||||
uiSettings,
|
||||
buildSeriesMetaParams
|
||||
) {
|
||||
return (next) => async (doc) => {
|
||||
const { timeField } = await buildSeriesMetaParams();
|
||||
const { from, to } = getTimerange(req);
|
||||
|
||||
doc.size = 0;
|
||||
|
||||
const queries = !panel.ignore_global_filter ? req.body.query : [];
|
||||
const filters = !panel.ignore_global_filter ? req.body.filters : [];
|
||||
doc.query = esQuery.buildEsQuery(seriesIndex.indexPattern, queries, filters, esQueryConfig);
|
||||
|
||||
const timerange = {
|
||||
range: {
|
||||
[timeField]: {
|
||||
gte: from.toISOString(),
|
||||
lte: to.toISOString(),
|
||||
format: 'strict_date_optional_time',
|
||||
},
|
||||
},
|
||||
};
|
||||
doc.query.bool.must.push(timerange);
|
||||
if (panel.filter) {
|
||||
doc.query.bool.must.push(
|
||||
esQuery.buildEsQuery(seriesIndex.indexPattern, [panel.filter], [], esQueryConfig)
|
||||
);
|
||||
}
|
||||
|
||||
return next(doc);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getTimerange, overwrite } from '../../helpers';
|
||||
import { esQuery } from '../../../../../../data/server';
|
||||
import type { TableRequestProcessorsFunction } from './types';
|
||||
|
||||
export const query: TableRequestProcessorsFunction = ({
|
||||
req,
|
||||
panel,
|
||||
esQueryConfig,
|
||||
seriesIndex,
|
||||
buildSeriesMetaParams,
|
||||
}) => (next) => async (doc) => {
|
||||
const { timeField } = await buildSeriesMetaParams();
|
||||
const { from, to } = getTimerange(req);
|
||||
const indexPattern = seriesIndex.indexPattern || undefined;
|
||||
|
||||
doc.size = 0;
|
||||
|
||||
const queries = !panel.ignore_global_filter ? req.body.query : [];
|
||||
const filters = !panel.ignore_global_filter ? req.body.filters : [];
|
||||
doc.query = esQuery.buildEsQuery(indexPattern, queries, filters, esQueryConfig);
|
||||
|
||||
const boolFilters: unknown[] = [];
|
||||
|
||||
if (timeField) {
|
||||
const timerange = {
|
||||
range: {
|
||||
[timeField]: {
|
||||
gte: from.toISOString(),
|
||||
lte: to.toISOString(),
|
||||
format: 'strict_date_optional_time',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
boolFilters.push(timerange);
|
||||
}
|
||||
if (panel.filter) {
|
||||
boolFilters.push(esQuery.buildEsQuery(indexPattern, [panel.filter], [], esQueryConfig));
|
||||
}
|
||||
|
||||
overwrite(doc, 'query.bool.must', boolFilters);
|
||||
|
||||
return next(doc);
|
||||
};
|
|
@ -1,36 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { overwrite } from '../../helpers';
|
||||
import { bucketTransform } from '../../helpers/bucket_transform';
|
||||
import { calculateAggRoot } from './calculate_agg_root';
|
||||
import { get } from 'lodash';
|
||||
|
||||
export function siblingBuckets(req, panel) {
|
||||
return (next) => async (doc) => {
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
column.metrics
|
||||
.filter((row) => /_bucket$/.test(row.type))
|
||||
.forEach((metric) => {
|
||||
const fn = bucketTransform[metric.type];
|
||||
if (fn) {
|
||||
try {
|
||||
const intervalString = get(doc, aggRoot.replace(/\.aggs$/, '.meta.intervalString'));
|
||||
const bucket = fn(metric, column.metrics, intervalString);
|
||||
|
||||
overwrite(doc, `${aggRoot}.${metric.id}`, bucket);
|
||||
} catch (e) {
|
||||
// meh
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
return next(doc);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import { get } from 'lodash';
|
||||
import { overwrite, bucketTransform } from '../../helpers';
|
||||
import { calculateAggRoot } from './calculate_agg_root';
|
||||
import type { TableRequestProcessorsFunction } from './types';
|
||||
|
||||
export const siblingBuckets: TableRequestProcessorsFunction = ({ panel }) => (next) => async (
|
||||
doc
|
||||
) => {
|
||||
panel.series.forEach((column) => {
|
||||
const aggRoot = calculateAggRoot(doc, column);
|
||||
|
||||
column.metrics
|
||||
.filter((row) => /_bucket$/.test(row.type))
|
||||
.forEach((metric) => {
|
||||
const fn = bucketTransform[metric.type];
|
||||
|
||||
if (fn) {
|
||||
try {
|
||||
const intervalString = get(doc, aggRoot.replace(/\.aggs$/, '.meta.intervalString'));
|
||||
const bucket = fn(metric, column.metrics, intervalString);
|
||||
|
||||
overwrite(doc, `${aggRoot}.${metric.id}`, bucket);
|
||||
} catch (e) {
|
||||
// meh
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return next(doc);
|
||||
};
|
|
@ -1,29 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { overwrite } from '../../helpers';
|
||||
import { esQuery } from '../../../../../../data/server';
|
||||
|
||||
export function splitByEverything(req, panel, esQueryConfig, seriesIndex) {
|
||||
return (next) => (doc) => {
|
||||
panel.series
|
||||
.filter((c) => !(c.aggregate_by && c.aggregate_function))
|
||||
.forEach((column) => {
|
||||
if (column.filter) {
|
||||
overwrite(
|
||||
doc,
|
||||
`aggs.pivot.aggs.${column.id}.filter`,
|
||||
esQuery.buildEsQuery(seriesIndex.indexPattern, [column.filter], [], esQueryConfig)
|
||||
);
|
||||
} else {
|
||||
overwrite(doc, `aggs.pivot.aggs.${column.id}.filter.match_all`, {});
|
||||
}
|
||||
});
|
||||
return next(doc);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { overwrite } from '../../helpers';
|
||||
import { esQuery } from '../../../../../../data/server';
|
||||
|
||||
import type { TableRequestProcessorsFunction } from './types';
|
||||
|
||||
export const splitByEverything: TableRequestProcessorsFunction = ({
|
||||
panel,
|
||||
esQueryConfig,
|
||||
seriesIndex,
|
||||
}) => (next) => (doc) => {
|
||||
const indexPattern = seriesIndex.indexPattern || undefined;
|
||||
|
||||
panel.series
|
||||
.filter((c) => !(c.aggregate_by && c.aggregate_function))
|
||||
.forEach((column) => {
|
||||
if (column.filter) {
|
||||
overwrite(
|
||||
doc,
|
||||
`aggs.pivot.aggs.${column.id}.filter`,
|
||||
esQuery.buildEsQuery(indexPattern, [column.filter], [], esQueryConfig)
|
||||
);
|
||||
} else {
|
||||
overwrite(doc, `aggs.pivot.aggs.${column.id}.filter.match_all`, {});
|
||||
}
|
||||
});
|
||||
|
||||
return next(doc);
|
||||
};
|
|
@ -9,7 +9,15 @@
|
|||
import { overwrite } from '../../helpers';
|
||||
import { esQuery } from '../../../../../../data/server';
|
||||
|
||||
export function splitByTerms(req, panel, esQueryConfig, seriesIndex) {
|
||||
import type { TableRequestProcessorsFunction } from './types';
|
||||
|
||||
export const splitByTerms: TableRequestProcessorsFunction = ({
|
||||
panel,
|
||||
esQueryConfig,
|
||||
seriesIndex,
|
||||
}) => {
|
||||
const indexPattern = seriesIndex.indexPattern || undefined;
|
||||
|
||||
return (next) => (doc) => {
|
||||
panel.series
|
||||
.filter((c) => c.aggregate_by && c.aggregate_function)
|
||||
|
@ -21,10 +29,10 @@ export function splitByTerms(req, panel, esQueryConfig, seriesIndex) {
|
|||
overwrite(
|
||||
doc,
|
||||
`aggs.pivot.aggs.${column.id}.column_filter.filter`,
|
||||
esQuery.buildEsQuery(seriesIndex.indexPattern, [column.filter], [], esQueryConfig)
|
||||
esQuery.buildEsQuery(indexPattern, [column.filter], [], esQueryConfig)
|
||||
);
|
||||
}
|
||||
});
|
||||
return next(doc);
|
||||
};
|
||||
}
|
||||
};
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { IUiSettingsClient } from 'kibana/server';
|
||||
import type { FetchedIndexPattern, Panel } from '../../../../../common/types';
|
||||
import type { EsQueryConfig } from '../../../../../../data/common';
|
||||
import type { SearchCapabilities } from '../../../search_strategies';
|
||||
import type { VisTypeTimeseriesVisDataRequest } from '../../../../types';
|
||||
|
||||
import type { ProcessorFunction } from '../../build_processor_function';
|
||||
import type { BaseMeta } from '../types';
|
||||
|
||||
export interface TableRequestProcessorsParams {
|
||||
req: VisTypeTimeseriesVisDataRequest;
|
||||
panel: Panel;
|
||||
esQueryConfig: EsQueryConfig;
|
||||
seriesIndex: FetchedIndexPattern;
|
||||
capabilities: SearchCapabilities;
|
||||
uiSettings: IUiSettingsClient;
|
||||
buildSeriesMetaParams: () => Promise<{
|
||||
maxBars: number;
|
||||
timeField?: string;
|
||||
interval: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface TableSearchRequestMeta extends BaseMeta {
|
||||
panelId?: string;
|
||||
timeField?: string;
|
||||
}
|
||||
|
||||
export type TableSearchRequest = Record<string, any>;
|
||||
|
||||
export type TableRequestProcessorsFunction = ProcessorFunction<
|
||||
TableRequestProcessorsParams,
|
||||
TableSearchRequest
|
||||
>;
|
|
@ -6,8 +6,6 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
|
||||
export function getLastMetric(series) {
|
||||
return _.last(series.metrics.filter((s) => s.type !== 'series_agg'));
|
||||
export interface BaseMeta {
|
||||
index?: string;
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { dropLastBucket } from '../series/drop_last_bucket';
|
||||
import { isLastValueTimerangeMode } from '../../helpers/get_timerange_mode';
|
||||
|
||||
export function dropLastBucketFn(bucket, panel, series) {
|
||||
return (next) => (results) => {
|
||||
const shouldDropLastBucket = isLastValueTimerangeMode(panel);
|
||||
|
||||
if (shouldDropLastBucket) {
|
||||
const fn = dropLastBucket({ aggregations: bucket }, panel, series);
|
||||
|
||||
return fn(next)(results);
|
||||
}
|
||||
|
||||
return next(results);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { isLastValueTimerangeMode } from '../../helpers';
|
||||
|
||||
// @ts-expect-error no typed yet
|
||||
import { dropLastBucket } from '../series/drop_last_bucket';
|
||||
|
||||
import type { TableResponseProcessorsFunction } from './types';
|
||||
|
||||
export const dropLastBucketFn: TableResponseProcessorsFunction = ({ bucket, panel, series }) => (
|
||||
next
|
||||
) => (results) => {
|
||||
const shouldDropLastBucket = isLastValueTimerangeMode(panel);
|
||||
|
||||
if (shouldDropLastBucket) {
|
||||
const fn = dropLastBucket({ aggregations: bucket }, panel, series);
|
||||
|
||||
return fn(next)(results);
|
||||
}
|
||||
|
||||
return next(results);
|
||||
};
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { stdMetric } from './std_metric';
|
||||
import { stdSibling } from './std_sibling';
|
||||
import { seriesAgg } from './series_agg';
|
||||
import { percentile } from './percentile';
|
||||
import { percentileRank } from './percentile_rank';
|
||||
|
||||
import { math } from './math';
|
||||
import { dropLastBucketFn } from './drop_last_bucket';
|
||||
|
||||
export const processors = [
|
||||
percentile,
|
||||
percentileRank,
|
||||
stdMetric,
|
||||
stdSibling,
|
||||
math,
|
||||
seriesAgg,
|
||||
dropLastBucketFn,
|
||||
];
|
|
@ -0,0 +1,15 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { stdMetric } from './std_metric';
|
||||
export { stdSibling } from './std_sibling';
|
||||
export { seriesAgg } from './series_agg';
|
||||
export { percentile } from './percentile';
|
||||
export { percentileRank } from './percentile_rank';
|
||||
export { math } from './math';
|
||||
export { dropLastBucketFn } from './drop_last_bucket';
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
// @ts-expect-error no typed yet
|
||||
import { mathAgg } from '../series/math';
|
||||
|
||||
import type { TableResponseProcessorsFunction } from './types';
|
||||
|
||||
export const math: TableResponseProcessorsFunction = ({
|
||||
bucket,
|
||||
panel,
|
||||
series,
|
||||
meta,
|
||||
extractFields,
|
||||
}) => (next) => (results) => {
|
||||
const mathFn = mathAgg({ aggregations: bucket }, panel, series, meta, extractFields);
|
||||
return mathFn(next)(results);
|
||||
};
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { last } from 'lodash';
|
||||
import { getSplits } from '../../helpers/get_splits';
|
||||
import { getLastMetric } from '../../helpers/get_last_metric';
|
||||
import { toPercentileNumber } from '../../../../../common/to_percentile_number';
|
||||
import { METRIC_TYPES } from '../../../../../common/enums';
|
||||
|
||||
export function percentile(bucket, panel, series, meta, extractFields) {
|
||||
return (next) => async (results) => {
|
||||
const metric = getLastMetric(series);
|
||||
|
||||
if (metric.type !== METRIC_TYPES.PERCENTILE) {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
const fakeResp = {
|
||||
aggregations: bucket,
|
||||
};
|
||||
|
||||
(await getSplits(fakeResp, panel, series, meta, extractFields)).forEach((split) => {
|
||||
// table allows only one percentile in a series (the last one will be chosen in case of several)
|
||||
const percentile = last(metric.percentiles);
|
||||
const percentileKey = toPercentileNumber(percentile.value);
|
||||
const data = split.timeseries.buckets.map((bucket) => [
|
||||
bucket.key,
|
||||
bucket[metric.id].values[percentileKey],
|
||||
]);
|
||||
|
||||
results.push({
|
||||
id: split.id,
|
||||
label: `${split.label} (${percentile.value ?? 0})`,
|
||||
data,
|
||||
});
|
||||
});
|
||||
|
||||
return next(results);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { last } from 'lodash';
|
||||
import { getSplits, getLastMetric } from '../../helpers';
|
||||
import { toPercentileNumber } from '../../../../../common/to_percentile_number';
|
||||
import { METRIC_TYPES } from '../../../../../common/enums';
|
||||
|
||||
import type { TableResponseProcessorsFunction } from './types';
|
||||
import type { PanelDataArray } from '../../../../../common/types/vis_data';
|
||||
|
||||
export const percentile: TableResponseProcessorsFunction = ({
|
||||
bucket,
|
||||
panel,
|
||||
series,
|
||||
meta,
|
||||
extractFields,
|
||||
}) => (next) => async (results) => {
|
||||
const metric = getLastMetric(series);
|
||||
|
||||
if (metric.type !== METRIC_TYPES.PERCENTILE) {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
const fakeResp = {
|
||||
aggregations: bucket,
|
||||
};
|
||||
|
||||
(await getSplits(fakeResp, panel, series, meta, extractFields)).forEach((split) => {
|
||||
// table allows only one percentile in a series (the last one will be chosen in case of several)
|
||||
const lastPercentile = last(metric.percentiles)?.value ?? 0;
|
||||
const percentileKey = toPercentileNumber(lastPercentile);
|
||||
const data = split.timeseries.buckets.map((b) => [
|
||||
b.key,
|
||||
b[metric.id].values[percentileKey],
|
||||
]) as PanelDataArray[];
|
||||
|
||||
results.push({
|
||||
id: split.id,
|
||||
label: `${split.label} (${lastPercentile ?? 0})`,
|
||||
data: data!,
|
||||
});
|
||||
});
|
||||
|
||||
return next(results);
|
||||
};
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { last } from 'lodash';
|
||||
import { getSplits } from '../../helpers/get_splits';
|
||||
import { getLastMetric } from '../../helpers/get_last_metric';
|
||||
import { toPercentileNumber } from '../../../../../common/to_percentile_number';
|
||||
import { getAggValue } from '../../helpers/get_agg_value';
|
||||
import { METRIC_TYPES } from '../../../../../common/enums';
|
||||
|
||||
export function percentileRank(bucket, panel, series, meta, extractFields) {
|
||||
return (next) => async (results) => {
|
||||
const metric = getLastMetric(series);
|
||||
|
||||
if (metric.type !== METRIC_TYPES.PERCENTILE_RANK) {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
const fakeResp = {
|
||||
aggregations: bucket,
|
||||
};
|
||||
|
||||
(await getSplits(fakeResp, panel, series, meta, extractFields)).forEach((split) => {
|
||||
// table allows only one percentile rank in a series (the last one will be chosen in case of several)
|
||||
const lastRankValue = last(metric.values);
|
||||
const percentileRank = toPercentileNumber(lastRankValue);
|
||||
|
||||
const data = split.timeseries.buckets.map((bucket) => [
|
||||
bucket.key,
|
||||
getAggValue(bucket, {
|
||||
...metric,
|
||||
value: percentileRank,
|
||||
}),
|
||||
]);
|
||||
|
||||
results.push({
|
||||
data,
|
||||
id: split.id,
|
||||
label: `${split.label} (${lastRankValue ?? 0})`,
|
||||
});
|
||||
});
|
||||
|
||||
return next(results);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { last } from 'lodash';
|
||||
import { getSplits, getAggValue, getLastMetric } from '../../helpers';
|
||||
import { toPercentileNumber } from '../../../../../common/to_percentile_number';
|
||||
import { METRIC_TYPES } from '../../../../../common/enums';
|
||||
|
||||
import type { TableResponseProcessorsFunction } from './types';
|
||||
import type { PanelDataArray } from '../../../../../common/types/vis_data';
|
||||
|
||||
export const percentileRank: TableResponseProcessorsFunction = ({
|
||||
bucket,
|
||||
panel,
|
||||
series,
|
||||
meta,
|
||||
extractFields,
|
||||
}) => (next) => async (results) => {
|
||||
const metric = getLastMetric(series);
|
||||
|
||||
if (metric.type !== METRIC_TYPES.PERCENTILE_RANK) {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
const fakeResp = {
|
||||
aggregations: bucket,
|
||||
};
|
||||
|
||||
(await getSplits(fakeResp, panel, series, meta, extractFields)).forEach((split) => {
|
||||
// table allows only one percentile rank in a series (the last one will be chosen in case of several)
|
||||
const lastRankValue = last(metric.values) ?? 0;
|
||||
const lastPercentileNumber = toPercentileNumber(lastRankValue);
|
||||
|
||||
const data = split.timeseries.buckets.map((b) => [
|
||||
b.key,
|
||||
getAggValue(b, {
|
||||
...metric,
|
||||
value: lastPercentileNumber,
|
||||
}),
|
||||
]) as PanelDataArray[];
|
||||
|
||||
results.push({
|
||||
data,
|
||||
id: split.id,
|
||||
label: `${split.label} (${lastRankValue ?? 0})`,
|
||||
});
|
||||
});
|
||||
|
||||
return next(results);
|
||||
};
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { SeriesAgg } from './_series_agg';
|
||||
import { last, first } from 'lodash';
|
||||
import { calculateLabel } from '../../../../../common/calculate_label';
|
||||
|
||||
export function seriesAgg(resp, panel, series, meta, extractFields) {
|
||||
return (next) => async (results) => {
|
||||
if (series.aggregate_by && series.aggregate_function) {
|
||||
const targetSeries = [];
|
||||
// Filter out the seires with the matching metric and store them
|
||||
// in targetSeries
|
||||
results = results.filter((s) => {
|
||||
if (s.id.split(/:/)[0] === series.id) {
|
||||
targetSeries.push(s.data);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
const fn = SeriesAgg[series.aggregate_function];
|
||||
const data = fn(targetSeries);
|
||||
const fieldsForSeries = meta.index ? await extractFields({ id: meta.index }) : [];
|
||||
|
||||
results.push({
|
||||
id: `${series.id}`,
|
||||
label:
|
||||
series.label || calculateLabel(last(series.metrics), series.metrics, fieldsForSeries),
|
||||
data: first(data),
|
||||
});
|
||||
}
|
||||
return next(results);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { last } from 'lodash';
|
||||
|
||||
import { calculateLabel } from '../../../../../common/calculate_label';
|
||||
|
||||
// @ts-expect-error no typed yet
|
||||
import { SeriesAgg } from './_series_agg';
|
||||
|
||||
import type { TableResponseProcessorsFunction } from './types';
|
||||
import type { PanelDataArray } from '../../../../../common/types/vis_data';
|
||||
|
||||
export const seriesAgg: TableResponseProcessorsFunction = ({ series, meta, extractFields }) => (
|
||||
next
|
||||
) => async (results) => {
|
||||
if (series.aggregate_by && series.aggregate_function) {
|
||||
const targetSeries: PanelDataArray[][] = [];
|
||||
|
||||
// Filter out the seires with the matching metric and store them
|
||||
// in targetSeries
|
||||
results = results.filter((s) => {
|
||||
if (s.id && s.id.split(/:/)[0] === series.id) {
|
||||
targetSeries.push(s.data!);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
const fn = SeriesAgg[series.aggregate_function];
|
||||
const data = fn(targetSeries);
|
||||
const fieldsForSeries = meta.index ? await extractFields({ id: meta.index }) : [];
|
||||
|
||||
results.push({
|
||||
id: `${series.id}`,
|
||||
label: series.label || calculateLabel(last(series.metrics)!, series.metrics, fieldsForSeries),
|
||||
data: data[0],
|
||||
});
|
||||
}
|
||||
return next(results);
|
||||
};
|
|
@ -1,43 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getSplits, getLastMetric, mapEmptyToZero } from '../../helpers';
|
||||
import { METRIC_TYPES } from '../../../../../common/enums';
|
||||
|
||||
export function stdMetric(bucket, panel, series, meta, extractFields) {
|
||||
return (next) => async (results) => {
|
||||
const metric = getLastMetric(series);
|
||||
|
||||
if (metric.type === METRIC_TYPES.STD_DEVIATION && metric.mode === 'band') {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
if ([METRIC_TYPES.PERCENTILE_RANK, METRIC_TYPES.PERCENTILE].includes(metric.type)) {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
if (/_bucket$/.test(metric.type)) {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
const fakeResp = {
|
||||
aggregations: bucket,
|
||||
};
|
||||
|
||||
(await getSplits(fakeResp, panel, series, meta, extractFields)).forEach((split) => {
|
||||
const data = mapEmptyToZero(metric, split.timeseries.buckets);
|
||||
results.push({
|
||||
id: split.id,
|
||||
label: split.label,
|
||||
data,
|
||||
});
|
||||
});
|
||||
|
||||
return next(results);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getSplits, getLastMetric, mapEmptyToZero } from '../../helpers';
|
||||
import { METRIC_TYPES } from '../../../../../common/enums';
|
||||
|
||||
import type { TableResponseProcessorsFunction } from './types';
|
||||
|
||||
export const stdMetric: TableResponseProcessorsFunction = ({
|
||||
bucket,
|
||||
panel,
|
||||
series,
|
||||
meta,
|
||||
extractFields,
|
||||
}) => (next) => async (results) => {
|
||||
const metric = getLastMetric(series);
|
||||
|
||||
if (metric.type === METRIC_TYPES.STD_DEVIATION && metric.mode === 'band') {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
if (METRIC_TYPES.PERCENTILE_RANK === metric.type || METRIC_TYPES.PERCENTILE === metric.type) {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
if (/_bucket$/.test(metric.type)) {
|
||||
return next(results);
|
||||
}
|
||||
|
||||
const fakeResp = {
|
||||
aggregations: bucket,
|
||||
};
|
||||
|
||||
(await getSplits(fakeResp, panel, series, meta, extractFields)).forEach((split) => {
|
||||
const data = mapEmptyToZero(metric, split.timeseries.buckets);
|
||||
|
||||
results.push({
|
||||
id: split.id,
|
||||
label: split.label,
|
||||
data,
|
||||
});
|
||||
});
|
||||
|
||||
return next(results);
|
||||
};
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getSplits } from '../../helpers/get_splits';
|
||||
import { getLastMetric } from '../../helpers/get_last_metric';
|
||||
import { getSiblingAggValue } from '../../helpers/get_sibling_agg_value';
|
||||
|
||||
export function stdSibling(bucket, panel, series, meta, extractFields) {
|
||||
return (next) => async (results) => {
|
||||
const metric = getLastMetric(series);
|
||||
|
||||
if (!/_bucket$/.test(metric.type)) return next(results);
|
||||
if (metric.type === 'std_deviation_bucket' && metric.mode === 'band') return next(results);
|
||||
|
||||
const fakeResp = { aggregations: bucket };
|
||||
(await getSplits(fakeResp, panel, series, meta, extractFields)).forEach((split) => {
|
||||
const data = split.timeseries.buckets.map((b) => {
|
||||
return [b.key, getSiblingAggValue(split, metric)];
|
||||
});
|
||||
results.push({
|
||||
id: split.id,
|
||||
label: split.label,
|
||||
data,
|
||||
});
|
||||
});
|
||||
|
||||
return next(results);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getSplits, getLastMetric, getSiblingAggValue } from '../../helpers';
|
||||
|
||||
import type { TableResponseProcessorsFunction } from './types';
|
||||
import type { PanelDataArray } from '../../../../../common/types/vis_data';
|
||||
|
||||
export const stdSibling: TableResponseProcessorsFunction = ({
|
||||
bucket,
|
||||
panel,
|
||||
series,
|
||||
meta,
|
||||
extractFields,
|
||||
}) => (next) => async (results) => {
|
||||
const metric = getLastMetric(series);
|
||||
|
||||
if (!/_bucket$/.test(metric.type)) return next(results);
|
||||
if (metric.type === 'std_deviation_bucket' && metric.mode === 'band') return next(results);
|
||||
|
||||
const fakeResp = { aggregations: bucket };
|
||||
(await getSplits(fakeResp, panel, series, meta, extractFields)).forEach((split) => {
|
||||
const data: PanelDataArray[] = split.timeseries.buckets.map((b) => {
|
||||
return [b.key, getSiblingAggValue(split, metric)];
|
||||
});
|
||||
|
||||
results.push({
|
||||
id: split.id,
|
||||
label: split.label,
|
||||
data,
|
||||
});
|
||||
});
|
||||
|
||||
return next(results);
|
||||
};
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import { createFieldsFetcher } from '../../../search_strategies/lib/fields_fetcher';
|
||||
|
||||
import type { ProcessorFunction } from '../../build_processor_function';
|
||||
import type { TableSearchRequestMeta } from '../../request_processors/table/types';
|
||||
import type { Panel, Series, PanelData } from '../../../../../common/types';
|
||||
|
||||
export interface TableResponseProcessorsParams {
|
||||
bucket: Record<string, unknown>;
|
||||
panel: Panel;
|
||||
series: Series;
|
||||
meta: TableSearchRequestMeta;
|
||||
extractFields: ReturnType<typeof createFieldsFetcher>;
|
||||
}
|
||||
|
||||
export type TableSearchResponse = Array<
|
||||
Partial<PanelData> &
|
||||
Partial<{
|
||||
slope: number;
|
||||
last: number | string | null;
|
||||
}>
|
||||
>;
|
||||
|
||||
export type TableResponseProcessorsFunction = ProcessorFunction<
|
||||
TableResponseProcessorsParams,
|
||||
TableSearchResponse
|
||||
>;
|
|
@ -1,16 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { _legacyBuildProcessorFunction } from '../build_processor_function';
|
||||
import { processors } from '../request_processors/table';
|
||||
|
||||
export async function buildRequestBody(...args) {
|
||||
const processor = _legacyBuildProcessorFunction(processors, ...args);
|
||||
const doc = await processor({});
|
||||
return doc;
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { buildProcessorFunction } from '../build_processor_function';
|
||||
import {
|
||||
query,
|
||||
pivot,
|
||||
splitByTerms,
|
||||
splitByEverything,
|
||||
dateHistogram,
|
||||
metricBuckets,
|
||||
siblingBuckets,
|
||||
filterRatios,
|
||||
positiveRate,
|
||||
normalizeQuery,
|
||||
} from '../request_processors/table';
|
||||
|
||||
import type {
|
||||
TableRequestProcessorsFunction,
|
||||
TableRequestProcessorsParams,
|
||||
TableSearchRequest,
|
||||
} from '../request_processors/table/types';
|
||||
|
||||
export function buildTableRequest(params: TableRequestProcessorsParams) {
|
||||
const processor = buildProcessorFunction<
|
||||
TableRequestProcessorsFunction,
|
||||
TableRequestProcessorsParams,
|
||||
TableSearchRequest
|
||||
>(
|
||||
[
|
||||
query,
|
||||
pivot,
|
||||
splitByTerms,
|
||||
splitByEverything,
|
||||
dateHistogram,
|
||||
metricBuckets,
|
||||
siblingBuckets,
|
||||
filterRatios,
|
||||
positiveRate,
|
||||
normalizeQuery,
|
||||
],
|
||||
params
|
||||
);
|
||||
|
||||
return processor({});
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { buildProcessorFunction } from '../build_processor_function';
|
||||
import {
|
||||
percentile,
|
||||
percentileRank,
|
||||
stdMetric,
|
||||
stdSibling,
|
||||
math,
|
||||
seriesAgg,
|
||||
dropLastBucketFn,
|
||||
} from '../response_processors/table';
|
||||
|
||||
import type {
|
||||
TableResponseProcessorsFunction,
|
||||
TableResponseProcessorsParams,
|
||||
TableSearchResponse,
|
||||
} from '../response_processors/table/types';
|
||||
|
||||
export function buildTableResponse(params: TableResponseProcessorsParams) {
|
||||
const processor = buildProcessorFunction<
|
||||
TableResponseProcessorsFunction,
|
||||
TableResponseProcessorsParams,
|
||||
TableSearchResponse
|
||||
>([percentile, percentileRank, stdMetric, stdSibling, math, seriesAgg, dropLastBucketFn], params);
|
||||
|
||||
return processor([]);
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { _legacyBuildProcessorFunction } from '../build_processor_function';
|
||||
import { processors } from '../response_processors/table';
|
||||
import { getLastValue } from '../../../../common/last_value_utils';
|
||||
import { first, get } from 'lodash';
|
||||
import { overwrite } from '../helpers';
|
||||
import { getActiveSeries } from '../helpers/get_active_series';
|
||||
|
||||
function trendSinceLastBucket(data) {
|
||||
if (data.length < 2) {
|
||||
return 0;
|
||||
}
|
||||
const currentBucket = data[data.length - 1];
|
||||
const prevBucket = data[data.length - 2];
|
||||
const trend = (currentBucket[1] - prevBucket[1]) / currentBucket[1];
|
||||
return Number.isNaN(trend) ? 0 : trend;
|
||||
}
|
||||
|
||||
export function processBucket(panel, req, searchStrategy, capabilities, extractFields) {
|
||||
return async (bucket) => {
|
||||
const series = await Promise.all(
|
||||
getActiveSeries(panel).map(async (series) => {
|
||||
const timeseries = get(bucket, `${series.id}.timeseries`);
|
||||
const buckets = get(bucket, `${series.id}.buckets`);
|
||||
let meta = {};
|
||||
|
||||
if (!timeseries && buckets) {
|
||||
meta = get(bucket, `${series.id}.meta`);
|
||||
const timeseries = {
|
||||
buckets: get(bucket, `${series.id}.buckets`),
|
||||
};
|
||||
overwrite(bucket, series.id, { meta, timeseries });
|
||||
}
|
||||
|
||||
const processor = _legacyBuildProcessorFunction(
|
||||
processors,
|
||||
bucket,
|
||||
panel,
|
||||
series,
|
||||
meta,
|
||||
extractFields
|
||||
);
|
||||
const result = first(await processor([]));
|
||||
|
||||
if (!result) return null;
|
||||
const data = get(result, 'data', []);
|
||||
result.slope = trendSinceLastBucket(data);
|
||||
result.last = getLastValue(data);
|
||||
return result;
|
||||
})
|
||||
);
|
||||
|
||||
return { key: bucket.key, series };
|
||||
};
|
||||
}
|
|
@ -8,11 +8,14 @@
|
|||
|
||||
import { processBucket } from './process_bucket';
|
||||
|
||||
function createValueObject(key, value, seriesId) {
|
||||
import type { Panel, Series } from '../../../../common/types';
|
||||
import { createFieldsFetcher } from '../../search_strategies/lib/fields_fetcher';
|
||||
|
||||
function createValueObject(key: string | number, value: string | number, seriesId: string) {
|
||||
return { key_as_string: `${key}`, doc_count: value, key, [seriesId]: { value } };
|
||||
}
|
||||
|
||||
function createBucketsObjects(size, sort, seriesId) {
|
||||
function createBucketsObjects(size: number, sort: string, seriesId: string) {
|
||||
const values = Array(size)
|
||||
.fill(1)
|
||||
.map((_, i) => i + 1);
|
||||
|
@ -25,34 +28,39 @@ function createBucketsObjects(size, sort, seriesId) {
|
|||
return values.map((v, i) => createValueObject(i, v, seriesId));
|
||||
}
|
||||
|
||||
function createPanel(series) {
|
||||
function createPanel(series: string[]) {
|
||||
return {
|
||||
type: 'table',
|
||||
time_field: '',
|
||||
series: series.map((seriesId) => ({
|
||||
series: (series.map((seriesId) => ({
|
||||
id: seriesId,
|
||||
metrics: [{ id: seriesId, type: 'count' }],
|
||||
trend_arrows: 1,
|
||||
})),
|
||||
};
|
||||
})) as unknown) as Series[],
|
||||
} as Panel;
|
||||
}
|
||||
|
||||
function createBuckets(series) {
|
||||
function createBuckets(series: string[]) {
|
||||
return [
|
||||
{ key: 'A', trend: 'asc', size: 10 },
|
||||
{ key: 'B', trend: 'desc', size: 10 },
|
||||
{ key: 'C', trend: 'flat', size: 10 },
|
||||
{ key: 'D', trend: 'asc', size: 1, expectedTrend: 'flat' },
|
||||
].map(({ key, trend, size, expectedTrend }) => {
|
||||
const baseObj = {
|
||||
const baseObj: {
|
||||
expectedTrend: string;
|
||||
key: string;
|
||||
[series: string]: any;
|
||||
} = {
|
||||
key,
|
||||
expectedTrend: expectedTrend || trend,
|
||||
};
|
||||
|
||||
for (const seriesId of series) {
|
||||
baseObj[seriesId] = {
|
||||
meta: {
|
||||
timeField: 'timestamp',
|
||||
seriesId: seriesId,
|
||||
seriesId,
|
||||
},
|
||||
buckets: createBucketsObjects(size, trend, seriesId),
|
||||
};
|
||||
|
@ -61,7 +69,7 @@ function createBuckets(series) {
|
|||
});
|
||||
}
|
||||
|
||||
function trendChecker(trend, slope) {
|
||||
function trendChecker(trend: string, slope: number) {
|
||||
switch (trend) {
|
||||
case 'asc':
|
||||
return slope > 0;
|
||||
|
@ -75,8 +83,10 @@ function trendChecker(trend, slope) {
|
|||
}
|
||||
|
||||
describe('processBucket(panel)', () => {
|
||||
const extractFields = jest.fn() as ReturnType<typeof createFieldsFetcher>;
|
||||
let panel: Panel;
|
||||
|
||||
describe('single metric panel', () => {
|
||||
let panel;
|
||||
const SERIES_ID = 'series-id';
|
||||
|
||||
beforeEach(() => {
|
||||
|
@ -84,17 +94,17 @@ describe('processBucket(panel)', () => {
|
|||
});
|
||||
|
||||
test('return the correct trend direction', async () => {
|
||||
const bucketProcessor = processBucket(panel);
|
||||
const bucketProcessor = processBucket({ panel, extractFields });
|
||||
const buckets = createBuckets([SERIES_ID]);
|
||||
for (const bucket of buckets) {
|
||||
const result = await bucketProcessor(bucket);
|
||||
expect(result.key).toEqual(bucket.key);
|
||||
expect(trendChecker(bucket.expectedTrend, result.series[0].slope)).toBeTruthy();
|
||||
expect(trendChecker(bucket.expectedTrend, result.series[0]!.slope!)).toBeTruthy();
|
||||
}
|
||||
});
|
||||
|
||||
test('properly handle 0 values for trend', async () => {
|
||||
const bucketProcessor = processBucket(panel);
|
||||
const bucketProcessor = processBucket({ panel, extractFields });
|
||||
const bucketforNaNResult = {
|
||||
key: 'NaNScenario',
|
||||
expectedTrend: 'flat',
|
||||
|
@ -112,11 +122,13 @@ describe('processBucket(panel)', () => {
|
|||
};
|
||||
const result = await bucketProcessor(bucketforNaNResult);
|
||||
expect(result.key).toEqual(bucketforNaNResult.key);
|
||||
expect(trendChecker(bucketforNaNResult.expectedTrend, result.series[0].slope)).toEqual(true);
|
||||
expect(trendChecker(bucketforNaNResult.expectedTrend, result.series[0]!.slope!)).toEqual(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
test('have the side effect to create the timeseries property if missing on bucket', async () => {
|
||||
const bucketProcessor = processBucket(panel);
|
||||
const bucketProcessor = processBucket({ panel, extractFields });
|
||||
const buckets = createBuckets([SERIES_ID]);
|
||||
|
||||
for (const bucket of buckets) {
|
||||
|
@ -129,7 +141,6 @@ describe('processBucket(panel)', () => {
|
|||
});
|
||||
|
||||
describe('multiple metrics panel', () => {
|
||||
let panel;
|
||||
const SERIES = ['series-id-1', 'series-id-2'];
|
||||
|
||||
beforeEach(() => {
|
||||
|
@ -137,14 +148,15 @@ describe('processBucket(panel)', () => {
|
|||
});
|
||||
|
||||
test('return the correct trend direction', async () => {
|
||||
const bucketProcessor = processBucket(panel);
|
||||
const bucketProcessor = processBucket({ panel, extractFields });
|
||||
const buckets = createBuckets(SERIES);
|
||||
|
||||
for (const bucket of buckets) {
|
||||
const result = await bucketProcessor(bucket);
|
||||
|
||||
expect(result.key).toEqual(bucket.key);
|
||||
expect(trendChecker(bucket.expectedTrend, result.series[0].slope)).toBeTruthy();
|
||||
expect(trendChecker(bucket.expectedTrend, result.series[1].slope)).toBeTruthy();
|
||||
expect(trendChecker(bucket.expectedTrend, result.series[0]!.slope!)).toBeTruthy();
|
||||
expect(trendChecker(bucket.expectedTrend, result.series[1]!.slope!)).toBeTruthy();
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import { getLastValue } from '../../../../common/last_value_utils';
|
||||
import { overwrite, getActiveSeries } from '../helpers';
|
||||
import { buildTableResponse } from './build_response_body';
|
||||
import { createFieldsFetcher } from '../../search_strategies/lib/fields_fetcher';
|
||||
|
||||
import type { Panel } from '../../../../common/types';
|
||||
import type { TableSearchRequestMeta } from '../request_processors/table/types';
|
||||
import { PanelDataArray } from '../../../../common/types/vis_data';
|
||||
|
||||
function trendSinceLastBucket(data: PanelDataArray[]) {
|
||||
if (data.length < 2) {
|
||||
return 0;
|
||||
}
|
||||
const currentBucket = data[data.length - 1];
|
||||
const prevBucket = data[data.length - 2];
|
||||
|
||||
const trend = (Number(currentBucket[1]) - Number(prevBucket[1])) / Number(currentBucket[1]);
|
||||
return Number.isNaN(trend) ? 0 : trend;
|
||||
}
|
||||
|
||||
interface ProcessTableBucketParams {
|
||||
panel: Panel;
|
||||
extractFields: ReturnType<typeof createFieldsFetcher>;
|
||||
}
|
||||
|
||||
export function processBucket({ panel, extractFields }: ProcessTableBucketParams) {
|
||||
return async (bucket: Record<string, unknown>) => {
|
||||
const resultSeries = await Promise.all(
|
||||
getActiveSeries(panel).map(async (series) => {
|
||||
const timeseries = get(bucket, `${series.id}.timeseries`);
|
||||
const buckets = get(bucket, `${series.id}.buckets`);
|
||||
let meta: TableSearchRequestMeta = {};
|
||||
|
||||
if (!timeseries && buckets) {
|
||||
meta = get(bucket, `${series.id}.meta`) as TableSearchRequestMeta;
|
||||
|
||||
overwrite(bucket, series.id, {
|
||||
meta,
|
||||
timeseries: {
|
||||
buckets: get(bucket, `${series.id}.buckets`),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const [result] = await buildTableResponse({
|
||||
bucket,
|
||||
panel,
|
||||
series,
|
||||
meta,
|
||||
extractFields,
|
||||
});
|
||||
|
||||
if (!result) return null;
|
||||
const data = result?.data ?? [];
|
||||
result.slope = trendSinceLastBucket(data);
|
||||
result.last = getLastValue(data);
|
||||
return result;
|
||||
})
|
||||
);
|
||||
|
||||
return { key: bucket.key, series: resultSeries };
|
||||
};
|
||||
}
|
|
@ -83,7 +83,10 @@ export class KibanaMetricsAdapter implements InfraMetricsAdapter {
|
|||
return {
|
||||
id: series.id,
|
||||
label: series.label,
|
||||
data: series.data.map((point) => ({ timestamp: point[0], value: point[1] })),
|
||||
data: series.data.map((point) => ({
|
||||
timestamp: point[0] as number,
|
||||
value: point[1] as number | null,
|
||||
})),
|
||||
};
|
||||
}),
|
||||
};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue