mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Metrics UI] Replace date_histogram with date_range aggregation in threshold alert (#100004)
* [Metrics UI] Replace date_histogram with date_range aggregation in threshold alert * Remove console.log * Fix rate aggregation and offset
This commit is contained in:
parent
5edf7e267a
commit
2f3e175417
3 changed files with 40 additions and 19 deletions
|
@ -25,6 +25,7 @@ interface Aggregation {
|
|||
buckets: Array<{
|
||||
aggregatedValue: { value: number; values?: Array<{ key: number; value: number }> };
|
||||
doc_count: number;
|
||||
to_as_string: string;
|
||||
key_as_string: string;
|
||||
}>;
|
||||
};
|
||||
|
@ -60,6 +61,7 @@ export const evaluateAlert = <Params extends EvaluatedAlertParams = EvaluatedAle
|
|||
filterQuery,
|
||||
timeframe
|
||||
);
|
||||
|
||||
const { threshold, warningThreshold, comparator, warningComparator } = criterion;
|
||||
const pointsEvaluator = (points: any[] | typeof NaN | null, t?: number[], c?: Comparator) => {
|
||||
if (!t || !c) return [false];
|
||||
|
@ -179,18 +181,21 @@ const getValuesFromAggregations = (
|
|||
const { buckets } = aggregations.aggregatedIntervals;
|
||||
if (!buckets.length) return null; // No Data state
|
||||
if (aggType === Aggregators.COUNT) {
|
||||
return buckets.map((bucket) => ({ key: bucket.key_as_string, value: bucket.doc_count }));
|
||||
return buckets.map((bucket) => ({
|
||||
key: bucket.to_as_string,
|
||||
value: bucket.doc_count,
|
||||
}));
|
||||
}
|
||||
if (aggType === Aggregators.P95 || aggType === Aggregators.P99) {
|
||||
return buckets.map((bucket) => {
|
||||
const values = bucket.aggregatedValue?.values || [];
|
||||
const firstValue = first(values);
|
||||
if (!firstValue) return null;
|
||||
return { key: bucket.key_as_string, value: firstValue.value };
|
||||
return { key: bucket.to_as_string, value: firstValue.value };
|
||||
});
|
||||
}
|
||||
return buckets.map((bucket) => ({
|
||||
key: bucket.key_as_string,
|
||||
key: bucket.key_as_string ?? bucket.to_as_string,
|
||||
value: bucket.aggregatedValue?.value ?? null,
|
||||
}));
|
||||
} catch (e) {
|
||||
|
|
|
@ -37,11 +37,12 @@ export const getElasticsearchMetricQuery = (
|
|||
}
|
||||
const interval = `${timeSize}${timeUnit}`;
|
||||
const intervalAsSeconds = getIntervalInSeconds(interval);
|
||||
const intervalAsMS = intervalAsSeconds * 1000;
|
||||
|
||||
const to = roundTimestamp(timeframe ? timeframe.end : Date.now(), timeUnit);
|
||||
// We need enough data for 5 buckets worth of data. We also need
|
||||
// to convert the intervalAsSeconds to milliseconds.
|
||||
const minimumFrom = to - intervalAsSeconds * 1000 * MINIMUM_BUCKETS;
|
||||
const minimumFrom = to - intervalAsMS * MINIMUM_BUCKETS;
|
||||
|
||||
const from = roundTimestamp(
|
||||
timeframe && timeframe.start <= minimumFrom ? timeframe.start : minimumFrom,
|
||||
|
@ -49,6 +50,7 @@ export const getElasticsearchMetricQuery = (
|
|||
);
|
||||
|
||||
const offset = calculateDateHistogramOffset({ from, to, interval, field: timefield });
|
||||
const offsetInMS = parseInt(offset, 10) * 1000;
|
||||
|
||||
const aggregations =
|
||||
aggType === Aggregators.COUNT
|
||||
|
@ -65,20 +67,34 @@ export const getElasticsearchMetricQuery = (
|
|||
},
|
||||
};
|
||||
|
||||
const baseAggs = {
|
||||
aggregatedIntervals: {
|
||||
date_histogram: {
|
||||
field: timefield,
|
||||
fixed_interval: interval,
|
||||
offset,
|
||||
extended_bounds: {
|
||||
min: from,
|
||||
max: to,
|
||||
},
|
||||
},
|
||||
aggregations,
|
||||
},
|
||||
};
|
||||
const baseAggs =
|
||||
aggType === Aggregators.RATE
|
||||
? {
|
||||
aggregatedIntervals: {
|
||||
date_histogram: {
|
||||
field: timefield,
|
||||
fixed_interval: interval,
|
||||
offset,
|
||||
extended_bounds: {
|
||||
min: from,
|
||||
max: to,
|
||||
},
|
||||
},
|
||||
aggregations,
|
||||
},
|
||||
}
|
||||
: {
|
||||
aggregatedIntervals: {
|
||||
date_range: {
|
||||
field: timefield,
|
||||
ranges: Array.from(Array(Math.floor((to - from) / intervalAsMS)), (_, i) => ({
|
||||
from: from + intervalAsMS * i + offsetInMS,
|
||||
to: from + intervalAsMS * (i + 1) + offsetInMS,
|
||||
})),
|
||||
},
|
||||
aggregations,
|
||||
},
|
||||
};
|
||||
|
||||
const aggs = groupBy
|
||||
? {
|
||||
|
|
|
@ -13,7 +13,7 @@ const bucketsA = [
|
|||
{
|
||||
doc_count: 3,
|
||||
aggregatedValue: { value: 1.0, values: [{ key: 95.0, value: 1.0 }] },
|
||||
key_as_string: new Date(1577858400000).toISOString(),
|
||||
to_as_string: new Date(1577858400000).toISOString(),
|
||||
},
|
||||
];
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue