mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[Infrastructure UI] Add parameter Snapshot API to not query using histogram (#146573)
## Summary closes #146517 This PR expands the current `includeTimeseries` parameter behaviour, by not using the `date_histogram` in the `composite` aggregation when it is set to `false`. This way, not only the API won't return a `timeseries` object, but the query will calculate the aggregations on ES side as opposed to when `includeTimeseries` is true, that does an extra `avg` and `max` calculation on TS side using the histogram buckets. The main motivation behind this change is to run the snapshot query without the need of returning buckets, to avoid the max bucket error. ### How to test - Enable the system module on metricbeat - Start a local Kibana - Start metricbeat - Explore `Infrastructure > Metrics`, `Infrastructure > Inventory` and `Infrastructure > Hosts` and play with the filters #### cURL ##### Run a query with Composite aggregation ###### With timeseries ```bash curl -XPOST -f 'http://0.0.0.0:5601/ftw/api/metrics/snapshot' \ -H 'kbn-xsrf:localhost' \ -H 'Content-Type: application/json' \ --data-raw '{"filterQuery":"{\"bool\":{\"must\":[],\"filter\":[],\"should\":[],\"must_not\":[]}}","metrics":[{"type":"memory"}],"groupBy":[],"nodeType":"host","sourceId":"default","accountId":"","region":"","timerange":{"from":1670859470896,"to":1670861030896,"interval":"1m","ignoreLookback":true},"includeTimeseries":true}' \ -u elastic:changeme ``` ###### Without timeseries ```bash curl -XPOST -f 'http://0.0.0.0:5601/ftw/api/metrics/snapshot' \ -H 'kbn-xsrf:localhost' \ -H 'Content-Type: application/json' \ --data-raw '{"filterQuery":"{\"bool\":{\"must\":[],\"filter\":[],\"should\":[],\"must_not\":[]}}","metrics":[{"type":"memory"}],"groupBy":[],"nodeType":"host","sourceId":"default","accountId":"","region":"","timerange":{"from":1670859470896,"to":1670861030896,"interval":"1m","ignoreLookback":true},"includeTimeseries":false}' \ -u elastic:changeme ``` ##### Run a query with just Date Histogram ###### With timeseries ```bash curl -XPOST -f 'http://0.0.0.0:5601/ftw/api/metrics/snapshot' \ -H 'kbn-xsrf:localhost' \ -H 'Content-Type: application/json' \ --data-raw '{"filterQuery":"{\"bool\":{\"must\":[],\"filter\":[],\"should\":[],\"must_not\":[]}}","metrics":[{"type":"memory"}],"groupBy":null,"nodeType":"host","sourceId":"default","accountId":"","region":"","timerange":{"from":1670859470896,"to":1670861030896,"interval":"1m","ignoreLookback":true},"includeTimeseries":true}' \ -u elastic:changeme ``` ##### Without timeseries ```bash curl -XPOST -f 'http://0.0.0.0:5601/ftw/api/metrics/snapshot' \ -H 'kbn-xsrf:localhost' \ -H 'Content-Type: application/json' \ --data-raw '{"filterQuery":"{\"bool\":{\"must\":[],\"filter\":[],\"should\":[],\"must_not\":[]}}","metrics":[{"type":"memory"}],"groupBy":null,"nodeType":"host","sourceId":"default","accountId":"","region":"","timerange":{"from":1670859470896,"to":1670861030896,"interval":"1m","ignoreLookback":true},"includeTimeseries":false}' \ -u elastic:changeme ``` ### For maintainers The `logRate` metric type <b>doesn't</b> work without being in a date histogram, because it uses [cumulative sum](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-cumulative-sum-aggregation.html). Therefore `includeTimeseries` false in a `composite` aggregation will return a 400 error. I haven't found anywhere that would make a request like below. #### 400 error ```bash curl -XPOST -f 'http://0.0.0.0:5601/ftw/api/metrics/snapshot' \ -H 'kbn-xsrf:localhost' \ -H 'Content-Type: application/json' \ --data-raw '{"filterQuery":"{\"bool\":{\"must\":[],\"filter\":[],\"should\":[],\"must_not\":[]}}","metrics":[{"type":"logRate"}],"groupBy":[],"nodeType":"host","sourceId":"default","accountId":"","region":"","timerange":{"from":1670859470896,"to":1670861030896,"interval":"1m","ignoreLookback":true},"includeTimeseries":false}' \ -u elastic:changeme ``` Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
959f8e9c6c
commit
4a3af9c97b
33 changed files with 908 additions and 522 deletions
|
@ -19,3 +19,4 @@ export { toNumberRt } from './src/to_number_rt';
|
|||
export { toBooleanRt } from './src/to_boolean_rt';
|
||||
export { toJsonSchema } from './src/to_json_schema';
|
||||
export { nonEmptyStringRt } from './src/non_empty_string_rt';
|
||||
export { createLiteralValueFromUndefinedRT } from './src/literal_value_from_undefined_rt';
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { either } from 'fp-ts';
|
||||
import * as rt from 'io-ts';
|
||||
import { createLiteralValueFromUndefinedRT } from './literal_value';
|
||||
import { createLiteralValueFromUndefinedRT } from '.';
|
||||
|
||||
describe('LiteralValueFromUndefined runtime type', () => {
|
||||
it('decodes undefined to a given literal value', () => {
|
|
@ -1,8 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createLiteralValueFromUndefinedRT } from '@kbn/io-ts-utils';
|
||||
import * as rt from 'io-ts';
|
||||
import { MetricsUIAggregationRT } from '../inventory_models/types';
|
||||
import { afterKeyObjectRT } from './metrics_explorer';
|
||||
|
@ -27,22 +28,25 @@ export const MetricsAPIRequestRT = rt.intersection([
|
|||
timerange: MetricsAPITimerangeRT,
|
||||
indexPattern: rt.string,
|
||||
metrics: rt.array(MetricsAPIMetricRT),
|
||||
includeTimeseries: rt.union([rt.boolean, createLiteralValueFromUndefinedRT(true)]),
|
||||
}),
|
||||
rt.partial({
|
||||
groupBy: rt.array(groupByRT),
|
||||
modules: rt.array(rt.string),
|
||||
afterKey: rt.union([rt.null, afterKeyObjectRT]),
|
||||
limit: rt.union([rt.number, rt.null, rt.undefined]),
|
||||
filters: rt.array(rt.object),
|
||||
limit: rt.union([rt.number, rt.null]),
|
||||
filters: rt.array(rt.UnknownRecord),
|
||||
dropPartialBuckets: rt.boolean,
|
||||
alignDataToEnd: rt.boolean,
|
||||
}),
|
||||
]);
|
||||
|
||||
export const MetricsAPIPageInfoRT = rt.type({
|
||||
afterKey: rt.union([rt.null, afterKeyObjectRT, rt.undefined]),
|
||||
interval: rt.number,
|
||||
});
|
||||
export const MetricsAPIPageInfoRT = rt.intersection([
|
||||
rt.type({
|
||||
afterKey: rt.union([rt.null, afterKeyObjectRT, rt.undefined]),
|
||||
}),
|
||||
rt.partial({ interval: rt.number }),
|
||||
]);
|
||||
|
||||
export const MetricsAPIColumnTypeRT = rt.keyof({
|
||||
date: null,
|
||||
|
@ -76,10 +80,13 @@ export const MetricsAPISeriesRT = rt.intersection([
|
|||
}),
|
||||
]);
|
||||
|
||||
export const MetricsAPIResponseSeriesRT = rt.intersection([
|
||||
MetricsAPISeriesRT,
|
||||
rt.partial({ metricsets: rt.array(rt.string) }),
|
||||
]);
|
||||
|
||||
export const MetricsAPIResponseRT = rt.type({
|
||||
series: rt.array(
|
||||
rt.intersection([MetricsAPISeriesRT, rt.partial({ metricsets: rt.array(rt.string) })])
|
||||
),
|
||||
series: rt.array(MetricsAPIResponseSeriesRT),
|
||||
info: MetricsAPIPageInfoRT,
|
||||
});
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createLiteralValueFromUndefinedRT } from '@kbn/io-ts-utils';
|
||||
import * as rt from 'io-ts';
|
||||
import { SnapshotMetricTypeRT, ItemTypeRT } from '../inventory_models/types';
|
||||
import { MetricsAPISeriesRT } from './metrics_api';
|
||||
|
@ -48,10 +49,12 @@ export const SnapshotNodeRT = rt.type({
|
|||
name: rt.string,
|
||||
});
|
||||
|
||||
export const SnapshotNodeResponseRT = rt.type({
|
||||
nodes: rt.array(SnapshotNodeRT),
|
||||
interval: rt.string,
|
||||
});
|
||||
export const SnapshotNodeResponseRT = rt.intersection([
|
||||
rt.type({
|
||||
nodes: rt.array(SnapshotNodeRT),
|
||||
}),
|
||||
rt.partial({ interval: rt.string }),
|
||||
]);
|
||||
|
||||
export const InfraTimerangeInputRT = rt.intersection([
|
||||
rt.type({
|
||||
|
@ -111,12 +114,12 @@ export const SnapshotRequestRT = rt.intersection([
|
|||
groupBy: rt.union([SnapshotGroupByRT, rt.null]),
|
||||
nodeType: ItemTypeRT,
|
||||
sourceId: rt.string,
|
||||
includeTimeseries: rt.union([rt.boolean, createLiteralValueFromUndefinedRT(true)]),
|
||||
}),
|
||||
rt.partial({
|
||||
accountId: rt.string,
|
||||
region: rt.string,
|
||||
filterQuery: rt.union([rt.string, rt.null]),
|
||||
includeTimeseries: rt.boolean,
|
||||
overrideCompositeSize: rt.number,
|
||||
}),
|
||||
]);
|
||||
|
|
|
@ -286,9 +286,15 @@ export const ESSumBucketAggRT = rt.type({
|
|||
});
|
||||
|
||||
export const ESTopMetricsAggRT = rt.type({
|
||||
top_metrics: rt.type({
|
||||
metrics: rt.union([rt.array(rt.type({ field: rt.string })), rt.type({ field: rt.string })]),
|
||||
}),
|
||||
top_metrics: rt.intersection([
|
||||
rt.type({
|
||||
metrics: rt.union([rt.array(rt.type({ field: rt.string })), rt.type({ field: rt.string })]),
|
||||
}),
|
||||
rt.partial({
|
||||
size: rt.number,
|
||||
sort: rt.record(rt.string, rt.union([rt.literal('desc'), rt.literal('asc')])),
|
||||
}),
|
||||
]),
|
||||
});
|
||||
|
||||
export const ESMaxPeriodFilterExistsAggRT = rt.type({
|
||||
|
|
|
@ -64,20 +64,20 @@ export const ExpressionChart: React.FC<Props> = ({
|
|||
});
|
||||
|
||||
const options = useWaffleOptionsContext();
|
||||
const { loading, nodes } = useSnapshot(
|
||||
const { loading, nodes } = useSnapshot({
|
||||
filterQuery,
|
||||
expression.metric === 'custom'
|
||||
? [buildCustomMetric(expression.customMetric)]
|
||||
: [{ type: expression.metric }],
|
||||
[],
|
||||
metrics:
|
||||
expression.metric === 'custom'
|
||||
? [buildCustomMetric(expression.customMetric)]
|
||||
: [{ type: expression.metric }],
|
||||
groupBy: [],
|
||||
nodeType,
|
||||
sourceId,
|
||||
0,
|
||||
options.accountId,
|
||||
options.region,
|
||||
true,
|
||||
timerange
|
||||
);
|
||||
currentTime: 0,
|
||||
accountId: options.accountId,
|
||||
region: options.region,
|
||||
timerange,
|
||||
});
|
||||
|
||||
const { uiSettings } = useKibanaContextForPlugin().services;
|
||||
|
||||
|
|
|
@ -46,18 +46,18 @@ export const HostsTable = () => {
|
|||
// Snapshot endpoint internally uses the indices stored in source.configuration.metricAlias.
|
||||
// For the Unified Search, we create a data view, which for now will be built off of source.configuration.metricAlias too
|
||||
// if we introduce data view selection, we'll have to change this hook and the endpoint to accept a new parameter for the indices
|
||||
const { loading, nodes, reload } = useSnapshot(
|
||||
esQuery && JSON.stringify(esQuery),
|
||||
HOST_METRICS,
|
||||
[],
|
||||
'host',
|
||||
const { loading, nodes, reload } = useSnapshot({
|
||||
filterQuery: esQuery ? JSON.stringify(esQuery) : null,
|
||||
metrics: HOST_METRICS,
|
||||
groupBy: [],
|
||||
nodeType: 'host',
|
||||
sourceId,
|
||||
dateRangeTimestamp.to,
|
||||
'',
|
||||
'',
|
||||
true,
|
||||
timeRange
|
||||
);
|
||||
currentTime: dateRangeTimestamp.to,
|
||||
accountId: '',
|
||||
region: '',
|
||||
timerange: timeRange,
|
||||
includeTimeseries: false,
|
||||
});
|
||||
|
||||
const items = useHostTable(nodes);
|
||||
const noData = items.length === 0;
|
||||
|
|
|
@ -119,31 +119,31 @@ const TabComponent = (props: TabProps) => {
|
|||
buildCustomMetric('system.cpu.cores', 'cores', 'max'),
|
||||
];
|
||||
|
||||
const { nodes, reload } = useSnapshot(
|
||||
filter,
|
||||
[...defaultMetrics, ...customMetrics],
|
||||
[],
|
||||
const { nodes, reload } = useSnapshot({
|
||||
filterQuery: filter,
|
||||
metrics: [...defaultMetrics, ...customMetrics],
|
||||
groupBy: [],
|
||||
nodeType,
|
||||
sourceId,
|
||||
currentTime,
|
||||
accountId,
|
||||
region,
|
||||
false,
|
||||
timeRange
|
||||
);
|
||||
sendRequestImmediately: false,
|
||||
timerange: timeRange,
|
||||
});
|
||||
|
||||
const { nodes: logRateNodes, reload: reloadLogRate } = useSnapshot(
|
||||
filter,
|
||||
[{ type: 'logRate' }],
|
||||
[],
|
||||
const { nodes: logRateNodes, reload: reloadLogRate } = useSnapshot({
|
||||
filterQuery: filter,
|
||||
metrics: [{ type: 'logRate' }],
|
||||
groupBy: [],
|
||||
nodeType,
|
||||
sourceId,
|
||||
currentTime,
|
||||
accountId,
|
||||
region,
|
||||
false,
|
||||
timeRange
|
||||
);
|
||||
sendRequestImmediately: false,
|
||||
timerange: timeRange,
|
||||
});
|
||||
|
||||
const getDomain = useCallback(
|
||||
(timeseries: MetricsExplorerSeries, ms: MetricsExplorerOptionsMetric[]) => {
|
||||
|
|
|
@ -28,16 +28,22 @@ export const SnapshotContainer = ({ render }: Props) => {
|
|||
const { metric, groupBy, nodeType, accountId, region } = useWaffleOptionsContext();
|
||||
const { currentTime } = useWaffleTimeContext();
|
||||
const { filterQueryAsJson } = useWaffleFiltersContext();
|
||||
const { loading, nodes, reload, interval } = useSnapshot(
|
||||
filterQueryAsJson,
|
||||
[metric],
|
||||
const {
|
||||
loading,
|
||||
nodes,
|
||||
reload,
|
||||
interval = '60s',
|
||||
} = useSnapshot({
|
||||
filterQuery: filterQueryAsJson,
|
||||
metrics: [metric],
|
||||
groupBy,
|
||||
nodeType,
|
||||
sourceId,
|
||||
currentTime,
|
||||
accountId,
|
||||
region,
|
||||
false
|
||||
);
|
||||
sendRequestImmediately: false,
|
||||
});
|
||||
|
||||
return render({ loading, nodes, reload, interval });
|
||||
};
|
||||
|
|
|
@ -16,7 +16,7 @@ jest.mock('../../../../../containers/metrics_source', () => ({
|
|||
}));
|
||||
|
||||
jest.mock('../../hooks/use_snaphot');
|
||||
import { useSnapshot } from '../../hooks/use_snaphot';
|
||||
import { useSnapshot, UseSnapshotRequest } from '../../hooks/use_snaphot';
|
||||
jest.mock('../../hooks/use_waffle_options');
|
||||
import { useWaffleOptionsContext } from '../../hooks/use_waffle_options';
|
||||
const mockedUseSnapshot = useSnapshot as jest.Mock<ReturnType<typeof useSnapshot>>;
|
||||
|
@ -103,16 +103,16 @@ describe('ConditionalToolTip', () => {
|
|||
const tooltip = wrapper.find('[data-test-subj~="conditionalTooltipContent-host-01"]');
|
||||
expect(tooltip.render()).toMatchSnapshot();
|
||||
|
||||
expect(mockedUseSnapshot).toBeCalledWith(
|
||||
expectedQuery,
|
||||
expectedMetrics,
|
||||
[],
|
||||
'host',
|
||||
'default',
|
||||
expect(mockedUseSnapshot).toBeCalledWith({
|
||||
filterQuery: expectedQuery,
|
||||
metrics: expectedMetrics,
|
||||
groupBy: [],
|
||||
nodeType: 'host',
|
||||
sourceId: 'default',
|
||||
currentTime,
|
||||
'',
|
||||
''
|
||||
);
|
||||
accountId: '',
|
||||
region: '',
|
||||
} as UseSnapshotRequest);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -50,7 +50,16 @@ export const ConditionalToolTip = withTheme(({ theme, node, nodeType, currentTim
|
|||
},
|
||||
},
|
||||
});
|
||||
const { nodes } = useSnapshot(query, requestMetrics, [], nodeType, sourceId, currentTime, '', '');
|
||||
const { nodes } = useSnapshot({
|
||||
filterQuery: query,
|
||||
metrics: requestMetrics,
|
||||
groupBy: [],
|
||||
nodeType,
|
||||
sourceId,
|
||||
currentTime,
|
||||
accountId: '',
|
||||
region: '',
|
||||
});
|
||||
|
||||
const dataNode = first(nodes);
|
||||
const metrics = (dataNode && dataNode.metrics) || [];
|
||||
|
|
|
@ -14,27 +14,25 @@ import { useHTTPRequest } from '../../../../hooks/use_http_request';
|
|||
import {
|
||||
SnapshotNodeResponseRT,
|
||||
SnapshotNodeResponse,
|
||||
SnapshotGroupBy,
|
||||
SnapshotRequest,
|
||||
InfraTimerangeInput,
|
||||
} from '../../../../../common/http_api/snapshot_api';
|
||||
import {
|
||||
InventoryItemType,
|
||||
SnapshotMetricType,
|
||||
} from '../../../../../common/inventory_models/types';
|
||||
|
||||
export function useSnapshot(
|
||||
filterQuery: string | null | symbol | undefined,
|
||||
metrics: Array<{ type: SnapshotMetricType }>,
|
||||
groupBy: SnapshotGroupBy,
|
||||
nodeType: InventoryItemType,
|
||||
sourceId: string,
|
||||
currentTime: number,
|
||||
accountId: string,
|
||||
region: string,
|
||||
sendRequestImmediatly = true,
|
||||
timerange?: InfraTimerangeInput
|
||||
) {
|
||||
export interface UseSnapshotRequest
|
||||
extends Omit<SnapshotRequest, 'filterQuery' | 'timerange' | 'includeTimeseries'> {
|
||||
filterQuery: string | null | symbol | undefined;
|
||||
currentTime: number;
|
||||
sendRequestImmediately?: boolean;
|
||||
includeTimeseries?: boolean;
|
||||
timerange?: InfraTimerangeInput;
|
||||
}
|
||||
export function useSnapshot({
|
||||
timerange,
|
||||
currentTime,
|
||||
sendRequestImmediately = true,
|
||||
includeTimeseries = true,
|
||||
...args
|
||||
}: UseSnapshotRequest) {
|
||||
const decodeResponse = (response: any) => {
|
||||
return pipe(
|
||||
SnapshotNodeResponseRT.decode(response),
|
||||
|
@ -42,37 +40,31 @@ export function useSnapshot(
|
|||
);
|
||||
};
|
||||
|
||||
timerange = timerange || {
|
||||
interval: '1m',
|
||||
to: currentTime,
|
||||
from: currentTime - 1200 * 1000,
|
||||
lookbackSize: 5,
|
||||
const payload: Omit<SnapshotRequest, 'filterQuery'> = {
|
||||
...args,
|
||||
timerange: timerange ?? {
|
||||
interval: '1m',
|
||||
to: currentTime,
|
||||
from: currentTime - 1200 * 1000,
|
||||
lookbackSize: 5,
|
||||
},
|
||||
includeTimeseries,
|
||||
};
|
||||
|
||||
const { error, loading, response, makeRequest } = useHTTPRequest<SnapshotNodeResponse>(
|
||||
'/api/metrics/snapshot',
|
||||
'POST',
|
||||
JSON.stringify({
|
||||
metrics,
|
||||
groupBy,
|
||||
nodeType,
|
||||
timerange,
|
||||
filterQuery,
|
||||
sourceId,
|
||||
accountId,
|
||||
region,
|
||||
includeTimeseries: true,
|
||||
} as SnapshotRequest),
|
||||
JSON.stringify(payload),
|
||||
decodeResponse
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
if (sendRequestImmediatly) {
|
||||
if (sendRequestImmediately) {
|
||||
await makeRequest();
|
||||
}
|
||||
})();
|
||||
}, [makeRequest, sendRequestImmediatly]);
|
||||
}, [makeRequest, sendRequestImmediately]);
|
||||
|
||||
return {
|
||||
error: (error && error.message) || null,
|
||||
|
|
|
@ -5,27 +5,32 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { set } from '@kbn/safer-lodash-set';
|
||||
import { ThrowReporter } from 'io-ts/lib/ThrowReporter';
|
||||
import { decodeOrThrow } from '../../../common/runtime_types';
|
||||
import { TIMESTAMP_FIELD } from '../../../common/constants';
|
||||
import { MetricsAPIRequest, MetricsAPIResponse, afterKeyObjectRT } from '../../../common/http_api';
|
||||
import { MetricsAPIRequest, MetricsAPIResponse } from '../../../common/http_api';
|
||||
import {
|
||||
ESSearchClient,
|
||||
GroupingResponseRT,
|
||||
CompositeResponseRT,
|
||||
MetricsESResponse,
|
||||
HistogramResponseRT,
|
||||
AggregationResponseRT,
|
||||
AggregationResponse,
|
||||
CompositeResponse,
|
||||
HistogramBucketRT,
|
||||
} from './types';
|
||||
import { EMPTY_RESPONSE } from './constants';
|
||||
import { createAggregations } from './lib/create_aggregations';
|
||||
import { convertHistogramBucketsToTimeseries } from './lib/convert_histogram_buckets_to_timeseries';
|
||||
import { createAggregations, createCompositeAggregations } from './lib/create_aggregations';
|
||||
import { convertBucketsToMetricsApiSeries } from './lib/convert_buckets_to_metrics_series';
|
||||
import { calculateBucketSize } from './lib/calculate_bucket_size';
|
||||
import { calculatedInterval } from './lib/calculate_interval';
|
||||
|
||||
const DEFAULT_LIMIT = 9;
|
||||
|
||||
export const query = async (
|
||||
search: ESSearchClient,
|
||||
rawOptions: MetricsAPIRequest
|
||||
): Promise<MetricsAPIResponse> => {
|
||||
const interval = await calculatedInterval(search, rawOptions);
|
||||
|
||||
const options = {
|
||||
...rawOptions,
|
||||
timerange: {
|
||||
|
@ -53,25 +58,11 @@ export const query = async (
|
|||
index: options.indexPattern,
|
||||
body: {
|
||||
size: 0,
|
||||
query: { bool: { filter } },
|
||||
aggs: { ...createAggregations(options) },
|
||||
query: { bool: { filter: [...filter, ...(options.filters ?? [])] } },
|
||||
aggs: hasGroupBy ? createCompositeAggregations(options) : createAggregations(options),
|
||||
},
|
||||
};
|
||||
|
||||
if (hasGroupBy) {
|
||||
if (options.afterKey) {
|
||||
if (afterKeyObjectRT.is(options.afterKey)) {
|
||||
set(params, 'body.aggs.groupings.composite.after', options.afterKey);
|
||||
} else {
|
||||
set(params, 'body.aggs.groupings.composite.after', { groupBy0: options.afterKey });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (options.filters) {
|
||||
params.body.query.bool.filter = [...params.body.query.bool.filter, ...options.filters];
|
||||
}
|
||||
|
||||
const response = await search<{}, MetricsESResponse>(params);
|
||||
|
||||
if (response.hits.total.value === 0) {
|
||||
|
@ -84,50 +75,56 @@ export const query = async (
|
|||
|
||||
const { bucketSize } = calculateBucketSize({ ...options.timerange, interval });
|
||||
|
||||
if (hasGroupBy && GroupingResponseRT.is(response.aggregations)) {
|
||||
const { groupings } = response.aggregations;
|
||||
const { after_key: afterKey } = groupings;
|
||||
const limit = options.limit || 9;
|
||||
const returnAfterKey = afterKey && groupings.buckets.length === limit ? true : false;
|
||||
if (hasGroupBy) {
|
||||
const aggregations = decodeOrThrow(CompositeResponseRT)(response.aggregations);
|
||||
const { groupings } = aggregations;
|
||||
const limit = options.limit ?? DEFAULT_LIMIT;
|
||||
const returnAfterKey = !!groupings.after_key && groupings.buckets.length === limit;
|
||||
const afterKey = returnAfterKey ? groupings.after_key : null;
|
||||
|
||||
return {
|
||||
series: groupings.buckets.map((bucket) => {
|
||||
const keys = Object.values(bucket.key);
|
||||
const metricsetNames = bucket.metricsets.buckets.map((m) => m.key);
|
||||
const timeseries = convertHistogramBucketsToTimeseries(
|
||||
keys,
|
||||
options,
|
||||
bucket.histogram.buckets,
|
||||
bucketSize * 1000
|
||||
);
|
||||
return { ...timeseries, metricsets: metricsetNames };
|
||||
}),
|
||||
series: getSeriesFromCompositeAggregations(groupings, options, bucketSize * 1000),
|
||||
info: {
|
||||
afterKey: returnAfterKey ? afterKey : null,
|
||||
interval: bucketSize,
|
||||
afterKey,
|
||||
interval: rawOptions.includeTimeseries ? bucketSize : undefined,
|
||||
},
|
||||
};
|
||||
} else if (hasGroupBy) {
|
||||
ThrowReporter.report(GroupingResponseRT.decode(response.aggregations));
|
||||
}
|
||||
|
||||
if (HistogramResponseRT.is(response.aggregations)) {
|
||||
return {
|
||||
series: [
|
||||
convertHistogramBucketsToTimeseries(
|
||||
['*'],
|
||||
options,
|
||||
response.aggregations.histogram.buckets,
|
||||
bucketSize * 1000
|
||||
),
|
||||
],
|
||||
info: {
|
||||
afterKey: null,
|
||||
interval: bucketSize,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
ThrowReporter.report(HistogramResponseRT.decode(response.aggregations));
|
||||
}
|
||||
|
||||
throw new Error('Elasticsearch responded with an unrecognized format.');
|
||||
const aggregations = decodeOrThrow(AggregationResponseRT)(response.aggregations);
|
||||
return {
|
||||
series: getSeriesFromHistogram(aggregations, options, bucketSize * 1000),
|
||||
info: {
|
||||
afterKey: null,
|
||||
interval: bucketSize,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const getSeriesFromHistogram = (
|
||||
aggregations: AggregationResponse,
|
||||
options: MetricsAPIRequest,
|
||||
bucketSize: number
|
||||
): MetricsAPIResponse['series'] => {
|
||||
return [
|
||||
convertBucketsToMetricsApiSeries(['*'], options, aggregations.histogram.buckets, bucketSize),
|
||||
];
|
||||
};
|
||||
|
||||
const getSeriesFromCompositeAggregations = (
|
||||
groupings: CompositeResponse['groupings'],
|
||||
options: MetricsAPIRequest,
|
||||
bucketSize: number
|
||||
): MetricsAPIResponse['series'] => {
|
||||
return groupings.buckets.map((bucket) => {
|
||||
const keys = Object.values(bucket.key);
|
||||
const metricsetNames = bucket.metricsets.buckets.map((m) => m.key);
|
||||
const metrics = convertBucketsToMetricsApiSeries(
|
||||
keys,
|
||||
options,
|
||||
HistogramBucketRT.is(bucket) ? bucket.histogram.buckets : [bucket],
|
||||
bucketSize
|
||||
);
|
||||
return { ...metrics, metricsets: metricsetNames };
|
||||
});
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should drop the last bucket 1`] = `
|
||||
exports[`convertBucketsToMetricsApiSeries should drop the last bucket 1`] = `
|
||||
Object {
|
||||
"columns": Array [
|
||||
Object {
|
||||
|
@ -41,7 +41,7 @@ Object {
|
|||
}
|
||||
`;
|
||||
|
||||
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should just work 1`] = `
|
||||
exports[`convertBucketsToMetricsApiSeries should just work 1`] = `
|
||||
Object {
|
||||
"columns": Array [
|
||||
Object {
|
||||
|
@ -86,7 +86,7 @@ Object {
|
|||
}
|
||||
`;
|
||||
|
||||
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should return empty timeseries for empty metrics 1`] = `
|
||||
exports[`convertBucketsToMetricsApiSeries should return empty timeseries for empty metrics 1`] = `
|
||||
Object {
|
||||
"columns": Array [],
|
||||
"id": "example-0",
|
||||
|
@ -97,7 +97,7 @@ Object {
|
|||
}
|
||||
`;
|
||||
|
||||
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should transform top_metric aggregations 1`] = `
|
||||
exports[`convertBucketsToMetricsApiSeries should transform top_metric aggregations 1`] = `
|
||||
Object {
|
||||
"columns": Array [
|
||||
Object {
|
||||
|
@ -137,7 +137,7 @@ Object {
|
|||
}
|
||||
`;
|
||||
|
||||
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should work with keyed percentiles 1`] = `
|
||||
exports[`convertBucketsToMetricsApiSeries should work with keyed percentiles 1`] = `
|
||||
Object {
|
||||
"columns": Array [
|
||||
Object {
|
||||
|
@ -182,7 +182,7 @@ Object {
|
|||
}
|
||||
`;
|
||||
|
||||
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should work with normalized_values 1`] = `
|
||||
exports[`convertBucketsToMetricsApiSeries should work with normalized_values 1`] = `
|
||||
Object {
|
||||
"columns": Array [
|
||||
Object {
|
||||
|
@ -227,7 +227,7 @@ Object {
|
|||
}
|
||||
`;
|
||||
|
||||
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should work with percentiles 1`] = `
|
||||
exports[`convertBucketsToMetricsApiSeries should work with percentiles 1`] = `
|
||||
Object {
|
||||
"columns": Array [
|
||||
Object {
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { MetricsAPIRequest } from '../../../../common/http_api';
|
||||
import moment from 'moment';
|
||||
import { convertHistogramBucketsToTimeseries } from './convert_histogram_buckets_to_timeseries';
|
||||
import { convertBucketsToMetricsApiSeries } from './convert_buckets_to_metrics_series';
|
||||
|
||||
const keys = ['example-0'];
|
||||
|
||||
|
@ -22,6 +22,7 @@ const options: MetricsAPIRequest = {
|
|||
metrics: [
|
||||
{ id: 'metric_0', aggregations: { metric_0: { avg: { field: 'system.cpu.user.pct' } } } },
|
||||
],
|
||||
includeTimeseries: true,
|
||||
};
|
||||
|
||||
const buckets = [
|
||||
|
@ -63,13 +64,13 @@ const buckets = [
|
|||
},
|
||||
];
|
||||
|
||||
describe('convertHistogramBucketsToTimeseies(keys, options, buckets)', () => {
|
||||
describe('convertBucketsToMetricsApiSeries', () => {
|
||||
it('should just work', () => {
|
||||
expect(convertHistogramBucketsToTimeseries(keys, options, buckets, 60000)).toMatchSnapshot();
|
||||
expect(convertBucketsToMetricsApiSeries(keys, options, buckets, 60000)).toMatchSnapshot();
|
||||
});
|
||||
it('should drop the last bucket', () => {
|
||||
expect(
|
||||
convertHistogramBucketsToTimeseries(
|
||||
convertBucketsToMetricsApiSeries(
|
||||
keys,
|
||||
{ ...options, dropPartialBuckets: true },
|
||||
buckets,
|
||||
|
@ -79,7 +80,7 @@ describe('convertHistogramBucketsToTimeseies(keys, options, buckets)', () => {
|
|||
});
|
||||
it('should return empty timeseries for empty metrics', () => {
|
||||
expect(
|
||||
convertHistogramBucketsToTimeseries(keys, { ...options, metrics: [] }, buckets, 60000)
|
||||
convertBucketsToMetricsApiSeries(keys, { ...options, metrics: [] }, buckets, 60000)
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
it('should work with normalized_values', () => {
|
||||
|
@ -91,7 +92,7 @@ describe('convertHistogramBucketsToTimeseies(keys, options, buckets)', () => {
|
|||
return bucket;
|
||||
});
|
||||
expect(
|
||||
convertHistogramBucketsToTimeseries(keys, { ...options }, bucketsWithNormalizedValue, 60000)
|
||||
convertBucketsToMetricsApiSeries(keys, { ...options }, bucketsWithNormalizedValue, 60000)
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
it('should work with percentiles', () => {
|
||||
|
@ -99,7 +100,7 @@ describe('convertHistogramBucketsToTimeseies(keys, options, buckets)', () => {
|
|||
return { ...bucket, metric_0: { values: { '95.0': 3 } } };
|
||||
});
|
||||
expect(
|
||||
convertHistogramBucketsToTimeseries(keys, { ...options }, bucketsWithPercentiles, 60000)
|
||||
convertBucketsToMetricsApiSeries(keys, { ...options }, bucketsWithPercentiles, 60000)
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
it('should throw error with multiple percentiles', () => {
|
||||
|
@ -107,12 +108,7 @@ describe('convertHistogramBucketsToTimeseies(keys, options, buckets)', () => {
|
|||
return { ...bucket, metric_0: { values: { '95.0': 3, '99.0': 4 } } };
|
||||
});
|
||||
expect(() =>
|
||||
convertHistogramBucketsToTimeseries(
|
||||
keys,
|
||||
{ ...options },
|
||||
bucketsWithMultiplePercentiles,
|
||||
60000
|
||||
)
|
||||
convertBucketsToMetricsApiSeries(keys, { ...options }, bucketsWithMultiplePercentiles, 60000)
|
||||
).toThrow();
|
||||
});
|
||||
it('should work with keyed percentiles', () => {
|
||||
|
@ -120,7 +116,7 @@ describe('convertHistogramBucketsToTimeseies(keys, options, buckets)', () => {
|
|||
return { ...bucket, metric_0: { values: [{ key: '99.0', value: 4 }] } };
|
||||
});
|
||||
expect(
|
||||
convertHistogramBucketsToTimeseries(keys, { ...options }, bucketsWithKeyedPercentiles, 60000)
|
||||
convertBucketsToMetricsApiSeries(keys, { ...options }, bucketsWithKeyedPercentiles, 60000)
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
it('should throw error with multiple keyed percentiles', () => {
|
||||
|
@ -136,7 +132,7 @@ describe('convertHistogramBucketsToTimeseies(keys, options, buckets)', () => {
|
|||
};
|
||||
});
|
||||
expect(() =>
|
||||
convertHistogramBucketsToTimeseries(
|
||||
convertBucketsToMetricsApiSeries(
|
||||
keys,
|
||||
{ ...options },
|
||||
bucketsWithMultipleKeyedPercentiles,
|
||||
|
@ -193,7 +189,7 @@ describe('convertHistogramBucketsToTimeseies(keys, options, buckets)', () => {
|
|||
];
|
||||
|
||||
expect(
|
||||
convertHistogramBucketsToTimeseries(keys, topMetricOptions, bucketsWithTopAggregation, 60000)
|
||||
convertBucketsToMetricsApiSeries(keys, topMetricOptions, bucketsWithTopAggregation, 60000)
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
});
|
|
@ -14,7 +14,7 @@ import {
|
|||
MetricsAPIRow,
|
||||
} from '../../../../common/http_api/metrics_api';
|
||||
import {
|
||||
HistogramBucket,
|
||||
Bucket,
|
||||
BasicMetricValueRT,
|
||||
NormalizedMetricValueRT,
|
||||
PercentilesTypeRT,
|
||||
|
@ -68,9 +68,9 @@ const dropOutOfBoundsBuckets =
|
|||
(from: number, to: number, bucketSizeInMillis: number) => (row: MetricsAPIRow) =>
|
||||
row.timestamp >= from && row.timestamp + bucketSizeInMillis <= to;
|
||||
|
||||
const convertBucketsToRows = (
|
||||
export const convertBucketsToRows = (
|
||||
options: MetricsAPIRequest,
|
||||
buckets: HistogramBucket[]
|
||||
buckets: Bucket[]
|
||||
): MetricsAPIRow[] => {
|
||||
return buckets.map((bucket) => {
|
||||
const ids = options.metrics.map((metric) => metric.id);
|
||||
|
@ -78,14 +78,15 @@ const convertBucketsToRows = (
|
|||
const valueObject = get(bucket, [id]);
|
||||
return { ...acc, [id]: ValueObjectTypeRT.is(valueObject) ? getValue(valueObject) : null };
|
||||
}, {} as Record<string, number | null | object[]>);
|
||||
|
||||
return { timestamp: bucket.key as number, ...metrics };
|
||||
});
|
||||
};
|
||||
|
||||
export const convertHistogramBucketsToTimeseries = (
|
||||
export const convertBucketsToMetricsApiSeries = (
|
||||
keys: string[],
|
||||
options: MetricsAPIRequest,
|
||||
buckets: HistogramBucket[],
|
||||
buckets: Bucket[],
|
||||
bucketSizeInMillis: number
|
||||
): MetricsAPISeries => {
|
||||
const id = keys.join(':');
|
||||
|
@ -99,11 +100,14 @@ export const convertHistogramBucketsToTimeseries = (
|
|||
type: 'number',
|
||||
})) as MetricsAPIColumn[];
|
||||
const allRows = convertBucketsToRows(options, buckets);
|
||||
const rows = options.dropPartialBuckets
|
||||
? allRows.filter(
|
||||
dropOutOfBoundsBuckets(options.timerange.from, options.timerange.to, bucketSizeInMillis)
|
||||
)
|
||||
: allRows;
|
||||
|
||||
const rows =
|
||||
options.dropPartialBuckets && options.includeTimeseries
|
||||
? allRows.filter(
|
||||
dropOutOfBoundsBuckets(options.timerange.from, options.timerange.to, bucketSizeInMillis)
|
||||
)
|
||||
: allRows;
|
||||
|
||||
return {
|
||||
id,
|
||||
keys,
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createAggregations } from './create_aggregations';
|
||||
import { createAggregations, createCompositeAggregations } from './create_aggregations';
|
||||
import moment from 'moment';
|
||||
import { MetricsAPIRequest } from '../../../../common/http_api';
|
||||
|
||||
|
@ -20,16 +20,57 @@ const options: MetricsAPIRequest = {
|
|||
metrics: [
|
||||
{ id: 'metric_0', aggregations: { metric_0: { avg: { field: 'system.cpu.user.pct' } } } },
|
||||
],
|
||||
includeTimeseries: true,
|
||||
};
|
||||
|
||||
describe('createAggregations(options)', () => {
|
||||
it('should return groupings aggregation with groupBy', () => {
|
||||
const optionsWithGroupBy = { ...options, groupBy: ['host.name'] };
|
||||
expect(createAggregations(optionsWithGroupBy)).toMatchSnapshot();
|
||||
const optionsWithGroupBy: MetricsAPIRequest = { ...options, groupBy: ['host.name'] };
|
||||
expect(createCompositeAggregations(optionsWithGroupBy)).toMatchSnapshot();
|
||||
});
|
||||
it('should return groupings aggregation with afterKey', () => {
|
||||
const optionsWithGroupBy: MetricsAPIRequest = {
|
||||
...options,
|
||||
groupBy: ['host.name'],
|
||||
afterKey: { group0: 'host-0' },
|
||||
};
|
||||
expect(createCompositeAggregations(optionsWithGroupBy)).toEqual({
|
||||
groupings: expect.objectContaining({
|
||||
composite: expect.objectContaining({
|
||||
after: { group0: 'host-0' },
|
||||
}),
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
it('should return groupings aggregation without date histogram', () => {
|
||||
const optionsWithGroupBy: MetricsAPIRequest = {
|
||||
...options,
|
||||
groupBy: ['host.name'],
|
||||
includeTimeseries: false,
|
||||
};
|
||||
expect(createCompositeAggregations(optionsWithGroupBy)).toEqual({
|
||||
groupings: expect.objectContaining({
|
||||
aggs: {
|
||||
metric_0: {
|
||||
avg: {
|
||||
field: 'system.cpu.user.pct',
|
||||
},
|
||||
},
|
||||
metricsets: {
|
||||
terms: {
|
||||
field: 'metricset.name',
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
it('should return just histogram aggregation without groupBy', () => {
|
||||
expect(createAggregations(options)).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should return add offset to histogram', () => {
|
||||
const optionsWithAlignDataToEnd = {
|
||||
...options,
|
||||
|
|
|
@ -5,15 +5,36 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { AggregationOptionsByType } from '@kbn/es-types';
|
||||
|
||||
import Boom from '@hapi/boom';
|
||||
import { afterKeyObjectRT } from '../../../../common/http_api';
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
import { MetricsAPIRequest } from '../../../../common/http_api/metrics_api';
|
||||
import { calculateDateHistogramOffset } from './calculate_date_histogram_offset';
|
||||
import { createMetricsAggregations } from './create_metrics_aggregations';
|
||||
import { calculateBucketSize } from './calculate_bucket_size';
|
||||
|
||||
export const createAggregations = (options: MetricsAPIRequest) => {
|
||||
const DEFAULT_LIMIT = 9;
|
||||
const METRICSET_AGGS = {
|
||||
metricsets: {
|
||||
terms: {
|
||||
field: 'metricset.name',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
type MetricsAggregation = ReturnType<typeof createMetricsAggregations>;
|
||||
interface HistogramAggregation {
|
||||
histogram: {
|
||||
date_histogram: AggregationOptionsByType['date_histogram'];
|
||||
aggregations: MetricsAggregation;
|
||||
};
|
||||
}
|
||||
|
||||
const createMetricHistogramAggs = (options: MetricsAPIRequest): HistogramAggregation => {
|
||||
const { intervalString } = calculateBucketSize(options.timerange);
|
||||
const histogramAggregation = {
|
||||
return {
|
||||
histogram: {
|
||||
date_histogram: {
|
||||
field: TIMESTAMP_FIELD,
|
||||
|
@ -26,27 +47,52 @@ export const createAggregations = (options: MetricsAPIRequest) => {
|
|||
},
|
||||
aggregations: createMetricsAggregations(options),
|
||||
},
|
||||
metricsets: {
|
||||
terms: {
|
||||
field: 'metricset.name',
|
||||
};
|
||||
};
|
||||
|
||||
const getAfterKey = (options: MetricsAPIRequest) => {
|
||||
if (!options.afterKey) {
|
||||
return null;
|
||||
}
|
||||
if (afterKeyObjectRT.is(options.afterKey)) {
|
||||
return options.afterKey;
|
||||
} else {
|
||||
return { groupBy0: options.afterKey };
|
||||
}
|
||||
};
|
||||
export const createCompositeAggregations = (options: MetricsAPIRequest) => {
|
||||
if (!Array.isArray(options.groupBy) || !options.groupBy.length) {
|
||||
throw Boom.badRequest('groupBy must be informed.');
|
||||
}
|
||||
|
||||
if (!options.includeTimeseries && !!options.metrics.find((p) => p.id === 'logRate')) {
|
||||
throw Boom.badRequest('logRate metric is not supported without time series');
|
||||
}
|
||||
|
||||
const after = getAfterKey(options);
|
||||
|
||||
return {
|
||||
groupings: {
|
||||
composite: {
|
||||
size: options.limit ?? DEFAULT_LIMIT,
|
||||
sources: options.groupBy.map((field, index) => ({
|
||||
[`groupBy${index}`]: { terms: { field } },
|
||||
})),
|
||||
...(after ? { after } : {}),
|
||||
},
|
||||
aggs: {
|
||||
...(options.includeTimeseries
|
||||
? createMetricHistogramAggs(options)
|
||||
: createMetricsAggregations(options)),
|
||||
...METRICSET_AGGS,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (Array.isArray(options.groupBy) && options.groupBy.length) {
|
||||
const limit = options.limit || 9;
|
||||
return {
|
||||
groupings: {
|
||||
composite: {
|
||||
size: limit,
|
||||
sources: options.groupBy.map((field, index) => ({
|
||||
[`groupBy${index}`]: { terms: { field } },
|
||||
})),
|
||||
},
|
||||
aggs: histogramAggregation,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return histogramAggregation;
|
||||
};
|
||||
|
||||
export const createAggregations = (options: MetricsAPIRequest) => {
|
||||
return {
|
||||
...createMetricHistogramAggs(options),
|
||||
...METRICSET_AGGS,
|
||||
};
|
||||
};
|
||||
|
|
|
@ -27,6 +27,7 @@ const options: MetricsAPIRequest = {
|
|||
},
|
||||
},
|
||||
],
|
||||
includeTimeseries: true,
|
||||
};
|
||||
|
||||
describe('createMetricsAggregations(options)', () => {
|
||||
|
|
|
@ -60,37 +60,48 @@ export const TermsWithMetrics = rt.intersection([
|
|||
}),
|
||||
]);
|
||||
|
||||
export const HistogramBucketRT = rt.record(
|
||||
export const BucketRT = rt.record(
|
||||
rt.string,
|
||||
rt.union([rt.number, rt.string, MetricValueTypeRT, TermsWithMetrics])
|
||||
rt.union([
|
||||
rt.number,
|
||||
rt.string,
|
||||
MetricValueTypeRT,
|
||||
TermsWithMetrics,
|
||||
rt.record(rt.string, rt.string),
|
||||
])
|
||||
);
|
||||
|
||||
export const HistogramResponseRT = rt.type({
|
||||
histogram: rt.type({
|
||||
buckets: rt.array(HistogramBucketRT),
|
||||
}),
|
||||
metricsets: rt.type({
|
||||
buckets: rt.array(
|
||||
rt.type({
|
||||
key: rt.string,
|
||||
doc_count: rt.number,
|
||||
})
|
||||
),
|
||||
}),
|
||||
export const MetricsetRT = rt.type({
|
||||
buckets: rt.array(
|
||||
rt.type({
|
||||
key: rt.string,
|
||||
doc_count: rt.number,
|
||||
})
|
||||
),
|
||||
});
|
||||
|
||||
const GroupingBucketRT = rt.intersection([
|
||||
export const HistogramRT = rt.type({
|
||||
histogram: rt.type({
|
||||
buckets: rt.array(BucketRT),
|
||||
}),
|
||||
metricsets: MetricsetRT,
|
||||
});
|
||||
|
||||
export const MetricsBucketRT = rt.intersection([BucketRT, rt.type({ metricsets: MetricsetRT })]);
|
||||
export const HistogramBucketRT = rt.intersection([
|
||||
rt.type({
|
||||
key: rt.record(rt.string, rt.string),
|
||||
doc_count: rt.number,
|
||||
}),
|
||||
HistogramResponseRT,
|
||||
HistogramRT,
|
||||
]);
|
||||
|
||||
export const GroupingResponseRT = rt.type({
|
||||
export const AggregationResponseRT = HistogramRT;
|
||||
|
||||
export const CompositeResponseRT = rt.type({
|
||||
groupings: rt.intersection([
|
||||
rt.type({
|
||||
buckets: rt.array(GroupingBucketRT),
|
||||
buckets: rt.array(rt.union([HistogramBucketRT, MetricsBucketRT])),
|
||||
}),
|
||||
rt.partial({
|
||||
after_key: rt.record(rt.string, rt.string),
|
||||
|
@ -98,13 +109,11 @@ export const GroupingResponseRT = rt.type({
|
|||
]),
|
||||
});
|
||||
|
||||
export type Bucket = rt.TypeOf<typeof BucketRT>;
|
||||
export type HistogramBucket = rt.TypeOf<typeof HistogramBucketRT>;
|
||||
|
||||
export type HistogramResponse = rt.TypeOf<typeof HistogramResponseRT>;
|
||||
|
||||
export type GroupingResponse = rt.TypeOf<typeof GroupingResponseRT>;
|
||||
|
||||
export type MetricsESResponse = HistogramResponse | GroupingResponse;
|
||||
export type CompositeResponse = rt.TypeOf<typeof CompositeResponseRT>;
|
||||
export type AggregationResponse = rt.TypeOf<typeof AggregationResponseRT>;
|
||||
export type MetricsESResponse = AggregationResponse | CompositeResponse;
|
||||
|
||||
export interface LogQueryFields {
|
||||
indexPattern: string;
|
||||
|
|
|
@ -31,6 +31,7 @@ const BASE_METRICS_UI_OPTIONS: MetricsAPIRequest = {
|
|||
metrics: [
|
||||
{ id: 'metric_0', aggregations: { metric_0: { avg: { field: 'system.cpu.user.pct' } } } },
|
||||
],
|
||||
includeTimeseries: true,
|
||||
};
|
||||
|
||||
describe('convertRequestToMetricsAPIOptions', () => {
|
||||
|
|
|
@ -27,6 +27,7 @@ export const convertRequestToMetricsAPIOptions = (
|
|||
limit,
|
||||
metrics,
|
||||
dropPartialBuckets: true,
|
||||
includeTimeseries: true,
|
||||
};
|
||||
|
||||
if (options.afterKey) {
|
||||
|
@ -43,7 +44,7 @@ export const convertRequestToMetricsAPIOptions = (
|
|||
try {
|
||||
const filterObject = JSON.parse(options.filterQuery);
|
||||
if (isObject(filterObject)) {
|
||||
metricsApiOptions.filters = [filterObject];
|
||||
metricsApiOptions.filters = [filterObject as any];
|
||||
}
|
||||
} catch (err) {
|
||||
metricsApiOptions.filters = [
|
||||
|
|
|
@ -5,21 +5,22 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { MetricsAPIRequest } from '../../../../common/http_api';
|
||||
import { queryTotalGroupings } from './query_total_groupings';
|
||||
|
||||
describe('queryTotalGroupings', () => {
|
||||
const ESSearchClientMock = jest.fn().mockReturnValue({});
|
||||
const defaultOptions = {
|
||||
const defaultOptions: MetricsAPIRequest = {
|
||||
timerange: {
|
||||
from: 1615972672011,
|
||||
interval: '>=10s',
|
||||
to: 1615976272012,
|
||||
field: '@timestamp',
|
||||
},
|
||||
indexPattern: 'testIndexPattern',
|
||||
metrics: [],
|
||||
dropLastBucket: true,
|
||||
dropPartialBuckets: true,
|
||||
groupBy: ['testField'],
|
||||
includeTimeseries: true,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
|
|
|
@ -21,7 +21,7 @@ import { LogQueryFields } from '../../lib/metrics/types';
|
|||
const escapeHatch = schema.object({}, { unknowns: 'allow' });
|
||||
|
||||
export const initSnapshotRoute = (libs: InfraBackendLibs) => {
|
||||
const { framework, handleEsError } = libs;
|
||||
const { framework } = libs;
|
||||
|
||||
framework.registerRoute(
|
||||
{
|
||||
|
@ -64,7 +64,19 @@ export const initSnapshotRoute = (libs: InfraBackendLibs) => {
|
|||
body: SnapshotNodeResponseRT.encode(snapshotResponse),
|
||||
});
|
||||
} catch (err) {
|
||||
return handleEsError({ error: err, response });
|
||||
if (Boom.isBoom(err)) {
|
||||
return response.customError({
|
||||
statusCode: err.output.statusCode,
|
||||
body: { message: err.output.payload.message },
|
||||
});
|
||||
}
|
||||
|
||||
return response.customError({
|
||||
statusCode: err.statusCode ?? err,
|
||||
body: {
|
||||
message: err.message ?? 'An unexpected error occurred',
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
|
|
@ -15,7 +15,6 @@ import {
|
|||
MetricsAPIRow,
|
||||
} from '../../../../common/http_api';
|
||||
import { META_KEY } from './constants';
|
||||
import { InfraSource } from '../../../lib/sources';
|
||||
|
||||
export const isIPv4 = (subject: string) => /^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$/.test(subject);
|
||||
|
||||
|
@ -26,8 +25,7 @@ type RowWithMetadata = MetricsAPIRow & {
|
|||
export const applyMetadataToLastPath = (
|
||||
series: MetricsAPISeries,
|
||||
node: SnapshotNode,
|
||||
snapshotRequest: SnapshotRequest,
|
||||
source: InfraSource
|
||||
snapshotRequest: SnapshotRequest
|
||||
): SnapshotNodePath[] => {
|
||||
// First we need to find a row with metadata
|
||||
const rowWithMeta = series.rows.find(
|
||||
|
|
|
@ -41,7 +41,6 @@ const transformAndQueryData = async ({
|
|||
const snapshotResponse = transformMetricsApiResponseToSnapshotResponse(
|
||||
metricsApiRequest,
|
||||
snapshotRequest,
|
||||
source,
|
||||
metricsApiResponse
|
||||
);
|
||||
return copyMissingMetrics(snapshotResponse);
|
||||
|
|
|
@ -5,28 +5,55 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
InfraTimerangeInput,
|
||||
MetricsAPIRequest,
|
||||
SnapshotRequest,
|
||||
} from '../../../../common/http_api';
|
||||
import moment from 'moment';
|
||||
import { transformMetricsApiResponseToSnapshotResponse } from './transform_metrics_ui_response';
|
||||
|
||||
jest.mock('./apply_metadata_to_last_path', () => ({
|
||||
applyMetadataToLastPath: (series: any) => [{ label: series.id }],
|
||||
}));
|
||||
|
||||
const now = 1630597319235;
|
||||
const now = moment('2020-01-01T00:00:00Z').add(5, 'minute').valueOf();
|
||||
|
||||
const timerange: InfraTimerangeInput = {
|
||||
from: moment('2020-01-01T00:00:00Z').valueOf(),
|
||||
to: now,
|
||||
interval: '1m',
|
||||
};
|
||||
|
||||
const snapshotApiRequest: SnapshotRequest = {
|
||||
metrics: [{ type: 'cpu' }],
|
||||
includeTimeseries: false,
|
||||
timerange,
|
||||
groupBy: [],
|
||||
nodeType: 'host',
|
||||
sourceId: '',
|
||||
};
|
||||
|
||||
const metricsApiRequest: MetricsAPIRequest = {
|
||||
metrics: [
|
||||
{
|
||||
id: 'cpu',
|
||||
aggregations: { cpu: { avg: { field: 'system.cpu.user.pct' } } },
|
||||
},
|
||||
],
|
||||
includeTimeseries: false,
|
||||
timerange,
|
||||
indexPattern: 'metrics-*',
|
||||
};
|
||||
|
||||
describe('transformMetricsApiResponseToSnapshotResponse', () => {
|
||||
test('filters out nodes from APM which report no data', () => {
|
||||
const result = transformMetricsApiResponseToSnapshotResponse(
|
||||
{
|
||||
// @ts-ignore
|
||||
metrics: [{ id: 'cpu' }],
|
||||
},
|
||||
{
|
||||
includeTimeseries: false,
|
||||
nodeType: 'host',
|
||||
},
|
||||
{},
|
||||
metricsApiRequest,
|
||||
snapshotApiRequest,
|
||||
{
|
||||
info: {
|
||||
afterKey: null,
|
||||
interval: 60,
|
||||
},
|
||||
series: [
|
||||
|
|
|
@ -11,15 +11,13 @@ import {
|
|||
MetricsAPIResponse,
|
||||
SnapshotNodeResponse,
|
||||
MetricsAPIRequest,
|
||||
MetricsExplorerColumnType,
|
||||
MetricsAPIRow,
|
||||
SnapshotRequest,
|
||||
SnapshotNodePath,
|
||||
SnapshotNodeMetric,
|
||||
SnapshotNode,
|
||||
MetricsAPISeries,
|
||||
SnapshotNodeMetric,
|
||||
} from '../../../../common/http_api';
|
||||
import { META_KEY } from './constants';
|
||||
import { InfraSource } from '../../../lib/sources';
|
||||
import { applyMetadataToLastPath } from './apply_metadata_to_last_path';
|
||||
|
||||
const getMetricValue = (row: MetricsAPIRow) => {
|
||||
|
@ -45,52 +43,70 @@ const getLastValue = (rows: MetricsAPIRow[]) => {
|
|||
export const transformMetricsApiResponseToSnapshotResponse = (
|
||||
options: MetricsAPIRequest,
|
||||
snapshotRequest: SnapshotRequest,
|
||||
source: InfraSource,
|
||||
metricsApiResponse: MetricsAPIResponse
|
||||
): SnapshotNodeResponse => {
|
||||
const nodes = metricsApiResponse.series
|
||||
.map((series) => {
|
||||
const node = {
|
||||
metrics: options.metrics
|
||||
.filter((m) => m.id !== META_KEY)
|
||||
.map((metric) => {
|
||||
const name = metric.id as SnapshotMetricType;
|
||||
const timeseries = {
|
||||
id: name,
|
||||
columns: [
|
||||
{ name: 'timestamp', type: 'date' as MetricsExplorerColumnType },
|
||||
{ name: 'metric_0', type: 'number' as MetricsExplorerColumnType },
|
||||
],
|
||||
rows: series.rows.map((row) => {
|
||||
return { timestamp: row.timestamp, metric_0: get(row, metric.id, null) };
|
||||
}),
|
||||
};
|
||||
const maxValue = calculateMax(timeseries.rows);
|
||||
const avg = calculateAvg(timeseries.rows);
|
||||
const value = getLastValue(timeseries.rows);
|
||||
const nodeMetric: SnapshotNodeMetric = { name, max: maxValue, value, avg };
|
||||
if (snapshotRequest.includeTimeseries) {
|
||||
nodeMetric.timeseries = timeseries;
|
||||
}
|
||||
return nodeMetric;
|
||||
}),
|
||||
path:
|
||||
series.keys?.map((key) => {
|
||||
return { value: key, label: key } as SnapshotNodePath;
|
||||
}) ?? [],
|
||||
name: '',
|
||||
metrics: getMetrics(options, snapshotRequest, series),
|
||||
path: (series.keys ?? []).map((key) => {
|
||||
return { value: key, label: key };
|
||||
}),
|
||||
};
|
||||
|
||||
const isNoData = node.metrics.every((m) => m.value === null);
|
||||
const isAPMNode = series.metricsets?.includes('app');
|
||||
if (isNoData && isAPMNode) return null;
|
||||
|
||||
const path = applyMetadataToLastPath(series, node, snapshotRequest, source);
|
||||
const lastPath = last(path);
|
||||
const name = lastPath?.label ?? 'N/A';
|
||||
const path = applyMetadataToLastPath(series, node, snapshotRequest);
|
||||
const name = last(path)?.label ?? 'N/A';
|
||||
|
||||
return { ...node, path, name };
|
||||
})
|
||||
.filter((n) => n !== null) as SnapshotNode[];
|
||||
return { nodes, interval: `${metricsApiResponse.info.interval}s` };
|
||||
|
||||
return {
|
||||
nodes,
|
||||
interval:
|
||||
metricsApiResponse.info.interval !== undefined
|
||||
? `${metricsApiResponse.info.interval}s`
|
||||
: undefined,
|
||||
};
|
||||
};
|
||||
|
||||
const getMetrics = (
|
||||
options: MetricsAPIRequest,
|
||||
snapshotRequest: SnapshotRequest,
|
||||
series: MetricsAPIResponse['series'][number]
|
||||
): SnapshotNodeMetric[] => {
|
||||
return options.metrics
|
||||
.filter((m) => m.id !== META_KEY)
|
||||
.map((metric) => {
|
||||
const name = metric.id as SnapshotMetricType;
|
||||
|
||||
const metrics = series.rows.map((row) => ({
|
||||
timestamp: row.timestamp,
|
||||
metric_0: get(row, metric.id, null),
|
||||
}));
|
||||
|
||||
const timeseries = snapshotRequest.includeTimeseries
|
||||
? ({
|
||||
id: name,
|
||||
columns: [
|
||||
{ name: 'timestamp', type: 'date' },
|
||||
{ name: 'metric_0', type: 'number' },
|
||||
],
|
||||
rows: [...metrics],
|
||||
} as MetricsAPISeries)
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
name,
|
||||
value: getLastValue(metrics),
|
||||
max: calculateMax(metrics),
|
||||
avg: calculateAvg(metrics),
|
||||
timeseries,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import { transformRequestToMetricsAPIRequest } from './transform_request_to_metrics_api_request';
|
||||
import { ESSearchClient } from '../../../lib/metrics/types';
|
||||
import { InfraSource } from '../../../lib/sources';
|
||||
import { SnapshotRequest } from '../../../../common/http_api';
|
||||
import { MetricsAPIRequest, SnapshotRequest } from '../../../../common/http_api';
|
||||
|
||||
jest.mock('./create_timerange_with_interval', () => {
|
||||
return {
|
||||
|
@ -73,7 +73,7 @@ const snapshotRequest: SnapshotRequest = {
|
|||
includeTimeseries: true,
|
||||
};
|
||||
|
||||
const metricsApiRequest = {
|
||||
const metricsApiRequest: MetricsAPIRequest = {
|
||||
indexPattern: 'metrics-*,metricbeat-*',
|
||||
timerange: { from: 1605705900000, to: 1605706200000, interval: '60s' },
|
||||
metrics: [
|
||||
|
@ -111,4 +111,5 @@ const metricsApiRequest = {
|
|||
alignDataToEnd: true,
|
||||
dropPartialBuckets: true,
|
||||
groupBy: ['kubernetes.pod.uid'],
|
||||
includeTimeseries: true,
|
||||
};
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
import { findInventoryFields, findInventoryModel } from '../../../../common/inventory_models';
|
||||
import { MetricsAPIRequest, SnapshotRequest } from '../../../../common/http_api';
|
||||
import { MetricsAPIMetric, MetricsAPIRequest, SnapshotRequest } from '../../../../common/http_api';
|
||||
import { ESSearchClient } from '../../../lib/metrics/types';
|
||||
import { InfraSource } from '../../../lib/sources';
|
||||
import { createTimeRangeWithInterval } from './create_timerange_with_interval';
|
||||
|
@ -48,6 +48,7 @@ export const transformRequestToMetricsAPIRequest = async ({
|
|||
: compositeSize,
|
||||
alignDataToEnd: true,
|
||||
dropPartialBuckets: true,
|
||||
includeTimeseries: snapshotRequest.includeTimeseries,
|
||||
};
|
||||
|
||||
const filters = [];
|
||||
|
@ -75,13 +76,24 @@ export const transformRequestToMetricsAPIRequest = async ({
|
|||
metricsApiRequest.groupBy = [...groupBy, inventoryFields.id];
|
||||
}
|
||||
|
||||
const metaAggregation = {
|
||||
const topMetricMetrics = [{ field: inventoryFields.name }];
|
||||
if (inventoryFields.ip) {
|
||||
topMetricMetrics.push({ field: inventoryFields.ip });
|
||||
}
|
||||
if (inventoryFields.os) {
|
||||
topMetricMetrics.push({ field: inventoryFields.os });
|
||||
}
|
||||
if (inventoryFields.cloudProvider) {
|
||||
topMetricMetrics.push({ field: inventoryFields.cloudProvider });
|
||||
}
|
||||
|
||||
const metaAggregation: MetricsAPIMetric = {
|
||||
id: META_KEY,
|
||||
aggregations: {
|
||||
[META_KEY]: {
|
||||
top_metrics: {
|
||||
size: 1,
|
||||
metrics: [{ field: inventoryFields.name }],
|
||||
metrics: topMetricMetrics,
|
||||
sort: {
|
||||
[TIMESTAMP_FIELD]: 'desc',
|
||||
},
|
||||
|
@ -90,20 +102,6 @@ export const transformRequestToMetricsAPIRequest = async ({
|
|||
},
|
||||
};
|
||||
|
||||
if (inventoryFields.ip) {
|
||||
metaAggregation.aggregations[META_KEY].top_metrics.metrics.push({ field: inventoryFields.ip });
|
||||
}
|
||||
|
||||
if (inventoryFields.os) {
|
||||
metaAggregation.aggregations[META_KEY].top_metrics.metrics.push({ field: inventoryFields.os });
|
||||
}
|
||||
|
||||
if (inventoryFields.cloudProvider) {
|
||||
metaAggregation.aggregations[META_KEY].top_metrics.metrics.push({
|
||||
field: inventoryFields.cloudProvider,
|
||||
});
|
||||
}
|
||||
|
||||
metricsApiRequest.metrics.push(metaAggregation);
|
||||
|
||||
if (filters.length) {
|
||||
|
|
|
@ -7,9 +7,9 @@
|
|||
|
||||
export * from './ccs';
|
||||
export * from './cluster';
|
||||
export * from './literal_value';
|
||||
export * from './pagination';
|
||||
export * from './query_string_boolean';
|
||||
export * from './query_string_number';
|
||||
export * from './sorting';
|
||||
export * from './time_range';
|
||||
export { createLiteralValueFromUndefinedRT } from '@kbn/io-ts-utils';
|
||||
|
|
|
@ -20,13 +20,14 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
const esArchiver = getService('esArchiver');
|
||||
const supertest = getService('supertest');
|
||||
const fetchSnapshot = async (
|
||||
body: SnapshotRequest
|
||||
body: SnapshotRequest,
|
||||
expectedStatusCode = 200
|
||||
): Promise<SnapshotNodeResponse | undefined> => {
|
||||
const response = await supertest
|
||||
.post('/api/metrics/snapshot')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send(body)
|
||||
.expect(200);
|
||||
.expect(expectedStatusCode);
|
||||
return response.body;
|
||||
};
|
||||
|
||||
|
@ -37,7 +38,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
after(() => esArchiver.unload('x-pack/test/functional/es_archives/infra/6.6.0/docker'));
|
||||
|
||||
it('should basically work', async () => {
|
||||
const resp = fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -47,34 +48,62 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
metrics: [{ type: 'cpu' }],
|
||||
nodeType: 'container',
|
||||
groupBy: [],
|
||||
includeTimeseries: true,
|
||||
});
|
||||
return resp.then((data) => {
|
||||
if (!resp) {
|
||||
return;
|
||||
}
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(5);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a'
|
||||
);
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics).to.eql([
|
||||
{
|
||||
name: 'cpu',
|
||||
value: 0,
|
||||
max: 0,
|
||||
avg: 0,
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(5);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a'
|
||||
);
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics).to.eql([
|
||||
{
|
||||
name: 'cpu',
|
||||
value: 0,
|
||||
max: 0,
|
||||
avg: 0,
|
||||
timeseries: {
|
||||
columns: [
|
||||
{
|
||||
name: 'timestamp',
|
||||
type: 'date',
|
||||
},
|
||||
{
|
||||
name: 'metric_0',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
id: 'cpu',
|
||||
rows: [
|
||||
{
|
||||
metric_0: 0,
|
||||
timestamp: 1547578849952,
|
||||
},
|
||||
{
|
||||
metric_0: 0,
|
||||
timestamp: 1547578909952,
|
||||
},
|
||||
{
|
||||
metric_0: 0,
|
||||
timestamp: 1547578969952,
|
||||
},
|
||||
{
|
||||
metric_0: 0,
|
||||
timestamp: 1547579029952,
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
}
|
||||
});
|
||||
},
|
||||
]);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -88,7 +117,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
);
|
||||
|
||||
it("should use the id for the label when the name doesn't exist", async () => {
|
||||
const resp = fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -98,29 +127,32 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
metrics: [{ type: 'cpu' }],
|
||||
nodeType: 'pod',
|
||||
groupBy: [],
|
||||
includeTimeseries: false,
|
||||
});
|
||||
return resp.then((data) => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(65);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'00597dd7-a348-11e9-9a96-42010a84004d'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'label',
|
||||
'00597dd7-a348-11e9-9a96-42010a84004d'
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
if (!snapshot) {
|
||||
return;
|
||||
}
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(65);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'00597dd7-a348-11e9-9a96-42010a84004d'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'label',
|
||||
'00597dd7-a348-11e9-9a96-42010a84004d'
|
||||
);
|
||||
}
|
||||
});
|
||||
it('should have an id and label', async () => {
|
||||
const resp = fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -130,25 +162,129 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
metrics: [{ type: 'cpu' }],
|
||||
nodeType: 'container',
|
||||
groupBy: [],
|
||||
includeTimeseries: false,
|
||||
});
|
||||
return resp.then((data) => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(135);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'01078c21eef4194b0b96253c7c6c32796aba66e3f3f37e26ac97d1dff3e2e91a'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'label',
|
||||
'k8s_prometheus-to-sd-exporter_fluentd-gcp-v3.2.0-wcmm4_kube-system_b214d17a-9ae0-11e9-9a96-42010a84004d_0'
|
||||
);
|
||||
}
|
||||
|
||||
if (!snapshot) {
|
||||
return;
|
||||
}
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(135);
|
||||
if (snapshot) {
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'01078c21eef4194b0b96253c7c6c32796aba66e3f3f37e26ac97d1dff3e2e91a'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'label',
|
||||
'k8s_prometheus-to-sd-exporter_fluentd-gcp-v3.2.0-wcmm4_kube-system_b214d17a-9ae0-11e9-9a96-42010a84004d_0'
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('should not return timeseries data - with groupBy', async () => {
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metrics: [{ type: 'cpu' }],
|
||||
nodeType: 'host',
|
||||
groupBy: [{ field: 'host.name' }],
|
||||
includeTimeseries: false,
|
||||
});
|
||||
|
||||
const expected = {
|
||||
name: 'cpu',
|
||||
value: 0.44708333333333333,
|
||||
max: 0.44708333333333333,
|
||||
avg: 0.44708333333333333,
|
||||
};
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(3);
|
||||
const firstNode = nodes[0] as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(2);
|
||||
expect(firstNode.path[0]).to.have.property(
|
||||
'value',
|
||||
'gke-observability-8--observability-8--bc1afd95-f0zc'
|
||||
);
|
||||
expect(firstNode.path[1]).to.have.property(
|
||||
'value',
|
||||
'gke-observability-8--observability-8--bc1afd95-f0zc'
|
||||
);
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics).to.eql([expected]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should not return timeseries data - without groupBy', async () => {
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metrics: [{ type: 'cpu' }],
|
||||
nodeType: 'host',
|
||||
groupBy: null,
|
||||
includeTimeseries: false,
|
||||
});
|
||||
|
||||
const expected = {
|
||||
name: 'cpu',
|
||||
value: null,
|
||||
max: 0.47105555555555556,
|
||||
avg: 0.0672936507936508,
|
||||
};
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = nodes[0] as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(firstNode.path[0]).to.have.property('value', '*');
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics).to.eql([expected]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail to fetch logRate with no timeseries data', async () => {
|
||||
const resp = await fetchSnapshot(
|
||||
{
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metrics: [{ type: 'logRate' }],
|
||||
nodeType: 'host',
|
||||
groupBy: [{ field: 'host.name' }],
|
||||
includeTimeseries: false,
|
||||
},
|
||||
400
|
||||
);
|
||||
|
||||
expect(resp).to.eql({
|
||||
statusCode: 400,
|
||||
error: 'Bad Request',
|
||||
message: 'logRate metric is not supported without time series',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -159,7 +295,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
after(() => esArchiver.unload('x-pack/test/functional/es_archives/infra/7.0.0/hosts'));
|
||||
|
||||
it('should basically work', async () => {
|
||||
const resp = fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -169,33 +305,63 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
metrics: [{ type: 'cpu' }],
|
||||
nodeType: 'host',
|
||||
groupBy: [],
|
||||
includeTimeseries: true,
|
||||
});
|
||||
return resp.then((data) => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics).to.eql([
|
||||
{
|
||||
name: 'cpu',
|
||||
value: 0.0032,
|
||||
max: 0.0038333333333333336,
|
||||
avg: 0.003341666666666667,
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics).to.eql([
|
||||
{
|
||||
name: 'cpu',
|
||||
value: 0.0032,
|
||||
max: 0.0038333333333333336,
|
||||
avg: 0.003341666666666667,
|
||||
timeseries: {
|
||||
columns: [
|
||||
{
|
||||
name: 'timestamp',
|
||||
type: 'date',
|
||||
},
|
||||
{
|
||||
name: 'metric_0',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
id: 'cpu',
|
||||
rows: [
|
||||
{
|
||||
metric_0: 0.003166666666666667,
|
||||
timestamp: 1547571590967,
|
||||
},
|
||||
{
|
||||
metric_0: 0.003166666666666667,
|
||||
timestamp: 1547571650967,
|
||||
},
|
||||
{
|
||||
metric_0: 0.0038333333333333336,
|
||||
timestamp: 1547571710967,
|
||||
},
|
||||
{
|
||||
metric_0: 0.0032,
|
||||
timestamp: 1547571770967,
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
}
|
||||
});
|
||||
},
|
||||
]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow for overrides for interval and ignoring lookback', async () => {
|
||||
const resp = fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -209,29 +375,27 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
groupBy: [],
|
||||
includeTimeseries: true,
|
||||
});
|
||||
return resp.then((data) => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics[0]).to.have.property('timeseries');
|
||||
expect(firstNode.metrics[0].timeseries?.rows.length).to.equal(56);
|
||||
const rows = firstNode.metrics[0].timeseries?.rows;
|
||||
const rowInterval = (rows?.[1]?.timestamp || 0) - (rows?.[0]?.timestamp || 0);
|
||||
expect(rowInterval).to.equal(10000);
|
||||
}
|
||||
});
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics[0]).to.have.property('timeseries');
|
||||
expect(firstNode.metrics[0].timeseries?.rows.length).to.equal(56);
|
||||
const rows = firstNode.metrics[0].timeseries?.rows;
|
||||
const rowInterval = (rows?.[1]?.timestamp || 0) - (rows?.[0]?.timestamp || 0);
|
||||
expect(rowInterval).to.equal(10000);
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow for overrides for lookback', async () => {
|
||||
const resp = fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -244,26 +408,24 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
groupBy: [],
|
||||
includeTimeseries: true,
|
||||
});
|
||||
return resp.then((data) => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics[0]).to.have.property('timeseries');
|
||||
expect(firstNode.metrics[0].timeseries?.rows.length).to.equal(5);
|
||||
}
|
||||
});
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics[0]).to.have.property('timeseries');
|
||||
expect(firstNode.metrics[0].timeseries?.rows.length).to.equal(5);
|
||||
}
|
||||
});
|
||||
|
||||
it('should work with custom metrics', async () => {
|
||||
const data = await fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -280,10 +442,9 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
] as SnapshotMetricInput[],
|
||||
nodeType: 'host',
|
||||
groupBy: [],
|
||||
includeTimeseries: true,
|
||||
});
|
||||
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
|
@ -299,13 +460,44 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
value: 0.0016,
|
||||
max: 0.0018333333333333333,
|
||||
avg: 0.00165,
|
||||
timeseries: {
|
||||
columns: [
|
||||
{
|
||||
name: 'timestamp',
|
||||
type: 'date',
|
||||
},
|
||||
{
|
||||
name: 'metric_0',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
id: 'custom_0',
|
||||
rows: [
|
||||
{
|
||||
metric_0: 0.0016666666666666668,
|
||||
timestamp: 1547571590967,
|
||||
},
|
||||
{
|
||||
metric_0: 0.0015000000000000002,
|
||||
timestamp: 1547571650967,
|
||||
},
|
||||
{
|
||||
metric_0: 0.0018333333333333333,
|
||||
timestamp: 1547571710967,
|
||||
},
|
||||
{
|
||||
metric_0: 0.0016,
|
||||
timestamp: 1547571770967,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should basically work with 1 grouping', async () => {
|
||||
const resp = fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -315,24 +507,23 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
metrics: [{ type: 'cpu' }],
|
||||
nodeType: 'host',
|
||||
groupBy: [{ field: 'cloud.availability_zone' }],
|
||||
includeTimeseries: false,
|
||||
});
|
||||
return resp.then((data) => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(2);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'virtualbox');
|
||||
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
}
|
||||
});
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(2);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'virtualbox');
|
||||
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
}
|
||||
});
|
||||
|
||||
it('should basically work with 2 groupings', async () => {
|
||||
const resp = fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -342,26 +533,24 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
metrics: [{ type: 'cpu' }],
|
||||
nodeType: 'host',
|
||||
groupBy: [{ field: 'cloud.provider' }, { field: 'cloud.availability_zone' }],
|
||||
includeTimeseries: false,
|
||||
});
|
||||
|
||||
return resp.then((data) => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(3);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'vagrant');
|
||||
expect(firstNode.path[1]).to.have.property('value', 'virtualbox');
|
||||
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
}
|
||||
});
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes) as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(3);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'vagrant');
|
||||
expect(firstNode.path[1]).to.have.property('value', 'virtualbox');
|
||||
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
}
|
||||
});
|
||||
|
||||
it('should show metrics for all nodes when grouping by service type', async () => {
|
||||
const resp = fetchSnapshot({
|
||||
const snapshot = await fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
|
@ -371,43 +560,66 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
metrics: [{ type: 'cpu' }],
|
||||
nodeType: 'host',
|
||||
groupBy: [{ field: 'service.type' }],
|
||||
includeTimeseries: true,
|
||||
});
|
||||
return resp.then((data) => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(2);
|
||||
const firstNode = nodes[0] as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(2);
|
||||
expect(firstNode.path[0]).to.have.property('value', 'mysql');
|
||||
expect(firstNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics).to.eql([
|
||||
|
||||
const expected = {
|
||||
name: 'cpu',
|
||||
value: 0.0032,
|
||||
max: 0.0038333333333333336,
|
||||
avg: 0.003341666666666667,
|
||||
timeseries: {
|
||||
columns: [
|
||||
{
|
||||
name: 'cpu',
|
||||
value: 0.0032,
|
||||
max: 0.0038333333333333336,
|
||||
avg: 0.003341666666666667,
|
||||
name: 'timestamp',
|
||||
type: 'date',
|
||||
},
|
||||
]);
|
||||
const secondNode = nodes[1] as any;
|
||||
expect(secondNode).to.have.property('path');
|
||||
expect(secondNode.path.length).to.equal(2);
|
||||
expect(secondNode.path[0]).to.have.property('value', 'system');
|
||||
expect(secondNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(secondNode).to.have.property('metrics');
|
||||
expect(secondNode.metrics).to.eql([
|
||||
{
|
||||
name: 'cpu',
|
||||
value: 0.0032,
|
||||
max: 0.0038333333333333336,
|
||||
avg: 0.003341666666666667,
|
||||
name: 'metric_0',
|
||||
type: 'number',
|
||||
},
|
||||
]);
|
||||
}
|
||||
});
|
||||
],
|
||||
id: 'cpu',
|
||||
rows: [
|
||||
{
|
||||
metric_0: 0.003166666666666667,
|
||||
timestamp: 1547571590967,
|
||||
},
|
||||
{
|
||||
metric_0: 0.003166666666666667,
|
||||
timestamp: 1547571650967,
|
||||
},
|
||||
{
|
||||
metric_0: 0.0038333333333333336,
|
||||
timestamp: 1547571710967,
|
||||
},
|
||||
{
|
||||
metric_0: 0.0032,
|
||||
timestamp: 1547571770967,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(2);
|
||||
const firstNode = nodes[0] as any;
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(2);
|
||||
expect(firstNode.path[0]).to.have.property('value', 'mysql');
|
||||
expect(firstNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metrics');
|
||||
expect(firstNode.metrics).to.eql([expected]);
|
||||
const secondNode = nodes[1] as any;
|
||||
expect(secondNode).to.have.property('path');
|
||||
expect(secondNode.path.length).to.equal(2);
|
||||
expect(secondNode.path[0]).to.have.property('value', 'system');
|
||||
expect(secondNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(secondNode).to.have.property('metrics');
|
||||
expect(secondNode.metrics).to.eql([expected]);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue