mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
* Port graphql query for snapshot to hapijs * fix type * Remove graphql from node details. * Use boom for error handling. * Remove unused imports * Remove dead code. Merge types * Fix tests * Remove unused import * Rename types * Go all in on io-ts. Cleanup types * Fix typecheck
This commit is contained in:
parent
0e2c3bcecc
commit
8586a2f0c8
28 changed files with 946 additions and 843 deletions
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
import { InventoryMetricRT, ItemTypeRT } from '../inventory_models/types';
|
||||
import { InfraWrappableRequest } from '../../server/lib/adapters/framework';
|
||||
import { InfraTimerangeInputRT } from './snapshot_api';
|
||||
|
||||
const NodeDetailsDataPointRT = rt.intersection([
|
||||
rt.type({
|
||||
timestamp: rt.number,
|
||||
}),
|
||||
rt.partial({
|
||||
value: rt.union([rt.number, rt.null]),
|
||||
}),
|
||||
]);
|
||||
|
||||
const NodeDetailsDataSeries = rt.type({
|
||||
id: rt.string,
|
||||
label: rt.string,
|
||||
data: rt.array(NodeDetailsDataPointRT),
|
||||
});
|
||||
|
||||
export const NodeDetailsMetricDataRT = rt.intersection([
|
||||
rt.partial({
|
||||
id: rt.union([InventoryMetricRT, rt.null]),
|
||||
}),
|
||||
rt.type({
|
||||
series: rt.array(NodeDetailsDataSeries),
|
||||
}),
|
||||
]);
|
||||
|
||||
export const NodeDetailsMetricDataResponseRT = rt.type({
|
||||
metrics: rt.array(NodeDetailsMetricDataRT),
|
||||
});
|
||||
|
||||
export const NodeDetailsRequestRT = rt.intersection([
|
||||
rt.type({
|
||||
nodeType: ItemTypeRT,
|
||||
nodeId: rt.string,
|
||||
metrics: rt.array(InventoryMetricRT),
|
||||
timerange: InfraTimerangeInputRT,
|
||||
sourceId: rt.string,
|
||||
}),
|
||||
rt.partial({
|
||||
cloudId: rt.union([rt.string, rt.null]),
|
||||
}),
|
||||
]);
|
||||
|
||||
// export type NodeDetailsRequest = InfraWrappableRequest<NodesArgs & SourceArgs>;
|
||||
|
||||
export type NodeDetailsRequest = rt.TypeOf<typeof NodeDetailsRequestRT>;
|
||||
export type NodeDetailsWrappedRequest = InfraWrappableRequest<NodeDetailsRequest>;
|
||||
|
||||
export type NodeDetailsMetricDataResponse = rt.TypeOf<typeof NodeDetailsMetricDataResponseRT>;
|
69
x-pack/legacy/plugins/infra/common/http_api/snapshot_api.ts
Normal file
69
x-pack/legacy/plugins/infra/common/http_api/snapshot_api.ts
Normal file
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
import { InfraWrappableRequest } from '../../server/lib/adapters/framework';
|
||||
import { SnapshotMetricTypeRT, ItemTypeRT } from '../inventory_models/types';
|
||||
|
||||
export const SnapshotNodePathRT = rt.intersection([
|
||||
rt.type({
|
||||
value: rt.string,
|
||||
label: rt.string,
|
||||
}),
|
||||
rt.partial({
|
||||
ip: rt.union([rt.string, rt.null]),
|
||||
}),
|
||||
]);
|
||||
|
||||
const SnapshotNodeMetricOptionalRT = rt.partial({
|
||||
value: rt.union([rt.number, rt.null]),
|
||||
average: rt.union([rt.number, rt.null]),
|
||||
max: rt.union([rt.number, rt.null]),
|
||||
});
|
||||
|
||||
const SnapshotNodeMetricRequiredRT = rt.type({
|
||||
name: SnapshotMetricTypeRT,
|
||||
});
|
||||
|
||||
export const SnapshotNodeRT = rt.type({
|
||||
metric: rt.intersection([SnapshotNodeMetricRequiredRT, SnapshotNodeMetricOptionalRT]),
|
||||
path: rt.array(SnapshotNodePathRT),
|
||||
});
|
||||
|
||||
export const SnapshotNodeResponseRT = rt.type({
|
||||
nodes: rt.array(SnapshotNodeRT),
|
||||
});
|
||||
|
||||
export const InfraTimerangeInputRT = rt.type({
|
||||
interval: rt.string,
|
||||
to: rt.number,
|
||||
from: rt.number,
|
||||
});
|
||||
|
||||
export const SnapshotRequestRT = rt.intersection([
|
||||
rt.type({
|
||||
timerange: InfraTimerangeInputRT,
|
||||
metric: rt.type({
|
||||
type: SnapshotMetricTypeRT,
|
||||
}),
|
||||
groupBy: rt.array(
|
||||
rt.partial({
|
||||
label: rt.union([rt.string, rt.null]),
|
||||
field: rt.union([rt.string, rt.null]),
|
||||
})
|
||||
),
|
||||
nodeType: ItemTypeRT,
|
||||
sourceId: rt.string,
|
||||
}),
|
||||
rt.partial({
|
||||
filterQuery: rt.union([rt.string, rt.null]),
|
||||
}),
|
||||
]);
|
||||
|
||||
export type SnapshotRequest = rt.TypeOf<typeof SnapshotRequestRT>;
|
||||
export type SnapshotWrappedRequest = InfraWrappableRequest<SnapshotRequest>;
|
||||
export type SnapshotNode = rt.TypeOf<typeof SnapshotNodeRT>;
|
||||
export type SnapshotNodeResponse = rt.TypeOf<typeof SnapshotNodeResponseRT>;
|
|
@ -6,33 +6,61 @@
|
|||
|
||||
import React from 'react';
|
||||
import { InfraWaffleMapOptions, InfraWaffleMapBounds } from '../../lib/lib';
|
||||
import { InfraNodeType, InfraSnapshotNode, InfraTimerangeInput } from '../../graphql/types';
|
||||
import {
|
||||
InfraNodeType,
|
||||
InfraTimerangeInput,
|
||||
InfraSnapshotMetricInput,
|
||||
InfraSnapshotGroupbyInput,
|
||||
} from '../../graphql/types';
|
||||
import { KueryFilterQuery } from '../../store/local/waffle_filter';
|
||||
|
||||
import { NodesOverview } from '../nodes_overview';
|
||||
import { Toolbar } from './toolbars/toolbar';
|
||||
import { PageContent } from '../page';
|
||||
import { useSnapshot } from '../../containers/waffle/use_snaphot';
|
||||
|
||||
export interface LayoutProps {
|
||||
options: InfraWaffleMapOptions;
|
||||
nodeType: InfraNodeType;
|
||||
nodes: InfraSnapshotNode[];
|
||||
loading: boolean;
|
||||
reload: () => void;
|
||||
onDrilldown: (filter: KueryFilterQuery) => void;
|
||||
timeRange: InfraTimerangeInput;
|
||||
onViewChange: (view: string) => void;
|
||||
view: string;
|
||||
boundsOverride: InfraWaffleMapBounds;
|
||||
autoBounds: boolean;
|
||||
|
||||
filterQuery: string | null | undefined;
|
||||
metric: InfraSnapshotMetricInput;
|
||||
groupBy: InfraSnapshotGroupbyInput[];
|
||||
sourceId: string;
|
||||
}
|
||||
|
||||
export const Layout = (props: LayoutProps) => {
|
||||
const { loading, nodes, reload } = useSnapshot(
|
||||
props.filterQuery,
|
||||
props.metric,
|
||||
props.groupBy,
|
||||
props.nodeType,
|
||||
props.sourceId,
|
||||
props.timeRange
|
||||
);
|
||||
return (
|
||||
<>
|
||||
<Toolbar nodeType={props.nodeType} />
|
||||
<PageContent>
|
||||
<NodesOverview {...props} />
|
||||
<NodesOverview
|
||||
nodes={nodes}
|
||||
options={props.options}
|
||||
nodeType={props.nodeType}
|
||||
loading={loading}
|
||||
reload={reload}
|
||||
onDrilldown={props.onDrilldown}
|
||||
timeRange={props.timeRange}
|
||||
onViewChange={props.onViewChange}
|
||||
view={props.view}
|
||||
autoBounds={props.autoBounds}
|
||||
boundsOverride={props.boundsOverride}
|
||||
/>
|
||||
</PageContent>
|
||||
</>
|
||||
);
|
||||
|
|
|
@ -25,11 +25,12 @@ import { InfraLoadingPanel } from '../loading';
|
|||
import { Map } from '../waffle/map';
|
||||
import { ViewSwitcher } from '../waffle/view_switcher';
|
||||
import { TableView } from './table';
|
||||
import { SnapshotNode } from '../../../common/http_api/snapshot_api';
|
||||
|
||||
interface Props {
|
||||
options: InfraWaffleMapOptions;
|
||||
nodeType: InfraNodeType;
|
||||
nodes: InfraSnapshotNode[];
|
||||
nodes: SnapshotNode[];
|
||||
loading: boolean;
|
||||
reload: () => void;
|
||||
onDrilldown: (filter: KueryFilterQuery) => void;
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { InfraNodeType, InfraTimerangeInput } from '../../graphql/types';
|
||||
import { throwErrors, createPlainError } from '../../../common/runtime_types';
|
||||
import { useHTTPRequest } from '../../hooks/use_http_request';
|
||||
import {
|
||||
NodeDetailsMetricDataResponseRT,
|
||||
NodeDetailsMetricDataResponse,
|
||||
} from '../../../common/http_api/node_details_api';
|
||||
import { InventoryMetric } from '../../../common/inventory_models/types';
|
||||
|
||||
export function useNodeDetails(
|
||||
metrics: InventoryMetric[],
|
||||
nodeId: string,
|
||||
nodeType: InfraNodeType,
|
||||
sourceId: string,
|
||||
timerange: InfraTimerangeInput,
|
||||
cloudId: string
|
||||
) {
|
||||
const decodeResponse = (response: any) => {
|
||||
return pipe(
|
||||
NodeDetailsMetricDataResponseRT.decode(response),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
};
|
||||
|
||||
const { error, loading, response, makeRequest } = useHTTPRequest<NodeDetailsMetricDataResponse>(
|
||||
'/api/metrics/node_details',
|
||||
'POST',
|
||||
JSON.stringify({
|
||||
metrics,
|
||||
nodeId,
|
||||
nodeType,
|
||||
timerange,
|
||||
cloudId,
|
||||
sourceId,
|
||||
decodeResponse,
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
error,
|
||||
loading,
|
||||
metrics: response ? response.metrics : [],
|
||||
makeRequest,
|
||||
};
|
||||
}
|
|
@ -5,4 +5,3 @@
|
|||
*/
|
||||
|
||||
export * from './with_waffle_filters';
|
||||
export * from './with_waffle_nodes';
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { useEffect } from 'react';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import {
|
||||
InfraNodeType,
|
||||
InfraSnapshotMetricInput,
|
||||
InfraSnapshotGroupbyInput,
|
||||
InfraTimerangeInput,
|
||||
} from '../../graphql/types';
|
||||
import { throwErrors, createPlainError } from '../../../common/runtime_types';
|
||||
import { useHTTPRequest } from '../../hooks/use_http_request';
|
||||
import {
|
||||
SnapshotNodeResponseRT,
|
||||
SnapshotNodeResponse,
|
||||
} from '../../../common/http_api/snapshot_api';
|
||||
|
||||
export function useSnapshot(
|
||||
filterQuery: string | null | undefined,
|
||||
metric: InfraSnapshotMetricInput,
|
||||
groupBy: InfraSnapshotGroupbyInput[],
|
||||
nodeType: InfraNodeType,
|
||||
sourceId: string,
|
||||
timerange: InfraTimerangeInput
|
||||
) {
|
||||
const decodeResponse = (response: any) => {
|
||||
return pipe(
|
||||
SnapshotNodeResponseRT.decode(response),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
};
|
||||
|
||||
const { error, loading, response, makeRequest } = useHTTPRequest<SnapshotNodeResponse>(
|
||||
'/api/metrics/snapshot',
|
||||
'POST',
|
||||
JSON.stringify({
|
||||
metric,
|
||||
groupBy,
|
||||
nodeType,
|
||||
timerange,
|
||||
filterQuery,
|
||||
sourceId,
|
||||
decodeResponse,
|
||||
})
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
await makeRequest();
|
||||
})();
|
||||
}, [makeRequest]);
|
||||
|
||||
return {
|
||||
error: (error && error.message) || null,
|
||||
loading,
|
||||
nodes: response ? response.nodes : [],
|
||||
reload: makeRequest,
|
||||
};
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { Query } from 'react-apollo';
|
||||
|
||||
import {
|
||||
InfraSnapshotMetricInput,
|
||||
InfraSnapshotNode,
|
||||
InfraNodeType,
|
||||
InfraSnapshotGroupbyInput,
|
||||
InfraTimerangeInput,
|
||||
WaffleNodesQuery,
|
||||
} from '../../graphql/types';
|
||||
import { waffleNodesQuery } from './waffle_nodes.gql_query';
|
||||
|
||||
interface WithWaffleNodesArgs {
|
||||
nodes: InfraSnapshotNode[];
|
||||
loading: boolean;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
interface WithWaffleNodesProps {
|
||||
children: (args: WithWaffleNodesArgs) => React.ReactNode;
|
||||
filterQuery: string | null | undefined;
|
||||
metric: InfraSnapshotMetricInput;
|
||||
groupBy: InfraSnapshotGroupbyInput[];
|
||||
nodeType: InfraNodeType;
|
||||
sourceId: string;
|
||||
timerange: InfraTimerangeInput;
|
||||
}
|
||||
|
||||
export const WithWaffleNodes = ({
|
||||
children,
|
||||
filterQuery,
|
||||
metric,
|
||||
groupBy,
|
||||
nodeType,
|
||||
sourceId,
|
||||
timerange,
|
||||
}: WithWaffleNodesProps) => (
|
||||
<Query<WaffleNodesQuery.Query, WaffleNodesQuery.Variables>
|
||||
query={waffleNodesQuery}
|
||||
fetchPolicy="network-only"
|
||||
notifyOnNetworkStatusChange
|
||||
variables={{
|
||||
sourceId,
|
||||
metric,
|
||||
groupBy: [...groupBy],
|
||||
type: nodeType,
|
||||
timerange,
|
||||
filterQuery,
|
||||
}}
|
||||
>
|
||||
{({ data, loading, refetch, error }) =>
|
||||
children({
|
||||
loading,
|
||||
nodes:
|
||||
!error && data && data.source && data.source.snapshot && data.source.snapshot.nodes
|
||||
? data.source.snapshot.nodes
|
||||
: [],
|
||||
refetch,
|
||||
})
|
||||
}
|
||||
</Query>
|
||||
);
|
|
@ -1,3 +1,5 @@
|
|||
import { SnapshotMetricType } from '../../common/inventory_models/types';
|
||||
|
||||
/* tslint:disable */
|
||||
|
||||
// ====================================================
|
||||
|
@ -291,7 +293,7 @@ export interface InfraSnapshotNodePath {
|
|||
}
|
||||
|
||||
export interface InfraSnapshotNodeMetric {
|
||||
name: InfraSnapshotMetricType;
|
||||
name: SnapshotMetricType;
|
||||
|
||||
value?: number | null;
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
import React from 'react';
|
||||
|
||||
import { WithWaffleFilter } from '../../../containers/waffle/with_waffle_filters';
|
||||
import { WithWaffleNodes } from '../../../containers/waffle/with_waffle_nodes';
|
||||
import { WithWaffleOptions } from '../../../containers/waffle/with_waffle_options';
|
||||
import { WithWaffleTime } from '../../../containers/waffle/with_waffle_time';
|
||||
import { WithOptions } from '../../../containers/with_options';
|
||||
|
@ -33,35 +32,25 @@ export const SnapshotPageContent: React.FC = () => (
|
|||
autoBounds,
|
||||
boundsOverride,
|
||||
}) => (
|
||||
<WithWaffleNodes
|
||||
<Layout
|
||||
filterQuery={filterQueryAsJson}
|
||||
metric={metric}
|
||||
groupBy={groupBy}
|
||||
nodeType={nodeType}
|
||||
sourceId={sourceId}
|
||||
timerange={currentTimeRange}
|
||||
>
|
||||
{({ nodes, loading, refetch }) => (
|
||||
<Layout
|
||||
nodes={nodes}
|
||||
loading={nodes.length > 0 && isAutoReloading ? false : loading}
|
||||
nodeType={nodeType}
|
||||
options={{
|
||||
...wafflemap,
|
||||
metric,
|
||||
fields: configuration && configuration.fields,
|
||||
groupBy,
|
||||
}}
|
||||
reload={refetch}
|
||||
onDrilldown={applyFilterQuery}
|
||||
timeRange={currentTimeRange}
|
||||
view={view}
|
||||
onViewChange={changeView}
|
||||
autoBounds={autoBounds}
|
||||
boundsOverride={boundsOverride}
|
||||
/>
|
||||
)}
|
||||
</WithWaffleNodes>
|
||||
timeRange={currentTimeRange}
|
||||
options={{
|
||||
...wafflemap,
|
||||
metric,
|
||||
fields: configuration && configuration.fields,
|
||||
groupBy,
|
||||
}}
|
||||
onDrilldown={applyFilterQuery}
|
||||
view={view}
|
||||
onViewChange={changeView}
|
||||
autoBounds={autoBounds}
|
||||
boundsOverride={boundsOverride}
|
||||
/>
|
||||
)}
|
||||
</WithWaffleOptions>
|
||||
)}
|
||||
|
|
|
@ -0,0 +1,149 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { useCallback, useEffect } from 'react';
|
||||
import {
|
||||
EuiPage,
|
||||
EuiPageBody,
|
||||
EuiPageHeader,
|
||||
EuiPageHeaderSection,
|
||||
EuiHideFor,
|
||||
EuiTitle,
|
||||
EuiPageContent,
|
||||
} from '@elastic/eui';
|
||||
import { InventoryMetric } from '../../../../common/inventory_models/types';
|
||||
import { useNodeDetails } from '../../../containers/node_details/use_node_details';
|
||||
import { InfraNodeType, InfraTimerangeInput } from '../../../graphql/types';
|
||||
import { MetricsSideNav } from './side_nav';
|
||||
import { AutoSizer } from '../../../components/auto_sizer';
|
||||
import { MetricsTimeControls } from './time_controls';
|
||||
import { NodeDetails } from './node_details';
|
||||
import { SideNavContext, NavItem } from '../lib/side_nav_context';
|
||||
import { PageBody } from './page_body';
|
||||
import euiStyled from '../../../../../../common/eui_styled_components';
|
||||
import { MetricsTimeInput } from '../containers/with_metrics_time';
|
||||
import { InfraMetadata } from '../../../../common/http_api/metadata_api';
|
||||
import { PageError } from './page_error';
|
||||
|
||||
interface Props {
|
||||
name: string;
|
||||
requiredMetrics: InventoryMetric[];
|
||||
nodeId: string;
|
||||
cloudId: string;
|
||||
nodeType: InfraNodeType;
|
||||
sourceId: string;
|
||||
timeRange: MetricsTimeInput;
|
||||
parsedTimeRange: InfraTimerangeInput;
|
||||
metadataLoading: boolean;
|
||||
isAutoReloading: boolean;
|
||||
refreshInterval: number;
|
||||
sideNav: NavItem[];
|
||||
metadata: InfraMetadata | null;
|
||||
addNavItem(item: NavItem): void;
|
||||
setRefreshInterval(refreshInterval: number): void;
|
||||
setAutoReload(isAutoReloading: boolean): void;
|
||||
triggerRefresh(): void;
|
||||
setTimeRange(timeRange: MetricsTimeInput): void;
|
||||
}
|
||||
export const NodeDetailsPage = (props: Props) => {
|
||||
if (!props.metadata) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { parsedTimeRange } = props;
|
||||
const { metrics, loading, makeRequest, error } = useNodeDetails(
|
||||
props.requiredMetrics,
|
||||
props.nodeId,
|
||||
props.nodeType,
|
||||
props.sourceId,
|
||||
props.parsedTimeRange,
|
||||
props.cloudId
|
||||
);
|
||||
|
||||
const refetch = useCallback(() => {
|
||||
makeRequest();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
makeRequest();
|
||||
}, [parsedTimeRange]);
|
||||
|
||||
if (error) {
|
||||
return <PageError error={error} name={props.name} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<EuiPage style={{ flex: '1 0 auto' }}>
|
||||
<MetricsSideNav loading={props.metadataLoading} name={props.name} items={props.sideNav} />
|
||||
<AutoSizer content={false} bounds detectAnyWindowResize>
|
||||
{({ bounds: { width = 0 } }) => {
|
||||
const w = width ? `${width}px` : `100%`;
|
||||
return (
|
||||
<MetricsDetailsPageColumn>
|
||||
<EuiPageBody style={{ width: w }}>
|
||||
<EuiPageHeader style={{ flex: '0 0 auto' }}>
|
||||
<EuiPageHeaderSection style={{ width: '100%' }}>
|
||||
<MetricsTitleTimeRangeContainer>
|
||||
<EuiHideFor sizes={['xs', 's']}>
|
||||
<EuiTitle size="m">
|
||||
<h1>{props.name}</h1>
|
||||
</EuiTitle>
|
||||
</EuiHideFor>
|
||||
<MetricsTimeControls
|
||||
currentTimeRange={props.timeRange}
|
||||
isLiveStreaming={props.isAutoReloading}
|
||||
refreshInterval={props.refreshInterval}
|
||||
setRefreshInterval={props.setRefreshInterval}
|
||||
onChangeTimeRange={props.setTimeRange}
|
||||
setAutoReload={props.setAutoReload}
|
||||
onRefresh={props.triggerRefresh}
|
||||
/>
|
||||
</MetricsTitleTimeRangeContainer>
|
||||
</EuiPageHeaderSection>
|
||||
</EuiPageHeader>
|
||||
<NodeDetails metadata={props.metadata} />
|
||||
<EuiPageContentWithRelative>
|
||||
<SideNavContext.Provider
|
||||
value={{
|
||||
items: props.sideNav,
|
||||
addNavItem: props.addNavItem,
|
||||
}}
|
||||
>
|
||||
<PageBody
|
||||
loading={metrics.length > 0 && props.isAutoReloading ? false : loading}
|
||||
refetch={refetch}
|
||||
type={props.nodeType}
|
||||
metrics={metrics}
|
||||
onChangeRangeTime={props.setTimeRange}
|
||||
isLiveStreaming={props.isAutoReloading}
|
||||
stopLiveStreaming={() => props.setAutoReload(false)}
|
||||
/>
|
||||
</SideNavContext.Provider>
|
||||
</EuiPageContentWithRelative>
|
||||
</EuiPageBody>
|
||||
</MetricsDetailsPageColumn>
|
||||
);
|
||||
}}
|
||||
</AutoSizer>
|
||||
</EuiPage>
|
||||
);
|
||||
};
|
||||
|
||||
const EuiPageContentWithRelative = euiStyled(EuiPageContent)`
|
||||
position: relative;
|
||||
`;
|
||||
|
||||
const MetricsDetailsPageColumn = euiStyled.div`
|
||||
flex: 1 0 0%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
`;
|
||||
|
||||
const MetricsTitleTimeRangeContainer = euiStyled.div`
|
||||
display: flex;
|
||||
flex-flow: row wrap;
|
||||
justify-content: space-between;
|
||||
`;
|
|
@ -4,25 +4,21 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { GraphQLFormattedError } from 'graphql';
|
||||
// import { GraphQLFormattedError } from 'graphql';
|
||||
import React from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { ApolloError } from 'apollo-client';
|
||||
import { KFetchError } from 'ui/kfetch/kfetch_error';
|
||||
import { InvalidNodeError } from './invalid_node';
|
||||
import { InfraMetricsErrorCodes } from '../../../../common/errors';
|
||||
// import { InfraMetricsErrorCodes } from '../../../../common/errors';
|
||||
import { DocumentTitle } from '../../../components/document_title';
|
||||
import { ErrorPageBody } from '../../error';
|
||||
|
||||
interface Props {
|
||||
name: string;
|
||||
error: ApolloError;
|
||||
error: KFetchError;
|
||||
}
|
||||
|
||||
export const PageError = ({ error, name }: Props) => {
|
||||
const invalidNodeError = error.graphQLErrors.some(
|
||||
(err: GraphQLFormattedError) => err.code === InfraMetricsErrorCodes.invalid_node
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<DocumentTitle
|
||||
|
@ -35,11 +31,13 @@ export const PageError = ({ error, name }: Props) => {
|
|||
})
|
||||
}
|
||||
/>
|
||||
{invalidNodeError ? (
|
||||
<InvalidNodeError nodeName={name} />
|
||||
) : (
|
||||
<ErrorPageBody message={error.message} />
|
||||
)}
|
||||
{
|
||||
(error.body.statusCode = 404 ? (
|
||||
<InvalidNodeError nodeName={name} />
|
||||
) : (
|
||||
<ErrorPageBody message={error.message} />
|
||||
))
|
||||
}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import createContainer from 'constate';
|
||||
import React, { useContext, useState, useMemo, useCallback } from 'react';
|
||||
import React, { useContext, useState, useCallback } from 'react';
|
||||
import { isNumber } from 'lodash';
|
||||
import moment from 'moment';
|
||||
import dateMath from '@elastic/datemath';
|
||||
|
@ -32,34 +32,44 @@ interface MetricsTimeState {
|
|||
triggerRefresh: () => void;
|
||||
}
|
||||
|
||||
const parseRange = (range: MetricsTimeInput) => {
|
||||
const parsedFrom = dateMath.parse(range.from);
|
||||
const parsedTo = dateMath.parse(range.to, { roundUp: true });
|
||||
return {
|
||||
...range,
|
||||
from:
|
||||
(parsedFrom && parsedFrom.valueOf()) ||
|
||||
moment()
|
||||
.subtract(1, 'hour')
|
||||
.valueOf(),
|
||||
to: (parsedTo && parsedTo.valueOf()) || moment().valueOf(),
|
||||
};
|
||||
};
|
||||
|
||||
export const useMetricsTime = () => {
|
||||
const [isAutoReloading, setAutoReload] = useState(false);
|
||||
const [refreshInterval, setRefreshInterval] = useState(5000);
|
||||
const [lastRefresh, setLastRefresh] = useState<number>(moment().valueOf());
|
||||
const [timeRange, setTimeRange] = useState({
|
||||
const defaultRange = {
|
||||
from: 'now-1h',
|
||||
to: 'now',
|
||||
interval: '>=1m',
|
||||
});
|
||||
};
|
||||
const [isAutoReloading, setAutoReload] = useState(false);
|
||||
const [refreshInterval, setRefreshInterval] = useState(5000);
|
||||
const [lastRefresh, setLastRefresh] = useState<number>(moment().valueOf());
|
||||
const [timeRange, setTimeRange] = useState(defaultRange);
|
||||
|
||||
const parsedFrom = dateMath.parse(timeRange.from);
|
||||
const parsedTo = dateMath.parse(timeRange.to, { roundUp: true });
|
||||
const parsedTimeRange = useMemo(
|
||||
() => ({
|
||||
...timeRange,
|
||||
from:
|
||||
(parsedFrom && parsedFrom.valueOf()) ||
|
||||
moment()
|
||||
.subtract(1, 'hour')
|
||||
.valueOf(),
|
||||
to: (parsedTo && parsedTo.valueOf()) || moment().valueOf(),
|
||||
}),
|
||||
[parsedFrom, parsedTo, lastRefresh]
|
||||
const [parsedTimeRange, setParsedTimeRange] = useState(parseRange(defaultRange));
|
||||
|
||||
const updateTimeRange = useCallback(
|
||||
(range: MetricsTimeInput) => {
|
||||
setTimeRange(range);
|
||||
setParsedTimeRange(parseRange(range));
|
||||
},
|
||||
[setParsedTimeRange]
|
||||
);
|
||||
|
||||
return {
|
||||
timeRange,
|
||||
setTimeRange,
|
||||
setTimeRange: updateTimeRange,
|
||||
parsedTimeRange,
|
||||
refreshInterval,
|
||||
setRefreshInterval,
|
||||
|
@ -129,7 +139,13 @@ export const WithMetricsTimeUrlState = () => (
|
|||
}}
|
||||
onInitialize={initialUrlState => {
|
||||
if (initialUrlState && initialUrlState.time) {
|
||||
setTimeRange(initialUrlState.time);
|
||||
if (
|
||||
timeRange.from !== initialUrlState.time.from ||
|
||||
timeRange.to !== initialUrlState.time.to ||
|
||||
timeRange.interval !== initialUrlState.time.interval
|
||||
) {
|
||||
setTimeRange(initialUrlState.time);
|
||||
}
|
||||
}
|
||||
if (initialUrlState && initialUrlState.autoReload) {
|
||||
setAutoReload(true);
|
||||
|
|
|
@ -3,49 +3,29 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import {
|
||||
EuiHideFor,
|
||||
EuiPage,
|
||||
EuiPageBody,
|
||||
EuiPageContent,
|
||||
EuiPageHeader,
|
||||
EuiPageHeaderSection,
|
||||
EuiTitle,
|
||||
} from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React, { useContext, useState } from 'react';
|
||||
import { UICapabilities } from 'ui/capabilities';
|
||||
import { injectUICapabilities } from 'ui/capabilities/react';
|
||||
import euiStyled, { EuiTheme, withTheme } from '../../../../../common/eui_styled_components';
|
||||
import { AutoSizer } from '../../components/auto_sizer';
|
||||
import { DocumentTitle } from '../../components/document_title';
|
||||
import { Header } from '../../components/header';
|
||||
import { MetricsSideNav } from './components/side_nav';
|
||||
import { MetricsTimeControls } from './components/time_controls';
|
||||
import { ColumnarPage, PageContent } from '../../components/page';
|
||||
import { WithMetrics } from './containers/with_metrics';
|
||||
import { WithMetricsTime, WithMetricsTimeUrlState } from './containers/with_metrics_time';
|
||||
import { InfraNodeType } from '../../graphql/types';
|
||||
import { withMetricPageProviders } from './page_providers';
|
||||
import { useMetadata } from '../../containers/metadata/use_metadata';
|
||||
import { Source } from '../../containers/source';
|
||||
import { InfraLoadingPanel } from '../../components/loading';
|
||||
import { NodeDetails } from './components/node_details';
|
||||
import { findInventoryModel } from '../../../common/inventory_models';
|
||||
import { PageError } from './components/page_error';
|
||||
import { NavItem, SideNavContext } from './lib/side_nav_context';
|
||||
import { PageBody } from './components/page_body';
|
||||
import { NavItem } from './lib/side_nav_context';
|
||||
import { NodeDetailsPage } from './components/node_details_page';
|
||||
|
||||
const DetailPageContent = euiStyled(PageContent)`
|
||||
overflow: auto;
|
||||
background-color: ${props => props.theme.eui.euiColorLightestShade};
|
||||
`;
|
||||
|
||||
const EuiPageContentWithRelative = euiStyled(EuiPageContent)`
|
||||
position: relative;
|
||||
`;
|
||||
|
||||
interface Props {
|
||||
theme: EuiTheme;
|
||||
match: {
|
||||
|
@ -132,72 +112,26 @@ export const MetricDetail = withMetricPageProviders(
|
|||
})}
|
||||
/>
|
||||
<DetailPageContent data-test-subj="infraMetricsPage">
|
||||
<WithMetrics
|
||||
<NodeDetailsPage
|
||||
name={name}
|
||||
requiredMetrics={filteredRequiredMetrics}
|
||||
sourceId={sourceId}
|
||||
timerange={parsedTimeRange}
|
||||
timeRange={timeRange}
|
||||
parsedTimeRange={parsedTimeRange}
|
||||
nodeType={nodeType}
|
||||
nodeId={nodeId}
|
||||
cloudId={cloudId}
|
||||
>
|
||||
{({ metrics, error, loading, refetch }) => {
|
||||
if (error) {
|
||||
return <PageError error={error} name={name} />;
|
||||
}
|
||||
return (
|
||||
<EuiPage style={{ flex: '1 0 auto' }}>
|
||||
<MetricsSideNav loading={metadataLoading} name={name} items={sideNav} />
|
||||
<AutoSizer content={false} bounds detectAnyWindowResize>
|
||||
{({ measureRef, bounds: { width = 0 } }) => {
|
||||
const w = width ? `${width}px` : `100%`;
|
||||
return (
|
||||
<MetricsDetailsPageColumn ref={measureRef}>
|
||||
<EuiPageBody style={{ width: w }}>
|
||||
<EuiPageHeader style={{ flex: '0 0 auto' }}>
|
||||
<EuiPageHeaderSection style={{ width: '100%' }}>
|
||||
<MetricsTitleTimeRangeContainer>
|
||||
<EuiHideFor sizes={['xs', 's']}>
|
||||
<EuiTitle size="m">
|
||||
<h1>{name}</h1>
|
||||
</EuiTitle>
|
||||
</EuiHideFor>
|
||||
<MetricsTimeControls
|
||||
currentTimeRange={timeRange}
|
||||
isLiveStreaming={isAutoReloading}
|
||||
refreshInterval={refreshInterval}
|
||||
setRefreshInterval={setRefreshInterval}
|
||||
onChangeTimeRange={setTimeRange}
|
||||
setAutoReload={setAutoReload}
|
||||
onRefresh={triggerRefresh}
|
||||
/>
|
||||
</MetricsTitleTimeRangeContainer>
|
||||
</EuiPageHeaderSection>
|
||||
</EuiPageHeader>
|
||||
<NodeDetails metadata={metadata} />
|
||||
<EuiPageContentWithRelative>
|
||||
<SideNavContext.Provider value={{ items: sideNav, addNavItem }}>
|
||||
<PageBody
|
||||
loading={
|
||||
metrics.length > 0 && isAutoReloading ? false : loading
|
||||
}
|
||||
refetch={refetch}
|
||||
type={nodeType}
|
||||
metrics={metrics}
|
||||
onChangeRangeTime={setTimeRange}
|
||||
isLiveStreaming={isAutoReloading}
|
||||
stopLiveStreaming={() => setAutoReload(false)}
|
||||
/>
|
||||
</SideNavContext.Provider>
|
||||
</EuiPageContentWithRelative>
|
||||
</EuiPageBody>
|
||||
</MetricsDetailsPageColumn>
|
||||
);
|
||||
}}
|
||||
</AutoSizer>
|
||||
</EuiPage>
|
||||
);
|
||||
}}
|
||||
</WithMetrics>
|
||||
metadataLoading={metadataLoading}
|
||||
isAutoReloading={isAutoReloading}
|
||||
refreshInterval={refreshInterval}
|
||||
sideNav={sideNav}
|
||||
metadata={metadata}
|
||||
addNavItem={addNavItem}
|
||||
setRefreshInterval={setRefreshInterval}
|
||||
setAutoReload={setAutoReload}
|
||||
triggerRefresh={triggerRefresh}
|
||||
setTimeRange={setTimeRange}
|
||||
/>
|
||||
</DetailPageContent>
|
||||
</ColumnarPage>
|
||||
)}
|
||||
|
@ -206,15 +140,3 @@ export const MetricDetail = withMetricPageProviders(
|
|||
})
|
||||
)
|
||||
);
|
||||
|
||||
const MetricsDetailsPageColumn = euiStyled.div`
|
||||
flex: 1 0 0%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
`;
|
||||
|
||||
const MetricsTitleTimeRangeContainer = euiStyled.div`
|
||||
display: flex;
|
||||
flex-flow: row wrap;
|
||||
justify-content: space-between;
|
||||
`;
|
||||
|
|
|
@ -7,8 +7,6 @@
|
|||
import { rootSchema } from '../../common/graphql/root/schema.gql';
|
||||
import { sharedSchema } from '../../common/graphql/shared/schema.gql';
|
||||
import { logEntriesSchema } from './log_entries/schema.gql';
|
||||
import { metricsSchema } from './metrics/schema.gql';
|
||||
import { snapshotSchema } from './snapshot/schema.gql';
|
||||
import { sourceStatusSchema } from './source_status/schema.gql';
|
||||
import { sourcesSchema } from './sources/schema.gql';
|
||||
|
||||
|
@ -16,8 +14,6 @@ export const schemas = [
|
|||
rootSchema,
|
||||
sharedSchema,
|
||||
logEntriesSchema,
|
||||
snapshotSchema,
|
||||
sourcesSchema,
|
||||
sourceStatusSchema,
|
||||
metricsSchema,
|
||||
];
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export { createMetricResolvers } from './resolvers';
|
||||
export { metricsSchema } from './schema.gql';
|
|
@ -1,55 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { failure } from 'io-ts/lib/PathReporter';
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'rxjs';
|
||||
import { InfraSourceResolvers } from '../../graphql/types';
|
||||
import { InfraMetricsDomain } from '../../lib/domains/metrics_domain';
|
||||
import { SourceConfigurationRuntimeType } from '../../lib/sources';
|
||||
import { UsageCollector } from '../../usage/usage_collector';
|
||||
import { ChildResolverOf, InfraResolverOf } from '../../utils/typed_resolvers';
|
||||
import { QuerySourceResolver } from '../sources/resolvers';
|
||||
|
||||
type InfraSourceMetricsResolver = ChildResolverOf<
|
||||
InfraResolverOf<InfraSourceResolvers.MetricsResolver>,
|
||||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
interface ResolverDeps {
|
||||
metrics: InfraMetricsDomain;
|
||||
}
|
||||
|
||||
export const createMetricResolvers = (
|
||||
libs: ResolverDeps
|
||||
): {
|
||||
InfraSource: {
|
||||
metrics: InfraSourceMetricsResolver;
|
||||
};
|
||||
} => ({
|
||||
InfraSource: {
|
||||
async metrics(source, args, { req }) {
|
||||
const sourceConfiguration = pipe(
|
||||
SourceConfigurationRuntimeType.decode(source.configuration),
|
||||
fold(errors => {
|
||||
throw new Error(failure(errors).join('\n'));
|
||||
}, identity)
|
||||
);
|
||||
|
||||
UsageCollector.countNode(args.nodeType);
|
||||
const options = {
|
||||
nodeIds: args.nodeIds,
|
||||
nodeType: args.nodeType,
|
||||
timerange: args.timerange,
|
||||
metrics: args.metrics,
|
||||
sourceConfiguration,
|
||||
};
|
||||
return libs.metrics.getMetrics(req, options);
|
||||
},
|
||||
},
|
||||
});
|
|
@ -1,80 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
export const metricsSchema: any = gql`
|
||||
enum InfraMetric {
|
||||
hostSystemOverview
|
||||
hostCpuUsage
|
||||
hostFilesystem
|
||||
hostK8sOverview
|
||||
hostK8sCpuCap
|
||||
hostK8sDiskCap
|
||||
hostK8sMemoryCap
|
||||
hostK8sPodCap
|
||||
hostLoad
|
||||
hostMemoryUsage
|
||||
hostNetworkTraffic
|
||||
hostDockerOverview
|
||||
hostDockerInfo
|
||||
hostDockerTop5ByCpu
|
||||
hostDockerTop5ByMemory
|
||||
podOverview
|
||||
podCpuUsage
|
||||
podMemoryUsage
|
||||
podLogUsage
|
||||
podNetworkTraffic
|
||||
containerOverview
|
||||
containerCpuKernel
|
||||
containerCpuUsage
|
||||
containerDiskIOOps
|
||||
containerDiskIOBytes
|
||||
containerMemory
|
||||
containerNetworkTraffic
|
||||
nginxHits
|
||||
nginxRequestRate
|
||||
nginxActiveConnections
|
||||
nginxRequestsPerConnection
|
||||
awsOverview
|
||||
awsCpuUtilization
|
||||
awsNetworkBytes
|
||||
awsNetworkPackets
|
||||
awsDiskioBytes
|
||||
awsDiskioOps
|
||||
custom
|
||||
}
|
||||
|
||||
type InfraMetricData {
|
||||
id: InfraMetric
|
||||
series: [InfraDataSeries!]!
|
||||
}
|
||||
|
||||
type InfraDataSeries {
|
||||
id: ID!
|
||||
label: String!
|
||||
data: [InfraDataPoint!]!
|
||||
}
|
||||
|
||||
type InfraDataPoint {
|
||||
timestamp: Float!
|
||||
value: Float
|
||||
}
|
||||
|
||||
input InfraNodeIdsInput {
|
||||
nodeId: ID!
|
||||
cloudId: ID
|
||||
}
|
||||
|
||||
extend type InfraSource {
|
||||
metrics(
|
||||
nodeIds: InfraNodeIdsInput!
|
||||
nodeType: InfraNodeType!
|
||||
timerange: InfraTimerangeInput!
|
||||
metrics: [InfraMetric!]!
|
||||
): [InfraMetricData!]!
|
||||
}
|
||||
`;
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export { createSnapshotResolvers } from './resolvers';
|
||||
export { snapshotSchema } from './schema.gql';
|
|
@ -1,70 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraSnapshotResponseResolvers, InfraSourceResolvers } from '../../graphql/types';
|
||||
import { InfraSnapshotRequestOptions } from '../../lib/snapshot';
|
||||
import { InfraSnapshot } from '../../lib/snapshot';
|
||||
import { UsageCollector } from '../../usage/usage_collector';
|
||||
import { parseFilterQuery } from '../../utils/serialized_query';
|
||||
import { ChildResolverOf, InfraResolverOf, ResultOf } from '../../utils/typed_resolvers';
|
||||
import { QuerySourceResolver } from '../sources/resolvers';
|
||||
|
||||
type InfraSourceSnapshotResolver = ChildResolverOf<
|
||||
InfraResolverOf<
|
||||
InfraSourceResolvers.SnapshotResolver<
|
||||
{
|
||||
source: ResultOf<QuerySourceResolver>;
|
||||
} & InfraSourceResolvers.SnapshotArgs
|
||||
>
|
||||
>,
|
||||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
type InfraNodesResolver = ChildResolverOf<
|
||||
InfraResolverOf<InfraSnapshotResponseResolvers.NodesResolver>,
|
||||
InfraSourceSnapshotResolver
|
||||
>;
|
||||
|
||||
interface SnapshotResolversDeps {
|
||||
snapshot: InfraSnapshot;
|
||||
}
|
||||
|
||||
export const createSnapshotResolvers = (
|
||||
libs: SnapshotResolversDeps
|
||||
): {
|
||||
InfraSource: {
|
||||
snapshot: InfraSourceSnapshotResolver;
|
||||
};
|
||||
InfraSnapshotResponse: {
|
||||
nodes: InfraNodesResolver;
|
||||
};
|
||||
} => ({
|
||||
InfraSource: {
|
||||
async snapshot(source, args) {
|
||||
return {
|
||||
source,
|
||||
timerange: args.timerange,
|
||||
filterQuery: args.filterQuery,
|
||||
};
|
||||
},
|
||||
},
|
||||
InfraSnapshotResponse: {
|
||||
async nodes(snapshotResponse, args, { req }) {
|
||||
const { source, timerange, filterQuery } = snapshotResponse;
|
||||
UsageCollector.countNode(args.type);
|
||||
const options: InfraSnapshotRequestOptions = {
|
||||
filterQuery: parseFilterQuery(filterQuery),
|
||||
nodeType: args.type,
|
||||
groupBy: args.groupBy,
|
||||
sourceConfiguration: source.configuration,
|
||||
metric: args.metric,
|
||||
timerange,
|
||||
};
|
||||
|
||||
return await libs.snapshot.getNodes(req, options);
|
||||
},
|
||||
},
|
||||
});
|
|
@ -1,72 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
export const snapshotSchema: any = gql`
|
||||
type InfraSnapshotNodeMetric {
|
||||
name: InfraSnapshotMetricType!
|
||||
value: Float
|
||||
avg: Float
|
||||
max: Float
|
||||
}
|
||||
|
||||
type InfraSnapshotNodePath {
|
||||
value: String!
|
||||
label: String!
|
||||
ip: String
|
||||
}
|
||||
|
||||
type InfraSnapshotNode {
|
||||
path: [InfraSnapshotNodePath!]!
|
||||
metric: InfraSnapshotNodeMetric!
|
||||
}
|
||||
|
||||
input InfraTimerangeInput {
|
||||
"The interval string to use for last bucket. The format is '{value}{unit}'. For example '5m' would return the metrics for the last 5 minutes of the timespan."
|
||||
interval: String!
|
||||
"The end of the timerange"
|
||||
to: Float!
|
||||
"The beginning of the timerange"
|
||||
from: Float!
|
||||
}
|
||||
|
||||
enum InfraSnapshotMetricType {
|
||||
count
|
||||
cpu
|
||||
load
|
||||
memory
|
||||
tx
|
||||
rx
|
||||
logRate
|
||||
}
|
||||
|
||||
input InfraSnapshotMetricInput {
|
||||
"The type of metric"
|
||||
type: InfraSnapshotMetricType!
|
||||
}
|
||||
|
||||
input InfraSnapshotGroupbyInput {
|
||||
"The label to use in the results for the group by for the terms group by"
|
||||
label: String
|
||||
"The field to group by from a terms aggregation, this is ignored by the filter type"
|
||||
field: String
|
||||
}
|
||||
|
||||
type InfraSnapshotResponse {
|
||||
"Nodes of type host, container or pod grouped by 0, 1 or 2 terms"
|
||||
nodes(
|
||||
type: InfraNodeType!
|
||||
groupBy: [InfraSnapshotGroupbyInput!]!
|
||||
metric: InfraSnapshotMetricInput!
|
||||
): [InfraSnapshotNode!]!
|
||||
}
|
||||
|
||||
extend type InfraSource {
|
||||
"A snapshot of nodes"
|
||||
snapshot(timerange: InfraTimerangeInput!, filterQuery: String): InfraSnapshotResponse
|
||||
}
|
||||
`;
|
|
@ -8,8 +8,6 @@ import { IResolvers, makeExecutableSchema } from 'graphql-tools';
|
|||
import { initIpToHostName } from './routes/ip_to_hostname';
|
||||
import { schemas } from './graphql';
|
||||
import { createLogEntriesResolvers } from './graphql/log_entries';
|
||||
import { createMetricResolvers } from './graphql/metrics/resolvers';
|
||||
import { createSnapshotResolvers } from './graphql/snapshot';
|
||||
import { createSourceStatusResolvers } from './graphql/source_status';
|
||||
import { createSourcesResolvers } from './graphql/sources';
|
||||
import { InfraBackendLibs } from './lib/infra_types';
|
||||
|
@ -19,15 +17,15 @@ import {
|
|||
} from './routes/log_analysis';
|
||||
import { initMetricExplorerRoute } from './routes/metrics_explorer';
|
||||
import { initMetadataRoute } from './routes/metadata';
|
||||
import { initSnapshotRoute } from './routes/snapshot';
|
||||
import { initNodeDetailsRoute } from './routes/node_details';
|
||||
|
||||
export const initInfraServer = (libs: InfraBackendLibs) => {
|
||||
const schema = makeExecutableSchema({
|
||||
resolvers: [
|
||||
createLogEntriesResolvers(libs) as IResolvers,
|
||||
createSnapshotResolvers(libs) as IResolvers,
|
||||
createSourcesResolvers(libs) as IResolvers,
|
||||
createSourceStatusResolvers(libs) as IResolvers,
|
||||
createMetricResolvers(libs) as IResolvers,
|
||||
],
|
||||
typeDefs: schemas,
|
||||
});
|
||||
|
@ -36,6 +34,8 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
|
|||
|
||||
initIpToHostName(libs);
|
||||
initLogAnalysisGetLogEntryRateRoute(libs);
|
||||
initSnapshotRoute(libs);
|
||||
initNodeDetailsRoute(libs);
|
||||
initIndexPatternsValidateRoute(libs);
|
||||
initMetricExplorerRoute(libs);
|
||||
initMetadataRoute(libs);
|
||||
|
|
|
@ -7,11 +7,11 @@
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { flatten, get } from 'lodash';
|
||||
|
||||
import Boom from 'boom';
|
||||
import { InfraMetric, InfraMetricData, InfraNodeType } from '../../../graphql/types';
|
||||
import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from '../framework';
|
||||
import { InfraMetricsAdapter, InfraMetricsRequestOptions } from './adapter_types';
|
||||
import { checkValidNode } from './lib/check_valid_node';
|
||||
import { InvalidNodeError } from './lib/errors';
|
||||
import { metrics } from '../../../../common/inventory_models';
|
||||
import { TSVBMetricModelCreator } from '../../../../common/inventory_models/types';
|
||||
import { calculateMetricInterval } from '../../../utils/calculate_metric_interval';
|
||||
|
@ -39,7 +39,7 @@ export class KibanaMetricsAdapter implements InfraMetricsAdapter {
|
|||
|
||||
const validNode = await checkValidNode(search, indexPattern, nodeField, options.nodeIds.nodeId);
|
||||
if (!validNode) {
|
||||
throw new InvalidNodeError(
|
||||
throw Boom.notFound(
|
||||
i18n.translate('xpack.infra.kibanaMetrics.nodeDoesNotExistErrorMessage', {
|
||||
defaultMessage: '{nodeId} does not exist.',
|
||||
values: {
|
||||
|
@ -97,7 +97,7 @@ export class KibanaMetricsAdapter implements InfraMetricsAdapter {
|
|||
) {
|
||||
const createTSVBModel = get(metrics, ['tsvb', metricId]) as TSVBMetricModelCreator | undefined;
|
||||
if (!createTSVBModel) {
|
||||
throw new Error(
|
||||
throw Boom.badRequest(
|
||||
i18n.translate('xpack.infra.metrics.missingTSVBModelError', {
|
||||
defaultMessage: 'The TSVB model for {metricId} does not exist for {nodeType}',
|
||||
values: {
|
||||
|
@ -135,7 +135,7 @@ export class KibanaMetricsAdapter implements InfraMetricsAdapter {
|
|||
}
|
||||
|
||||
if (model.id_type === 'cloud' && !options.nodeIds.cloudId) {
|
||||
throw new InvalidNodeError(
|
||||
throw Boom.badRequest(
|
||||
i18n.translate('xpack.infra.kibanaMetrics.cloudIdMissingErrorMessage', {
|
||||
defaultMessage:
|
||||
'Model for {metricId} requires a cloudId, but none was given for {nodeId}.',
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import Boom from 'boom';
|
||||
import { boomify } from 'boom';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { InfraBackendLibs } from '../../lib/infra_types';
|
||||
import { UsageCollector } from '../../usage/usage_collector';
|
||||
import { InfraMetricsRequestOptions } from '../../lib/adapters/metrics';
|
||||
import { InfraNodeType, InfraMetric } from '../../graphql/types';
|
||||
import {
|
||||
NodeDetailsWrappedRequest,
|
||||
NodeDetailsRequestRT,
|
||||
NodeDetailsMetricDataResponse,
|
||||
} from '../../../common/http_api/node_details_api';
|
||||
import { throwErrors } from '../../../common/runtime_types';
|
||||
|
||||
export const initNodeDetailsRoute = (libs: InfraBackendLibs) => {
|
||||
const { framework } = libs;
|
||||
|
||||
framework.registerRoute<NodeDetailsWrappedRequest, Promise<NodeDetailsMetricDataResponse>>({
|
||||
method: 'POST',
|
||||
path: '/api/metrics/node_details',
|
||||
handler: async req => {
|
||||
const { nodeId, cloudId, nodeType, metrics, timerange, sourceId } = pipe(
|
||||
NodeDetailsRequestRT.decode(req.payload),
|
||||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
try {
|
||||
const source = await libs.sources.getSourceConfiguration(req, sourceId);
|
||||
|
||||
UsageCollector.countNode(nodeType);
|
||||
const options: InfraMetricsRequestOptions = {
|
||||
nodeIds: {
|
||||
nodeId,
|
||||
cloudId,
|
||||
},
|
||||
nodeType: nodeType as InfraNodeType,
|
||||
sourceConfiguration: source.configuration,
|
||||
metrics: metrics as InfraMetric[],
|
||||
timerange,
|
||||
};
|
||||
|
||||
return {
|
||||
metrics: await libs.metrics.getMetrics(req, options),
|
||||
};
|
||||
} catch (e) {
|
||||
throw boomify(e);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
48
x-pack/legacy/plugins/infra/server/routes/snapshot/index.ts
Normal file
48
x-pack/legacy/plugins/infra/server/routes/snapshot/index.ts
Normal file
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import Boom from 'boom';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { InfraBackendLibs } from '../../lib/infra_types';
|
||||
import { InfraSnapshotRequestOptions } from '../../lib/snapshot';
|
||||
import { UsageCollector } from '../../usage/usage_collector';
|
||||
import { parseFilterQuery } from '../../utils/serialized_query';
|
||||
import { InfraNodeType, InfraSnapshotMetricInput } from '../../../public/graphql/types';
|
||||
import {
|
||||
SnapshotRequestRT,
|
||||
SnapshotWrappedRequest,
|
||||
SnapshotNodeResponse,
|
||||
} from '../../../common/http_api/snapshot_api';
|
||||
import { throwErrors } from '../../../common/runtime_types';
|
||||
|
||||
export const initSnapshotRoute = (libs: InfraBackendLibs) => {
|
||||
const { framework } = libs;
|
||||
|
||||
framework.registerRoute<SnapshotWrappedRequest, Promise<SnapshotNodeResponse>>({
|
||||
method: 'POST',
|
||||
path: '/api/metrics/snapshot',
|
||||
handler: async req => {
|
||||
const { filterQuery, nodeType, groupBy, sourceId, metric, timerange } = pipe(
|
||||
SnapshotRequestRT.decode(req.payload),
|
||||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
const source = await libs.sources.getSourceConfiguration(req, sourceId);
|
||||
UsageCollector.countNode(nodeType);
|
||||
const options: InfraSnapshotRequestOptions = {
|
||||
filterQuery: parseFilterQuery(filterQuery),
|
||||
// TODO: Use common infra metric and replace graphql type
|
||||
nodeType: nodeType as InfraNodeType,
|
||||
groupBy,
|
||||
sourceConfiguration: source.configuration,
|
||||
// TODO: Use common infra metric and replace graphql type
|
||||
metric: metric as InfraSnapshotMetricInput,
|
||||
timerange,
|
||||
};
|
||||
return { nodes: await libs.snapshot.getNodes(req, options) };
|
||||
},
|
||||
});
|
||||
};
|
|
@ -6,6 +6,8 @@
|
|||
|
||||
import { UsageCollectionSetup } from 'src/plugins/usage_collection/server';
|
||||
import { InfraNodeType } from '../graphql/types';
|
||||
import { InventoryItemType } from '../../common/inventory_models/types';
|
||||
|
||||
const KIBANA_REPORTING_TYPE = 'infraops';
|
||||
|
||||
interface InfraopsSum {
|
||||
|
@ -31,7 +33,7 @@ export class UsageCollector {
|
|||
});
|
||||
}
|
||||
|
||||
public static countNode(nodeType: InfraNodeType) {
|
||||
public static countNode(nodeType: InventoryItemType) {
|
||||
const bucket = this.getBucket();
|
||||
this.maybeInitializeBucket(bucket);
|
||||
|
||||
|
|
|
@ -7,72 +7,94 @@
|
|||
import expect from '@kbn/expect';
|
||||
import { first, last } from 'lodash';
|
||||
|
||||
import { metricsQuery } from '../../../../legacy/plugins/infra/public/pages/metrics/containers/metrics.gql_query';
|
||||
import { MetricsQuery } from '../../../../legacy/plugins/infra/public/graphql/types';
|
||||
import { InventoryMetric } from '../../../../legacy/plugins/infra/common/inventory_models/types';
|
||||
import {
|
||||
InfraNodeType,
|
||||
InfraTimerangeInput,
|
||||
} from '../../../../legacy/plugins/infra/public/graphql/types';
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
import { DATES } from './constants';
|
||||
|
||||
import { NodeDetailsMetricDataResponse } from '../../../../legacy/plugins/infra/common/http_api/node_details_api';
|
||||
const { min, max } = DATES['7.0.0'].hosts;
|
||||
|
||||
interface NodeDetailsRequest {
|
||||
metrics: InventoryMetric[];
|
||||
nodeId: string;
|
||||
nodeType: InfraNodeType;
|
||||
sourceId: string;
|
||||
timerange: InfraTimerangeInput;
|
||||
cloudId?: string;
|
||||
}
|
||||
|
||||
export default function({ getService }: FtrProviderContext) {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
const supertest = getService('supertest');
|
||||
|
||||
describe('metrics', () => {
|
||||
before(() => esArchiver.load('infra/7.0.0/hosts'));
|
||||
after(() => esArchiver.unload('infra/7.0.0/hosts'));
|
||||
|
||||
const fetchNodeDetails = async (
|
||||
body: NodeDetailsRequest
|
||||
): Promise<NodeDetailsMetricDataResponse | undefined> => {
|
||||
const response = await supertest
|
||||
.post('/api/metrics/node_details')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send(body)
|
||||
.expect(200);
|
||||
return response.body;
|
||||
};
|
||||
|
||||
it('should basically work', () => {
|
||||
return client
|
||||
.query<MetricsQuery.Query>({
|
||||
query: metricsQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
metrics: ['hostCpuUsage'],
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '>=1m',
|
||||
},
|
||||
nodeId: 'demo-stack-mysql-01',
|
||||
nodeType: 'host',
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const { metrics } = resp.data.source;
|
||||
expect(metrics.length).to.equal(1);
|
||||
const metric = first(metrics);
|
||||
expect(metric).to.have.property('id', 'hostCpuUsage');
|
||||
expect(metric).to.have.property('series');
|
||||
const series = first(metric.series);
|
||||
expect(series).to.have.property('id', 'user');
|
||||
expect(series).to.have.property('data');
|
||||
const datapoint = last(series.data);
|
||||
expect(datapoint).to.have.property('timestamp', 1547571720000);
|
||||
expect(datapoint).to.have.property('value', 0.0018333333333333333);
|
||||
});
|
||||
const data = fetchNodeDetails({
|
||||
sourceId: 'default',
|
||||
metrics: ['hostCpuUsage'],
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '>=1m',
|
||||
},
|
||||
nodeId: 'demo-stack-mysql-01',
|
||||
nodeType: 'host' as InfraNodeType,
|
||||
});
|
||||
return data.then(resp => {
|
||||
if (!resp) {
|
||||
return;
|
||||
}
|
||||
expect(resp.metrics.length).to.equal(1);
|
||||
const metric = first(resp.metrics);
|
||||
expect(metric).to.have.property('id', 'hostCpuUsage');
|
||||
expect(metric).to.have.property('series');
|
||||
const series = first(metric.series);
|
||||
expect(series).to.have.property('id', 'user');
|
||||
expect(series).to.have.property('data');
|
||||
const datapoint = last(series.data);
|
||||
expect(datapoint).to.have.property('timestamp', 1547571720000);
|
||||
expect(datapoint).to.have.property('value', 0.0018333333333333333);
|
||||
});
|
||||
});
|
||||
|
||||
it('should support multiple metrics', () => {
|
||||
return client
|
||||
.query<MetricsQuery.Query>({
|
||||
query: metricsQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
metrics: ['hostCpuUsage', 'hostLoad'],
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '>=1m',
|
||||
},
|
||||
nodeId: 'demo-stack-mysql-01',
|
||||
nodeType: 'host',
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const { metrics } = resp.data.source;
|
||||
expect(metrics.length).to.equal(2);
|
||||
});
|
||||
const data = fetchNodeDetails({
|
||||
sourceId: 'default',
|
||||
metrics: ['hostCpuUsage', 'hostLoad'],
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '>=1m',
|
||||
},
|
||||
nodeId: 'demo-stack-mysql-01',
|
||||
nodeType: 'host' as InfraNodeType,
|
||||
});
|
||||
return data.then(resp => {
|
||||
if (!resp) {
|
||||
return;
|
||||
}
|
||||
|
||||
expect(resp.metrics.length).to.equal(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -7,15 +7,38 @@
|
|||
import expect from '@kbn/expect';
|
||||
import { first, last } from 'lodash';
|
||||
|
||||
import { waffleNodesQuery } from '../../../../legacy/plugins/infra/public/containers/waffle/waffle_nodes.gql_query';
|
||||
import { WaffleNodesQuery } from '../../../../legacy/plugins/infra/public/graphql/types';
|
||||
import {
|
||||
InfraSnapshotMetricInput,
|
||||
InfraNodeType,
|
||||
InfraTimerangeInput,
|
||||
InfraSnapshotGroupbyInput,
|
||||
} from '../../../../legacy/plugins/infra/server/graphql/types';
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
import { SnapshotNodeResponse } from '../../../../legacy/plugins/infra/common/http_api/snapshot_api';
|
||||
import { DATES } from './constants';
|
||||
|
||||
interface SnapshotRequest {
|
||||
filterQuery?: string | null;
|
||||
metric: InfraSnapshotMetricInput;
|
||||
groupBy: InfraSnapshotGroupbyInput[];
|
||||
nodeType: InfraNodeType;
|
||||
sourceId: string;
|
||||
timerange: InfraTimerangeInput;
|
||||
}
|
||||
|
||||
export default function({ getService }: FtrProviderContext) {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
const supertest = getService('supertest');
|
||||
const fetchSnapshot = async (
|
||||
body: SnapshotRequest
|
||||
): Promise<SnapshotNodeResponse | undefined> => {
|
||||
const response = await supertest
|
||||
.post('/api/metrics/snapshot')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send(body)
|
||||
.expect(200);
|
||||
return response.body;
|
||||
};
|
||||
|
||||
describe('waffle nodes', () => {
|
||||
describe('6.6.0', () => {
|
||||
|
@ -24,48 +47,43 @@ export default function({ getService }: FtrProviderContext) {
|
|||
after(() => esArchiver.unload('infra/6.6.0/docker'));
|
||||
|
||||
it('should basically work', () => {
|
||||
return client
|
||||
.query<WaffleNodesQuery.Query>({
|
||||
query: waffleNodesQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' },
|
||||
type: 'container',
|
||||
groupBy: [],
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const { snapshot } = resp.data.source;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(5);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'label',
|
||||
'docker-autodiscovery_nginx_1'
|
||||
);
|
||||
expect(firstNode).to.have.property('metric');
|
||||
expect(firstNode.metric).to.eql({
|
||||
name: 'cpu',
|
||||
value: 0,
|
||||
max: 0,
|
||||
avg: 0,
|
||||
__typename: 'InfraSnapshotNodeMetric',
|
||||
});
|
||||
}
|
||||
});
|
||||
const resp = fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' } as InfraSnapshotMetricInput,
|
||||
nodeType: 'container' as InfraNodeType,
|
||||
groupBy: [],
|
||||
});
|
||||
return resp.then(data => {
|
||||
if (!resp) {
|
||||
return;
|
||||
}
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(5);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property('label', 'docker-autodiscovery_nginx_1');
|
||||
expect(firstNode).to.have.property('metric');
|
||||
expect(firstNode.metric).to.eql({
|
||||
name: 'cpu',
|
||||
value: 0,
|
||||
max: 0,
|
||||
avg: 0,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -75,76 +93,68 @@ export default function({ getService }: FtrProviderContext) {
|
|||
after(() => esArchiver.unload('infra/8.0.0/logs_and_metrics'));
|
||||
|
||||
it("should use the id for the label when the name doesn't exist", () => {
|
||||
return client
|
||||
.query<WaffleNodesQuery.Query>({
|
||||
query: waffleNodesQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' },
|
||||
type: 'pod',
|
||||
groupBy: [],
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const { snapshot } = resp.data.source;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(65);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'00597dd7-a348-11e9-9a96-42010a84004d'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'label',
|
||||
'00597dd7-a348-11e9-9a96-42010a84004d'
|
||||
);
|
||||
}
|
||||
});
|
||||
const resp = fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' } as InfraSnapshotMetricInput,
|
||||
nodeType: 'pod' as InfraNodeType,
|
||||
groupBy: [],
|
||||
});
|
||||
return resp.then(data => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(65);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'00597dd7-a348-11e9-9a96-42010a84004d'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'label',
|
||||
'00597dd7-a348-11e9-9a96-42010a84004d'
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
it('should have an id and label', () => {
|
||||
return client
|
||||
.query<WaffleNodesQuery.Query>({
|
||||
query: waffleNodesQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' },
|
||||
type: 'container',
|
||||
groupBy: [],
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const { snapshot } = resp.data.source;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(136);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'01078c21eef4194b0b96253c7c6c32796aba66e3f3f37e26ac97d1dff3e2e91a'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'label',
|
||||
'k8s_prometheus-to-sd-exporter_fluentd-gcp-v3.2.0-wcmm4_kube-system_b214d17a-9ae0-11e9-9a96-42010a84004d_0'
|
||||
);
|
||||
}
|
||||
});
|
||||
const resp = fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' } as InfraSnapshotMetricInput,
|
||||
nodeType: 'container' as InfraNodeType,
|
||||
groupBy: [],
|
||||
});
|
||||
return resp.then(data => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(136);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'value',
|
||||
'01078c21eef4194b0b96253c7c6c32796aba66e3f3f37e26ac97d1dff3e2e91a'
|
||||
);
|
||||
expect(first(firstNode.path)).to.have.property(
|
||||
'label',
|
||||
'k8s_prometheus-to-sd-exporter_fluentd-gcp-v3.2.0-wcmm4_kube-system_b214d17a-9ae0-11e9-9a96-42010a84004d_0'
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -154,157 +164,139 @@ export default function({ getService }: FtrProviderContext) {
|
|||
after(() => esArchiver.unload('infra/7.0.0/hosts'));
|
||||
|
||||
it('should basically work', () => {
|
||||
return client
|
||||
.query<WaffleNodesQuery.Query>({
|
||||
query: waffleNodesQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' },
|
||||
type: 'host',
|
||||
groupBy: [],
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const { snapshot } = resp.data.source;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metric');
|
||||
expect(firstNode.metric).to.eql({
|
||||
name: 'cpu',
|
||||
value: 0.003666666666666667,
|
||||
avg: 0.00809090909090909,
|
||||
max: 0.057833333333333334,
|
||||
__typename: 'InfraSnapshotNodeMetric',
|
||||
});
|
||||
}
|
||||
});
|
||||
const resp = fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' } as InfraSnapshotMetricInput,
|
||||
nodeType: 'host' as InfraNodeType,
|
||||
groupBy: [],
|
||||
});
|
||||
return resp.then(data => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(1);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metric');
|
||||
expect(firstNode.metric).to.eql({
|
||||
name: 'cpu',
|
||||
value: 0.003666666666666667,
|
||||
avg: 0.00809090909090909,
|
||||
max: 0.057833333333333334,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should basically work with 1 grouping', () => {
|
||||
return client
|
||||
.query<WaffleNodesQuery.Query>({
|
||||
query: waffleNodesQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' },
|
||||
type: 'host',
|
||||
groupBy: [{ field: 'cloud.availability_zone' }],
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const { snapshot } = resp.data.source;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(2);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'virtualbox');
|
||||
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
}
|
||||
});
|
||||
const resp = fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' } as InfraSnapshotMetricInput,
|
||||
nodeType: 'host' as InfraNodeType,
|
||||
groupBy: [{ field: 'cloud.availability_zone' }],
|
||||
});
|
||||
return resp.then(data => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(2);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'virtualbox');
|
||||
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should basically work with 2 groupings', () => {
|
||||
return client
|
||||
.query<WaffleNodesQuery.Query>({
|
||||
query: waffleNodesQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' },
|
||||
type: 'host',
|
||||
groupBy: [{ field: 'cloud.provider' }, { field: 'cloud.availability_zone' }],
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const { snapshot } = resp.data.source;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(3);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'vagrant');
|
||||
expect(firstNode.path[1]).to.have.property('value', 'virtualbox');
|
||||
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
}
|
||||
});
|
||||
const resp = fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' } as InfraSnapshotMetricInput,
|
||||
nodeType: 'host' as InfraNodeType,
|
||||
groupBy: [{ field: 'cloud.provider' }, { field: 'cloud.availability_zone' }],
|
||||
});
|
||||
|
||||
return resp.then(data => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(1);
|
||||
const firstNode = first(nodes);
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(3);
|
||||
expect(first(firstNode.path)).to.have.property('value', 'vagrant');
|
||||
expect(firstNode.path[1]).to.have.property('value', 'virtualbox');
|
||||
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should show metrics for all nodes when grouping by service type', () => {
|
||||
return client
|
||||
.query<WaffleNodesQuery.Query>({
|
||||
query: waffleNodesQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' },
|
||||
type: 'host',
|
||||
groupBy: [{ field: 'service.type' }],
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const { snapshot } = resp.data.source;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(2);
|
||||
const firstNode = nodes[0];
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(2);
|
||||
expect(firstNode.path[0]).to.have.property('value', 'mysql');
|
||||
expect(firstNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metric');
|
||||
expect(firstNode.metric).to.eql({
|
||||
name: 'cpu',
|
||||
value: 0.003666666666666667,
|
||||
avg: 0.00809090909090909,
|
||||
max: 0.057833333333333334,
|
||||
__typename: 'InfraSnapshotNodeMetric',
|
||||
});
|
||||
const secondNode = nodes[1];
|
||||
expect(secondNode).to.have.property('path');
|
||||
expect(secondNode.path.length).to.equal(2);
|
||||
expect(secondNode.path[0]).to.have.property('value', 'system');
|
||||
expect(secondNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(secondNode).to.have.property('metric');
|
||||
expect(secondNode.metric).to.eql({
|
||||
name: 'cpu',
|
||||
value: 0.003666666666666667,
|
||||
avg: 0.00809090909090909,
|
||||
max: 0.057833333333333334,
|
||||
__typename: 'InfraSnapshotNodeMetric',
|
||||
});
|
||||
}
|
||||
});
|
||||
const resp = fetchSnapshot({
|
||||
sourceId: 'default',
|
||||
timerange: {
|
||||
to: max,
|
||||
from: min,
|
||||
interval: '1m',
|
||||
},
|
||||
metric: { type: 'cpu' } as InfraSnapshotMetricInput,
|
||||
nodeType: 'host' as InfraNodeType,
|
||||
groupBy: [{ field: 'service.type' }],
|
||||
});
|
||||
return resp.then(data => {
|
||||
const snapshot = data;
|
||||
expect(snapshot).to.have.property('nodes');
|
||||
if (snapshot) {
|
||||
const { nodes } = snapshot;
|
||||
expect(nodes.length).to.equal(2);
|
||||
const firstNode = nodes[0];
|
||||
expect(firstNode).to.have.property('path');
|
||||
expect(firstNode.path.length).to.equal(2);
|
||||
expect(firstNode.path[0]).to.have.property('value', 'mysql');
|
||||
expect(firstNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(firstNode).to.have.property('metric');
|
||||
expect(firstNode.metric).to.eql({
|
||||
name: 'cpu',
|
||||
value: 0.003666666666666667,
|
||||
avg: 0.00809090909090909,
|
||||
max: 0.057833333333333334,
|
||||
});
|
||||
const secondNode = nodes[1];
|
||||
expect(secondNode).to.have.property('path');
|
||||
expect(secondNode.path.length).to.equal(2);
|
||||
expect(secondNode.path[0]).to.have.property('value', 'system');
|
||||
expect(secondNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
|
||||
expect(secondNode).to.have.property('metric');
|
||||
expect(secondNode.metric).to.eql({
|
||||
name: 'cpu',
|
||||
value: 0.003666666666666667,
|
||||
avg: 0.00809090909090909,
|
||||
max: 0.057833333333333334,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue