mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
* [Infra UI] Add cloud metrics and cloud/host info to metadata endpoint (#41836) * [Infra UI] Add cloud metrics and cloud/host info to metadata endpoint * Adding cloud metrics * Correcting the pod host/cloud info * Adding tests to metadata for new cloud/host info * Fixing test to include machine.type * Adding aws test data * Refactor metadata container into hook * Functionally complete * updating tests * Removing Metadata GraphQL endpoint and supporting files * Moving types under common/http_api and prefixing with Infra * adding filter for aws.ec2 dataset * move away from fetch to useHTTPRequest * Add decode function to useHTTPRequest; rename data to response; * Changing from Typescript types to IO-TS types and adding checks at client and server * Fixing field type
This commit is contained in:
parent
ec83196b55
commit
d32d59bd21
40 changed files with 18106 additions and 939 deletions
|
@ -28,8 +28,6 @@ export interface InfraSource {
|
|||
configuration: InfraSourceConfiguration;
|
||||
/** The status of the source */
|
||||
status: InfraSourceStatus;
|
||||
/** A hierarchy of metadata entries by node */
|
||||
metadataByNode: InfraNodeMetadata;
|
||||
/** A consecutive span of log entries surrounding a point in time */
|
||||
logEntriesAround: InfraLogEntryInterval;
|
||||
/** A consecutive span of log entries within an interval */
|
||||
|
@ -132,20 +130,6 @@ export interface InfraIndexField {
|
|||
/** Whether the field's values can be aggregated */
|
||||
aggregatable: boolean;
|
||||
}
|
||||
/** One metadata entry for a node. */
|
||||
export interface InfraNodeMetadata {
|
||||
id: string;
|
||||
|
||||
name: string;
|
||||
|
||||
features: InfraNodeFeature[];
|
||||
}
|
||||
|
||||
export interface InfraNodeFeature {
|
||||
name: string;
|
||||
|
||||
source: string;
|
||||
}
|
||||
/** A consecutive sequence of log entries */
|
||||
export interface InfraLogEntryInterval {
|
||||
/** The key corresponding to the start of the interval covered by the entries */
|
||||
|
@ -424,11 +408,6 @@ export interface SourceQueryArgs {
|
|||
/** The id of the source */
|
||||
id: string;
|
||||
}
|
||||
export interface MetadataByNodeInfraSourceArgs {
|
||||
nodeId: string;
|
||||
|
||||
nodeType: InfraNodeType;
|
||||
}
|
||||
export interface LogEntriesAroundInfraSourceArgs {
|
||||
/** The sort key that corresponds to the point in time */
|
||||
key: InfraTimeKeyInput;
|
||||
|
@ -722,44 +701,6 @@ export namespace LogSummary {
|
|||
};
|
||||
}
|
||||
|
||||
export namespace MetadataQuery {
|
||||
export type Variables = {
|
||||
sourceId: string;
|
||||
nodeId: string;
|
||||
nodeType: InfraNodeType;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
|
||||
source: Source;
|
||||
};
|
||||
|
||||
export type Source = {
|
||||
__typename?: 'InfraSource';
|
||||
|
||||
id: string;
|
||||
|
||||
metadataByNode: MetadataByNode;
|
||||
};
|
||||
|
||||
export type MetadataByNode = {
|
||||
__typename?: 'InfraNodeMetadata';
|
||||
|
||||
name: string;
|
||||
|
||||
features: Features[];
|
||||
};
|
||||
|
||||
export type Features = {
|
||||
__typename?: 'InfraNodeFeature';
|
||||
|
||||
name: string;
|
||||
|
||||
source: string;
|
||||
};
|
||||
}
|
||||
|
||||
export namespace MetricsQuery {
|
||||
export type Variables = {
|
||||
sourceId: string;
|
||||
|
|
|
@ -6,3 +6,5 @@
|
|||
|
||||
export * from './search_results_api';
|
||||
export * from './search_summary_api';
|
||||
export * from './metadata_api';
|
||||
export * from './timed_api';
|
||||
|
|
100
x-pack/legacy/plugins/infra/common/http_api/metadata_api.ts
Normal file
100
x-pack/legacy/plugins/infra/common/http_api/metadata_api.ts
Normal file
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
import { InfraWrappableRequest } from '../../server/lib/adapters/framework';
|
||||
|
||||
export const InfraMetadataNodeTypeRT = rt.keyof({
|
||||
host: null,
|
||||
pod: null,
|
||||
container: null,
|
||||
});
|
||||
|
||||
export const InfraMetadataRequestRT = rt.type({
|
||||
nodeId: rt.string,
|
||||
nodeType: InfraMetadataNodeTypeRT,
|
||||
sourceId: rt.string,
|
||||
});
|
||||
|
||||
export const InfraMetadataFeatureRT = rt.type({
|
||||
name: rt.string,
|
||||
source: rt.string,
|
||||
});
|
||||
|
||||
export const InfraMetadataOSRT = rt.partial({
|
||||
codename: rt.string,
|
||||
family: rt.string,
|
||||
kernel: rt.string,
|
||||
name: rt.string,
|
||||
platform: rt.string,
|
||||
version: rt.string,
|
||||
});
|
||||
|
||||
export const InfraMetadataHostRT = rt.partial({
|
||||
name: rt.string,
|
||||
os: InfraMetadataOSRT,
|
||||
architecture: rt.string,
|
||||
containerized: rt.boolean,
|
||||
});
|
||||
|
||||
export const InfraMetadataInstanceRT = rt.partial({
|
||||
id: rt.string,
|
||||
name: rt.string,
|
||||
});
|
||||
|
||||
export const InfraMetadataProjectRT = rt.partial({
|
||||
id: rt.string,
|
||||
});
|
||||
|
||||
export const InfraMetadataMachineRT = rt.partial({
|
||||
interface: rt.string,
|
||||
});
|
||||
|
||||
export const InfraMetadataCloudRT = rt.partial({
|
||||
instance: InfraMetadataInstanceRT,
|
||||
provider: rt.string,
|
||||
availability_zone: rt.string,
|
||||
project: InfraMetadataProjectRT,
|
||||
machine: InfraMetadataMachineRT,
|
||||
});
|
||||
|
||||
export const InfraMetadataInfoRT = rt.partial({
|
||||
cloud: InfraMetadataCloudRT,
|
||||
host: InfraMetadataHostRT,
|
||||
});
|
||||
|
||||
const InfraMetadataRequiredRT = rt.type({
|
||||
name: rt.string,
|
||||
features: rt.array(InfraMetadataFeatureRT),
|
||||
});
|
||||
|
||||
const InfraMetadataOptionalRT = rt.partial({
|
||||
info: InfraMetadataInfoRT,
|
||||
});
|
||||
|
||||
export const InfraMetadataRT = rt.intersection([InfraMetadataRequiredRT, InfraMetadataOptionalRT]);
|
||||
|
||||
export type InfraMetadata = rt.TypeOf<typeof InfraMetadataRT>;
|
||||
|
||||
export type InfraMetadataRequest = rt.TypeOf<typeof InfraMetadataRequestRT>;
|
||||
|
||||
export type InfraMetadataWrappedRequest = InfraWrappableRequest<InfraMetadataRequest>;
|
||||
|
||||
export type InfraMetadataFeature = rt.TypeOf<typeof InfraMetadataFeatureRT>;
|
||||
|
||||
export type InfraMetadataInfo = rt.TypeOf<typeof InfraMetadataInfoRT>;
|
||||
|
||||
export type InfraMetadataCloud = rt.TypeOf<typeof InfraMetadataCloudRT>;
|
||||
|
||||
export type InfraMetadataInstance = rt.TypeOf<typeof InfraMetadataInstanceRT>;
|
||||
|
||||
export type InfraMetadataProject = rt.TypeOf<typeof InfraMetadataProjectRT>;
|
||||
|
||||
export type InfraMetadataMachine = rt.TypeOf<typeof InfraMetadataMachineRT>;
|
||||
|
||||
export type InfraMetadataHost = rt.TypeOf<typeof InfraMetadataHostRT>;
|
||||
|
||||
export type InfraMEtadataOS = rt.TypeOf<typeof InfraMetadataOSRT>;
|
14
x-pack/legacy/plugins/infra/common/runtime_types.ts
Normal file
14
x-pack/legacy/plugins/infra/common/runtime_types.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Errors } from 'io-ts';
|
||||
import { failure } from 'io-ts/lib/PathReporter';
|
||||
|
||||
export const createPlainError = (message: string) => new Error(message);
|
||||
|
||||
export const throwErrors = (createError: (message: string) => Error) => (errors: Errors) => {
|
||||
throw createError(failure(errors).join('\n'));
|
||||
};
|
|
@ -7,7 +7,7 @@
|
|||
import { EuiButton, EuiComboBox, EuiForm, EuiFormRow } from '@elastic/eui';
|
||||
import { InjectedIntl, injectI18n } from '@kbn/i18n/react';
|
||||
import React from 'react';
|
||||
import { InfraIndexField } from '../../../server/graphql/types';
|
||||
import { InfraIndexField } from '../../graphql/types';
|
||||
interface Props {
|
||||
onSubmit: (field: string) => void;
|
||||
fields: InfraIndexField[];
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
} from '@elastic/eui';
|
||||
import { FormattedMessage, InjectedIntl, injectI18n } from '@kbn/i18n/react';
|
||||
import React from 'react';
|
||||
import { InfraIndexField, InfraNodeType, InfraSnapshotGroupbyInput } from '../../graphql/types';
|
||||
import { InfraNodeType, InfraSnapshotGroupbyInput, InfraIndexField } from '../../graphql/types';
|
||||
import { InfraGroupByOptions } from '../../lib/lib';
|
||||
import { CustomFieldPanel } from './custom_field_panel';
|
||||
import { fieldToName } from './lib/field_to_display_name';
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { InfraMetadataFeature } from '../../../../common/http_api/metadata_api';
|
||||
import { InfraMetricLayout } from '../../../pages/metrics/layouts/types';
|
||||
|
||||
export const getFilteredLayouts = (
|
||||
layouts: InfraMetricLayout[],
|
||||
metadata: Array<InfraMetadataFeature | null> | undefined
|
||||
): InfraMetricLayout[] => {
|
||||
if (!metadata) {
|
||||
return layouts;
|
||||
}
|
||||
|
||||
const metricMetadata: Array<string | null> = metadata
|
||||
.filter(data => data && data.source === 'metrics')
|
||||
.map(data => data && data.name);
|
||||
|
||||
// After filtering out sections that can't be displayed, a layout may end up empty and can be removed.
|
||||
const filteredLayouts = layouts
|
||||
.map(layout => getFilteredLayout(layout, metricMetadata))
|
||||
.filter(layout => layout.sections.length > 0);
|
||||
return filteredLayouts;
|
||||
};
|
||||
|
||||
export const getFilteredLayout = (
|
||||
layout: InfraMetricLayout,
|
||||
metricMetadata: Array<string | null>
|
||||
): InfraMetricLayout => {
|
||||
// A section is only displayed if at least one of its requirements is met
|
||||
// All others are filtered out.
|
||||
const filteredSections = layout.sections.filter(
|
||||
section => _.intersection(section.requires, metricMetadata).length > 0
|
||||
);
|
||||
return { ...layout, sections: filteredSections };
|
||||
};
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
export const metadataQuery = gql`
|
||||
query MetadataQuery($sourceId: ID!, $nodeId: String!, $nodeType: InfraNodeType!) {
|
||||
source(id: $sourceId) {
|
||||
id
|
||||
metadataByNode(nodeId: $nodeId, nodeType: $nodeType) {
|
||||
name
|
||||
features {
|
||||
name
|
||||
source
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { useEffect } from 'react';
|
||||
import { InfraNodeType } from '../../graphql/types';
|
||||
import { InfraMetricLayout } from '../../pages/metrics/layouts/types';
|
||||
import { InfraMetadata, InfraMetadataRT } from '../../../common/http_api/metadata_api';
|
||||
import { getFilteredLayouts } from './lib/get_filtered_layouts';
|
||||
import { useHTTPRequest } from '../../hooks/use_http_request';
|
||||
import { throwErrors, createPlainError } from '../../../common/runtime_types';
|
||||
|
||||
export function useMetadata(
|
||||
nodeId: string,
|
||||
nodeType: InfraNodeType,
|
||||
layouts: InfraMetricLayout[],
|
||||
sourceId: string
|
||||
) {
|
||||
const decodeResponse = (response: any) => {
|
||||
return InfraMetadataRT.decode(response).getOrElseL(throwErrors(createPlainError));
|
||||
};
|
||||
|
||||
const { error, loading, response, makeRequest } = useHTTPRequest<InfraMetadata>(
|
||||
'/api/infra/metadata',
|
||||
'POST',
|
||||
JSON.stringify({
|
||||
nodeId,
|
||||
nodeType,
|
||||
sourceId,
|
||||
decodeResponse,
|
||||
})
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
await makeRequest();
|
||||
})();
|
||||
}, [makeRequest]);
|
||||
|
||||
return {
|
||||
name: (response && response.name) || '',
|
||||
filteredLayouts: (response && getFilteredLayouts(layouts, response.features)) || [],
|
||||
error: (error && error.message) || null,
|
||||
loading,
|
||||
};
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import React from 'react';
|
||||
import { Query } from 'react-apollo';
|
||||
|
||||
import { InfraNodeType, MetadataQuery } from '../../graphql/types';
|
||||
import { InfraMetricLayout } from '../../pages/metrics/layouts/types';
|
||||
import { metadataQuery } from './metadata.gql_query';
|
||||
|
||||
interface WithMetadataProps {
|
||||
children: (args: WithMetadataArgs) => React.ReactNode;
|
||||
layouts: InfraMetricLayout[];
|
||||
nodeType: InfraNodeType;
|
||||
nodeId: string;
|
||||
sourceId: string;
|
||||
}
|
||||
|
||||
interface WithMetadataArgs {
|
||||
name: string;
|
||||
filteredLayouts: InfraMetricLayout[];
|
||||
error?: string | undefined;
|
||||
loading: boolean;
|
||||
}
|
||||
|
||||
export const WithMetadata = ({
|
||||
children,
|
||||
layouts,
|
||||
nodeType,
|
||||
nodeId,
|
||||
sourceId,
|
||||
}: WithMetadataProps) => {
|
||||
return (
|
||||
<Query<MetadataQuery.Query, MetadataQuery.Variables>
|
||||
query={metadataQuery}
|
||||
fetchPolicy="no-cache"
|
||||
variables={{
|
||||
sourceId,
|
||||
nodeType,
|
||||
nodeId,
|
||||
}}
|
||||
>
|
||||
{({ data, error, loading }) => {
|
||||
const metadata = data && data.source && data.source.metadataByNode;
|
||||
const filteredLayouts = (metadata && getFilteredLayouts(layouts, metadata.features)) || [];
|
||||
return children({
|
||||
name: (metadata && metadata.name) || '',
|
||||
filteredLayouts,
|
||||
error: error && error.message,
|
||||
loading,
|
||||
});
|
||||
}}
|
||||
</Query>
|
||||
);
|
||||
};
|
||||
|
||||
const getFilteredLayouts = (
|
||||
layouts: InfraMetricLayout[],
|
||||
metadata: Array<MetadataQuery.Features | null> | undefined
|
||||
): InfraMetricLayout[] => {
|
||||
if (!metadata) {
|
||||
return layouts;
|
||||
}
|
||||
|
||||
const metricMetadata: Array<string | null> = metadata
|
||||
.filter(data => data && data.source === 'metrics')
|
||||
.map(data => data && data.name);
|
||||
|
||||
// After filtering out sections that can't be displayed, a layout may end up empty and can be removed.
|
||||
const filteredLayouts = layouts
|
||||
.map(layout => getFilteredLayout(layout, metricMetadata))
|
||||
.filter(layout => layout.sections.length > 0);
|
||||
return filteredLayouts;
|
||||
};
|
||||
|
||||
const getFilteredLayout = (
|
||||
layout: InfraMetricLayout,
|
||||
metricMetadata: Array<string | null>
|
||||
): InfraMetricLayout => {
|
||||
// A section is only displayed if at least one of its requirements is met
|
||||
// All others are filtered out.
|
||||
const filteredSections = layout.sections.filter(
|
||||
section => _.intersection(section.requires, metricMetadata).length > 0
|
||||
);
|
||||
return { ...layout, sections: filteredSections };
|
||||
};
|
|
@ -137,39 +137,6 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "metadataByNode",
|
||||
"description": "A hierarchy of metadata entries by node",
|
||||
"args": [
|
||||
{
|
||||
"name": "nodeId",
|
||||
"description": "",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "nodeType",
|
||||
"description": "",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "ENUM", "name": "InfraNodeType", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "InfraNodeMetadata", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "logEntriesAround",
|
||||
"description": "A consecutive span of log entries surrounding a point in time",
|
||||
|
@ -1086,115 +1053,6 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "ENUM",
|
||||
"name": "InfraNodeType",
|
||||
"description": "",
|
||||
"fields": null,
|
||||
"inputFields": null,
|
||||
"interfaces": null,
|
||||
"enumValues": [
|
||||
{ "name": "pod", "description": "", "isDeprecated": false, "deprecationReason": null },
|
||||
{
|
||||
"name": "container",
|
||||
"description": "",
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{ "name": "host", "description": "", "isDeprecated": false, "deprecationReason": null }
|
||||
],
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraNodeMetadata",
|
||||
"description": "One metadata entry for a node.",
|
||||
"fields": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "ID", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"description": "",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "features",
|
||||
"description": "",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "InfraNodeFeature", "ofType": null }
|
||||
}
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraNodeFeature",
|
||||
"description": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "name",
|
||||
"description": "",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "source",
|
||||
"description": "",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "InfraTimeKeyInput",
|
||||
|
@ -2039,6 +1897,25 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "ENUM",
|
||||
"name": "InfraNodeType",
|
||||
"description": "",
|
||||
"fields": null,
|
||||
"inputFields": null,
|
||||
"interfaces": null,
|
||||
"enumValues": [
|
||||
{ "name": "pod", "description": "", "isDeprecated": false, "deprecationReason": null },
|
||||
{
|
||||
"name": "container",
|
||||
"description": "",
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{ "name": "host", "description": "", "isDeprecated": false, "deprecationReason": null }
|
||||
],
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "InfraSnapshotGroupbyInput",
|
||||
|
|
|
@ -28,8 +28,6 @@ export interface InfraSource {
|
|||
configuration: InfraSourceConfiguration;
|
||||
/** The status of the source */
|
||||
status: InfraSourceStatus;
|
||||
/** A hierarchy of metadata entries by node */
|
||||
metadataByNode: InfraNodeMetadata;
|
||||
/** A consecutive span of log entries surrounding a point in time */
|
||||
logEntriesAround: InfraLogEntryInterval;
|
||||
/** A consecutive span of log entries within an interval */
|
||||
|
@ -132,20 +130,6 @@ export interface InfraIndexField {
|
|||
/** Whether the field's values can be aggregated */
|
||||
aggregatable: boolean;
|
||||
}
|
||||
/** One metadata entry for a node. */
|
||||
export interface InfraNodeMetadata {
|
||||
id: string;
|
||||
|
||||
name: string;
|
||||
|
||||
features: InfraNodeFeature[];
|
||||
}
|
||||
|
||||
export interface InfraNodeFeature {
|
||||
name: string;
|
||||
|
||||
source: string;
|
||||
}
|
||||
/** A consecutive sequence of log entries */
|
||||
export interface InfraLogEntryInterval {
|
||||
/** The key corresponding to the start of the interval covered by the entries */
|
||||
|
@ -424,11 +408,6 @@ export interface SourceQueryArgs {
|
|||
/** The id of the source */
|
||||
id: string;
|
||||
}
|
||||
export interface MetadataByNodeInfraSourceArgs {
|
||||
nodeId: string;
|
||||
|
||||
nodeType: InfraNodeType;
|
||||
}
|
||||
export interface LogEntriesAroundInfraSourceArgs {
|
||||
/** The sort key that corresponds to the point in time */
|
||||
key: InfraTimeKeyInput;
|
||||
|
@ -722,44 +701,6 @@ export namespace LogSummary {
|
|||
};
|
||||
}
|
||||
|
||||
export namespace MetadataQuery {
|
||||
export type Variables = {
|
||||
sourceId: string;
|
||||
nodeId: string;
|
||||
nodeType: InfraNodeType;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
|
||||
source: Source;
|
||||
};
|
||||
|
||||
export type Source = {
|
||||
__typename?: 'InfraSource';
|
||||
|
||||
id: string;
|
||||
|
||||
metadataByNode: MetadataByNode;
|
||||
};
|
||||
|
||||
export type MetadataByNode = {
|
||||
__typename?: 'InfraNodeMetadata';
|
||||
|
||||
name: string;
|
||||
|
||||
features: Features[];
|
||||
};
|
||||
|
||||
export type Features = {
|
||||
__typename?: 'InfraNodeFeature';
|
||||
|
||||
name: string;
|
||||
|
||||
source: string;
|
||||
};
|
||||
}
|
||||
|
||||
export namespace MetricsQuery {
|
||||
export type Variables = {
|
||||
sourceId: string;
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
import { toastNotifications } from 'ui/notify';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { idx } from '@kbn/elastic-idx/target';
|
||||
import { KFetchError } from 'ui/kfetch/kfetch_error';
|
||||
import { useTrackedPromise } from '../utils/use_tracked_promise';
|
||||
export function useHTTPRequest<Response>(
|
||||
pathname: string,
|
||||
method: 'GET' | 'POST' | 'PUT' | 'DELETE' | 'HEAD',
|
||||
body?: string,
|
||||
decode: (response: any) => Response = response => response
|
||||
) {
|
||||
const [response, setResponse] = useState<Response | null>(null);
|
||||
const [error, setError] = useState<KFetchError | null>(null);
|
||||
const [request, makeRequest] = useTrackedPromise(
|
||||
{
|
||||
cancelPreviousOn: 'resolution',
|
||||
createPromise: () =>
|
||||
kfetch({
|
||||
method,
|
||||
pathname,
|
||||
body,
|
||||
}),
|
||||
onResolve: resp => setResponse(decode(resp)),
|
||||
onReject: (e: unknown) => {
|
||||
const err = e as KFetchError;
|
||||
setError(err);
|
||||
toastNotifications.addWarning({
|
||||
title: i18n.translate('xpack.infra.useHTTPRequest.error.title', {
|
||||
defaultMessage: `Error while fetching resource`,
|
||||
}),
|
||||
text: (
|
||||
<div>
|
||||
<h5>
|
||||
{i18n.translate('xpack.infra.useHTTPRequest.error.status', {
|
||||
defaultMessage: `Error`,
|
||||
})}
|
||||
</h5>
|
||||
{idx(err.res, r => r.statusText)} ({idx(err.res, r => r.status)})
|
||||
<h5>
|
||||
{i18n.translate('xpack.infra.useHTTPRequest.error.url', {
|
||||
defaultMessage: `URL`,
|
||||
})}
|
||||
</h5>
|
||||
{idx(err.res, r => r.url)}
|
||||
</div>
|
||||
),
|
||||
});
|
||||
},
|
||||
},
|
||||
[pathname, body, method]
|
||||
);
|
||||
|
||||
const loading = useMemo(() => request.state === 'pending', [request.state]);
|
||||
|
||||
return {
|
||||
response,
|
||||
error,
|
||||
loading,
|
||||
makeRequest,
|
||||
};
|
||||
}
|
|
@ -15,7 +15,7 @@ import {
|
|||
} from '@elastic/eui';
|
||||
import { InjectedIntl, injectI18n } from '@kbn/i18n/react';
|
||||
import { GraphQLFormattedError } from 'graphql';
|
||||
import React from 'react';
|
||||
import React, { useCallback, useContext } from 'react';
|
||||
import { UICapabilities } from 'ui/capabilities';
|
||||
import { injectUICapabilities } from 'ui/capabilities/react';
|
||||
import euiStyled, { EuiTheme, withTheme } from '../../../../../common/eui_styled_components';
|
||||
|
@ -29,18 +29,18 @@ import { MetricsSideNav } from '../../components/metrics/side_nav';
|
|||
import { MetricsTimeControls } from '../../components/metrics/time_controls';
|
||||
import { ColumnarPage, PageContent } from '../../components/page';
|
||||
import { SourceConfigurationFlyout } from '../../components/source_configuration';
|
||||
import { WithMetadata } from '../../containers/metadata/with_metadata';
|
||||
import { WithMetrics } from '../../containers/metrics/with_metrics';
|
||||
import {
|
||||
WithMetricsTime,
|
||||
WithMetricsTimeUrlState,
|
||||
} from '../../containers/metrics/with_metrics_time';
|
||||
import { WithSource } from '../../containers/with_source';
|
||||
import { InfraNodeType, InfraTimerangeInput } from '../../graphql/types';
|
||||
import { Error, ErrorPageBody } from '../error';
|
||||
import { layoutCreators } from './layouts';
|
||||
import { InfraMetricLayoutSection } from './layouts/types';
|
||||
import { MetricDetailPageProviders } from './page_providers';
|
||||
import { withMetricPageProviders } from './page_providers';
|
||||
import { useMetadata } from '../../containers/metadata/use_metadata';
|
||||
import { Source } from '../../containers/source';
|
||||
|
||||
const DetailPageContent = euiStyled(PageContent)`
|
||||
overflow: auto;
|
||||
|
@ -63,206 +63,189 @@ interface Props {
|
|||
uiCapabilities: UICapabilities;
|
||||
}
|
||||
|
||||
export const MetricDetail = injectUICapabilities(
|
||||
withTheme(
|
||||
injectI18n(
|
||||
class extends React.PureComponent<Props> {
|
||||
public static displayName = 'MetricDetailPage';
|
||||
|
||||
public render() {
|
||||
const { intl, uiCapabilities } = this.props;
|
||||
const nodeId = this.props.match.params.node;
|
||||
const nodeType = this.props.match.params.type as InfraNodeType;
|
||||
const layoutCreator = layoutCreators[nodeType];
|
||||
if (!layoutCreator) {
|
||||
return (
|
||||
<Error
|
||||
message={intl.formatMessage(
|
||||
{
|
||||
id: 'xpack.infra.metricDetailPage.invalidNodeTypeErrorMessage',
|
||||
defaultMessage: '{nodeType} is not a valid node type',
|
||||
},
|
||||
{
|
||||
nodeType: `"${nodeType}"`,
|
||||
}
|
||||
)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
const layouts = layoutCreator(this.props.theme);
|
||||
|
||||
export const MetricDetail = withMetricPageProviders(
|
||||
injectUICapabilities(
|
||||
withTheme(
|
||||
injectI18n(({ intl, uiCapabilities, match, theme }: Props) => {
|
||||
const nodeId = match.params.node;
|
||||
const nodeType = match.params.type as InfraNodeType;
|
||||
const layoutCreator = layoutCreators[nodeType];
|
||||
if (!layoutCreator) {
|
||||
return (
|
||||
<MetricDetailPageProviders>
|
||||
<WithSource>
|
||||
{({ sourceId }) => (
|
||||
<WithMetricsTime>
|
||||
{({
|
||||
timeRange,
|
||||
setTimeRange,
|
||||
refreshInterval,
|
||||
setRefreshInterval,
|
||||
isAutoReloading,
|
||||
setAutoReload,
|
||||
}) => (
|
||||
<WithMetadata
|
||||
layouts={layouts}
|
||||
sourceId={sourceId}
|
||||
nodeType={nodeType}
|
||||
nodeId={nodeId}
|
||||
>
|
||||
{({ name, filteredLayouts, loading: metadataLoading }) => {
|
||||
const breadcrumbs = [
|
||||
{
|
||||
href: '#/',
|
||||
text: intl.formatMessage({
|
||||
id: 'xpack.infra.header.infrastructureTitle',
|
||||
defaultMessage: 'Infrastructure',
|
||||
}),
|
||||
},
|
||||
{ text: name },
|
||||
];
|
||||
return (
|
||||
<ColumnarPage>
|
||||
<Header
|
||||
breadcrumbs={breadcrumbs}
|
||||
readOnlyBadge={!uiCapabilities.infrastructure.save}
|
||||
/>
|
||||
<SourceConfigurationFlyout
|
||||
shouldAllowEdit={
|
||||
uiCapabilities.infrastructure.configureSource as boolean
|
||||
}
|
||||
/>
|
||||
<WithMetricsTimeUrlState />
|
||||
<DocumentTitle
|
||||
title={intl.formatMessage(
|
||||
{
|
||||
id: 'xpack.infra.metricDetailPage.documentTitle',
|
||||
defaultMessage: 'Infrastructure | Metrics | {name}',
|
||||
},
|
||||
{
|
||||
name,
|
||||
}
|
||||
)}
|
||||
/>
|
||||
<DetailPageContent data-test-subj="infraMetricsPage">
|
||||
<WithMetrics
|
||||
layouts={filteredLayouts}
|
||||
sourceId={sourceId}
|
||||
timerange={timeRange as InfraTimerangeInput}
|
||||
nodeType={nodeType}
|
||||
nodeId={nodeId}
|
||||
>
|
||||
{({ metrics, error, loading, refetch }) => {
|
||||
if (error) {
|
||||
const invalidNodeError = error.graphQLErrors.some(
|
||||
(err: GraphQLFormattedError) =>
|
||||
err.code === InfraMetricsErrorCodes.invalid_node
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<DocumentTitle
|
||||
title={(previousTitle: string) =>
|
||||
intl.formatMessage(
|
||||
{
|
||||
id:
|
||||
'xpack.infra.metricDetailPage.documentTitleError',
|
||||
defaultMessage: '{previousTitle} | Uh oh',
|
||||
},
|
||||
{
|
||||
previousTitle,
|
||||
}
|
||||
)
|
||||
}
|
||||
/>
|
||||
{invalidNodeError ? (
|
||||
<InvalidNodeError nodeName={name} />
|
||||
) : (
|
||||
<ErrorPageBody message={error.message} />
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<EuiPage style={{ flex: '1 0 auto' }}>
|
||||
<MetricsSideNav
|
||||
layouts={filteredLayouts}
|
||||
loading={metadataLoading}
|
||||
nodeName={name}
|
||||
handleClick={this.handleClick}
|
||||
/>
|
||||
<AutoSizer content={false} bounds detectAnyWindowResize>
|
||||
{({ measureRef, bounds: { width = 0 } }) => {
|
||||
return (
|
||||
<MetricsDetailsPageColumn innerRef={measureRef}>
|
||||
<EuiPageBody style={{ width: `${width}px` }}>
|
||||
<EuiPageHeader style={{ flex: '0 0 auto' }}>
|
||||
<EuiPageHeaderSection style={{ width: '100%' }}>
|
||||
<MetricsTitleTimeRangeContainer>
|
||||
<EuiHideFor sizes={['xs', 's']}>
|
||||
<EuiTitle size="m">
|
||||
<h1>{name}</h1>
|
||||
</EuiTitle>
|
||||
</EuiHideFor>
|
||||
<MetricsTimeControls
|
||||
currentTimeRange={timeRange}
|
||||
isLiveStreaming={isAutoReloading}
|
||||
refreshInterval={refreshInterval}
|
||||
setRefreshInterval={setRefreshInterval}
|
||||
onChangeTimeRange={setTimeRange}
|
||||
setAutoReload={setAutoReload}
|
||||
/>
|
||||
</MetricsTitleTimeRangeContainer>
|
||||
</EuiPageHeaderSection>
|
||||
</EuiPageHeader>
|
||||
|
||||
<EuiPageContentWithRelative>
|
||||
<Metrics
|
||||
label={name}
|
||||
nodeId={nodeId}
|
||||
layouts={filteredLayouts}
|
||||
metrics={metrics}
|
||||
loading={
|
||||
metrics.length > 0 && isAutoReloading
|
||||
? false
|
||||
: loading
|
||||
}
|
||||
refetch={refetch}
|
||||
onChangeRangeTime={setTimeRange}
|
||||
isLiveStreaming={isAutoReloading}
|
||||
stopLiveStreaming={() => setAutoReload(false)}
|
||||
/>
|
||||
</EuiPageContentWithRelative>
|
||||
</EuiPageBody>
|
||||
</MetricsDetailsPageColumn>
|
||||
);
|
||||
}}
|
||||
</AutoSizer>
|
||||
</EuiPage>
|
||||
);
|
||||
}}
|
||||
</WithMetrics>
|
||||
</DetailPageContent>
|
||||
</ColumnarPage>
|
||||
);
|
||||
}}
|
||||
</WithMetadata>
|
||||
)}
|
||||
</WithMetricsTime>
|
||||
)}
|
||||
</WithSource>
|
||||
</MetricDetailPageProviders>
|
||||
<Error
|
||||
message={intl.formatMessage(
|
||||
{
|
||||
id: 'xpack.infra.metricDetailPage.invalidNodeTypeErrorMessage',
|
||||
defaultMessage: '{nodeType} is not a valid node type',
|
||||
},
|
||||
{
|
||||
nodeType: `"${nodeType}"`,
|
||||
}
|
||||
)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
const { sourceId } = useContext(Source.Context);
|
||||
const layouts = layoutCreator(theme);
|
||||
const { name, filteredLayouts, loading: metadataLoading } = useMetadata(
|
||||
nodeId,
|
||||
nodeType,
|
||||
layouts,
|
||||
sourceId
|
||||
);
|
||||
const breadcrumbs = [
|
||||
{
|
||||
href: '#/',
|
||||
text: intl.formatMessage({
|
||||
id: 'xpack.infra.header.infrastructureTitle',
|
||||
defaultMessage: 'Infrastructure',
|
||||
}),
|
||||
},
|
||||
{ text: name },
|
||||
];
|
||||
|
||||
private handleClick = (section: InfraMetricLayoutSection) => () => {
|
||||
const id = section.linkToId || section.id;
|
||||
const el = document.getElementById(id);
|
||||
if (el) {
|
||||
el.scrollIntoView();
|
||||
}
|
||||
};
|
||||
}
|
||||
const handleClick = useCallback(
|
||||
(section: InfraMetricLayoutSection) => () => {
|
||||
const id = section.linkToId || section.id;
|
||||
const el = document.getElementById(id);
|
||||
if (el) {
|
||||
el.scrollIntoView();
|
||||
}
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
return (
|
||||
<WithMetricsTime>
|
||||
{({
|
||||
timeRange,
|
||||
setTimeRange,
|
||||
refreshInterval,
|
||||
setRefreshInterval,
|
||||
isAutoReloading,
|
||||
setAutoReload,
|
||||
}) => (
|
||||
<ColumnarPage>
|
||||
<Header
|
||||
breadcrumbs={breadcrumbs}
|
||||
readOnlyBadge={!uiCapabilities.infrastructure.save}
|
||||
/>
|
||||
<SourceConfigurationFlyout
|
||||
shouldAllowEdit={uiCapabilities.infrastructure.configureSource as boolean}
|
||||
/>
|
||||
<WithMetricsTimeUrlState />
|
||||
<DocumentTitle
|
||||
title={intl.formatMessage(
|
||||
{
|
||||
id: 'xpack.infra.metricDetailPage.documentTitle',
|
||||
defaultMessage: 'Infrastructure | Metrics | {name}',
|
||||
},
|
||||
{
|
||||
name,
|
||||
}
|
||||
)}
|
||||
/>
|
||||
<DetailPageContent data-test-subj="infraMetricsPage">
|
||||
<WithMetrics
|
||||
layouts={filteredLayouts}
|
||||
sourceId={sourceId}
|
||||
timerange={timeRange as InfraTimerangeInput}
|
||||
nodeType={nodeType}
|
||||
nodeId={nodeId}
|
||||
>
|
||||
{({ metrics, error, loading, refetch }) => {
|
||||
if (error) {
|
||||
const invalidNodeError = error.graphQLErrors.some(
|
||||
(err: GraphQLFormattedError) =>
|
||||
err.code === InfraMetricsErrorCodes.invalid_node
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<DocumentTitle
|
||||
title={(previousTitle: string) =>
|
||||
intl.formatMessage(
|
||||
{
|
||||
id: 'xpack.infra.metricDetailPage.documentTitleError',
|
||||
defaultMessage: '{previousTitle} | Uh oh',
|
||||
},
|
||||
{
|
||||
previousTitle,
|
||||
}
|
||||
)
|
||||
}
|
||||
/>
|
||||
{invalidNodeError ? (
|
||||
<InvalidNodeError nodeName={name} />
|
||||
) : (
|
||||
<ErrorPageBody message={error.message} />
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<EuiPage style={{ flex: '1 0 auto' }}>
|
||||
<MetricsSideNav
|
||||
layouts={filteredLayouts}
|
||||
loading={metadataLoading}
|
||||
nodeName={name}
|
||||
handleClick={handleClick}
|
||||
/>
|
||||
<AutoSizer content={false} bounds detectAnyWindowResize>
|
||||
{({ measureRef, bounds: { width = 0 } }) => {
|
||||
return (
|
||||
<MetricsDetailsPageColumn innerRef={measureRef}>
|
||||
<EuiPageBody style={{ width: `${width}px` }}>
|
||||
<EuiPageHeader style={{ flex: '0 0 auto' }}>
|
||||
<EuiPageHeaderSection style={{ width: '100%' }}>
|
||||
<MetricsTitleTimeRangeContainer>
|
||||
<EuiHideFor sizes={['xs', 's']}>
|
||||
<EuiTitle size="m">
|
||||
<h1>{name}</h1>
|
||||
</EuiTitle>
|
||||
</EuiHideFor>
|
||||
<MetricsTimeControls
|
||||
currentTimeRange={timeRange}
|
||||
isLiveStreaming={isAutoReloading}
|
||||
refreshInterval={refreshInterval}
|
||||
setRefreshInterval={setRefreshInterval}
|
||||
onChangeTimeRange={setTimeRange}
|
||||
setAutoReload={setAutoReload}
|
||||
/>
|
||||
</MetricsTitleTimeRangeContainer>
|
||||
</EuiPageHeaderSection>
|
||||
</EuiPageHeader>
|
||||
|
||||
<EuiPageContentWithRelative>
|
||||
<Metrics
|
||||
label={name}
|
||||
nodeId={nodeId}
|
||||
layouts={filteredLayouts}
|
||||
metrics={metrics}
|
||||
loading={
|
||||
metrics.length > 0 && isAutoReloading ? false : loading
|
||||
}
|
||||
refetch={refetch}
|
||||
onChangeRangeTime={setTimeRange}
|
||||
isLiveStreaming={isAutoReloading}
|
||||
stopLiveStreaming={() => setAutoReload(false)}
|
||||
/>
|
||||
</EuiPageContentWithRelative>
|
||||
</EuiPageBody>
|
||||
</MetricsDetailsPageColumn>
|
||||
);
|
||||
}}
|
||||
</AutoSizer>
|
||||
</EuiPage>
|
||||
);
|
||||
}}
|
||||
</WithMetrics>
|
||||
</DetailPageContent>
|
||||
</ColumnarPage>
|
||||
)}
|
||||
</WithMetricsTime>
|
||||
);
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
|
|
@ -10,10 +10,14 @@ import { SourceConfigurationFlyoutState } from '../../components/source_configur
|
|||
import { MetricsTimeContainer } from '../../containers/metrics/with_metrics_time';
|
||||
import { Source } from '../../containers/source';
|
||||
|
||||
export const MetricDetailPageProviders: React.FunctionComponent = ({ children }) => (
|
||||
export const withMetricPageProviders = <T extends object>(Component: React.ComponentType<T>) => (
|
||||
props: T
|
||||
) => (
|
||||
<Source.Provider sourceId="default">
|
||||
<SourceConfigurationFlyoutState.Provider>
|
||||
<MetricsTimeContainer.Provider>{children}</MetricsTimeContainer.Provider>
|
||||
<MetricsTimeContainer.Provider>
|
||||
<Component {...props} />
|
||||
</MetricsTimeContainer.Provider>
|
||||
</SourceConfigurationFlyoutState.Provider>
|
||||
</Source.Provider>
|
||||
);
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
import { rootSchema } from '../../common/graphql/root/schema.gql';
|
||||
import { sharedSchema } from '../../common/graphql/shared/schema.gql';
|
||||
import { logEntriesSchema } from './log_entries/schema.gql';
|
||||
import { metadataSchema } from './metadata/schema.gql';
|
||||
import { metricsSchema } from './metrics/schema.gql';
|
||||
import { snapshotSchema } from './snapshot/schema.gql';
|
||||
import { sourceStatusSchema } from './source_status/schema.gql';
|
||||
|
@ -16,7 +15,6 @@ import { sourcesSchema } from './sources/schema.gql';
|
|||
export const schemas = [
|
||||
rootSchema,
|
||||
sharedSchema,
|
||||
metadataSchema,
|
||||
logEntriesSchema,
|
||||
snapshotSchema,
|
||||
sourcesSchema,
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export { createMetadataResolvers } from './resolvers';
|
||||
export { metadataSchema } from './schema.gql';
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraSourceResolvers } from '../../graphql/types';
|
||||
import { InfraMetadataDomain } from '../../lib/domains/metadata_domain';
|
||||
import { ChildResolverOf, InfraResolverOf } from '../../utils/typed_resolvers';
|
||||
import { QuerySourceResolver } from '../sources/resolvers';
|
||||
|
||||
type InfraSourceMetadataByNodeResolver = ChildResolverOf<
|
||||
InfraResolverOf<InfraSourceResolvers.MetadataByNodeResolver>,
|
||||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
export const createMetadataResolvers = (libs: {
|
||||
metadata: InfraMetadataDomain;
|
||||
}): {
|
||||
InfraSource: {
|
||||
metadataByNode: InfraSourceMetadataByNodeResolver;
|
||||
};
|
||||
} => ({
|
||||
InfraSource: {
|
||||
async metadataByNode(source, args, { req }) {
|
||||
const result = await libs.metadata.getMetadata(req, source.id, args.nodeId, args.nodeType);
|
||||
return result;
|
||||
},
|
||||
},
|
||||
});
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
export const metadataSchema = gql`
|
||||
"One metadata entry for a node."
|
||||
type InfraNodeMetadata {
|
||||
id: ID!
|
||||
name: String!
|
||||
features: [InfraNodeFeature!]!
|
||||
}
|
||||
|
||||
type InfraNodeFeature {
|
||||
name: String!
|
||||
source: String!
|
||||
}
|
||||
|
||||
extend type InfraSource {
|
||||
"A hierarchy of metadata entries by node"
|
||||
metadataByNode(nodeId: String!, nodeType: InfraNodeType!): InfraNodeMetadata!
|
||||
}
|
||||
`;
|
|
@ -56,8 +56,6 @@ export interface InfraSource {
|
|||
configuration: InfraSourceConfiguration;
|
||||
/** The status of the source */
|
||||
status: InfraSourceStatus;
|
||||
/** A hierarchy of metadata entries by node */
|
||||
metadataByNode: InfraNodeMetadata;
|
||||
/** A consecutive span of log entries surrounding a point in time */
|
||||
logEntriesAround: InfraLogEntryInterval;
|
||||
/** A consecutive span of log entries within an interval */
|
||||
|
@ -160,20 +158,6 @@ export interface InfraIndexField {
|
|||
/** Whether the field's values can be aggregated */
|
||||
aggregatable: boolean;
|
||||
}
|
||||
/** One metadata entry for a node. */
|
||||
export interface InfraNodeMetadata {
|
||||
id: string;
|
||||
|
||||
name: string;
|
||||
|
||||
features: InfraNodeFeature[];
|
||||
}
|
||||
|
||||
export interface InfraNodeFeature {
|
||||
name: string;
|
||||
|
||||
source: string;
|
||||
}
|
||||
/** A consecutive sequence of log entries */
|
||||
export interface InfraLogEntryInterval {
|
||||
/** The key corresponding to the start of the interval covered by the entries */
|
||||
|
@ -452,11 +436,6 @@ export interface SourceQueryArgs {
|
|||
/** The id of the source */
|
||||
id: string;
|
||||
}
|
||||
export interface MetadataByNodeInfraSourceArgs {
|
||||
nodeId: string;
|
||||
|
||||
nodeType: InfraNodeType;
|
||||
}
|
||||
export interface LogEntriesAroundInfraSourceArgs {
|
||||
/** The sort key that corresponds to the point in time */
|
||||
key: InfraTimeKeyInput;
|
||||
|
@ -663,8 +642,6 @@ export namespace InfraSourceResolvers {
|
|||
configuration?: ConfigurationResolver<InfraSourceConfiguration, TypeParent, Context>;
|
||||
/** The status of the source */
|
||||
status?: StatusResolver<InfraSourceStatus, TypeParent, Context>;
|
||||
/** A hierarchy of metadata entries by node */
|
||||
metadataByNode?: MetadataByNodeResolver<InfraNodeMetadata, TypeParent, Context>;
|
||||
/** A consecutive span of log entries surrounding a point in time */
|
||||
logEntriesAround?: LogEntriesAroundResolver<InfraLogEntryInterval, TypeParent, Context>;
|
||||
/** A consecutive span of log entries within an interval */
|
||||
|
@ -711,17 +688,6 @@ export namespace InfraSourceResolvers {
|
|||
Parent = InfraSource,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type MetadataByNodeResolver<
|
||||
R = InfraNodeMetadata,
|
||||
Parent = InfraSource,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context, MetadataByNodeArgs>;
|
||||
export interface MetadataByNodeArgs {
|
||||
nodeId: string;
|
||||
|
||||
nodeType: InfraNodeType;
|
||||
}
|
||||
|
||||
export type LogEntriesAroundResolver<
|
||||
R = InfraLogEntryInterval,
|
||||
Parent = InfraSource,
|
||||
|
@ -1106,51 +1072,6 @@ export namespace InfraIndexFieldResolvers {
|
|||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
/** One metadata entry for a node. */
|
||||
export namespace InfraNodeMetadataResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = InfraNodeMetadata> {
|
||||
id?: IdResolver<string, TypeParent, Context>;
|
||||
|
||||
name?: NameResolver<string, TypeParent, Context>;
|
||||
|
||||
features?: FeaturesResolver<InfraNodeFeature[], TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type IdResolver<R = string, Parent = InfraNodeMetadata, Context = InfraContext> = Resolver<
|
||||
R,
|
||||
Parent,
|
||||
Context
|
||||
>;
|
||||
export type NameResolver<
|
||||
R = string,
|
||||
Parent = InfraNodeMetadata,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type FeaturesResolver<
|
||||
R = InfraNodeFeature[],
|
||||
Parent = InfraNodeMetadata,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
|
||||
export namespace InfraNodeFeatureResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = InfraNodeFeature> {
|
||||
name?: NameResolver<string, TypeParent, Context>;
|
||||
|
||||
source?: SourceResolver<string, TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type NameResolver<
|
||||
R = string,
|
||||
Parent = InfraNodeFeature,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type SourceResolver<
|
||||
R = string,
|
||||
Parent = InfraNodeFeature,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
/** A consecutive sequence of log entries */
|
||||
export namespace InfraLogEntryIntervalResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = InfraLogEntryInterval> {
|
||||
|
|
|
@ -8,7 +8,6 @@ import { IResolvers, makeExecutableSchema } from 'graphql-tools';
|
|||
import { initIpToHostName } from './routes/ip_to_hostname';
|
||||
import { schemas } from './graphql';
|
||||
import { createLogEntriesResolvers } from './graphql/log_entries';
|
||||
import { createMetadataResolvers } from './graphql/metadata';
|
||||
import { createMetricResolvers } from './graphql/metrics/resolvers';
|
||||
import { createSnapshotResolvers } from './graphql/snapshot';
|
||||
import { createSourceStatusResolvers } from './graphql/source_status';
|
||||
|
@ -16,11 +15,11 @@ import { createSourcesResolvers } from './graphql/sources';
|
|||
import { InfraBackendLibs } from './lib/infra_types';
|
||||
import { initLegacyLoggingRoutes } from './logging_legacy';
|
||||
import { initMetricExplorerRoute } from './routes/metrics_explorer';
|
||||
import { initMetadataRoute } from './routes/metadata';
|
||||
|
||||
export const initInfraServer = (libs: InfraBackendLibs) => {
|
||||
const schema = makeExecutableSchema({
|
||||
resolvers: [
|
||||
createMetadataResolvers(libs) as IResolvers,
|
||||
createLogEntriesResolvers(libs) as IResolvers,
|
||||
createSnapshotResolvers(libs) as IResolvers,
|
||||
createSourcesResolvers(libs) as IResolvers,
|
||||
|
@ -35,4 +34,5 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
|
|||
initLegacyLoggingRoutes(libs.framework);
|
||||
initIpToHostName(libs);
|
||||
initMetricExplorerRoute(libs);
|
||||
initMetadataRoute(libs);
|
||||
};
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraSourceConfiguration } from '../../sources';
|
||||
import { InfraFrameworkRequest, InfraMetadataAggregationBucket } from '../framework';
|
||||
|
||||
export interface InfraMetricsAdapterResponse {
|
||||
id: string;
|
||||
name?: string;
|
||||
buckets: InfraMetadataAggregationBucket[];
|
||||
}
|
||||
|
||||
export interface InfraMetadataAdapter {
|
||||
getMetricMetadata(
|
||||
req: InfraFrameworkRequest,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
nodeId: string,
|
||||
nodeType: string
|
||||
): Promise<InfraMetricsAdapterResponse>;
|
||||
}
|
|
@ -1,87 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import { InfraSourceConfiguration } from '../../sources';
|
||||
import {
|
||||
InfraBackendFrameworkAdapter,
|
||||
InfraFrameworkRequest,
|
||||
InfraMetadataAggregationResponse,
|
||||
} from '../framework';
|
||||
import { InfraMetadataAdapter, InfraMetricsAdapterResponse } from './adapter_types';
|
||||
import { NAME_FIELDS } from '../../constants';
|
||||
|
||||
export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
|
||||
private framework: InfraBackendFrameworkAdapter;
|
||||
constructor(framework: InfraBackendFrameworkAdapter) {
|
||||
this.framework = framework;
|
||||
}
|
||||
|
||||
public async getMetricMetadata(
|
||||
req: InfraFrameworkRequest,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
nodeId: string,
|
||||
nodeType: 'host' | 'container' | 'pod'
|
||||
): Promise<InfraMetricsAdapterResponse> {
|
||||
const idFieldName = getIdFieldName(sourceConfiguration, nodeType);
|
||||
const metricQuery = {
|
||||
allowNoIndices: true,
|
||||
ignoreUnavailable: true,
|
||||
index: sourceConfiguration.metricAlias,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: {
|
||||
term: { [idFieldName]: nodeId },
|
||||
},
|
||||
},
|
||||
},
|
||||
size: 0,
|
||||
aggs: {
|
||||
nodeName: {
|
||||
terms: {
|
||||
field: NAME_FIELDS[nodeType],
|
||||
size: 1,
|
||||
},
|
||||
},
|
||||
metrics: {
|
||||
terms: {
|
||||
field: 'event.dataset',
|
||||
size: 1000,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const response = await this.framework.callWithRequest<
|
||||
any,
|
||||
{ metrics?: InfraMetadataAggregationResponse; nodeName?: InfraMetadataAggregationResponse }
|
||||
>(req, 'search', metricQuery);
|
||||
|
||||
const buckets =
|
||||
response.aggregations && response.aggregations.metrics
|
||||
? response.aggregations.metrics.buckets
|
||||
: [];
|
||||
|
||||
return {
|
||||
id: nodeId,
|
||||
name: get(response, ['aggregations', 'nodeName', 'buckets', 0, 'key'], nodeId),
|
||||
buckets,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const getIdFieldName = (sourceConfiguration: InfraSourceConfiguration, nodeType: string) => {
|
||||
switch (nodeType) {
|
||||
case 'host':
|
||||
return sourceConfiguration.fields.host;
|
||||
case 'container':
|
||||
return sourceConfiguration.fields.container;
|
||||
default:
|
||||
return sourceConfiguration.fields.pod;
|
||||
}
|
||||
};
|
|
@ -1,7 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './adapter_types';
|
|
@ -10,12 +10,10 @@ import { InfraKibanaConfigurationAdapter } from '../adapters/configuration/kiban
|
|||
import { FrameworkFieldsAdapter } from '../adapters/fields/framework_fields_adapter';
|
||||
import { InfraKibanaBackendFrameworkAdapter } from '../adapters/framework/kibana_framework_adapter';
|
||||
import { InfraKibanaLogEntriesAdapter } from '../adapters/log_entries/kibana_log_entries_adapter';
|
||||
import { ElasticsearchMetadataAdapter } from '../adapters/metadata/elasticsearch_metadata_adapter';
|
||||
import { KibanaMetricsAdapter } from '../adapters/metrics/kibana_metrics_adapter';
|
||||
import { InfraElasticsearchSourceStatusAdapter } from '../adapters/source_status';
|
||||
import { InfraFieldsDomain } from '../domains/fields_domain';
|
||||
import { InfraLogEntriesDomain } from '../domains/log_entries_domain';
|
||||
import { InfraMetadataDomain } from '../domains/metadata_domain';
|
||||
import { InfraMetricsDomain } from '../domains/metrics_domain';
|
||||
import { InfraBackendLibs, InfraDomainLibs } from '../infra_types';
|
||||
import { InfraSnapshot } from '../snapshot';
|
||||
|
@ -35,9 +33,6 @@ export function compose(server: Server): InfraBackendLibs {
|
|||
const snapshot = new InfraSnapshot({ sources, framework });
|
||||
|
||||
const domainLibs: InfraDomainLibs = {
|
||||
metadata: new InfraMetadataDomain(new ElasticsearchMetadataAdapter(framework), {
|
||||
sources,
|
||||
}),
|
||||
fields: new InfraFieldsDomain(new FrameworkFieldsAdapter(framework), {
|
||||
sources,
|
||||
}),
|
||||
|
|
|
@ -20,3 +20,5 @@ export const IP_FIELDS = {
|
|||
[InfraNodeType.pod]: 'kubernetes.pod.ip',
|
||||
[InfraNodeType.container]: 'container.ip_address',
|
||||
};
|
||||
|
||||
export const CLOUD_METRICS_MODULES = ['aws'];
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './metadata_domain';
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraFrameworkRequest, InfraMetadataAggregationBucket } from '../../adapters/framework';
|
||||
import { InfraMetadataAdapter } from '../../adapters/metadata';
|
||||
import { InfraSources } from '../../sources';
|
||||
|
||||
export class InfraMetadataDomain {
|
||||
constructor(
|
||||
private readonly adapter: InfraMetadataAdapter,
|
||||
private readonly libs: { sources: InfraSources }
|
||||
) {}
|
||||
|
||||
public async getMetadata(
|
||||
req: InfraFrameworkRequest,
|
||||
sourceId: string,
|
||||
nodeId: string,
|
||||
nodeType: string
|
||||
) {
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(req, sourceId);
|
||||
const metricsPromise = this.adapter.getMetricMetadata(req, configuration, nodeId, nodeType);
|
||||
|
||||
const metrics = await metricsPromise;
|
||||
|
||||
const metricMetadata = pickMetadata(metrics.buckets).map(entry => {
|
||||
return { name: entry, source: 'metrics' };
|
||||
});
|
||||
|
||||
const id = metrics.id;
|
||||
const name = metrics.name || id;
|
||||
return { id, name, features: metricMetadata };
|
||||
}
|
||||
}
|
||||
|
||||
const pickMetadata = (buckets: InfraMetadataAggregationBucket[]): string[] => {
|
||||
if (buckets) {
|
||||
const metadata = buckets.map(bucket => bucket.key);
|
||||
return metadata;
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
};
|
|
@ -9,14 +9,12 @@ import { InfraConfigurationAdapter } from './adapters/configuration';
|
|||
import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from './adapters/framework';
|
||||
import { InfraFieldsDomain } from './domains/fields_domain';
|
||||
import { InfraLogEntriesDomain } from './domains/log_entries_domain';
|
||||
import { InfraMetadataDomain } from './domains/metadata_domain';
|
||||
import { InfraMetricsDomain } from './domains/metrics_domain';
|
||||
import { InfraSnapshot } from './snapshot';
|
||||
import { InfraSourceStatus } from './source_status';
|
||||
import { InfraSources } from './sources';
|
||||
|
||||
export interface InfraDomainLibs {
|
||||
metadata: InfraMetadataDomain;
|
||||
fields: InfraFieldsDomain;
|
||||
logEntries: InfraLogEntriesDomain;
|
||||
metrics: InfraMetricsDomain;
|
||||
|
|
75
x-pack/legacy/plugins/infra/server/routes/metadata/index.ts
Normal file
75
x-pack/legacy/plugins/infra/server/routes/metadata/index.ts
Normal file
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import Boom, { boomify } from 'boom';
|
||||
import { get } from 'lodash';
|
||||
import {
|
||||
InfraMetadata,
|
||||
InfraMetadataWrappedRequest,
|
||||
InfraMetadataFeature,
|
||||
InfraMetadataRequestRT,
|
||||
InfraMetadataRT,
|
||||
} from '../../../common/http_api/metadata_api';
|
||||
import { InfraBackendLibs } from '../../lib/infra_types';
|
||||
import { getMetricMetadata } from './lib/get_metric_metadata';
|
||||
import { pickFeatureName } from './lib/pick_feature_name';
|
||||
import { getCloudMetricsMetadata } from './lib/get_cloud_metric_metadata';
|
||||
import { getNodeInfo } from './lib/get_node_info';
|
||||
import { throwErrors } from '../../../common/runtime_types';
|
||||
|
||||
export const initMetadataRoute = (libs: InfraBackendLibs) => {
|
||||
const { framework } = libs;
|
||||
|
||||
framework.registerRoute<InfraMetadataWrappedRequest, Promise<InfraMetadata>>({
|
||||
method: 'POST',
|
||||
path: '/api/infra/metadata',
|
||||
handler: async req => {
|
||||
try {
|
||||
const { nodeId, nodeType, sourceId } = InfraMetadataRequestRT.decode(
|
||||
req.payload
|
||||
).getOrElseL(throwErrors(Boom.badRequest));
|
||||
|
||||
const { configuration } = await libs.sources.getSourceConfiguration(req, sourceId);
|
||||
const metricsMetadata = await getMetricMetadata(
|
||||
framework,
|
||||
req,
|
||||
configuration,
|
||||
nodeId,
|
||||
nodeType
|
||||
);
|
||||
const metricFeatures = pickFeatureName(metricsMetadata.buckets).map(
|
||||
nameToFeature('metrics')
|
||||
);
|
||||
|
||||
const info = await getNodeInfo(framework, req, configuration, nodeId, nodeType);
|
||||
const cloudInstanceId = get<string>(info, 'cloud.instance.id');
|
||||
|
||||
const cloudMetricsMetadata = cloudInstanceId
|
||||
? await getCloudMetricsMetadata(framework, req, configuration, cloudInstanceId)
|
||||
: { buckets: [] };
|
||||
const cloudMetricsFeatures = pickFeatureName(cloudMetricsMetadata.buckets).map(
|
||||
nameToFeature('metrics')
|
||||
);
|
||||
|
||||
const id = metricsMetadata.id;
|
||||
const name = metricsMetadata.name || id;
|
||||
return InfraMetadataRT.decode({
|
||||
id,
|
||||
name,
|
||||
features: [...metricFeatures, ...cloudMetricsFeatures],
|
||||
info,
|
||||
}).getOrElseL(throwErrors(Boom.badImplementation));
|
||||
} catch (error) {
|
||||
throw boomify(error);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const nameToFeature = (source: string) => (name: string): InfraMetadataFeature => ({
|
||||
name,
|
||||
source,
|
||||
});
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import {
|
||||
InfraBackendFrameworkAdapter,
|
||||
InfraFrameworkRequest,
|
||||
InfraMetadataAggregationBucket,
|
||||
InfraMetadataAggregationResponse,
|
||||
} from '../../../lib/adapters/framework';
|
||||
import { InfraSourceConfiguration } from '../../../lib/sources';
|
||||
import { CLOUD_METRICS_MODULES } from '../../../lib/constants';
|
||||
|
||||
export interface InfraCloudMetricsAdapterResponse {
|
||||
buckets: InfraMetadataAggregationBucket[];
|
||||
}
|
||||
|
||||
export const getCloudMetricsMetadata = async (
|
||||
framework: InfraBackendFrameworkAdapter,
|
||||
req: InfraFrameworkRequest,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
instanceId: string
|
||||
): Promise<InfraCloudMetricsAdapterResponse> => {
|
||||
const metricQuery = {
|
||||
allowNoIndices: true,
|
||||
ignoreUnavailable: true,
|
||||
index: sourceConfiguration.metricAlias,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: [{ match: { 'cloud.instance.id': instanceId } }],
|
||||
should: CLOUD_METRICS_MODULES.map(module => ({ match: { 'event.module': module } })),
|
||||
},
|
||||
},
|
||||
size: 0,
|
||||
aggs: {
|
||||
metrics: {
|
||||
terms: {
|
||||
field: 'event.dataset',
|
||||
size: 1000,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const response = await framework.callWithRequest<
|
||||
{},
|
||||
{
|
||||
metrics?: InfraMetadataAggregationResponse;
|
||||
}
|
||||
>(req, 'search', metricQuery);
|
||||
|
||||
const buckets =
|
||||
response.aggregations && response.aggregations.metrics
|
||||
? response.aggregations.metrics.buckets
|
||||
: [];
|
||||
|
||||
return { buckets };
|
||||
};
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraSourceConfiguration } from '../../../lib/sources';
|
||||
|
||||
export const getIdFieldName = (sourceConfiguration: InfraSourceConfiguration, nodeType: string) => {
|
||||
switch (nodeType) {
|
||||
case 'host':
|
||||
return sourceConfiguration.fields.host;
|
||||
case 'container':
|
||||
return sourceConfiguration.fields.container;
|
||||
default:
|
||||
return sourceConfiguration.fields.pod;
|
||||
}
|
||||
};
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import {
|
||||
InfraFrameworkRequest,
|
||||
InfraMetadataAggregationBucket,
|
||||
InfraBackendFrameworkAdapter,
|
||||
InfraMetadataAggregationResponse,
|
||||
} from '../../../lib/adapters/framework';
|
||||
import { InfraSourceConfiguration } from '../../../lib/sources';
|
||||
import { getIdFieldName } from './get_id_field_name';
|
||||
import { NAME_FIELDS } from '../../../lib/constants';
|
||||
|
||||
export interface InfraMetricsAdapterResponse {
|
||||
id: string;
|
||||
name?: string;
|
||||
buckets: InfraMetadataAggregationBucket[];
|
||||
}
|
||||
|
||||
export const getMetricMetadata = async (
|
||||
framework: InfraBackendFrameworkAdapter,
|
||||
req: InfraFrameworkRequest,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
nodeId: string,
|
||||
nodeType: 'host' | 'pod' | 'container'
|
||||
): Promise<InfraMetricsAdapterResponse> => {
|
||||
const idFieldName = getIdFieldName(sourceConfiguration, nodeType);
|
||||
|
||||
const metricQuery = {
|
||||
allowNoIndices: true,
|
||||
ignoreUnavailable: true,
|
||||
index: sourceConfiguration.metricAlias,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
must_not: [{ match: { 'event.dataset': 'aws.ec2' } }],
|
||||
filter: [
|
||||
{
|
||||
match: { [idFieldName]: nodeId },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
size: 0,
|
||||
aggs: {
|
||||
nodeName: {
|
||||
terms: {
|
||||
field: NAME_FIELDS[nodeType],
|
||||
size: 1,
|
||||
},
|
||||
},
|
||||
metrics: {
|
||||
terms: {
|
||||
field: 'event.dataset',
|
||||
size: 1000,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const response = await framework.callWithRequest<
|
||||
{},
|
||||
{
|
||||
metrics?: InfraMetadataAggregationResponse;
|
||||
nodeName?: InfraMetadataAggregationResponse;
|
||||
}
|
||||
>(req, 'search', metricQuery);
|
||||
|
||||
const buckets =
|
||||
response.aggregations && response.aggregations.metrics
|
||||
? response.aggregations.metrics.buckets
|
||||
: [];
|
||||
|
||||
return {
|
||||
id: nodeId,
|
||||
name: get(response, ['aggregations', 'nodeName', 'buckets', 0, 'key'], nodeId),
|
||||
buckets,
|
||||
};
|
||||
};
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { first } from 'lodash';
|
||||
import {
|
||||
InfraFrameworkRequest,
|
||||
InfraBackendFrameworkAdapter,
|
||||
} from '../../../lib/adapters/framework';
|
||||
import { InfraSourceConfiguration } from '../../../lib/sources';
|
||||
import { InfraNodeType } from '../../../graphql/types';
|
||||
import { InfraMetadataInfo } from '../../../../common/http_api/metadata_api';
|
||||
import { getPodNodeName } from './get_pod_node_name';
|
||||
import { CLOUD_METRICS_MODULES } from '../../../lib/constants';
|
||||
import { getIdFieldName } from './get_id_field_name';
|
||||
|
||||
export const getNodeInfo = async (
|
||||
framework: InfraBackendFrameworkAdapter,
|
||||
req: InfraFrameworkRequest,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
nodeId: string,
|
||||
nodeType: 'host' | 'pod' | 'container'
|
||||
): Promise<InfraMetadataInfo> => {
|
||||
// If the nodeType is a Kubernetes pod then we need to get the node info
|
||||
// from a host record instead of a pod. This is due to the fact that any host
|
||||
// can report pod details and we can't rely on the host/cloud information associated
|
||||
// with the kubernetes.pod.uid. We need to first lookup the `kubernetes.node.name`
|
||||
// then use that to lookup the host's node information.
|
||||
if (nodeType === InfraNodeType.pod) {
|
||||
const kubernetesNodeName = await getPodNodeName(
|
||||
framework,
|
||||
req,
|
||||
sourceConfiguration,
|
||||
nodeId,
|
||||
nodeType
|
||||
);
|
||||
if (kubernetesNodeName) {
|
||||
return getNodeInfo(
|
||||
framework,
|
||||
req,
|
||||
sourceConfiguration,
|
||||
kubernetesNodeName,
|
||||
InfraNodeType.host
|
||||
);
|
||||
}
|
||||
return {};
|
||||
}
|
||||
const params = {
|
||||
allowNoIndices: true,
|
||||
ignoreUnavailable: true,
|
||||
terminateAfter: 1,
|
||||
index: sourceConfiguration.metricAlias,
|
||||
body: {
|
||||
size: 1,
|
||||
_source: ['host.*', 'cloud.*'],
|
||||
query: {
|
||||
bool: {
|
||||
must_not: CLOUD_METRICS_MODULES.map(module => ({ match: { 'event.module': module } })),
|
||||
filter: [{ match: { [getIdFieldName(sourceConfiguration, nodeType)]: nodeId } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const response = await framework.callWithRequest<{ _source: InfraMetadataInfo }, {}>(
|
||||
req,
|
||||
'search',
|
||||
params
|
||||
);
|
||||
const firstHit = first(response.hits.hits);
|
||||
if (firstHit) {
|
||||
return firstHit._source;
|
||||
}
|
||||
return {};
|
||||
};
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { first, get } from 'lodash';
|
||||
import {
|
||||
InfraFrameworkRequest,
|
||||
InfraBackendFrameworkAdapter,
|
||||
} from '../../../lib/adapters/framework';
|
||||
import { InfraSourceConfiguration } from '../../../lib/sources';
|
||||
import { getIdFieldName } from './get_id_field_name';
|
||||
|
||||
export const getPodNodeName = async (
|
||||
framework: InfraBackendFrameworkAdapter,
|
||||
req: InfraFrameworkRequest,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
nodeId: string,
|
||||
nodeType: 'host' | 'pod' | 'container'
|
||||
): Promise<string | undefined> => {
|
||||
const params = {
|
||||
allowNoIndices: true,
|
||||
ignoreUnavailable: true,
|
||||
terminateAfter: 1,
|
||||
index: sourceConfiguration.metricAlias,
|
||||
body: {
|
||||
size: 1,
|
||||
_source: ['kubernetes.node.name'],
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{ match: { [getIdFieldName(sourceConfiguration, nodeType)]: nodeId } },
|
||||
{ exists: { field: `kubernetes.node.name` } },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const response = await framework.callWithRequest<
|
||||
{ _source: { kubernetes: { node: { name: string } } } },
|
||||
{}
|
||||
>(req, 'search', params);
|
||||
const firstHit = first(response.hits.hits);
|
||||
if (firstHit) {
|
||||
return get(firstHit, '_source.kubernetes.node.name');
|
||||
}
|
||||
};
|
|
@ -0,0 +1,16 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraMetadataAggregationBucket } from '../../../lib/adapters/framework';
|
||||
|
||||
export const pickFeatureName = (buckets: InfraMetadataAggregationBucket[]): string[] => {
|
||||
if (buckets) {
|
||||
const metadata = buckets.map(bucket => bucket.key);
|
||||
return metadata;
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
};
|
|
@ -22,5 +22,9 @@ export const DATES = {
|
|||
min: 1562786660845,
|
||||
max: 1562786716965,
|
||||
},
|
||||
logs_and_metrics_with_aws: {
|
||||
min: 1564083185000,
|
||||
max: 1564083493080,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -5,39 +5,42 @@
|
|||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
|
||||
import { metadataQuery } from '../../../../legacy/plugins/infra/public/containers/metadata/metadata.gql_query';
|
||||
import { MetadataQuery } from '../../../../legacy/plugins/infra/public/graphql/types';
|
||||
import { InfraNodeType } from '../../../../legacy/plugins/infra/server/graphql/types';
|
||||
import {
|
||||
InfraMetadata,
|
||||
InfraMetadataRequest,
|
||||
} from '../../../../legacy/plugins/infra/common/http_api/metadata_api';
|
||||
import { KbnTestProvider } from './types';
|
||||
|
||||
const metadataTests: KbnTestProvider = ({ getService }) => {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
const supertest = getService('supertest');
|
||||
const fetchMetadata = async (body: InfraMetadataRequest): Promise<InfraMetadata | undefined> => {
|
||||
const response = await supertest
|
||||
.post('/api/infra/metadata')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send(body)
|
||||
.expect(200);
|
||||
return response.body;
|
||||
};
|
||||
|
||||
describe('metadata', () => {
|
||||
describe('7.0.0', () => {
|
||||
before(() => esArchiver.load('infra/7.0.0/hosts'));
|
||||
after(() => esArchiver.unload('infra/7.0.0/hosts'));
|
||||
|
||||
it('hosts', () => {
|
||||
return client
|
||||
.query<MetadataQuery.Query>({
|
||||
query: metadataQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
nodeId: 'demo-stack-mysql-01',
|
||||
nodeType: 'host',
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const metadata = resp.data.source.metadataByNode;
|
||||
if (metadata) {
|
||||
expect(metadata.features.length).to.be(12);
|
||||
expect(metadata.name).to.equal('demo-stack-mysql-01');
|
||||
} else {
|
||||
throw new Error('Metadata should never be empty');
|
||||
}
|
||||
});
|
||||
it('hosts', async () => {
|
||||
const metadata = await fetchMetadata({
|
||||
sourceId: 'default',
|
||||
nodeId: 'demo-stack-mysql-01',
|
||||
nodeType: InfraNodeType.host,
|
||||
});
|
||||
if (metadata) {
|
||||
expect(metadata.features.length).to.be(12);
|
||||
expect(metadata.name).to.equal('demo-stack-mysql-01');
|
||||
} else {
|
||||
throw new Error('Metadata should never be empty');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -45,25 +48,196 @@ const metadataTests: KbnTestProvider = ({ getService }) => {
|
|||
before(() => esArchiver.load('infra/6.6.0/docker'));
|
||||
after(() => esArchiver.unload('infra/6.6.0/docker'));
|
||||
|
||||
it('docker', () => {
|
||||
return client
|
||||
.query<MetadataQuery.Query>({
|
||||
query: metadataQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
nodeId: '631f36a845514442b93c3fdd2dc91bcd8feb680b8ac5832c7fb8fdc167bb938e',
|
||||
nodeType: 'container',
|
||||
it('docker', async () => {
|
||||
const metadata = await fetchMetadata({
|
||||
sourceId: 'default',
|
||||
nodeId: '631f36a845514442b93c3fdd2dc91bcd8feb680b8ac5832c7fb8fdc167bb938e',
|
||||
nodeType: InfraNodeType.container,
|
||||
});
|
||||
if (metadata) {
|
||||
expect(metadata.features.length).to.be(10);
|
||||
expect(metadata.name).to.equal('docker-autodiscovery_elasticsearch_1');
|
||||
} else {
|
||||
throw new Error('Metadata should never be empty');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('8.0.0', () => {
|
||||
const archiveName = 'infra/8.0.0/logs_and_metrics_with_aws';
|
||||
before(() => esArchiver.load(archiveName));
|
||||
after(() => esArchiver.unload(archiveName));
|
||||
|
||||
it('host', async () => {
|
||||
const metadata = await fetchMetadata({
|
||||
sourceId: 'default',
|
||||
nodeId: 'gke-observability-8--observability-8--bc1afd95-f0zc',
|
||||
nodeType: InfraNodeType.host,
|
||||
});
|
||||
if (metadata) {
|
||||
expect(metadata.features.length).to.be(58);
|
||||
expect(metadata.name).to.equal('gke-observability-8--observability-8--bc1afd95-f0zc');
|
||||
expect(metadata.info).to.eql({
|
||||
cloud: {
|
||||
availability_zone: 'europe-west1-c',
|
||||
instance: {
|
||||
name: 'gke-observability-8--observability-8--bc1afd95-f0zc',
|
||||
id: '6200309808276807579',
|
||||
},
|
||||
provider: 'gcp',
|
||||
machine: { type: 'n1-standard-4' },
|
||||
project: { id: 'elastic-observability' },
|
||||
},
|
||||
host: {
|
||||
hostname: 'gke-observability-8--observability-8--bc1afd95-f0zc',
|
||||
os: {
|
||||
kernel: '4.14.127+',
|
||||
codename: 'Core',
|
||||
name: 'CentOS Linux',
|
||||
family: 'redhat',
|
||||
version: '7 (Core)',
|
||||
platform: 'centos',
|
||||
},
|
||||
containerized: false,
|
||||
name: 'gke-observability-8--observability-8--bc1afd95-f0zc',
|
||||
architecture: 'x86_64',
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const metadata = resp.data.source.metadataByNode;
|
||||
if (metadata) {
|
||||
expect(metadata.features.length).to.be(10);
|
||||
expect(metadata.name).to.equal('docker-autodiscovery_elasticsearch_1');
|
||||
} else {
|
||||
throw new Error('Metadata should never be empty');
|
||||
}
|
||||
});
|
||||
} else {
|
||||
throw new Error('Metadata should never be empty');
|
||||
}
|
||||
});
|
||||
|
||||
it('host with aws', async () => {
|
||||
const metadata = await fetchMetadata({
|
||||
sourceId: 'default',
|
||||
nodeId: 'ip-172-31-47-9.us-east-2.compute.internal',
|
||||
nodeType: InfraNodeType.host,
|
||||
});
|
||||
if (metadata) {
|
||||
expect(metadata.features.length).to.be(19);
|
||||
expect(metadata.features.some(f => f.name === 'aws.ec2')).to.be(true);
|
||||
expect(metadata.name).to.equal('ip-172-31-47-9.us-east-2.compute.internal');
|
||||
expect(metadata.info).to.eql({
|
||||
cloud: {
|
||||
availability_zone: 'us-east-2c',
|
||||
image: { id: 'ami-0d8f6eb4f641ef691' },
|
||||
instance: { id: 'i-011454f72559c510b' },
|
||||
provider: 'aws',
|
||||
machine: { type: 't2.micro' },
|
||||
region: 'us-east-2',
|
||||
account: { id: '015351775590' },
|
||||
},
|
||||
host: {
|
||||
hostname: 'ip-172-31-47-9.us-east-2.compute.internal',
|
||||
os: {
|
||||
kernel: '4.14.123-111.109.amzn2.x86_64',
|
||||
codename: 'Karoo',
|
||||
name: 'Amazon Linux',
|
||||
family: 'redhat',
|
||||
version: '2',
|
||||
platform: 'amzn',
|
||||
},
|
||||
containerized: false,
|
||||
name: 'ip-172-31-47-9.us-east-2.compute.internal',
|
||||
id: 'ded64cbff86f478990a3dfbb63a8d238',
|
||||
architecture: 'x86_64',
|
||||
},
|
||||
});
|
||||
} else {
|
||||
throw new Error('Metadata should never be empty');
|
||||
}
|
||||
});
|
||||
|
||||
it('pod', async () => {
|
||||
const metadata = await fetchMetadata({
|
||||
sourceId: 'default',
|
||||
nodeId: '14887487-99f8-11e9-9a96-42010a84004d',
|
||||
nodeType: InfraNodeType.pod,
|
||||
});
|
||||
if (metadata) {
|
||||
expect(metadata.features.length).to.be(29);
|
||||
// With this data set the `kubernetes.pod.name` fields have been removed.
|
||||
expect(metadata.name).to.equal('fluentd-gcp-v3.2.0-np7vw');
|
||||
expect(metadata.info).to.eql({
|
||||
cloud: {
|
||||
instance: {
|
||||
id: '6613144177892233360',
|
||||
name: 'gke-observability-8--observability-8--bc1afd95-ngmh',
|
||||
},
|
||||
provider: 'gcp',
|
||||
availability_zone: 'europe-west1-c',
|
||||
machine: {
|
||||
type: 'n1-standard-4',
|
||||
},
|
||||
project: {
|
||||
id: 'elastic-observability',
|
||||
},
|
||||
},
|
||||
host: {
|
||||
hostname: 'gke-observability-8--observability-8--bc1afd95-ngmh',
|
||||
name: 'gke-observability-8--observability-8--bc1afd95-ngmh',
|
||||
os: {
|
||||
codename: 'Core',
|
||||
family: 'redhat',
|
||||
kernel: '4.14.127+',
|
||||
name: 'CentOS Linux',
|
||||
platform: 'centos',
|
||||
version: '7 (Core)',
|
||||
},
|
||||
architecture: 'x86_64',
|
||||
containerized: false,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
throw new Error('Metadata should never be empty');
|
||||
}
|
||||
});
|
||||
|
||||
it('container', async () => {
|
||||
const metadata = await fetchMetadata({
|
||||
sourceId: 'default',
|
||||
nodeId: 'c74b04834c6d7cc1800c3afbe31d0c8c0c267f06e9eb45c2b0c2df3e6cee40c5',
|
||||
nodeType: InfraNodeType.container,
|
||||
});
|
||||
if (metadata) {
|
||||
expect(metadata.features.length).to.be(26);
|
||||
expect(metadata.name).to.equal(
|
||||
'k8s_prometheus-to-sd-exporter_fluentd-gcp-v3.2.0-w68r5_kube-system_26950cde-9aed-11e9-9a96-42010a84004d_0'
|
||||
);
|
||||
expect(metadata.info).to.eql({
|
||||
cloud: {
|
||||
instance: {
|
||||
id: '4039094952262994102',
|
||||
name: 'gke-observability-8--observability-8--bc1afd95-nhhw',
|
||||
},
|
||||
provider: 'gcp',
|
||||
availability_zone: 'europe-west1-c',
|
||||
machine: {
|
||||
type: 'n1-standard-4',
|
||||
},
|
||||
project: {
|
||||
id: 'elastic-observability',
|
||||
},
|
||||
},
|
||||
host: {
|
||||
hostname: 'gke-observability-8--observability-8--bc1afd95-nhhw',
|
||||
name: 'gke-observability-8--observability-8--bc1afd95-nhhw',
|
||||
os: {
|
||||
codename: 'Core',
|
||||
family: 'redhat',
|
||||
kernel: '4.14.127+',
|
||||
name: 'CentOS Linux',
|
||||
platform: 'centos',
|
||||
version: '7 (Core)',
|
||||
},
|
||||
architecture: 'x86_64',
|
||||
containerized: false,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
throw new Error('Metadata should never be empty');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
Binary file not shown.
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue