[Metrics UI][Logs UI] Completely remove GraphQL and Apollo (#89036)

This commit is contained in:
Chris Cowan 2021-02-02 16:56:40 -07:00 committed by GitHub
parent 9246f398ea
commit cf846bdddf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
71 changed files with 406 additions and 7706 deletions

View file

@ -1,7 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { rootSchema } from './schema.gql';

View file

@ -1,18 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
export const rootSchema = gql`
schema {
query: Query
mutation: Mutation
}
type Query
type Mutation
`;

View file

@ -1,81 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
export const sharedFragments = {
InfraTimeKey: gql`
fragment InfraTimeKeyFields on InfraTimeKey {
time
tiebreaker
}
`,
InfraSourceFields: gql`
fragment InfraSourceFields on InfraSource {
id
version
updatedAt
origin
}
`,
InfraLogEntryFields: gql`
fragment InfraLogEntryFields on InfraLogEntry {
gid
key {
time
tiebreaker
}
columns {
... on InfraLogEntryTimestampColumn {
columnId
timestamp
}
... on InfraLogEntryMessageColumn {
columnId
message {
... on InfraLogMessageFieldSegment {
field
value
}
... on InfraLogMessageConstantSegment {
constant
}
}
}
... on InfraLogEntryFieldColumn {
columnId
field
value
}
}
}
`,
InfraLogEntryHighlightFields: gql`
fragment InfraLogEntryHighlightFields on InfraLogEntry {
gid
key {
time
tiebreaker
}
columns {
... on InfraLogEntryMessageColumn {
columnId
message {
... on InfraLogMessageFieldSegment {
field
highlights
}
}
}
... on InfraLogEntryFieldColumn {
columnId
field
highlights
}
}
}
`,
};

View file

@ -1,8 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { sharedFragments } from './fragments.gql_query';
export { sharedSchema } from './schema.gql';

View file

@ -1,38 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
export const sharedSchema = gql`
"A representation of the log entry's position in the event stream"
type InfraTimeKey {
"The timestamp of the event that the log entry corresponds to"
time: Float!
"The tiebreaker that disambiguates events with the same timestamp"
tiebreaker: Float!
}
input InfraTimeKeyInput {
time: Float!
tiebreaker: Float!
}
enum InfraIndexType {
ANY
LOGS
METRICS
}
enum InfraNodeType {
pod
container
host
awsEC2
awsS3
awsRDS
awsSQS
}
`;

View file

@ -1,780 +0,0 @@
/* tslint:disable */
// ====================================================
// START: Typescript template
// ====================================================
// ====================================================
// Types
// ====================================================
export interface Query {
/** Get an infrastructure data source by id.The resolution order for the source configuration attributes is as followswith the first defined value winning:1. The attributes of the saved object with the given 'id'.2. The attributes defined in the static Kibana configuration key'xpack.infra.sources.default'.3. The hard-coded default values.As a consequence, querying a source that doesn't exist doesn't error out,but returns the configured or hardcoded defaults. */
source: InfraSource;
/** Get a list of all infrastructure data sources */
allSources: InfraSource[];
}
/** A source of infrastructure data */
export interface InfraSource {
/** The id of the source */
id: string;
/** The version number the source configuration was last persisted with */
version?: string | null;
/** The timestamp the source configuration was last persisted at */
updatedAt?: number | null;
/** The origin of the source (one of 'fallback', 'internal', 'stored') */
origin: string;
/** The raw configuration of the source */
configuration: InfraSourceConfiguration;
/** The status of the source */
status: InfraSourceStatus;
/** A snapshot of nodes */
snapshot?: InfraSnapshotResponse | null;
metrics: InfraMetricData[];
}
/** A set of configuration options for an infrastructure data source */
export interface InfraSourceConfiguration {
/** The name of the data source */
name: string;
/** A description of the data source */
description: string;
/** The alias to read metric data from */
metricAlias: string;
/** The alias to read log data from */
logAlias: string;
/** The field mapping to use for this source */
fields: InfraSourceFields;
/** The columns to use for log display */
logColumns: InfraSourceLogColumn[];
}
/** A mapping of semantic fields to their document counterparts */
export interface InfraSourceFields {
/** The field to identify a container by */
container: string;
/** The fields to identify a host by */
host: string;
/** The fields to use as the log message */
message: string[];
/** The field to identify a pod by */
pod: string;
/** The field to use as a tiebreaker for log events that have identical timestamps */
tiebreaker: string;
/** The field to use as a timestamp for metrics and logs */
timestamp: string;
}
/** The built-in timestamp log column */
export interface InfraSourceTimestampLogColumn {
timestampColumn: InfraSourceTimestampLogColumnAttributes;
}
export interface InfraSourceTimestampLogColumnAttributes {
/** A unique id for the column */
id: string;
}
/** The built-in message log column */
export interface InfraSourceMessageLogColumn {
messageColumn: InfraSourceMessageLogColumnAttributes;
}
export interface InfraSourceMessageLogColumnAttributes {
/** A unique id for the column */
id: string;
}
/** A log column containing a field value */
export interface InfraSourceFieldLogColumn {
fieldColumn: InfraSourceFieldLogColumnAttributes;
}
export interface InfraSourceFieldLogColumnAttributes {
/** A unique id for the column */
id: string;
/** The field name this column refers to */
field: string;
}
/** The status of an infrastructure data source */
export interface InfraSourceStatus {
/** Whether the configured metric alias exists */
metricAliasExists: boolean;
/** Whether the configured log alias exists */
logAliasExists: boolean;
/** Whether the configured alias or wildcard pattern resolve to any metric indices */
metricIndicesExist: boolean;
/** Whether the configured alias or wildcard pattern resolve to any log indices */
logIndicesExist: boolean;
/** The list of indices in the metric alias */
metricIndices: string[];
/** The list of indices in the log alias */
logIndices: string[];
/** The list of fields defined in the index mappings */
indexFields: InfraIndexField[];
}
/** A descriptor of a field in an index */
export interface InfraIndexField {
/** The name of the field */
name: string;
/** The type of the field's values as recognized by Kibana */
type: string;
/** Whether the field's values can be efficiently searched for */
searchable: boolean;
/** Whether the field's values can be aggregated */
aggregatable: boolean;
/** Whether the field should be displayed based on event.module and a ECS allowed list */
displayable: boolean;
}
export interface InfraSnapshotResponse {
/** Nodes of type host, container or pod grouped by 0, 1 or 2 terms */
nodes: InfraSnapshotNode[];
}
export interface InfraSnapshotNode {
path: InfraSnapshotNodePath[];
metric: InfraSnapshotNodeMetric;
}
export interface InfraSnapshotNodePath {
value: string;
label: string;
ip?: string | null;
}
export interface InfraSnapshotNodeMetric {
name: InfraSnapshotMetricType;
value?: number | null;
avg?: number | null;
max?: number | null;
}
export interface InfraMetricData {
id?: InfraMetric | null;
series: InfraDataSeries[];
}
export interface InfraDataSeries {
id: string;
label: string;
data: InfraDataPoint[];
}
export interface InfraDataPoint {
timestamp: number;
value?: number | null;
}
export interface Mutation {
/** Create a new source of infrastructure data */
createSource: UpdateSourceResult;
/** Modify an existing source */
updateSource: UpdateSourceResult;
/** Delete a source of infrastructure data */
deleteSource: DeleteSourceResult;
}
/** The result of a successful source update */
export interface UpdateSourceResult {
/** The source that was updated */
source: InfraSource;
}
/** The result of a source deletion operations */
export interface DeleteSourceResult {
/** The id of the source that was deleted */
id: string;
}
// ====================================================
// InputTypes
// ====================================================
export interface InfraTimerangeInput {
/** The interval string to use for last bucket. The format is '{value}{unit}'. For example '5m' would return the metrics for the last 5 minutes of the timespan. */
interval: string;
/** The end of the timerange */
to: number;
/** The beginning of the timerange */
from: number;
}
export interface InfraSnapshotGroupbyInput {
/** The label to use in the results for the group by for the terms group by */
label?: string | null;
/** The field to group by from a terms aggregation, this is ignored by the filter type */
field?: string | null;
}
export interface InfraSnapshotMetricInput {
/** The type of metric */
type: InfraSnapshotMetricType;
}
export interface InfraNodeIdsInput {
nodeId: string;
cloudId?: string | null;
}
/** The properties to update the source with */
export interface UpdateSourceInput {
/** The name of the data source */
name?: string | null;
/** A description of the data source */
description?: string | null;
/** The alias to read metric data from */
metricAlias?: string | null;
/** The alias to read log data from */
logAlias?: string | null;
/** The field mapping to use for this source */
fields?: UpdateSourceFieldsInput | null;
/** Default view for inventory */
inventoryDefaultView?: string | null;
/** Default view for Metrics Explorer */
metricsExplorerDefaultView?: string | null;
/** The log columns to display for this source */
logColumns?: UpdateSourceLogColumnInput[] | null;
}
/** The mapping of semantic fields of the source to be created */
export interface UpdateSourceFieldsInput {
/** The field to identify a container by */
container?: string | null;
/** The fields to identify a host by */
host?: string | null;
/** The field to identify a pod by */
pod?: string | null;
/** The field to use as a tiebreaker for log events that have identical timestamps */
tiebreaker?: string | null;
/** The field to use as a timestamp for metrics and logs */
timestamp?: string | null;
}
/** One of the log column types to display for this source */
export interface UpdateSourceLogColumnInput {
/** A custom field log column */
fieldColumn?: UpdateSourceFieldLogColumnInput | null;
/** A built-in message log column */
messageColumn?: UpdateSourceMessageLogColumnInput | null;
/** A built-in timestamp log column */
timestampColumn?: UpdateSourceTimestampLogColumnInput | null;
}
export interface UpdateSourceFieldLogColumnInput {
id: string;
field: string;
}
export interface UpdateSourceMessageLogColumnInput {
id: string;
}
export interface UpdateSourceTimestampLogColumnInput {
id: string;
}
// ====================================================
// Arguments
// ====================================================
export interface SourceQueryArgs {
/** The id of the source */
id: string;
}
export interface SnapshotInfraSourceArgs {
timerange: InfraTimerangeInput;
filterQuery?: string | null;
}
export interface MetricsInfraSourceArgs {
nodeIds: InfraNodeIdsInput;
nodeType: InfraNodeType;
timerange: InfraTimerangeInput;
metrics: InfraMetric[];
}
export interface IndexFieldsInfraSourceStatusArgs {
indexType?: InfraIndexType | null;
}
export interface NodesInfraSnapshotResponseArgs {
type: InfraNodeType;
groupBy: InfraSnapshotGroupbyInput[];
metric: InfraSnapshotMetricInput;
}
export interface CreateSourceMutationArgs {
/** The id of the source */
id: string;
sourceProperties: UpdateSourceInput;
}
export interface UpdateSourceMutationArgs {
/** The id of the source */
id: string;
/** The properties to update the source with */
sourceProperties: UpdateSourceInput;
}
export interface DeleteSourceMutationArgs {
/** The id of the source */
id: string;
}
// ====================================================
// Enums
// ====================================================
export enum InfraIndexType {
ANY = 'ANY',
LOGS = 'LOGS',
METRICS = 'METRICS',
}
export enum InfraNodeType {
pod = 'pod',
container = 'container',
host = 'host',
awsEC2 = 'awsEC2',
awsS3 = 'awsS3',
awsRDS = 'awsRDS',
awsSQS = 'awsSQS',
}
export enum InfraSnapshotMetricType {
count = 'count',
cpu = 'cpu',
load = 'load',
memory = 'memory',
tx = 'tx',
rx = 'rx',
logRate = 'logRate',
diskIOReadBytes = 'diskIOReadBytes',
diskIOWriteBytes = 'diskIOWriteBytes',
s3TotalRequests = 's3TotalRequests',
s3NumberOfObjects = 's3NumberOfObjects',
s3BucketSize = 's3BucketSize',
s3DownloadBytes = 's3DownloadBytes',
s3UploadBytes = 's3UploadBytes',
rdsConnections = 'rdsConnections',
rdsQueriesExecuted = 'rdsQueriesExecuted',
rdsActiveTransactions = 'rdsActiveTransactions',
rdsLatency = 'rdsLatency',
sqsMessagesVisible = 'sqsMessagesVisible',
sqsMessagesDelayed = 'sqsMessagesDelayed',
sqsMessagesSent = 'sqsMessagesSent',
sqsMessagesEmpty = 'sqsMessagesEmpty',
sqsOldestMessage = 'sqsOldestMessage',
}
export enum InfraMetric {
hostSystemOverview = 'hostSystemOverview',
hostCpuUsage = 'hostCpuUsage',
hostFilesystem = 'hostFilesystem',
hostK8sOverview = 'hostK8sOverview',
hostK8sCpuCap = 'hostK8sCpuCap',
hostK8sDiskCap = 'hostK8sDiskCap',
hostK8sMemoryCap = 'hostK8sMemoryCap',
hostK8sPodCap = 'hostK8sPodCap',
hostLoad = 'hostLoad',
hostMemoryUsage = 'hostMemoryUsage',
hostNetworkTraffic = 'hostNetworkTraffic',
hostDockerOverview = 'hostDockerOverview',
hostDockerInfo = 'hostDockerInfo',
hostDockerTop5ByCpu = 'hostDockerTop5ByCpu',
hostDockerTop5ByMemory = 'hostDockerTop5ByMemory',
podOverview = 'podOverview',
podCpuUsage = 'podCpuUsage',
podMemoryUsage = 'podMemoryUsage',
podLogUsage = 'podLogUsage',
podNetworkTraffic = 'podNetworkTraffic',
containerOverview = 'containerOverview',
containerCpuKernel = 'containerCpuKernel',
containerCpuUsage = 'containerCpuUsage',
containerDiskIOOps = 'containerDiskIOOps',
containerDiskIOBytes = 'containerDiskIOBytes',
containerMemory = 'containerMemory',
containerNetworkTraffic = 'containerNetworkTraffic',
nginxHits = 'nginxHits',
nginxRequestRate = 'nginxRequestRate',
nginxActiveConnections = 'nginxActiveConnections',
nginxRequestsPerConnection = 'nginxRequestsPerConnection',
awsOverview = 'awsOverview',
awsCpuUtilization = 'awsCpuUtilization',
awsNetworkBytes = 'awsNetworkBytes',
awsNetworkPackets = 'awsNetworkPackets',
awsDiskioBytes = 'awsDiskioBytes',
awsDiskioOps = 'awsDiskioOps',
awsEC2CpuUtilization = 'awsEC2CpuUtilization',
awsEC2DiskIOBytes = 'awsEC2DiskIOBytes',
awsEC2NetworkTraffic = 'awsEC2NetworkTraffic',
awsS3TotalRequests = 'awsS3TotalRequests',
awsS3NumberOfObjects = 'awsS3NumberOfObjects',
awsS3BucketSize = 'awsS3BucketSize',
awsS3DownloadBytes = 'awsS3DownloadBytes',
awsS3UploadBytes = 'awsS3UploadBytes',
awsRDSCpuTotal = 'awsRDSCpuTotal',
awsRDSConnections = 'awsRDSConnections',
awsRDSQueriesExecuted = 'awsRDSQueriesExecuted',
awsRDSActiveTransactions = 'awsRDSActiveTransactions',
awsRDSLatency = 'awsRDSLatency',
awsSQSMessagesVisible = 'awsSQSMessagesVisible',
awsSQSMessagesDelayed = 'awsSQSMessagesDelayed',
awsSQSMessagesSent = 'awsSQSMessagesSent',
awsSQSMessagesEmpty = 'awsSQSMessagesEmpty',
awsSQSOldestMessage = 'awsSQSOldestMessage',
custom = 'custom',
}
// ====================================================
// Unions
// ====================================================
/** All known log column types */
export type InfraSourceLogColumn =
| InfraSourceTimestampLogColumn
| InfraSourceMessageLogColumn
| InfraSourceFieldLogColumn;
// ====================================================
// END: Typescript template
// ====================================================
// ====================================================
// Documents
// ====================================================
export namespace MetricsQuery {
export type Variables = {
sourceId: string;
timerange: InfraTimerangeInput;
metrics: InfraMetric[];
nodeId: string;
cloudId?: string | null;
nodeType: InfraNodeType;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
id: string;
metrics: Metrics[];
};
export type Metrics = {
__typename?: 'InfraMetricData';
id?: InfraMetric | null;
series: Series[];
};
export type Series = {
__typename?: 'InfraDataSeries';
id: string;
label: string;
data: Data[];
};
export type Data = {
__typename?: 'InfraDataPoint';
timestamp: number;
value?: number | null;
};
}
export namespace CreateSourceConfigurationMutation {
export type Variables = {
sourceId: string;
sourceProperties: UpdateSourceInput;
};
export type Mutation = {
__typename?: 'Mutation';
createSource: CreateSource;
};
export type CreateSource = {
__typename?: 'UpdateSourceResult';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
configuration: Configuration;
status: Status;
} & InfraSourceFields.Fragment;
export type Configuration = SourceConfigurationFields.Fragment;
export type Status = SourceStatusFields.Fragment;
}
export namespace SourceQuery {
export type Variables = {
sourceId?: string | null;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
configuration: Configuration;
status: Status;
} & InfraSourceFields.Fragment;
export type Configuration = SourceConfigurationFields.Fragment;
export type Status = SourceStatusFields.Fragment;
}
export namespace UpdateSourceMutation {
export type Variables = {
sourceId?: string | null;
sourceProperties: UpdateSourceInput;
};
export type Mutation = {
__typename?: 'Mutation';
updateSource: UpdateSource;
};
export type UpdateSource = {
__typename?: 'UpdateSourceResult';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
configuration: Configuration;
status: Status;
} & InfraSourceFields.Fragment;
export type Configuration = SourceConfigurationFields.Fragment;
export type Status = SourceStatusFields.Fragment;
}
export namespace WaffleNodesQuery {
export type Variables = {
sourceId: string;
timerange: InfraTimerangeInput;
filterQuery?: string | null;
metric: InfraSnapshotMetricInput;
groupBy: InfraSnapshotGroupbyInput[];
type: InfraNodeType;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
id: string;
snapshot?: Snapshot | null;
};
export type Snapshot = {
__typename?: 'InfraSnapshotResponse';
nodes: Nodes[];
};
export type Nodes = {
__typename?: 'InfraSnapshotNode';
path: Path[];
metric: Metric;
};
export type Path = {
__typename?: 'InfraSnapshotNodePath';
value: string;
label: string;
ip?: string | null;
};
export type Metric = {
__typename?: 'InfraSnapshotNodeMetric';
name: InfraSnapshotMetricType;
value?: number | null;
avg?: number | null;
max?: number | null;
};
}
export namespace SourceConfigurationFields {
export type Fragment = {
__typename?: 'InfraSourceConfiguration';
name: string;
description: string;
logAlias: string;
metricAlias: string;
fields: Fields;
logColumns: LogColumns[];
inventoryDefaultView: string;
metricsExplorerDefaultView: string;
};
export type Fields = {
__typename?: 'InfraSourceFields';
container: string;
host: string;
message: string[];
pod: string;
tiebreaker: string;
timestamp: string;
};
export type LogColumns =
| InfraSourceTimestampLogColumnInlineFragment
| InfraSourceMessageLogColumnInlineFragment
| InfraSourceFieldLogColumnInlineFragment;
export type InfraSourceTimestampLogColumnInlineFragment = {
__typename?: 'InfraSourceTimestampLogColumn';
timestampColumn: TimestampColumn;
};
export type TimestampColumn = {
__typename?: 'InfraSourceTimestampLogColumnAttributes';
id: string;
};
export type InfraSourceMessageLogColumnInlineFragment = {
__typename?: 'InfraSourceMessageLogColumn';
messageColumn: MessageColumn;
};
export type MessageColumn = {
__typename?: 'InfraSourceMessageLogColumnAttributes';
id: string;
};
export type InfraSourceFieldLogColumnInlineFragment = {
__typename?: 'InfraSourceFieldLogColumn';
fieldColumn: FieldColumn;
};
export type FieldColumn = {
__typename?: 'InfraSourceFieldLogColumnAttributes';
id: string;
field: string;
};
}
export namespace SourceStatusFields {
export type Fragment = {
__typename?: 'InfraSourceStatus';
indexFields: IndexFields[];
logIndicesExist: boolean;
metricIndicesExist: boolean;
};
export type IndexFields = {
__typename?: 'InfraIndexField';
name: string;
type: string;
searchable: boolean;
aggregatable: boolean;
displayable: boolean;
};
}
export namespace InfraTimeKeyFields {
export type Fragment = {
__typename?: 'InfraTimeKey';
time: number;
tiebreaker: number;
};
}
export namespace InfraSourceFields {
export type Fragment = {
__typename?: 'InfraSource';
id: string;
version?: string | null;
updatedAt?: number | null;
origin: string;
};
}

View file

@ -17,18 +17,20 @@ const NodeDetailsDataPointRT = rt.intersection([
}),
]);
const NodeDetailsDataSeries = rt.type({
const NodeDetailsDataSeriesRT = rt.type({
id: rt.string,
label: rt.string,
data: rt.array(NodeDetailsDataPointRT),
});
export type NodeDetailsDataSeries = rt.TypeOf<typeof NodeDetailsDataSeriesRT>;
export const NodeDetailsMetricDataRT = rt.intersection([
rt.partial({
id: rt.union([InventoryMetricRT, rt.null]),
}),
rt.type({
series: rt.array(NodeDetailsDataSeries),
series: rt.array(NodeDetailsDataSeriesRT),
}),
]);

View file

@ -39,18 +39,30 @@ const SavedSourceConfigurationFieldsRuntimeType = rt.partial({
timestamp: rt.string,
});
export type InfraSavedSourceConfigurationFields = rt.TypeOf<
typeof SavedSourceConfigurationFieldColumnRuntimeType
>;
export const SavedSourceConfigurationTimestampColumnRuntimeType = rt.type({
timestampColumn: rt.type({
id: rt.string,
}),
});
export type InfraSourceConfigurationTimestampColumn = rt.TypeOf<
typeof SavedSourceConfigurationTimestampColumnRuntimeType
>;
export const SavedSourceConfigurationMessageColumnRuntimeType = rt.type({
messageColumn: rt.type({
id: rt.string,
}),
});
export type InfraSourceConfigurationMessageColumn = rt.TypeOf<
typeof SavedSourceConfigurationMessageColumnRuntimeType
>;
export const SavedSourceConfigurationFieldColumnRuntimeType = rt.type({
fieldColumn: rt.type({
id: rt.string,
@ -64,6 +76,10 @@ export const SavedSourceConfigurationColumnRuntimeType = rt.union([
SavedSourceConfigurationFieldColumnRuntimeType,
]);
export type InfraSavedSourceConfigurationColumn = rt.TypeOf<
typeof SavedSourceConfigurationColumnRuntimeType
>;
export const SavedSourceConfigurationRuntimeType = rt.partial({
name: rt.string,
description: rt.string,
@ -136,12 +152,30 @@ const SourceConfigurationFieldsRuntimeType = rt.type({
...StaticSourceConfigurationFieldsRuntimeType.props,
});
export type InfraSourceConfigurationFields = rt.TypeOf<typeof SourceConfigurationFieldsRuntimeType>;
export const SourceConfigurationRuntimeType = rt.type({
...SavedSourceConfigurationRuntimeType.props,
fields: SourceConfigurationFieldsRuntimeType,
logColumns: rt.array(SavedSourceConfigurationColumnRuntimeType),
});
const SourceStatusFieldRuntimeType = rt.type({
name: rt.string,
type: rt.string,
searchable: rt.boolean,
aggregatable: rt.boolean,
displayable: rt.boolean,
});
export type InfraSourceIndexField = rt.TypeOf<typeof SourceStatusFieldRuntimeType>;
const SourceStatusRuntimeType = rt.type({
logIndicesExist: rt.boolean,
metricIndicesExist: rt.boolean,
indexFields: rt.array(SourceStatusFieldRuntimeType),
});
export const SourceRuntimeType = rt.intersection([
rt.type({
id: rt.string,
@ -155,31 +189,19 @@ export const SourceRuntimeType = rt.intersection([
rt.partial({
version: rt.string,
updatedAt: rt.number,
status: SourceStatusRuntimeType,
}),
]);
export interface InfraSourceStatus extends rt.TypeOf<typeof SourceStatusRuntimeType> {}
export interface InfraSourceConfiguration
extends rt.TypeOf<typeof SourceConfigurationRuntimeType> {}
export interface InfraSource extends rt.TypeOf<typeof SourceRuntimeType> {}
const SourceStatusFieldRuntimeType = rt.type({
name: rt.string,
type: rt.string,
searchable: rt.boolean,
aggregatable: rt.boolean,
displayable: rt.boolean,
});
const SourceStatusRuntimeType = rt.type({
logIndicesExist: rt.boolean,
metricIndicesExist: rt.boolean,
indexFields: rt.array(SourceStatusFieldRuntimeType),
});
export const SourceResponseRuntimeType = rt.type({
source: SourceRuntimeType,
status: SourceStatusRuntimeType,
});
export type SourceResponse = rt.TypeOf<typeof SourceResponseRuntimeType>;

View file

@ -14,7 +14,6 @@ export type LogEntryTime = TimeKey;
/**
* message parts
*/
export const logMessageConstantPartRT = rt.type({
constant: rt.string,
});

View file

@ -7,7 +7,7 @@ In this arch, we use 3 main terms to describe the code:
- **Libs / Domain Libs** - Business logic & data formatting (though complex formatting might call utils)
- **Adapters** - code that directly calls 3rd party APIs and data sources, exposing clean easy to stub APIs
- **Composition Files** - composes adapters into libs based on where the code is running
- **Implementation layer** - The API such as rest endpoints or graphql schema on the server, and the state management / UI on the client
- **Implementation layer** - The API such as rest endpoints on the server, and the state management / UI on the client
## Arch Visual Example
@ -85,7 +85,7 @@ An example structure might be...
| | | | |-- kibana_angular // if an adapter has more than one file...
| | | | | |-- index.html
| | | | | |-- index.ts
| | | | |
| | | | |
| | | | |-- ui_harness.ts
| | | |
| | |-- domains

View file

@ -26,7 +26,7 @@ However, components that tweak EUI should go into `/public/components/eui/${comp
If using an EUI component that has not yet been typed, types should be placed into `/types/eui.d.ts`
## Containers (Also: [see GraphQL docs](docs/graphql.md))
## Containers
- HOC's based on Apollo.
- One folder per data type e.g. `host`. Folder name should be singular.

View file

@ -1,53 +0,0 @@
# GraphQL In Infra UI
- The combined graphql schema collected from both the `public` and `server` directories is exported to `common/all.gql_schema.ts` for the purpose of automatic type generation only.
## Server
- Under `/server/graphql` there are files for each domain of data's graph schema and resolvers.
- Each file has 2 exports `${domain}Schema` e.g. `fieldsSchema`, and `create${domain}Resolvers` e.g. `createFieldResolvers`
- `/server/infra_server.ts` imports all schema and resolvers and passing the full schema to the server
- Resolvers should be used to call composed libs, rather than directly performing any meaningful amount of data processing.
- Resolvers should, however, only pass the required data into libs; that is to say all args for example would not be passed into a lib unless all were needed.
## Client
- Under `/public/containers/${domain}/` there is a file for each container. Each file has two exports, the query name e.g. `AllHosts` and the apollo HOC in the pattern of `with${queryName}` e.g. `withAllHosts`. This is done for two reasons:
1. It makes the code uniform, thus easier to reason about later.
2. If reformatting the data using a transform, it lets us re-type the data clearly.
- Containers should use the apollo props callback to pass ONLY the props and data needed to children. e.g.
```ts
import { Hosts, Pods, HostsAndPods } from '../../common/types';
// used to generate the `HostsAndPods` type imported above
export const hostsAndPods = gql`
# ...
`;
type HostsAndPodsProps = {
hosts: Hosts;
pods: Pods;
}
export const withHostsAndPods = graphql<
{},
HostsAndPods.Query,
HostsAndPods.Variables,
HostsAndPodsProps
>(hostsAndPods, {
props: ({ data, ownProps }) => ({
hosts: hostForMap(data && data.hosts ? data.hosts : []),
pods: podsFromHosts(data && data.hosts ? data.hosts : [])
...ownProps,
}),
});
```
as `ownProps` are the props passed to the wrapped component, they should just be forwarded.
## Types
- The command `yarn build-graphql-types` derives the schema, query and mutation types and stores them in `common/types.ts` for use on both the client and server.

View file

@ -4,7 +4,6 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ApolloClient } from 'apollo-client';
import { AppMountParameters, CoreStart } from 'kibana/public';
import React, { useMemo } from 'react';
import {
@ -15,32 +14,28 @@ import { EuiThemeProvider } from '../../../../../src/plugins/kibana_react/common
import { TriggersAndActionsUIPublicPluginStart } from '../../../triggers_actions_ui/public';
import { createKibanaContextForPlugin } from '../hooks/use_kibana';
import { InfraClientStartDeps } from '../types';
import { ApolloClientContext } from '../utils/apollo_context';
import { HeaderActionMenuProvider } from '../utils/header_action_menu_provider';
import { NavigationWarningPromptProvider } from '../utils/navigation_warning_prompt';
import { TriggersActionsProvider } from '../utils/triggers_actions_context';
import { Storage } from '../../../../../src/plugins/kibana_utils/public';
export const CommonInfraProviders: React.FC<{
apolloClient: ApolloClient<{}>;
appName: string;
storage: Storage;
triggersActionsUI: TriggersAndActionsUIPublicPluginStart;
setHeaderActionMenu: AppMountParameters['setHeaderActionMenu'];
}> = ({ apolloClient, children, triggersActionsUI, setHeaderActionMenu, appName, storage }) => {
}> = ({ children, triggersActionsUI, setHeaderActionMenu, appName, storage }) => {
const [darkMode] = useUiSetting$<boolean>('theme:darkMode');
return (
<TriggersActionsProvider triggersActionsUI={triggersActionsUI}>
<ApolloClientContext.Provider value={apolloClient}>
<EuiThemeProvider darkMode={darkMode}>
<DataUIProviders appName={appName} storage={storage}>
<HeaderActionMenuProvider setHeaderActionMenu={setHeaderActionMenu}>
<NavigationWarningPromptProvider>{children}</NavigationWarningPromptProvider>
</HeaderActionMenuProvider>
</DataUIProviders>
</EuiThemeProvider>
</ApolloClientContext.Provider>
<EuiThemeProvider darkMode={darkMode}>
<DataUIProviders appName={appName} storage={storage}>
<HeaderActionMenuProvider setHeaderActionMenu={setHeaderActionMenu}>
<NavigationWarningPromptProvider>{children}</NavigationWarningPromptProvider>
</HeaderActionMenuProvider>
</DataUIProviders>
</EuiThemeProvider>
</TriggersActionsProvider>
);
};

View file

@ -4,7 +4,6 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ApolloClient } from 'apollo-client';
import { History } from 'history';
import { CoreStart } from 'kibana/public';
import React from 'react';
@ -17,7 +16,6 @@ import { NotFoundPage } from '../pages/404';
import { LinkToLogsPage } from '../pages/link_to/link_to_logs';
import { LogsPage } from '../pages/logs';
import { InfraClientStartDeps } from '../types';
import { createApolloClient } from '../utils/apollo_client';
import { CommonInfraProviders, CoreProviders } from './common_providers';
import { prepareMountElement } from './common_styles';
@ -26,14 +24,12 @@ export const renderApp = (
plugins: InfraClientStartDeps,
{ element, history, setHeaderActionMenu }: AppMountParameters
) => {
const apolloClient = createApolloClient(core.http.fetch);
const storage = new Storage(window.localStorage);
prepareMountElement(element);
ReactDOM.render(
<LogsApp
apolloClient={apolloClient}
core={core}
storage={storage}
history={history}
@ -49,19 +45,17 @@ export const renderApp = (
};
const LogsApp: React.FC<{
apolloClient: ApolloClient<{}>;
core: CoreStart;
history: History<unknown>;
plugins: InfraClientStartDeps;
setHeaderActionMenu: AppMountParameters['setHeaderActionMenu'];
storage: Storage;
}> = ({ apolloClient, core, history, plugins, setHeaderActionMenu, storage }) => {
}> = ({ core, history, plugins, setHeaderActionMenu, storage }) => {
const uiCapabilities = core.application.capabilities;
return (
<CoreProviders core={core} plugins={plugins}>
<CommonInfraProviders
apolloClient={apolloClient}
appName="Logs UI"
setHeaderActionMenu={setHeaderActionMenu}
storage={storage}

View file

@ -4,7 +4,6 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ApolloClient } from 'apollo-client';
import { History } from 'history';
import { CoreStart } from 'kibana/public';
import React from 'react';
@ -18,7 +17,6 @@ import { LinkToMetricsPage } from '../pages/link_to/link_to_metrics';
import { InfrastructurePage } from '../pages/metrics';
import { MetricDetail } from '../pages/metrics/metric_detail';
import { InfraClientStartDeps } from '../types';
import { createApolloClient } from '../utils/apollo_client';
import { RedirectWithQueryParams } from '../utils/redirect_with_query_params';
import { CommonInfraProviders, CoreProviders } from './common_providers';
import { prepareMountElement } from './common_styles';
@ -28,14 +26,12 @@ export const renderApp = (
plugins: InfraClientStartDeps,
{ element, history, setHeaderActionMenu }: AppMountParameters
) => {
const apolloClient = createApolloClient(core.http.fetch);
const storage = new Storage(window.localStorage);
prepareMountElement(element);
ReactDOM.render(
<MetricsApp
apolloClient={apolloClient}
core={core}
history={history}
plugins={plugins}
@ -51,19 +47,17 @@ export const renderApp = (
};
const MetricsApp: React.FC<{
apolloClient: ApolloClient<{}>;
core: CoreStart;
history: History<unknown>;
plugins: InfraClientStartDeps;
setHeaderActionMenu: AppMountParameters['setHeaderActionMenu'];
storage: Storage;
}> = ({ apolloClient, core, history, plugins, setHeaderActionMenu, storage }) => {
}> = ({ core, history, plugins, setHeaderActionMenu, storage }) => {
const uiCapabilities = core.application.capabilities;
return (
<CoreProviders core={core} plugins={plugins}>
<CommonInfraProviders
apolloClient={apolloClient}
appName="Metrics UI"
setHeaderActionMenu={setHeaderActionMenu}
storage={storage}

View file

@ -5,12 +5,12 @@
*/
import { useCallback, useMemo } from 'react';
import { InfraSourceConfiguration } from '../../../common/http_api/source_api';
import { useIndicesConfigurationFormState } from './indices_configuration_form_state';
import { useLogColumnsConfigurationFormState } from './log_columns_configuration_form_state';
import { SourceConfiguration } from '../../utils/source_configuration';
export const useSourceConfigurationFormState = (configuration?: SourceConfiguration) => {
export const useSourceConfigurationFormState = (configuration?: InfraSourceConfiguration) => {
const indicesConfigurationFormState = useIndicesConfigurationFormState({
initialFormState: useMemo(
() =>

View file

@ -1,36 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
import { sharedFragments } from '../../../common/graphql/shared';
import {
sourceConfigurationFieldsFragment,
sourceStatusFieldsFragment,
} from './source_fields_fragment.gql_query';
export const createSourceMutation = gql`
mutation CreateSourceConfigurationMutation(
$sourceId: ID!
$sourceProperties: UpdateSourceInput!
) {
createSource(id: $sourceId, sourceProperties: $sourceProperties) {
source {
...InfraSourceFields
configuration {
...SourceConfigurationFields
}
status {
...SourceStatusFields
}
}
}
}
${sharedFragments.InfraSourceFields}
${sourceConfigurationFieldsFragment}
${sourceStatusFieldsFragment}
`;

View file

@ -1,31 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
import { sharedFragments } from '../../../common/graphql/shared';
import {
sourceConfigurationFieldsFragment,
sourceStatusFieldsFragment,
} from './source_fields_fragment.gql_query';
export const sourceQuery = gql`
query SourceQuery($sourceId: ID = "default") {
source(id: $sourceId) {
...InfraSourceFields
configuration {
...SourceConfigurationFields
}
status {
...SourceStatusFields
}
}
}
${sharedFragments.InfraSourceFields}
${sourceConfigurationFieldsFragment}
${sourceStatusFieldsFragment}
`;

View file

@ -8,20 +8,17 @@ import createContainer from 'constate';
import { useEffect, useMemo, useState } from 'react';
import {
CreateSourceConfigurationMutation,
SourceQuery,
UpdateSourceInput,
UpdateSourceMutation,
} from '../../graphql/types';
import { DependencyError, useApolloClient } from '../../utils/apollo_context';
InfraSavedSourceConfiguration,
InfraSource,
SourceResponse,
} from '../../../common/http_api/source_api';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { createSourceMutation } from './create_source.gql_query';
import { sourceQuery } from './query_source.gql_query';
import { updateSourceMutation } from './update_source.gql_query';
import { useKibana } from '../../../../../../src/plugins/kibana_react/public';
type Source = SourceQuery.Query['source'];
export const pickIndexPattern = (source: Source | undefined, type: 'logs' | 'metrics' | 'both') => {
export const pickIndexPattern = (
source: InfraSource | undefined,
type: 'logs' | 'metrics' | 'both'
) => {
if (!source) {
return 'unknown-index';
}
@ -34,96 +31,79 @@ export const pickIndexPattern = (source: Source | undefined, type: 'logs' | 'met
return `${source.configuration.logAlias},${source.configuration.metricAlias}`;
};
const DEPENDENCY_ERROR_MESSAGE = 'Failed to load source: No fetch client available.';
export const useSource = ({ sourceId }: { sourceId: string }) => {
const apolloClient = useApolloClient();
const [source, setSource] = useState<Source | undefined>(undefined);
const kibana = useKibana();
const fetchService = kibana.services.http?.fetch;
const API_URL = `/api/metrics/source/${sourceId}`;
const [source, setSource] = useState<InfraSource | undefined>(undefined);
const [loadSourceRequest, loadSource] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
if (!apolloClient) {
throw new DependencyError('Failed to load source: No apollo client available.');
if (!fetchService) {
throw new Error(DEPENDENCY_ERROR_MESSAGE);
}
return await apolloClient.query<SourceQuery.Query, SourceQuery.Variables>({
fetchPolicy: 'no-cache',
query: sourceQuery,
variables: {
sourceId,
},
return await fetchService<SourceResponse>(`${API_URL}/metrics`, {
method: 'GET',
});
},
onResolve: (response) => {
setSource(response.data.source);
setSource(response.source);
},
},
[apolloClient, sourceId]
[fetchService, sourceId]
);
const [createSourceConfigurationRequest, createSourceConfiguration] = useTrackedPromise(
{
createPromise: async (sourceProperties: UpdateSourceInput) => {
if (!apolloClient) {
throw new DependencyError(
'Failed to create source configuration: No apollo client available.'
);
createPromise: async (sourceProperties: InfraSavedSourceConfiguration) => {
if (!fetchService) {
throw new Error(DEPENDENCY_ERROR_MESSAGE);
}
return await apolloClient.mutate<
CreateSourceConfigurationMutation.Mutation,
CreateSourceConfigurationMutation.Variables
>({
mutation: createSourceMutation,
fetchPolicy: 'no-cache',
variables: {
sourceId,
sourceProperties,
},
return await fetchService<SourceResponse>(API_URL, {
method: 'PATCH',
body: JSON.stringify(sourceProperties),
});
},
onResolve: (response) => {
if (response.data) {
setSource(response.data.createSource.source);
if (response) {
setSource(response.source);
}
},
},
[apolloClient, sourceId]
[fetchService, sourceId]
);
const [updateSourceConfigurationRequest, updateSourceConfiguration] = useTrackedPromise(
{
createPromise: async (sourceProperties: UpdateSourceInput) => {
if (!apolloClient) {
throw new DependencyError(
'Failed to update source configuration: No apollo client available.'
);
createPromise: async (sourceProperties: InfraSavedSourceConfiguration) => {
if (!fetchService) {
throw new Error(DEPENDENCY_ERROR_MESSAGE);
}
return await apolloClient.mutate<
UpdateSourceMutation.Mutation,
UpdateSourceMutation.Variables
>({
mutation: updateSourceMutation,
fetchPolicy: 'no-cache',
variables: {
sourceId,
sourceProperties,
},
return await fetchService<SourceResponse>(API_URL, {
method: 'PATCH',
body: JSON.stringify(sourceProperties),
});
},
onResolve: (response) => {
if (response.data) {
setSource(response.data.updateSource.source);
if (response) {
setSource(response.source);
}
},
},
[apolloClient, sourceId]
[fetchService, sourceId]
);
const createDerivedIndexPattern = (type: 'logs' | 'metrics' | 'both') => {
return {
fields: source ? source.status.indexFields : [],
fields: source?.status ? source.status.indexFields : [],
title: pickIndexPattern(source, type),
};
};

View file

@ -1,58 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
export const sourceConfigurationFieldsFragment = gql`
fragment SourceConfigurationFields on InfraSourceConfiguration {
name
description
logAlias
metricAlias
inventoryDefaultView
metricsExplorerDefaultView
fields {
container
host
message
pod
tiebreaker
timestamp
}
logColumns {
... on InfraSourceTimestampLogColumn {
timestampColumn {
id
}
}
... on InfraSourceMessageLogColumn {
messageColumn {
id
}
}
... on InfraSourceFieldLogColumn {
fieldColumn {
id
field
}
}
}
}
`;
export const sourceStatusFieldsFragment = gql`
fragment SourceStatusFields on InfraSourceStatus {
indexFields {
name
type
searchable
aggregatable
displayable
}
logIndicesExist
metricIndicesExist
}
`;

View file

@ -1,33 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
import { sharedFragments } from '../../../common/graphql/shared';
import {
sourceConfigurationFieldsFragment,
sourceStatusFieldsFragment,
} from './source_fields_fragment.gql_query';
export const updateSourceMutation = gql`
mutation UpdateSourceMutation($sourceId: ID = "default", $sourceProperties: UpdateSourceInput!) {
updateSource(id: $sourceId, sourceProperties: $sourceProperties) {
source {
...InfraSourceFields
configuration {
...SourceConfigurationFields
}
status {
...SourceStatusFields
}
}
}
}
${sharedFragments.InfraSourceFields}
${sourceConfigurationFieldsFragment}
${sourceStatusFieldsFragment}
`;

View file

@ -72,7 +72,7 @@ export const useSourceViaHttp = ({
const createDerivedIndexPattern = useCallback(
(indexType: 'logs' | 'metrics' | 'both' = type) => {
return {
fields: response?.source ? response.status.indexFields : [],
fields: response?.source.status ? response.source.status.indexFields : [],
title: pickIndexPattern(response?.source, indexType),
};
},
@ -80,7 +80,7 @@ export const useSourceViaHttp = ({
);
const source = useMemo(() => {
return response ? { ...response.source, status: response.status } : null;
return response ? response.source : null;
}, [response]);
return {

View file

@ -7,14 +7,17 @@
import React, { useContext } from 'react';
import { IIndexPattern } from 'src/plugins/data/public';
import { SourceQuery, UpdateSourceInput } from '../../graphql/types';
import {
InfraSavedSourceConfiguration,
InfraSourceConfiguration,
} from '../../../common/http_api/source_api';
import { RendererFunction } from '../../utils/typed_react';
import { Source } from '../source';
interface WithSourceProps {
children: RendererFunction<{
configuration?: SourceQuery.Query['source']['configuration'];
create: (sourceProperties: UpdateSourceInput) => Promise<any> | undefined;
configuration?: InfraSourceConfiguration;
create: (sourceProperties: InfraSavedSourceConfiguration) => Promise<any> | undefined;
createDerivedIndexPattern: (type: 'logs' | 'metrics' | 'both') => IIndexPattern;
exists?: boolean;
hasFailed: boolean;
@ -25,7 +28,7 @@ interface WithSourceProps {
metricAlias?: string;
metricIndicesExist?: boolean;
sourceId: string;
update: (sourceProperties: UpdateSourceInput) => Promise<any> | undefined;
update: (sourceProperties: InfraSavedSourceConfiguration) => Promise<any> | undefined;
version?: string;
}>;
}

File diff suppressed because it is too large Load diff

View file

@ -1,786 +0,0 @@
import { SnapshotMetricType } from '../../common/inventory_models/types';
/* tslint:disable */
// ====================================================
// START: Typescript template
// ====================================================
// ====================================================
// Types
// ====================================================
export interface Query {
/** Get an infrastructure data source by id.The resolution order for the source configuration attributes is as followswith the first defined value winning:1. The attributes of the saved object with the given 'id'.2. The attributes defined in the static Kibana configuration key'xpack.infra.sources.default'.3. The hard-coded default values.As a consequence, querying a source that doesn't exist doesn't error out,but returns the configured or hardcoded defaults. */
source: InfraSource;
/** Get a list of all infrastructure data sources */
allSources: InfraSource[];
}
/** A source of infrastructure data */
export interface InfraSource {
/** The id of the source */
id: string;
/** The version number the source configuration was last persisted with */
version?: string | null;
/** The timestamp the source configuration was last persisted at */
updatedAt?: number | null;
/** The origin of the source (one of 'fallback', 'internal', 'stored') */
origin: string;
/** The raw configuration of the source */
configuration: InfraSourceConfiguration;
/** The status of the source */
status: InfraSourceStatus;
/** A snapshot of nodes */
snapshot?: InfraSnapshotResponse | null;
metrics: InfraMetricData[];
}
/** A set of configuration options for an infrastructure data source */
export interface InfraSourceConfiguration {
/** The name of the data source */
name: string;
/** A description of the data source */
description: string;
/** The alias to read metric data from */
metricAlias: string;
/** The alias to read log data from */
logAlias: string;
/** The field mapping to use for this source */
fields: InfraSourceFields;
/** Default view for inventory */
inventoryDefaultView: string;
/** Default view for Metrics Explorer */
metricsExplorerDefaultView?: string | null;
/** The columns to use for log display */
logColumns: InfraSourceLogColumn[];
}
/** A mapping of semantic fields to their document counterparts */
export interface InfraSourceFields {
/** The field to identify a container by */
container: string;
/** The fields to identify a host by */
host: string;
/** The fields to use as the log message */
message: string[];
/** The field to identify a pod by */
pod: string;
/** The field to use as a tiebreaker for log events that have identical timestamps */
tiebreaker: string;
/** The field to use as a timestamp for metrics and logs */
timestamp: string;
}
/** The built-in timestamp log column */
export interface InfraSourceTimestampLogColumn {
timestampColumn: InfraSourceTimestampLogColumnAttributes;
}
export interface InfraSourceTimestampLogColumnAttributes {
/** A unique id for the column */
id: string;
}
/** The built-in message log column */
export interface InfraSourceMessageLogColumn {
messageColumn: InfraSourceMessageLogColumnAttributes;
}
export interface InfraSourceMessageLogColumnAttributes {
/** A unique id for the column */
id: string;
}
/** A log column containing a field value */
export interface InfraSourceFieldLogColumn {
fieldColumn: InfraSourceFieldLogColumnAttributes;
}
export interface InfraSourceFieldLogColumnAttributes {
/** A unique id for the column */
id: string;
/** The field name this column refers to */
field: string;
}
/** The status of an infrastructure data source */
export interface InfraSourceStatus {
/** Whether the configured metric alias exists */
metricAliasExists: boolean;
/** Whether the configured log alias exists */
logAliasExists: boolean;
/** Whether the configured alias or wildcard pattern resolve to any metric indices */
metricIndicesExist: boolean;
/** Whether the configured alias or wildcard pattern resolve to any log indices */
logIndicesExist: boolean;
/** The list of indices in the metric alias */
metricIndices: string[];
/** The list of indices in the log alias */
logIndices: string[];
/** The list of fields defined in the index mappings */
indexFields: InfraIndexField[];
}
/** A descriptor of a field in an index */
export interface InfraIndexField {
/** The name of the field */
name: string;
/** The type of the field's values as recognized by Kibana */
type: string;
/** Whether the field's values can be efficiently searched for */
searchable: boolean;
/** Whether the field's values can be aggregated */
aggregatable: boolean;
/** Whether the field should be displayed based on event.module and a ECS allowed list */
displayable: boolean;
}
export interface InfraSnapshotResponse {
/** Nodes of type host, container or pod grouped by 0, 1 or 2 terms */
nodes: InfraSnapshotNode[];
}
export interface InfraSnapshotNode {
path: InfraSnapshotNodePath[];
metric: InfraSnapshotNodeMetric;
}
export interface InfraSnapshotNodePath {
value: string;
label: string;
ip?: string | null;
}
export interface InfraSnapshotNodeMetric {
name: SnapshotMetricType;
value?: number | null;
avg?: number | null;
max?: number | null;
}
export interface InfraMetricData {
id?: InfraMetric | null;
series: InfraDataSeries[];
}
export interface InfraDataSeries {
id: string;
label: string;
data: InfraDataPoint[];
}
export interface InfraDataPoint {
timestamp: number;
value?: number | null;
}
export interface Mutation {
/** Create a new source of infrastructure data */
createSource: UpdateSourceResult;
/** Modify an existing source */
updateSource: UpdateSourceResult;
/** Delete a source of infrastructure data */
deleteSource: DeleteSourceResult;
}
/** The result of a successful source update */
export interface UpdateSourceResult {
/** The source that was updated */
source: InfraSource;
}
/** The result of a source deletion operations */
export interface DeleteSourceResult {
/** The id of the source that was deleted */
id: string;
}
// ====================================================
// InputTypes
// ====================================================
export interface InfraTimerangeInput {
/** The interval string to use for last bucket. The format is '{value}{unit}'. For example '5m' would return the metrics for the last 5 minutes of the timespan. */
interval: string;
/** The end of the timerange */
to: number;
/** The beginning of the timerange */
from: number;
}
export interface InfraSnapshotGroupbyInput {
/** The label to use in the results for the group by for the terms group by */
label?: string | null;
/** The field to group by from a terms aggregation, this is ignored by the filter type */
field?: string | null;
}
export interface InfraSnapshotMetricInput {
/** The type of metric */
type: InfraSnapshotMetricType;
}
export interface InfraNodeIdsInput {
nodeId: string;
cloudId?: string | null;
}
/** The properties to update the source with */
export interface UpdateSourceInput {
/** The name of the data source */
name?: string | null;
/** A description of the data source */
description?: string | null;
/** The alias to read metric data from */
metricAlias?: string | null;
/** The alias to read log data from */
logAlias?: string | null;
/** The field mapping to use for this source */
fields?: UpdateSourceFieldsInput | null;
/** Name of default inventory view */
inventoryDefaultView?: string | null;
/** Default view for Metrics Explorer */
metricsExplorerDefaultView?: string | null;
/** The log columns to display for this source */
logColumns?: UpdateSourceLogColumnInput[] | null;
}
/** The mapping of semantic fields of the source to be created */
export interface UpdateSourceFieldsInput {
/** The field to identify a container by */
container?: string | null;
/** The fields to identify a host by */
host?: string | null;
/** The field to identify a pod by */
pod?: string | null;
/** The field to use as a tiebreaker for log events that have identical timestamps */
tiebreaker?: string | null;
/** The field to use as a timestamp for metrics and logs */
timestamp?: string | null;
}
/** One of the log column types to display for this source */
export interface UpdateSourceLogColumnInput {
/** A custom field log column */
fieldColumn?: UpdateSourceFieldLogColumnInput | null;
/** A built-in message log column */
messageColumn?: UpdateSourceMessageLogColumnInput | null;
/** A built-in timestamp log column */
timestampColumn?: UpdateSourceTimestampLogColumnInput | null;
}
export interface UpdateSourceFieldLogColumnInput {
id: string;
field: string;
}
export interface UpdateSourceMessageLogColumnInput {
id: string;
}
export interface UpdateSourceTimestampLogColumnInput {
id: string;
}
// ====================================================
// Arguments
// ====================================================
export interface SourceQueryArgs {
/** The id of the source */
id: string;
}
export interface SnapshotInfraSourceArgs {
timerange: InfraTimerangeInput;
filterQuery?: string | null;
}
export interface MetricsInfraSourceArgs {
nodeIds: InfraNodeIdsInput;
nodeType: InfraNodeType;
timerange: InfraTimerangeInput;
metrics: InfraMetric[];
}
export interface IndexFieldsInfraSourceStatusArgs {
indexType?: InfraIndexType | null;
}
export interface NodesInfraSnapshotResponseArgs {
type: InfraNodeType;
groupBy: InfraSnapshotGroupbyInput[];
metric: InfraSnapshotMetricInput;
}
export interface CreateSourceMutationArgs {
/** The id of the source */
id: string;
sourceProperties: UpdateSourceInput;
}
export interface UpdateSourceMutationArgs {
/** The id of the source */
id: string;
/** The properties to update the source with */
sourceProperties: UpdateSourceInput;
}
export interface DeleteSourceMutationArgs {
/** The id of the source */
id: string;
}
// ====================================================
// Enums
// ====================================================
export enum InfraIndexType {
ANY = 'ANY',
LOGS = 'LOGS',
METRICS = 'METRICS',
}
export enum InfraNodeType {
pod = 'pod',
container = 'container',
host = 'host',
awsEC2 = 'awsEC2',
awsS3 = 'awsS3',
awsRDS = 'awsRDS',
awsSQS = 'awsSQS',
}
export enum InfraSnapshotMetricType {
count = 'count',
cpu = 'cpu',
load = 'load',
memory = 'memory',
tx = 'tx',
rx = 'rx',
logRate = 'logRate',
diskIOReadBytes = 'diskIOReadBytes',
diskIOWriteBytes = 'diskIOWriteBytes',
s3TotalRequests = 's3TotalRequests',
s3NumberOfObjects = 's3NumberOfObjects',
s3BucketSize = 's3BucketSize',
s3DownloadBytes = 's3DownloadBytes',
s3UploadBytes = 's3UploadBytes',
rdsConnections = 'rdsConnections',
rdsQueriesExecuted = 'rdsQueriesExecuted',
rdsActiveTransactions = 'rdsActiveTransactions',
rdsLatency = 'rdsLatency',
sqsMessagesVisible = 'sqsMessagesVisible',
sqsMessagesDelayed = 'sqsMessagesDelayed',
sqsMessagesSent = 'sqsMessagesSent',
sqsMessagesEmpty = 'sqsMessagesEmpty',
sqsOldestMessage = 'sqsOldestMessage',
}
export enum InfraMetric {
hostSystemOverview = 'hostSystemOverview',
hostCpuUsage = 'hostCpuUsage',
hostFilesystem = 'hostFilesystem',
hostK8sOverview = 'hostK8sOverview',
hostK8sCpuCap = 'hostK8sCpuCap',
hostK8sDiskCap = 'hostK8sDiskCap',
hostK8sMemoryCap = 'hostK8sMemoryCap',
hostK8sPodCap = 'hostK8sPodCap',
hostLoad = 'hostLoad',
hostMemoryUsage = 'hostMemoryUsage',
hostNetworkTraffic = 'hostNetworkTraffic',
hostDockerOverview = 'hostDockerOverview',
hostDockerInfo = 'hostDockerInfo',
hostDockerTop5ByCpu = 'hostDockerTop5ByCpu',
hostDockerTop5ByMemory = 'hostDockerTop5ByMemory',
podOverview = 'podOverview',
podCpuUsage = 'podCpuUsage',
podMemoryUsage = 'podMemoryUsage',
podLogUsage = 'podLogUsage',
podNetworkTraffic = 'podNetworkTraffic',
containerOverview = 'containerOverview',
containerCpuKernel = 'containerCpuKernel',
containerCpuUsage = 'containerCpuUsage',
containerDiskIOOps = 'containerDiskIOOps',
containerDiskIOBytes = 'containerDiskIOBytes',
containerMemory = 'containerMemory',
containerNetworkTraffic = 'containerNetworkTraffic',
nginxHits = 'nginxHits',
nginxRequestRate = 'nginxRequestRate',
nginxActiveConnections = 'nginxActiveConnections',
nginxRequestsPerConnection = 'nginxRequestsPerConnection',
awsOverview = 'awsOverview',
awsCpuUtilization = 'awsCpuUtilization',
awsNetworkBytes = 'awsNetworkBytes',
awsNetworkPackets = 'awsNetworkPackets',
awsDiskioBytes = 'awsDiskioBytes',
awsDiskioOps = 'awsDiskioOps',
awsEC2CpuUtilization = 'awsEC2CpuUtilization',
awsEC2DiskIOBytes = 'awsEC2DiskIOBytes',
awsEC2NetworkTraffic = 'awsEC2NetworkTraffic',
awsS3TotalRequests = 'awsS3TotalRequests',
awsS3NumberOfObjects = 'awsS3NumberOfObjects',
awsS3BucketSize = 'awsS3BucketSize',
awsS3DownloadBytes = 'awsS3DownloadBytes',
awsS3UploadBytes = 'awsS3UploadBytes',
awsRDSCpuTotal = 'awsRDSCpuTotal',
awsRDSConnections = 'awsRDSConnections',
awsRDSQueriesExecuted = 'awsRDSQueriesExecuted',
awsRDSActiveTransactions = 'awsRDSActiveTransactions',
awsRDSLatency = 'awsRDSLatency',
awsSQSMessagesVisible = 'awsSQSMessagesVisible',
awsSQSMessagesDelayed = 'awsSQSMessagesDelayed',
awsSQSMessagesSent = 'awsSQSMessagesSent',
awsSQSMessagesEmpty = 'awsSQSMessagesEmpty',
awsSQSOldestMessage = 'awsSQSOldestMessage',
custom = 'custom',
}
// ====================================================
// Unions
// ====================================================
/** All known log column types */
export type InfraSourceLogColumn =
| InfraSourceTimestampLogColumn
| InfraSourceMessageLogColumn
| InfraSourceFieldLogColumn;
// ====================================================
// END: Typescript template
// ====================================================
// ====================================================
// Documents
// ====================================================
export namespace MetricsQuery {
export type Variables = {
sourceId: string;
timerange: InfraTimerangeInput;
metrics: InfraMetric[];
nodeId: string;
cloudId?: string | null;
nodeType: InfraNodeType;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
id: string;
metrics: Metrics[];
};
export type Metrics = {
__typename?: 'InfraMetricData';
id?: InfraMetric | null;
series: Series[];
};
export type Series = {
__typename?: 'InfraDataSeries';
id: string;
label: string;
data: Data[];
};
export type Data = {
__typename?: 'InfraDataPoint';
timestamp: number;
value?: number | null;
};
}
export namespace CreateSourceConfigurationMutation {
export type Variables = {
sourceId: string;
sourceProperties: UpdateSourceInput;
};
export type Mutation = {
__typename?: 'Mutation';
createSource: CreateSource;
};
export type CreateSource = {
__typename?: 'UpdateSourceResult';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
configuration: Configuration;
status: Status;
} & InfraSourceFields.Fragment;
export type Configuration = SourceConfigurationFields.Fragment;
export type Status = SourceStatusFields.Fragment;
}
export namespace SourceQuery {
export type Variables = {
sourceId?: string | null;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
configuration: Configuration;
status: Status;
} & InfraSourceFields.Fragment;
export type Configuration = SourceConfigurationFields.Fragment;
export type Status = SourceStatusFields.Fragment;
}
export namespace UpdateSourceMutation {
export type Variables = {
sourceId?: string | null;
sourceProperties: UpdateSourceInput;
};
export type Mutation = {
__typename?: 'Mutation';
updateSource: UpdateSource;
};
export type UpdateSource = {
__typename?: 'UpdateSourceResult';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
configuration: Configuration;
status: Status;
} & InfraSourceFields.Fragment;
export type Configuration = SourceConfigurationFields.Fragment;
export type Status = SourceStatusFields.Fragment;
}
export namespace WaffleNodesQuery {
export type Variables = {
sourceId: string;
timerange: InfraTimerangeInput;
filterQuery?: string | null;
metric: InfraSnapshotMetricInput;
groupBy: InfraSnapshotGroupbyInput[];
type: InfraNodeType;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
id: string;
snapshot?: Snapshot | null;
};
export type Snapshot = {
__typename?: 'InfraSnapshotResponse';
nodes: Nodes[];
};
export type Nodes = {
__typename?: 'InfraSnapshotNode';
path: Path[];
metric: Metric;
};
export type Path = {
__typename?: 'InfraSnapshotNodePath';
value: string;
label: string;
ip?: string | null;
};
export type Metric = {
__typename?: 'InfraSnapshotNodeMetric';
name: InfraSnapshotMetricType;
value?: number | null;
avg?: number | null;
max?: number | null;
};
}
export namespace SourceConfigurationFields {
export type Fragment = {
__typename?: 'InfraSourceConfiguration';
name: string;
description: string;
logAlias: string;
metricAlias: string;
fields: Fields;
inventoryDefaultView: string;
metricsExplorerDefaultView: string;
logColumns: LogColumns[];
};
export type Fields = {
__typename?: 'InfraSourceFields';
container: string;
host: string;
message: string[];
pod: string;
tiebreaker: string;
timestamp: string;
};
export type LogColumns =
| InfraSourceTimestampLogColumnInlineFragment
| InfraSourceMessageLogColumnInlineFragment
| InfraSourceFieldLogColumnInlineFragment;
export type InfraSourceTimestampLogColumnInlineFragment = {
__typename?: 'InfraSourceTimestampLogColumn';
timestampColumn: TimestampColumn;
};
export type TimestampColumn = {
__typename?: 'InfraSourceTimestampLogColumnAttributes';
id: string;
};
export type InfraSourceMessageLogColumnInlineFragment = {
__typename?: 'InfraSourceMessageLogColumn';
messageColumn: MessageColumn;
};
export type MessageColumn = {
__typename?: 'InfraSourceMessageLogColumnAttributes';
id: string;
};
export type InfraSourceFieldLogColumnInlineFragment = {
__typename?: 'InfraSourceFieldLogColumn';
fieldColumn: FieldColumn;
};
export type FieldColumn = {
__typename?: 'InfraSourceFieldLogColumnAttributes';
id: string;
field: string;
};
}
export namespace SourceStatusFields {
export type Fragment = {
__typename?: 'InfraSourceStatus';
indexFields: IndexFields[];
logIndicesExist: boolean;
metricIndicesExist: boolean;
};
export type IndexFields = {
__typename?: 'InfraIndexField';
name: string;
type: string;
searchable: boolean;
aggregatable: boolean;
displayable: boolean;
};
}
export namespace InfraTimeKeyFields {
export type Fragment = {
__typename?: 'InfraTimeKey';
time: number;
tiebreaker: number;
};
}
export namespace InfraSourceFields {
export type Fragment = {
__typename?: 'InfraSource';
id: string;
version?: string | null;
updatedAt?: number | null;
origin: string;
};
}

View file

@ -13,7 +13,7 @@ import {
SnapshotNodeMetric,
SnapshotNodePath,
} from '../../common/http_api/snapshot_api';
import { SourceQuery } from '../graphql/types';
import { InfraSourceConfigurationFields } from '../../common/http_api/source_api';
import { WaffleSortOption } from '../pages/metrics/inventory_view/hooks/use_waffle_options';
export interface InfraWaffleMapNode {
@ -123,7 +123,7 @@ export enum InfraWaffleMapRuleOperator {
}
export interface InfraWaffleMapOptions {
fields?: SourceQuery.Query['source']['configuration']['fields'] | null;
fields?: InfraSourceConfigurationFields | null;
formatter: InfraFormatterType;
formatTemplate: string;
metric: SnapshotMetricInput;

View file

@ -11,6 +11,7 @@ import { Route, RouteComponentProps, Switch } from 'react-router-dom';
import { EuiErrorBoundary, EuiFlexItem, EuiFlexGroup, EuiButtonEmpty } from '@elastic/eui';
import { IIndexPattern } from 'src/plugins/data/common';
import { InfraSourceConfiguration } from '../../../common/http_api/source_api';
import { DocumentTitle } from '../../components/document_title';
import { HelpCenterContent } from '../../components/help_center_content';
import { RoutedTabs } from '../../components/navigation/routed_tabs';
@ -36,7 +37,6 @@ import { WaffleFiltersProvider } from './inventory_view/hooks/use_waffle_filters
import { InventoryAlertDropdown } from '../../alerting/inventory/components/alert_dropdown';
import { MetricsAlertDropdown } from '../../alerting/metric_threshold/components/alert_dropdown';
import { SavedView } from '../../containers/saved_view/saved_view';
import { SourceConfigurationFields } from '../../graphql/types';
import { AlertPrefillProvider } from '../../alerting/use_alert_prefill';
import { InfraMLCapabilitiesProvider } from '../../containers/ml/infra_ml_capabilities';
import { AnomalyDetectionFlyout } from './inventory_view/components/ml/anomaly_detection/anomoly_detection_flyout';
@ -189,7 +189,7 @@ export const InfrastructurePage = ({ match }: RouteComponentProps) => {
};
const PageContent = (props: {
configuration: SourceConfigurationFields.Fragment;
configuration: InfraSourceConfiguration;
createDerivedIndexPattern: (type: 'logs' | 'metrics' | 'both') => IIndexPattern;
}) => {
const { createDerivedIndexPattern, configuration } = props;

View file

@ -8,14 +8,16 @@ import { ReactText } from 'react';
import Color from 'color';
import { get, first, last, min, max } from 'lodash';
import { createFormatter } from '../../../../../common/formatters';
import { InfraDataSeries } from '../../../../graphql/types';
import {
InventoryVisTypeRT,
InventoryFormatterType,
InventoryVisType,
} from '../../../../../common/inventory_models/types';
import { SeriesOverrides } from '../types';
import { NodeDetailsMetricData } from '../../../../../common/http_api/node_details_api';
import {
NodeDetailsDataSeries,
NodeDetailsMetricData,
} from '../../../../../common/http_api/node_details_api';
/**
* Returns a formatter
@ -28,7 +30,7 @@ export const getFormatter = (
/**
* Does a series have more then two points?
*/
export const seriesHasLessThen2DataPoints = (series: InfraDataSeries): boolean => {
export const seriesHasLessThen2DataPoints = (series: NodeDetailsDataSeries): boolean => {
return series.data.length < 2;
};

View file

@ -4,12 +4,10 @@
* you may not use this file except in compliance with the Elastic License.
*/
// import { GraphQLFormattedError } from 'graphql';
import React from 'react';
import { i18n } from '@kbn/i18n';
import { IHttpFetchError } from 'src/core/public';
import { InvalidNodeError } from './invalid_node';
// import { InfraMetricsErrorCodes } from '../../../../common/errors';
import { DocumentTitle } from '../../../../components/document_title';
import { ErrorPageBody } from '../../../error';

View file

@ -13,14 +13,14 @@ import {
BarSeriesStyle,
AreaSeriesStyle,
} from '@elastic/charts';
import { InfraDataSeries } from '../../../../graphql/types';
import { NodeDetailsDataSeries } from '../../../../../common/http_api/node_details_api';
import { InventoryVisType } from '../../../../../common/inventory_models/types';
interface Props {
id: string;
name: string;
color: string | null;
series: InfraDataSeries;
series: NodeDetailsDataSeries;
type: InventoryVisType;
stack: boolean | undefined;
}
@ -59,7 +59,7 @@ export const AreaChart = ({ id, color, series, name, type, stack }: Props) => {
);
};
export const BarChart = ({ id, color, series, name, type, stack }: Props) => {
export const BarChart = ({ id, color, series, name, stack }: Props) => {
const style: RecursivePartial<BarSeriesStyle> = {
rectBorder: {
stroke: color || void 0,

View file

@ -18,6 +18,7 @@ import {
} from '@elastic/charts';
import { first, last } from 'lodash';
import moment from 'moment';
import { InfraSourceConfiguration } from '../../../../../common/http_api/source_api';
import { MetricsExplorerSeries } from '../../../../../common/http_api/metrics_explorer';
import {
MetricsExplorerOptions,
@ -29,7 +30,6 @@ import { euiStyled } from '../../../../../../../../src/plugins/kibana_react/comm
import { createFormatterForMetric } from './helpers/create_formatter_for_metric';
import { MetricExplorerSeriesChart } from './series_chart';
import { MetricsExplorerChartContextMenu } from './chart_context_menu';
import { SourceQuery } from '../../../../graphql/types';
import { MetricsExplorerEmptyChart } from './empty_chart';
import { MetricsExplorerNoMetrics } from './no_metrics';
import { getChartTheme } from './helpers/get_chart_theme';
@ -46,7 +46,7 @@ interface Props {
options: MetricsExplorerOptions;
chartOptions: MetricsExplorerChartOptions;
series: MetricsExplorerSeries;
source: SourceQuery.Query['source']['configuration'] | undefined;
source: InfraSourceConfiguration | undefined;
timeRange: MetricsExplorerTimeOptions;
onTimeChange: (start: string, end: string) => void;
}

View file

@ -14,6 +14,7 @@ import {
} from '@elastic/eui';
import DateMath from '@elastic/datemath';
import { Capabilities } from 'src/core/public';
import { InfraSourceConfiguration } from '../../../../../common/http_api/source_api';
import { AlertFlyout } from '../../../../alerting/metric_threshold/components/alert_flyout';
import { MetricsExplorerSeries } from '../../../../../common/http_api/metrics_explorer';
import {
@ -23,7 +24,6 @@ import {
} from '../hooks/use_metrics_explorer_options';
import { createTSVBLink } from './helpers/create_tsvb_link';
import { getNodeDetailUrl } from '../../../link_to/redirect_to_node_detail';
import { SourceConfiguration } from '../../../../utils/source_configuration';
import { InventoryItemType } from '../../../../../common/inventory_models/types';
import { useLinkProps } from '../../../../hooks/use_link_props';
@ -31,14 +31,14 @@ export interface Props {
options: MetricsExplorerOptions;
onFilter?: (query: string) => void;
series: MetricsExplorerSeries;
source?: SourceConfiguration;
source?: InfraSourceConfiguration;
timeRange: MetricsExplorerTimeOptions;
uiCapabilities?: Capabilities;
chartOptions: MetricsExplorerChartOptions;
}
const fieldToNodeType = (
source: SourceConfiguration,
source: InfraSourceConfiguration,
groupBy: string | string[]
): InventoryItemType | undefined => {
const fields = Array.isArray(groupBy) ? groupBy : [groupBy];

View file

@ -8,6 +8,7 @@ import { EuiButton, EuiFlexGrid, EuiFlexItem, EuiText, EuiHorizontalRule } from
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import React from 'react';
import { InfraSourceConfiguration } from '../../../../../common/http_api/source_api';
import { MetricsExplorerResponse } from '../../../../../common/http_api/metrics_explorer';
import {
MetricsExplorerOptions,
@ -17,7 +18,6 @@ import {
import { InfraLoadingPanel } from '../../../../components/loading';
import { NoData } from '../../../../components/empty_states/no_data';
import { MetricsExplorerChart } from './chart';
import { SourceQuery } from '../../../../graphql/types';
type StringOrNull = string | null;
@ -30,7 +30,7 @@ interface Props {
onFilter: (filter: string) => void;
onTimeChange: (start: string, end: string) => void;
data: MetricsExplorerResponse | null;
source: SourceQuery.Query['source']['configuration'] | undefined;
source: InfraSourceConfiguration | undefined;
timeRange: MetricsExplorerTimeOptions;
}
export const MetricsExplorerCharts = ({

View file

@ -7,6 +7,7 @@
import { encode } from 'rison-node';
import uuid from 'uuid';
import { set } from '@elastic/safer-lodash-set';
import { InfraSourceConfiguration } from '../../../../../../common/http_api/source_api';
import { colorTransformer, Color } from '../../../../../../common/color_palette';
import { MetricsExplorerSeries } from '../../../../../../common/http_api/metrics_explorer';
import {
@ -19,15 +20,14 @@ import {
} from '../../hooks/use_metrics_explorer_options';
import { metricToFormat } from './metric_to_format';
import { InfraFormatterType } from '../../../../../lib/lib';
import { SourceQuery } from '../../../../../graphql/types';
import { createMetricLabel } from './create_metric_label';
import { LinkDescriptor } from '../../../../../hooks/use_link_props';
/*
We've recently changed the default index pattern in Metrics UI from `metricbeat-*` to
We've recently changed the default index pattern in Metrics UI from `metricbeat-*` to
`metrics-*,metricbeat-*`. There is a bug in TSVB when there is an empty index in the pattern
the field dropdowns are not populated correctly. This index pattern is a temporary fix.
See: https://github.com/elastic/kibana/issues/73987
See: https://github.com/elastic/kibana/issues/73987
*/
const TSVB_WORKAROUND_INDEX_PATTERN = 'metric*';
@ -142,7 +142,7 @@ const createTSVBIndexPattern = (alias: string) => {
};
export const createTSVBLink = (
source: SourceQuery.Query['source']['configuration'] | undefined,
source: InfraSourceConfiguration | undefined,
options: MetricsExplorerOptions,
series: MetricsExplorerSeries,
timeRange: MetricsExplorerTimeOptions,

View file

@ -6,6 +6,7 @@
import { useState, useCallback, useContext } from 'react';
import { IIndexPattern } from 'src/plugins/data/public';
import { InfraSourceConfiguration } from '../../../../../common/http_api/source_api';
import {
MetricsExplorerMetric,
MetricsExplorerAggregation,
@ -17,7 +18,6 @@ import {
MetricsExplorerTimeOptions,
MetricsExplorerOptions,
} from './use_metrics_explorer_options';
import { SourceQuery } from '../../../../graphql/types';
export interface MetricExplorerViewState {
chartOptions: MetricsExplorerChartOptions;
@ -26,7 +26,7 @@ export interface MetricExplorerViewState {
}
export const useMetricsExplorerState = (
source: SourceQuery.Query['source']['configuration'],
source: InfraSourceConfiguration,
derivedIndexPattern: IIndexPattern,
shouldLoadImmediately = true
) => {

View file

@ -19,9 +19,9 @@ import {
createSeries,
} from '../../../../utils/fixtures/metrics_explorer';
import { MetricsExplorerOptions, MetricsExplorerTimeOptions } from './use_metrics_explorer_options';
import { SourceQuery } from '../../../../../common/graphql/types';
import { IIndexPattern } from '../../../../../../../../src/plugins/data/public';
import { HttpHandler } from 'kibana/public';
import { InfraSourceConfiguration } from '../../../../../common/http_api/source_api';
const mockedFetch = jest.fn();
@ -37,7 +37,7 @@ const renderUseMetricsExplorerDataHook = () => {
return renderHook(
(props: {
options: MetricsExplorerOptions;
source: SourceQuery.Query['source']['configuration'] | undefined;
source: InfraSourceConfiguration | undefined;
derivedIndexPattern: IIndexPattern;
timeRange: MetricsExplorerTimeOptions;
afterKey: string | null | Record<string, string | null>;

View file

@ -8,7 +8,7 @@ import DateMath from '@elastic/datemath';
import { isEqual } from 'lodash';
import { useEffect, useState, useCallback } from 'react';
import { IIndexPattern } from 'src/plugins/data/public';
import { SourceQuery } from '../../../../../common/graphql/types';
import { InfraSourceConfiguration } from '../../../../../common/http_api/source_api';
import {
MetricsExplorerResponse,
metricsExplorerResponseRT,
@ -24,7 +24,7 @@ function isSameOptions(current: MetricsExplorerOptions, next: MetricsExplorerOpt
export function useMetricsExplorerData(
options: MetricsExplorerOptions,
source: SourceQuery.Query['source']['configuration'] | undefined,
source: InfraSourceConfiguration | undefined,
derivedIndexPattern: IIndexPattern,
timerange: MetricsExplorerTimeOptions,
afterKey: string | null | Record<string, string | null>,

View file

@ -8,8 +8,8 @@ import { EuiErrorBoundary } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import React, { useEffect } from 'react';
import { IIndexPattern } from 'src/plugins/data/public';
import { InfraSourceConfiguration } from '../../../../common/http_api/source_api';
import { useTrackPageview } from '../../../../../observability/public';
import { SourceQuery } from '../../../../common/graphql/types';
import { DocumentTitle } from '../../../components/document_title';
import { NoData } from '../../../components/empty_states';
import { MetricsExplorerCharts } from './components/charts';
@ -18,7 +18,7 @@ import { useMetricsExplorerState } from './hooks/use_metric_explorer_state';
import { useSavedViewContext } from '../../../containers/saved_view/saved_view';
interface MetricsExplorerPageProps {
source: SourceQuery.Query['source']['configuration'];
source: InfraSourceConfiguration;
derivedIndexPattern: IIndexPattern;
}

View file

@ -1,86 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InMemoryCache, IntrospectionFragmentMatcher } from 'apollo-cache-inmemory';
import ApolloClient from 'apollo-client';
import { ApolloLink } from 'apollo-link';
import { createHttpLink } from 'apollo-link-http';
import { withClientState } from 'apollo-link-state';
import { HttpFetchOptions, HttpHandler } from 'src/core/public';
import introspectionQueryResultData from '../graphql/introspection.json';
export const createApolloClient = (fetch: HttpHandler) => {
const cache = new InMemoryCache({
addTypename: false,
fragmentMatcher: new IntrospectionFragmentMatcher({
// @ts-expect-error apollo-cache-inmemory types don't match actual introspection data
introspectionQueryResultData,
}),
});
const wrappedFetch = (path: string, options: HttpFetchOptions) => {
return new Promise<Response>(async (resolve, reject) => {
// core.http.fetch isn't 100% compatible with the Fetch API and will
// throw Errors on 401s. This top level try / catch handles those scenarios.
try {
fetch(path, {
...options,
// Set headers to undefined due to this bug: https://github.com/apollographql/apollo-link/issues/249,
// Apollo will try to set a "content-type" header which will conflict with the "Content-Type" header that
// core.http.fetch correctly sets.
headers: undefined,
asResponse: true,
}).then((res) => {
if (!res.response) {
return reject();
}
// core.http.fetch will parse the Response and set a body before handing it back. As such .text() / .json()
// will have already been called on the Response instance. However, Apollo will also want to call
// .text() / .json() on the instance, as it expects the raw Response instance, rather than core's wrapper.
// .text() / .json() can only be called once, and an Error will be thrown if those methods are accessed again.
// This hacks around that by setting up a new .text() method that will restringify the JSON response we already have.
// This does result in an extra stringify / parse cycle, which isn't ideal, but as we only have a few endpoints left using
// GraphQL this shouldn't create excessive overhead.
// Ref: https://github.com/apollographql/apollo-link/blob/master/packages/apollo-link-http/src/httpLink.ts#L134
// and
// https://github.com/apollographql/apollo-link/blob/master/packages/apollo-link-http-common/src/index.ts#L125
return resolve({
...res.response,
text: () => {
return new Promise(async (resolveText, rejectText) => {
if (res.body) {
return resolveText(JSON.stringify(res.body));
} else {
return rejectText();
}
});
},
});
});
} catch (error) {
reject(error);
}
});
};
const HttpLink = createHttpLink({
fetch: wrappedFetch,
uri: `/api/infra/graphql`,
});
const graphQLOptions = {
cache,
link: ApolloLink.from([
withClientState({
cache,
resolvers: {},
}),
HttpLink,
]),
};
return new ApolloClient(graphQLOptions);
};

View file

@ -1,26 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { ApolloClient } from 'apollo-client';
import { createContext, useContext } from 'react';
/**
* This is a temporary provider and hook for use with hooks until react-apollo
* has upgraded to the new-style `createContext` api.
*/
export const ApolloClientContext = createContext<ApolloClient<{}> | undefined>(undefined);
export const useApolloClient = () => {
return useContext(ApolloClientContext);
};
export class DependencyError extends Error {
constructor(message?: string) {
super(message);
Object.setPrototypeOf(this, new.target.prototype);
}
}

View file

@ -4,14 +4,17 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SourceConfigurationFields } from '../graphql/types';
import {
InfraSavedSourceConfigurationColumn,
InfraSavedSourceConfigurationFields,
InfraSourceConfigurationMessageColumn,
InfraSourceConfigurationTimestampColumn,
} from '../../common/http_api/source_api';
export type SourceConfiguration = SourceConfigurationFields.Fragment;
export type LogColumnConfiguration = SourceConfigurationFields.LogColumns;
export type FieldLogColumnConfiguration = SourceConfigurationFields.InfraSourceFieldLogColumnInlineFragment;
export type MessageLogColumnConfiguration = SourceConfigurationFields.InfraSourceMessageLogColumnInlineFragment;
export type TimestampLogColumnConfiguration = SourceConfigurationFields.InfraSourceTimestampLogColumnInlineFragment;
export type LogColumnConfiguration = InfraSavedSourceConfigurationColumn;
export type FieldLogColumnConfiguration = InfraSavedSourceConfigurationFields;
export type MessageLogColumnConfiguration = InfraSourceConfigurationMessageColumn;
export type TimestampLogColumnConfiguration = InfraSourceConfigurationTimestampColumn;
export const isFieldLogColumnConfiguration = (
logColumnConfiguration: LogColumnConfiguration

View file

@ -1,16 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { buildSchemaFromTypeDefinitions } from 'graphql-tools';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { schemas as serverSchemas } from '../server/graphql';
export const schemas = [...serverSchemas];
// this default export is used to feed the combined types to the gql-gen tool
// which generates the corresponding typescript types
// eslint-disable-next-line import/no-default-export
export default buildSchemaFromTypeDefinitions(schemas);

View file

@ -1,75 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
require('../../../../src/setup_node_env');
const { join, resolve } = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, import/no-unresolved
const { generate } = require('graphql-code-generator');
const GRAPHQL_GLOBS = [
join('public', 'containers', '**', '*.gql_query.ts{,x}'),
join('public', 'store', '**', '*.gql_query.ts{,x}'),
join('common', 'graphql', '**', '*.gql_query.ts{,x}'),
];
const CLIENT_CONFIG_PATH = resolve(__dirname, 'gql_gen_client.json');
const SERVER_CONFIG_PATH = resolve(__dirname, 'gql_gen_server.json');
const OUTPUT_INTROSPECTION_PATH = resolve('public', 'graphql', 'introspection.json');
const OUTPUT_CLIENT_TYPES_PATH = resolve('public', 'graphql', 'types.ts');
const OUTPUT_COMMON_TYPES_PATH = resolve('common', 'graphql', 'types.ts');
const OUTPUT_SERVER_TYPES_PATH = resolve('server', 'graphql', 'types.ts');
const SCHEMA_PATH = resolve(__dirname, 'combined_schema.ts');
async function main() {
await generate(
{
args: GRAPHQL_GLOBS,
config: SERVER_CONFIG_PATH,
out: OUTPUT_INTROSPECTION_PATH,
overwrite: true,
schema: SCHEMA_PATH,
template: 'graphql-codegen-introspection-template',
},
true
);
await generate(
{
args: GRAPHQL_GLOBS,
config: CLIENT_CONFIG_PATH,
out: OUTPUT_CLIENT_TYPES_PATH,
overwrite: true,
schema: SCHEMA_PATH,
template: 'graphql-codegen-typescript-template',
},
true
);
await generate(
{
args: GRAPHQL_GLOBS,
config: CLIENT_CONFIG_PATH,
out: OUTPUT_COMMON_TYPES_PATH,
overwrite: true,
schema: SCHEMA_PATH,
template: 'graphql-codegen-typescript-template',
},
true
);
await generate(
{
args: [],
config: SERVER_CONFIG_PATH,
out: OUTPUT_SERVER_TYPES_PATH,
overwrite: true,
schema: SCHEMA_PATH,
template: 'graphql-codegen-typescript-resolvers-template',
},
true
);
}
if (require.main === module) {
main();
}

View file

@ -1,12 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { rootSchema } from '../../common/graphql/root/schema.gql';
import { sharedSchema } from '../../common/graphql/shared/schema.gql';
import { sourceStatusSchema } from './source_status/schema.gql';
import { sourcesSchema } from './sources/schema.gql';
export const schemas = [rootSchema, sharedSchema, sourcesSchema, sourceStatusSchema];

View file

@ -1,7 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { createSourceStatusResolvers } from './resolvers';

View file

@ -1,90 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraIndexType, InfraSourceStatusResolvers } from '../../graphql/types';
import { InfraFieldsDomain } from '../../lib/domains/fields_domain';
import { InfraSourceStatus } from '../../lib/source_status';
import { ChildResolverOf, InfraResolverOf } from '../../utils/typed_resolvers';
import { QuerySourceResolver } from '../sources/resolvers';
export type InfraSourceStatusMetricAliasExistsResolver = ChildResolverOf<
InfraResolverOf<InfraSourceStatusResolvers.MetricAliasExistsResolver>,
QuerySourceResolver
>;
export type InfraSourceStatusMetricIndicesExistResolver = ChildResolverOf<
InfraResolverOf<InfraSourceStatusResolvers.MetricIndicesExistResolver>,
QuerySourceResolver
>;
export type InfraSourceStatusMetricIndicesResolver = ChildResolverOf<
InfraResolverOf<InfraSourceStatusResolvers.MetricIndicesResolver>,
QuerySourceResolver
>;
export type InfraSourceStatusLogAliasExistsResolver = ChildResolverOf<
InfraResolverOf<InfraSourceStatusResolvers.LogAliasExistsResolver>,
QuerySourceResolver
>;
export type InfraSourceStatusLogIndicesExistResolver = ChildResolverOf<
InfraResolverOf<InfraSourceStatusResolvers.LogIndicesExistResolver>,
QuerySourceResolver
>;
export type InfraSourceStatusLogIndicesResolver = ChildResolverOf<
InfraResolverOf<InfraSourceStatusResolvers.LogIndicesResolver>,
QuerySourceResolver
>;
export type InfraSourceStatusIndexFieldsResolver = ChildResolverOf<
InfraResolverOf<InfraSourceStatusResolvers.IndexFieldsResolver>,
QuerySourceResolver
>;
export const createSourceStatusResolvers = (libs: {
sourceStatus: InfraSourceStatus;
fields: InfraFieldsDomain;
}): {
InfraSourceStatus: {
metricAliasExists: InfraSourceStatusMetricAliasExistsResolver;
metricIndicesExist: InfraSourceStatusMetricIndicesExistResolver;
metricIndices: InfraSourceStatusMetricIndicesResolver;
logAliasExists: InfraSourceStatusLogAliasExistsResolver;
logIndicesExist: InfraSourceStatusLogIndicesExistResolver;
logIndices: InfraSourceStatusLogIndicesResolver;
indexFields: InfraSourceStatusIndexFieldsResolver;
};
} => ({
InfraSourceStatus: {
async metricAliasExists(source, args, { req }) {
return await libs.sourceStatus.hasMetricAlias(req, source.id);
},
async metricIndicesExist(source, args, { req }) {
return await libs.sourceStatus.hasMetricIndices(req, source.id);
},
async metricIndices(source, args, { req }) {
return await libs.sourceStatus.getMetricIndexNames(req, source.id);
},
async logAliasExists(source, args, { req }) {
return await libs.sourceStatus.hasLogAlias(req, source.id);
},
async logIndicesExist(source, args, { req }) {
return (await libs.sourceStatus.getLogIndexStatus(req, source.id)) !== 'missing';
},
async logIndices(source, args, { req }) {
return await libs.sourceStatus.getLogIndexNames(req, source.id);
},
async indexFields(source, args, { req }) {
const fields = await libs.fields.getFields(
req,
source.id,
args.indexType || InfraIndexType.ANY
);
return fields;
},
},
});

View file

@ -1,40 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
export const sourceStatusSchema = gql`
"A descriptor of a field in an index"
type InfraIndexField {
"The name of the field"
name: String!
"The type of the field's values as recognized by Kibana"
type: String!
"Whether the field's values can be efficiently searched for"
searchable: Boolean!
"Whether the field's values can be aggregated"
aggregatable: Boolean!
"Whether the field should be displayed based on event.module and a ECS allowed list"
displayable: Boolean!
}
extend type InfraSourceStatus {
"Whether the configured metric alias exists"
metricAliasExists: Boolean!
"Whether the configured log alias exists"
logAliasExists: Boolean!
"Whether the configured alias or wildcard pattern resolve to any metric indices"
metricIndicesExist: Boolean!
"Whether the configured alias or wildcard pattern resolve to any log indices"
logIndicesExist: Boolean!
"The list of indices in the metric alias"
metricIndices: [String!]!
"The list of indices in the log alias"
logIndices: [String!]!
"The list of fields defined in the index mappings"
indexFields(indexType: InfraIndexType = ANY): [InfraIndexField!]!
}
`;

View file

@ -1,8 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { createSourcesResolvers } from './resolvers';
export { sourcesSchema } from './schema.gql';

View file

@ -1,202 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { UserInputError } from 'apollo-server-errors';
import { failure } from 'io-ts/lib/PathReporter';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import { fold } from 'fp-ts/lib/Either';
import {
InfraSourceLogColumn,
InfraSourceResolvers,
MutationResolvers,
QueryResolvers,
UpdateSourceLogColumnInput,
} from '../../graphql/types';
import { InfraSourceStatus } from '../../lib/source_status';
import {
InfraSources,
SavedSourceConfigurationFieldColumnRuntimeType,
SavedSourceConfigurationMessageColumnRuntimeType,
SavedSourceConfigurationTimestampColumnRuntimeType,
SavedSourceConfigurationColumnRuntimeType,
} from '../../lib/sources';
import {
ChildResolverOf,
InfraResolverOf,
InfraResolverWithFields,
ResultOf,
} from '../../utils/typed_resolvers';
export type QuerySourceResolver = InfraResolverWithFields<
QueryResolvers.SourceResolver,
'id' | 'version' | 'updatedAt' | 'configuration'
>;
export type QueryAllSourcesResolver = InfraResolverWithFields<
QueryResolvers.AllSourcesResolver,
'id' | 'version' | 'updatedAt' | 'configuration'
>;
export type InfraSourceStatusResolver = ChildResolverOf<
InfraResolverOf<InfraSourceResolvers.StatusResolver<ResultOf<QuerySourceResolver>>>,
QuerySourceResolver
>;
export type MutationCreateSourceResolver = InfraResolverOf<
MutationResolvers.CreateSourceResolver<{
source: ResultOf<QuerySourceResolver>;
}>
>;
export type MutationDeleteSourceResolver = InfraResolverOf<MutationResolvers.DeleteSourceResolver>;
export type MutationUpdateSourceResolver = InfraResolverOf<
MutationResolvers.UpdateSourceResolver<{
source: ResultOf<QuerySourceResolver>;
}>
>;
interface SourcesResolversDeps {
sources: InfraSources;
sourceStatus: InfraSourceStatus;
}
export const createSourcesResolvers = (
libs: SourcesResolversDeps
): {
Query: {
source: QuerySourceResolver;
allSources: QueryAllSourcesResolver;
};
InfraSource: {
status: InfraSourceStatusResolver;
};
InfraSourceLogColumn: {
__resolveType(
logColumn: InfraSourceLogColumn
):
| 'InfraSourceTimestampLogColumn'
| 'InfraSourceMessageLogColumn'
| 'InfraSourceFieldLogColumn'
| null;
};
Mutation: {
createSource: MutationCreateSourceResolver;
deleteSource: MutationDeleteSourceResolver;
updateSource: MutationUpdateSourceResolver;
};
} => ({
Query: {
async source(root, args, { req }) {
const requestedSourceConfiguration = await libs.sources.getSourceConfiguration(
req.core.savedObjects.client,
args.id
);
return requestedSourceConfiguration;
},
async allSources(root, args, { req }) {
const sourceConfigurations = await libs.sources.getAllSourceConfigurations(
req.core.savedObjects.client
);
return sourceConfigurations;
},
},
InfraSource: {
async status(source) {
return source;
},
},
InfraSourceLogColumn: {
__resolveType(logColumn) {
if (SavedSourceConfigurationTimestampColumnRuntimeType.is(logColumn)) {
return 'InfraSourceTimestampLogColumn';
}
if (SavedSourceConfigurationMessageColumnRuntimeType.is(logColumn)) {
return 'InfraSourceMessageLogColumn';
}
if (SavedSourceConfigurationFieldColumnRuntimeType.is(logColumn)) {
return 'InfraSourceFieldLogColumn';
}
return null;
},
},
Mutation: {
async createSource(root, args, { req }) {
const sourceConfiguration = await libs.sources.createSourceConfiguration(
req.core.savedObjects.client,
args.id,
compactObject({
...args.sourceProperties,
fields: args.sourceProperties.fields
? compactObject(args.sourceProperties.fields)
: undefined,
logColumns: decodeLogColumns(args.sourceProperties.logColumns),
})
);
return {
source: sourceConfiguration,
};
},
async deleteSource(root, args, { req }) {
await libs.sources.deleteSourceConfiguration(req.core.savedObjects.client, args.id);
return {
id: args.id,
};
},
async updateSource(root, args, { req }) {
const updatedSourceConfiguration = await libs.sources.updateSourceConfiguration(
req.core.savedObjects.client,
args.id,
compactObject({
...args.sourceProperties,
fields: args.sourceProperties.fields
? compactObject(args.sourceProperties.fields)
: undefined,
logColumns: decodeLogColumns(args.sourceProperties.logColumns),
})
);
return {
source: updatedSourceConfiguration,
};
},
},
});
type CompactObject<T> = { [K in keyof T]: NonNullable<T[K]> };
const compactObject = <T>(obj: T): CompactObject<T> =>
Object.entries(obj).reduce<CompactObject<T>>(
(accumulatedObj, [key, value]) =>
typeof value === 'undefined' || value === null
? accumulatedObj
: {
...(accumulatedObj as any),
[key]: value,
},
{} as CompactObject<T>
);
const decodeLogColumns = (logColumns?: UpdateSourceLogColumnInput[] | null) =>
logColumns
? logColumns.map((logColumn) =>
pipe(
SavedSourceConfigurationColumnRuntimeType.decode(logColumn),
fold((errors) => {
throw new UserInputError(failure(errors).join('\n'));
}, identity)
)
)
: undefined;

View file

@ -1,209 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
export const sourcesSchema = gql`
"A source of infrastructure data"
type InfraSource {
"The id of the source"
id: ID!
"The version number the source configuration was last persisted with"
version: String
"The timestamp the source configuration was last persisted at"
updatedAt: Float
"The origin of the source (one of 'fallback', 'internal', 'stored')"
origin: String!
"The raw configuration of the source"
configuration: InfraSourceConfiguration!
"The status of the source"
status: InfraSourceStatus!
}
"The status of an infrastructure data source"
type InfraSourceStatus
"A set of configuration options for an infrastructure data source"
type InfraSourceConfiguration {
"The name of the data source"
name: String!
"A description of the data source"
description: String!
"The alias to read metric data from"
metricAlias: String!
"The alias to read log data from"
logAlias: String!
"Default view for inventory"
inventoryDefaultView: String!
"Default view for Metrics Explorer"
metricsExplorerDefaultView: String!
"The field mapping to use for this source"
fields: InfraSourceFields!
"The columns to use for log display"
logColumns: [InfraSourceLogColumn!]!
}
"A mapping of semantic fields to their document counterparts"
type InfraSourceFields {
"The field to identify a container by"
container: String!
"The fields to identify a host by"
host: String!
"The fields to use as the log message"
message: [String!]!
"The field to identify a pod by"
pod: String!
"The field to use as a tiebreaker for log events that have identical timestamps"
tiebreaker: String!
"The field to use as a timestamp for metrics and logs"
timestamp: String!
}
"The built-in timestamp log column"
type InfraSourceTimestampLogColumn {
timestampColumn: InfraSourceTimestampLogColumnAttributes!
}
type InfraSourceTimestampLogColumnAttributes {
"A unique id for the column"
id: ID!
}
"The built-in message log column"
type InfraSourceMessageLogColumn {
messageColumn: InfraSourceMessageLogColumnAttributes!
}
type InfraSourceMessageLogColumnAttributes {
"A unique id for the column"
id: ID!
}
"A log column containing a field value"
type InfraSourceFieldLogColumn {
fieldColumn: InfraSourceFieldLogColumnAttributes!
}
type InfraSourceFieldLogColumnAttributes {
"A unique id for the column"
id: ID!
"The field name this column refers to"
field: String!
}
"All known log column types"
union InfraSourceLogColumn =
InfraSourceTimestampLogColumn
| InfraSourceMessageLogColumn
| InfraSourceFieldLogColumn
extend type Query {
"""
Get an infrastructure data source by id.
The resolution order for the source configuration attributes is as follows
with the first defined value winning:
1. The attributes of the saved object with the given 'id'.
2. The attributes defined in the static Kibana configuration key
'xpack.infra.sources.default'.
3. The hard-coded default values.
As a consequence, querying a source that doesn't exist doesn't error out,
but returns the configured or hardcoded defaults.
"""
source("The id of the source" id: ID!): InfraSource!
"Get a list of all infrastructure data sources"
allSources: [InfraSource!]!
}
"The properties to update the source with"
input UpdateSourceInput {
"The name of the data source"
name: String
"A description of the data source"
description: String
"The alias to read metric data from"
metricAlias: String
"The alias to read log data from"
logAlias: String
"The field mapping to use for this source"
fields: UpdateSourceFieldsInput
"Name of default inventory view"
inventoryDefaultView: String
"Default view for Metrics Explorer"
metricsExplorerDefaultView: String
"The log columns to display for this source"
logColumns: [UpdateSourceLogColumnInput!]
}
"The mapping of semantic fields of the source to be created"
input UpdateSourceFieldsInput {
"The field to identify a container by"
container: String
"The fields to identify a host by"
host: String
"The field to identify a pod by"
pod: String
"The field to use as a tiebreaker for log events that have identical timestamps"
tiebreaker: String
"The field to use as a timestamp for metrics and logs"
timestamp: String
}
"One of the log column types to display for this source"
input UpdateSourceLogColumnInput {
"A custom field log column"
fieldColumn: UpdateSourceFieldLogColumnInput
"A built-in message log column"
messageColumn: UpdateSourceMessageLogColumnInput
"A built-in timestamp log column"
timestampColumn: UpdateSourceTimestampLogColumnInput
}
input UpdateSourceFieldLogColumnInput {
id: ID!
field: String!
}
input UpdateSourceMessageLogColumnInput {
id: ID!
}
input UpdateSourceTimestampLogColumnInput {
id: ID!
}
"The result of a successful source update"
type UpdateSourceResult {
"The source that was updated"
source: InfraSource!
}
"The result of a source deletion operations"
type DeleteSourceResult {
"The id of the source that was deleted"
id: ID!
}
extend type Mutation {
"Create a new source of infrastructure data"
createSource(
"The id of the source"
id: ID!
sourceProperties: UpdateSourceInput!
): UpdateSourceResult!
"Modify an existing source"
updateSource(
"The id of the source"
id: ID!
"The properties to update the source with"
sourceProperties: UpdateSourceInput!
): UpdateSourceResult!
"Delete a source of infrastructure data"
deleteSource("The id of the source" id: ID!): DeleteSourceResult!
}
`;

File diff suppressed because it is too large Load diff

View file

@ -4,11 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IResolvers, makeExecutableSchema } from 'graphql-tools';
import { initIpToHostName } from './routes/ip_to_hostname';
import { schemas } from './graphql';
import { createSourceStatusResolvers } from './graphql/source_status';
import { createSourcesResolvers } from './graphql/sources';
import { InfraBackendLibs } from './lib/infra_types';
import {
initGetLogEntryCategoriesRoute,
@ -44,16 +40,6 @@ import { initGetLogAlertsChartPreviewDataRoute } from './routes/log_alerts';
import { initProcessListRoute } from './routes/process_list';
export const initInfraServer = (libs: InfraBackendLibs) => {
const schema = makeExecutableSchema({
resolvers: [
createSourcesResolvers(libs) as IResolvers,
createSourceStatusResolvers(libs) as IResolvers,
],
typeDefs: schemas,
});
libs.framework.registerGraphQLEndpoint('/graphql', schema);
initIpToHostName(libs);
initGetLogEntryCategoriesRoute(libs);
initGetLogEntryCategoryDatasetsRoute(libs);

View file

@ -4,9 +4,6 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { GraphQLSchema } from 'graphql';
import { runHttpQuery } from 'apollo-server-core';
import { schema, TypeOf } from '@kbn/config-schema';
import {
InfraRouteConfig,
InfraTSVBResponse,
@ -23,7 +20,6 @@ import {
CoreSetup,
IRouter,
KibanaRequest,
KibanaResponseFactory,
RouteMethod,
} from '../../../../../../../src/core/server';
import { RequestHandler } from '../../../../../../../src/core/server';
@ -73,79 +69,6 @@ export class KibanaFramework {
}
}
public registerGraphQLEndpoint(routePath: string, gqlSchema: GraphQLSchema) {
// These endpoints are validated by GraphQL at runtime and with GraphQL generated types
const body = schema.object({}, { unknowns: 'allow' });
type Body = TypeOf<typeof body>;
const routeOptions = {
path: `/api/infra${routePath}`,
validate: {
body,
},
options: {
tags: ['access:infra'],
},
};
async function handler(
context: InfraPluginRequestHandlerContext,
request: KibanaRequest<unknown, unknown, Body>,
response: KibanaResponseFactory
) {
try {
const query =
request.route.method === 'post'
? (request.body as Record<string, any>)
: (request.query as Record<string, any>);
const gqlResponse = await runHttpQuery([context, request], {
method: request.route.method.toUpperCase(),
options: (req: InfraPluginRequestHandlerContext, rawReq: KibanaRequest) => ({
context: { req, rawReq },
schema: gqlSchema,
}),
query,
});
return response.ok({
body: gqlResponse,
headers: {
'content-type': 'application/json',
},
});
} catch (error) {
const errorBody = {
message: error.message,
};
if ('HttpQueryError' !== error.name) {
return response.internalError({
body: errorBody,
});
}
if (error.isGraphQLError === true) {
return response.customError({
statusCode: error.statusCode,
body: errorBody,
headers: {
'Content-Type': 'application/json',
},
});
}
const { headers = [], statusCode = 500 } = error;
return response.customError({
statusCode,
headers,
body: errorBody,
});
}
}
this.router.post(routeOptions, handler);
this.router.get(routeOptions, handler);
}
callWithRequest<Hit = {}, Aggregation = undefined>(
requestContext: InfraPluginRequestHandlerContext,
endpoint: 'search',
@ -187,7 +110,7 @@ export class KibanaFramework {
options?: CallWithRequestParams
): Promise<InfraDatabaseSearchResponse>;
public async callWithRequest<Hit = {}, Aggregation = undefined>(
public async callWithRequest(
requestContext: InfraPluginRequestHandlerContext,
endpoint: string,
params: CallWithRequestParams

View file

@ -1,15 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { ApolloError } from 'apollo-server-errors';
import { InfraMetricsErrorCodes } from '../../../../../common/errors';
export class InvalidNodeError extends ApolloError {
constructor(message: string) {
super(message, InfraMetricsErrorCodes.invalid_node);
Object.defineProperty(this, 'name', { value: 'InvalidNodeError' });
}
}

View file

@ -5,9 +5,8 @@
*/
import type { InfraPluginRequestHandlerContext } from '../../types';
import { InfraIndexField, InfraIndexType } from '../../graphql/types';
import { FieldsAdapter } from '../adapters/fields';
import { InfraSources } from '../sources';
import { InfraSourceIndexField, InfraSources } from '../sources';
export class InfraFieldsDomain {
constructor(
@ -18,14 +17,14 @@ export class InfraFieldsDomain {
public async getFields(
requestContext: InfraPluginRequestHandlerContext,
sourceId: string,
indexType: InfraIndexType
): Promise<InfraIndexField[]> {
indexType: 'LOGS' | 'METRICS' | 'ANY'
): Promise<InfraSourceIndexField[]> {
const { configuration } = await this.libs.sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
const includeMetricIndices = [InfraIndexType.ANY, InfraIndexType.METRICS].includes(indexType);
const includeLogIndices = [InfraIndexType.ANY, InfraIndexType.LOGS].includes(indexType);
const includeMetricIndices = ['ANY', 'METRICS'].includes(indexType);
const includeLogIndices = ['ANY', 'LOGS'].includes(indexType);
const fields = await this.adapter.getIndexFields(
requestContext,

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraSourceConfiguration } from '../../common/graphql/types';
import { InfraSourceConfiguration } from '../../common/http_api/source_api';
import { InfraFieldsDomain } from './domains/fields_domain';
import { InfraLogEntriesDomain } from './domains/log_entries_domain';
import { InfraMetricsDomain } from './domains/metrics_domain';
@ -13,13 +13,6 @@ import { InfraSourceStatus } from './source_status';
import { InfraConfig } from '../plugin';
import { KibanaFramework } from './adapters/framework/kibana_framework_adapter';
// NP_TODO: We shouldn't need this context anymore but I am
// not sure how the graphql stuff uses it, so we can't remove it yet
export interface InfraContext {
req: any;
rawReq?: any;
}
export interface InfraDomainLibs {
fields: InfraFieldsDomain;
logEntries: InfraLogEntriesDomain;

View file

@ -11,7 +11,6 @@ import {
LOG_SOURCE_STATUS_PATH,
} from '../../../common/http_api/log_sources';
import { createValidationFunction } from '../../../common/runtime_types';
import { InfraIndexType } from '../../graphql/types';
import { InfraBackendLibs } from '../../lib/infra_types';
export const initLogSourceStatusRoutes = ({
@ -34,7 +33,7 @@ export const initLogSourceStatusRoutes = ({
const logIndexStatus = await sourceStatus.getLogIndexStatus(requestContext, sourceId);
const logIndexFields =
logIndexStatus !== 'missing'
? await fields.getFields(requestContext, sourceId, InfraIndexType.LOGS)
? await fields.getFields(requestContext, sourceId, 'LOGS')
: [];
return response.ok({

View file

@ -4,20 +4,25 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { schema } from '@kbn/config-schema';
import { SourceResponseRuntimeType } from '../../../common/http_api/source_api';
import Boom from '@hapi/boom';
import { createValidationFunction } from '../../../common/runtime_types';
import {
InfraSourceStatus,
SavedSourceConfigurationRuntimeType,
SourceResponseRuntimeType,
} from '../../../common/http_api/source_api';
import { InfraBackendLibs } from '../../lib/infra_types';
import { InfraIndexType } from '../../graphql/types';
import { hasData } from '../../lib/sources/has_data';
import { createSearchClient } from '../../lib/create_search_client';
const typeToInfraIndexType = (value: string | undefined) => {
switch (value) {
case 'metrics':
return InfraIndexType.METRICS;
return 'METRICS';
case 'logs':
return InfraIndexType.LOGS;
return 'LOGS';
default:
return InfraIndexType.ANY;
return 'ANY';
}
};
@ -50,14 +55,14 @@ export const initSourceRoute = (libs: InfraBackendLibs) => {
return response.notFound();
}
const status = {
const status: InfraSourceStatus = {
logIndicesExist: logIndexStatus !== 'missing',
metricIndicesExist,
indexFields,
};
return response.ok({
body: SourceResponseRuntimeType.encode({ source, status }),
body: SourceResponseRuntimeType.encode({ source: { ...source, status } }),
});
} catch (error) {
return response.internalError({
@ -67,6 +72,79 @@ export const initSourceRoute = (libs: InfraBackendLibs) => {
}
);
framework.registerRoute(
{
method: 'patch',
path: '/api/metrics/source/{sourceId}',
validate: {
params: schema.object({
sourceId: schema.string(),
}),
body: createValidationFunction(SavedSourceConfigurationRuntimeType),
},
},
framework.router.handleLegacyErrors(async (requestContext, request, response) => {
const { sources } = libs;
const { sourceId } = request.params;
const patchedSourceConfigurationProperties = request.body;
try {
const sourceConfiguration = await sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
if (sourceConfiguration.origin === 'internal') {
response.conflict({
body: 'A conflicting read-only source configuration already exists.',
});
}
const sourceConfigurationExists = sourceConfiguration.origin === 'stored';
const patchedSourceConfiguration = await (sourceConfigurationExists
? sources.updateSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId,
patchedSourceConfigurationProperties
)
: sources.createSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId,
patchedSourceConfigurationProperties
));
const [logIndexStatus, metricIndicesExist, indexFields] = await Promise.all([
libs.sourceStatus.getLogIndexStatus(requestContext, sourceId),
libs.sourceStatus.hasMetricIndices(requestContext, sourceId),
libs.fields.getFields(requestContext, sourceId, typeToInfraIndexType('metrics')),
]);
const status: InfraSourceStatus = {
logIndicesExist: logIndexStatus !== 'missing',
metricIndicesExist,
indexFields,
};
return response.ok({
body: SourceResponseRuntimeType.encode({
source: { ...patchedSourceConfiguration, status },
}),
});
} catch (error) {
if (Boom.isBoom(error)) {
throw error;
}
return response.customError({
statusCode: error.statusCode ?? 500,
body: {
message: error.message ?? 'An unexpected error occurred',
},
});
}
})
);
framework.registerRoute(
{
method: 'get',

View file

@ -4,8 +4,6 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { UserInputError } from 'apollo-server-errors';
import { JsonObject } from '../../../../../src/plugins/kibana_utils/common';
export const parseFilterQuery = (
@ -26,9 +24,6 @@ export const parseFilterQuery = (
return undefined;
}
} catch (err) {
throw new UserInputError(`Failed to parse query: ${err}`, {
query: filterQuery,
originalError: err,
});
throw new Error(`Failed to parse query: ${err}`);
}
};

View file

@ -1,97 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Resolver } from '../graphql/types';
type ResolverResult<R> = R | Promise<R>;
type InfraResolverResult<R> =
| Promise<R>
| Promise<{ [P in keyof R]: () => Promise<R[P]> }>
| { [P in keyof R]: () => Promise<R[P]> }
| { [P in keyof R]: () => R[P] }
| R;
export type ResultOf<Resolver_> = Resolver_ extends Resolver<InfraResolverResult<infer Result>>
? Result
: never;
export type SubsetResolverWithFields<R, IncludedFields extends string> = R extends Resolver<
Array<infer ResultInArray>,
infer ParentInArray,
infer ContextInArray,
infer ArgsInArray
>
? Resolver<
Array<Pick<ResultInArray, Extract<keyof ResultInArray, IncludedFields>>>,
ParentInArray,
ContextInArray,
ArgsInArray
>
: R extends Resolver<infer Result, infer Parent, infer Context, infer Args>
? Resolver<Pick<Result, Extract<keyof Result, IncludedFields>>, Parent, Context, Args>
: never;
export type SubsetResolverWithoutFields<R, ExcludedFields extends string> = R extends Resolver<
Array<infer ResultInArray>,
infer ParentInArray,
infer ContextInArray,
infer ArgsInArray
>
? Resolver<
Array<Pick<ResultInArray, Exclude<keyof ResultInArray, ExcludedFields>>>,
ParentInArray,
ContextInArray,
ArgsInArray
>
: R extends Resolver<infer Result, infer Parent, infer Context, infer Args>
? Resolver<Pick<Result, Exclude<keyof Result, ExcludedFields>>, Parent, Context, Args>
: never;
export type ResolverWithParent<Resolver_, Parent> = Resolver_ extends Resolver<
infer Result,
any,
infer Context,
infer Args
>
? Resolver<Result, Parent, Context, Args>
: never;
export type InfraResolver<Result = any, Parent = any, Context = any, Args = any> = Resolver<
InfraResolverResult<Result>,
Parent,
Context,
Args
>;
export type InfraResolverOf<Resolver_> = Resolver_ extends Resolver<
ResolverResult<infer ResultWithNeverParent>,
never,
infer ContextWithNeverParent,
infer ArgsWithNeverParent
>
? InfraResolver<ResultWithNeverParent, {}, ContextWithNeverParent, ArgsWithNeverParent>
: Resolver_ extends Resolver<
ResolverResult<infer Result>,
infer Parent,
infer Context,
infer Args
>
? InfraResolver<Result, Parent, Context, Args>
: never;
export type InfraResolverWithFields<Resolver_, IncludedFields extends string> = InfraResolverOf<
SubsetResolverWithFields<Resolver_, IncludedFields>
>;
export type InfraResolverWithoutFields<Resolver_, ExcludedFields extends string> = InfraResolverOf<
SubsetResolverWithoutFields<Resolver_, ExcludedFields>
>;
export type ChildResolverOf<Resolver_, ParentResolver> = ResolverWithParent<
Resolver_,
ResultOf<ParentResolver>
>;

View file

@ -1,11 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
declare module 'graphql-fields' {
function graphqlFields(info: any, obj?: any): any;
// eslint-disable-next-line import/no-default-export
export default graphqlFields;
}

View file

@ -1,252 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import expect from '@kbn/expect';
import gql from 'graphql-tag';
import { FtrProviderContext } from '../../ftr_provider_context';
const introspectionQuery = gql`
query Schema {
__schema {
queryType {
name
}
}
}
`;
export default function ({ getService }: FtrProviderContext) {
const security = getService('security');
const spaces = getService('spaces');
const clientFactory = getService('infraOpsGraphQLClientFactory');
const expectGraphQL403 = (result: any) => {
expect(result.response).to.be(undefined);
expect(result.error).not.to.be(undefined);
expect(result.error).to.have.property('networkError');
expect(result.error.networkError).to.have.property('statusCode', 403);
};
const expectGraphQLResponse = (result: any) => {
expect(result.error).to.be(undefined);
expect(result.response).to.have.property('data');
expect(result.response.data).to.be.an('object');
};
const executeGraphQLQuery = async (username: string, password: string, spaceId?: string) => {
const queryOptions = {
query: introspectionQuery,
};
const basePath = spaceId ? `/s/${spaceId}` : '';
const client = clientFactory({ username, password, basePath });
let error;
let response;
try {
response = await client.query(queryOptions);
} catch (err) {
error = err;
}
return {
error,
response,
};
};
describe('feature controls', () => {
it(`APIs can't be accessed by user with logstash-* "read" privileges`, async () => {
const username = 'logstash_read';
const roleName = 'logstash_read';
const password = `${username}-password`;
try {
await security.role.create(roleName, {
elasticsearch: {
indices: [
{
names: ['logstash-*'],
privileges: ['read', 'view_index_metadata'],
},
],
},
});
await security.user.create(username, {
password,
roles: [roleName],
full_name: 'a kibana user',
});
const graphQLResult = await executeGraphQLQuery(username, password);
expectGraphQL403(graphQLResult);
} finally {
await security.role.delete(roleName);
await security.user.delete(username);
}
});
it('APIs can be accessed user with global "all" and logstash-* "read" privileges', async () => {
const username = 'global_all';
const roleName = 'global_all';
const password = `${username}-password`;
try {
await security.role.create(roleName, {
elasticsearch: {
indices: [
{
names: ['logstash-*'],
privileges: ['read', 'view_index_metadata'],
},
],
},
kibana: [
{
base: ['all'],
spaces: ['*'],
},
],
});
await security.user.create(username, {
password,
roles: [roleName],
full_name: 'a kibana user',
});
const graphQLResult = await executeGraphQLQuery(username, password);
expectGraphQLResponse(graphQLResult);
} finally {
await security.role.delete(roleName);
await security.user.delete(username);
}
});
// this could be any role which doesn't have access to the infra feature
it(`APIs can't be accessed by user with dashboard "all" and logstash-* "read" privileges`, async () => {
const username = 'dashboard_all';
const roleName = 'dashboard_all';
const password = `${username}-password`;
try {
await security.role.create(roleName, {
elasticsearch: {
indices: [
{
names: ['logstash-*'],
privileges: ['read', 'view_index_metadata'],
},
],
},
kibana: [
{
feature: {
dashboard: ['all'],
},
spaces: ['*'],
},
],
});
await security.user.create(username, {
password,
roles: [roleName],
full_name: 'a kibana user',
});
const graphQLResult = await executeGraphQLQuery(username, password);
expectGraphQL403(graphQLResult);
} finally {
await security.role.delete(roleName);
await security.user.delete(username);
}
});
describe('spaces', () => {
// the following tests create a user_1 which has infrastructure read access to space_1, logs read access to space_2 and dashboard all access to space_3
const space1Id = 'space_1';
const space2Id = 'space_2';
const space3Id = 'space_3';
const roleName = 'user_1';
const username = 'user_1';
const password = 'user_1-password';
before(async () => {
await spaces.create({
id: space1Id,
name: space1Id,
disabledFeatures: [],
});
await spaces.create({
id: space2Id,
name: space2Id,
disabledFeatures: [],
});
await spaces.create({
id: space3Id,
name: space3Id,
disabledFeatures: [],
});
await security.role.create(roleName, {
elasticsearch: {
indices: [
{
names: ['logstash-*'],
privileges: ['read', 'view_index_metadata'],
},
],
},
kibana: [
{
feature: {
infrastructure: ['read'],
},
spaces: [space1Id],
},
{
feature: {
logs: ['read'],
},
spaces: [space2Id],
},
{
feature: {
dashboard: ['all'],
},
spaces: [space3Id],
},
],
});
await security.user.create(username, {
password,
roles: [roleName],
});
});
after(async () => {
await spaces.delete(space1Id);
await spaces.delete(space2Id);
await spaces.delete(space3Id);
await security.role.delete(roleName);
await security.user.delete(username);
});
it('user_1 can access APIs in space_1', async () => {
const graphQLResult = await executeGraphQLQuery(username, password, space1Id);
expectGraphQLResponse(graphQLResult);
});
it(`user_1 can access APIs in space_2`, async () => {
const graphQLResult = await executeGraphQLQuery(username, password, space2Id);
expectGraphQLResponse(graphQLResult);
});
it(`user_1 can't access APIs in space_3`, async () => {
const graphQLResult = await executeGraphQLQuery(username, password, space3Id);
expectGraphQL403(graphQLResult);
});
});
});
}

View file

@ -34,7 +34,7 @@ export default function ({ getService }: FtrProviderContext) {
before(() => esArchiver.load('infra/8.0.0/logs_and_metrics'));
after(() => esArchiver.unload('infra/8.0.0/logs_and_metrics'));
describe('/api/metrics/source/default/metrics', () => {
it('should just work', () => {
it('should just work', async () => {
const resp = fetchSource();
return resp.then((data) => {
expect(data).to.have.property('source');
@ -50,14 +50,14 @@ export default function ({ getService }: FtrProviderContext) {
tiebreaker: '_doc',
timestamp: '@timestamp',
});
expect(data).to.have.property('status');
expect(data?.status.metricIndicesExist).to.equal(true);
expect(data?.status.logIndicesExist).to.equal(true);
expect(data?.source).to.have.property('status');
expect(data?.source.status?.metricIndicesExist).to.equal(true);
expect(data?.source.status?.logIndicesExist).to.equal(true);
});
});
});
describe('/api/metrics/source/default/metrics/hasData', () => {
it('should just work', () => {
it('should just work', async () => {
const resp = fetchHasData('metrics');
return resp.then((data) => {
expect(data).to.have.property('hasData');
@ -66,7 +66,7 @@ export default function ({ getService }: FtrProviderContext) {
});
});
describe('/api/metrics/source/default/logs/hasData', () => {
it('should just work', () => {
it('should just work', async () => {
const resp = fetchHasData('logs');
return resp.then((data) => {
expect(data).to.have.property('hasData');

View file

@ -18,7 +18,6 @@ export default function ({ loadTestFile }) {
loadTestFile(require.resolve('./snapshot'));
loadTestFile(require.resolve('./metrics_alerting'));
loadTestFile(require.resolve('./metrics_explorer'));
loadTestFile(require.resolve('./feature_controls'));
loadTestFile(require.resolve('./ip_to_hostname'));
loadTestFile(require.resolve('./http_source'));
});

View file

@ -5,7 +5,6 @@
*/
import expect from '@kbn/expect';
import { InfraNodeType } from '../../../../plugins/infra/server/graphql/types';
import {
InfraMetadata,
InfraMetadataRequest,
@ -50,7 +49,7 @@ export default function ({ getService }: FtrProviderContext) {
const metadata = await fetchMetadata({
sourceId: 'default',
nodeId: 'demo-stack-mysql-01',
nodeType: InfraNodeType.host,
nodeType: 'host',
timeRange: timeRange700,
});
if (metadata) {
@ -70,7 +69,7 @@ export default function ({ getService }: FtrProviderContext) {
const metadata = await fetchMetadata({
sourceId: 'default',
nodeId: '631f36a845514442b93c3fdd2dc91bcd8feb680b8ac5832c7fb8fdc167bb938e',
nodeType: InfraNodeType.container,
nodeType: 'container',
timeRange: timeRange660,
});
if (metadata) {
@ -92,7 +91,7 @@ export default function ({ getService }: FtrProviderContext) {
const metadata = await fetchMetadata({
sourceId: 'default',
nodeId: 'gke-observability-8--observability-8--bc1afd95-f0zc',
nodeType: InfraNodeType.host,
nodeType: 'host',
timeRange: timeRange800withAws,
});
if (metadata) {
@ -140,7 +139,7 @@ export default function ({ getService }: FtrProviderContext) {
const metadata = await fetchMetadata({
sourceId: 'default',
nodeId: 'ip-172-31-47-9.us-east-2.compute.internal',
nodeType: InfraNodeType.host,
nodeType: 'host',
timeRange: timeRange800withAws,
});
if (metadata) {
@ -189,7 +188,7 @@ export default function ({ getService }: FtrProviderContext) {
const metadata = await fetchMetadata({
sourceId: 'default',
nodeId: '14887487-99f8-11e9-9a96-42010a84004d',
nodeType: InfraNodeType.pod,
nodeType: 'pod',
timeRange: timeRange800withAws,
});
if (metadata) {
@ -242,7 +241,7 @@ export default function ({ getService }: FtrProviderContext) {
const metadata = await fetchMetadata({
sourceId: 'default',
nodeId: 'c74b04834c6d7cc1800c3afbe31d0c8c0c267f06e9eb45c2b0c2df3e6cee40c5',
nodeType: InfraNodeType.container,
nodeType: 'container',
timeRange: timeRange800withAws,
});
if (metadata) {

View file

@ -7,8 +7,8 @@
import expect from '@kbn/expect';
import { first, last } from 'lodash';
import { InfraTimerangeInput } from '../../../../plugins/infra/common/http_api/snapshot_api';
import { InventoryMetric } from '../../../../plugins/infra/common/inventory_models/types';
import { InfraNodeType, InfraTimerangeInput } from '../../../../plugins/infra/public/graphql/types';
import { FtrProviderContext } from '../../ftr_provider_context';
import { DATES } from './constants';
@ -19,7 +19,7 @@ const { min, max } = DATES['7.0.0'].hosts;
interface NodeDetailsRequest {
metrics: InventoryMetric[];
nodeId: string;
nodeType: InfraNodeType;
nodeType: string;
sourceId: string;
timerange: InfraTimerangeInput;
cloudId?: string;
@ -44,7 +44,7 @@ export default function ({ getService }: FtrProviderContext) {
return response.body;
};
it('should basically work', () => {
it('should basically work', async () => {
const data = fetchNodeDetails({
sourceId: 'default',
metrics: ['hostCpuUsage'],
@ -54,7 +54,7 @@ export default function ({ getService }: FtrProviderContext) {
interval: '>=1m',
},
nodeId: 'demo-stack-mysql-01',
nodeType: 'host' as InfraNodeType,
nodeType: 'host',
});
return data.then((resp) => {
if (!resp) {
@ -73,7 +73,7 @@ export default function ({ getService }: FtrProviderContext) {
});
});
it('should support multiple metrics', () => {
it('should support multiple metrics', async () => {
const data = fetchNodeDetails({
sourceId: 'default',
metrics: ['hostCpuUsage', 'hostLoad'],
@ -83,7 +83,7 @@ export default function ({ getService }: FtrProviderContext) {
interval: '>=1m',
},
nodeId: 'demo-stack-mysql-01',
nodeType: 'host' as InfraNodeType,
nodeType: 'host',
});
return data.then((resp) => {
if (!resp) {
@ -104,7 +104,7 @@ export default function ({ getService }: FtrProviderContext) {
interval: '>=1m',
},
nodeId: 'demo-stack-mysql-01',
nodeType: 'host' as InfraNodeType,
nodeType: 'host',
});
return data.then((resp) => {
if (!resp) {

View file

@ -7,10 +7,6 @@
import expect from '@kbn/expect';
import { first, last } from 'lodash';
import {
InfraSnapshotMetricInput,
InfraNodeType,
} from '../../../../plugins/infra/server/graphql/types';
import { FtrProviderContext } from '../../ftr_provider_context';
import {
SnapshotNodeResponse,
@ -39,7 +35,7 @@ export default function ({ getService }: FtrProviderContext) {
before(() => esArchiver.load('infra/6.6.0/docker'));
after(() => esArchiver.unload('infra/6.6.0/docker'));
it('should basically work', () => {
it('should basically work', async () => {
const resp = fetchSnapshot({
sourceId: 'default',
timerange: {
@ -47,8 +43,8 @@ export default function ({ getService }: FtrProviderContext) {
from: min,
interval: '1m',
},
metrics: [{ type: 'cpu' }] as InfraSnapshotMetricInput[],
nodeType: 'container' as InfraNodeType,
metrics: [{ type: 'cpu' }],
nodeType: 'container',
groupBy: [],
});
return resp.then((data) => {
@ -86,7 +82,7 @@ export default function ({ getService }: FtrProviderContext) {
before(() => esArchiver.load('infra/8.0.0/logs_and_metrics'));
after(() => esArchiver.unload('infra/8.0.0/logs_and_metrics'));
it("should use the id for the label when the name doesn't exist", () => {
it("should use the id for the label when the name doesn't exist", async () => {
const resp = fetchSnapshot({
sourceId: 'default',
timerange: {
@ -94,8 +90,8 @@ export default function ({ getService }: FtrProviderContext) {
from: min,
interval: '1m',
},
metrics: [{ type: 'cpu' }] as InfraSnapshotMetricInput[],
nodeType: 'pod' as InfraNodeType,
metrics: [{ type: 'cpu' }],
nodeType: 'pod',
groupBy: [],
});
return resp.then((data) => {
@ -118,7 +114,7 @@ export default function ({ getService }: FtrProviderContext) {
}
});
});
it('should have an id and label', () => {
it('should have an id and label', async () => {
const resp = fetchSnapshot({
sourceId: 'default',
timerange: {
@ -126,8 +122,8 @@ export default function ({ getService }: FtrProviderContext) {
from: min,
interval: '1m',
},
metrics: [{ type: 'cpu' }] as InfraSnapshotMetricInput[],
nodeType: 'container' as InfraNodeType,
metrics: [{ type: 'cpu' }],
nodeType: 'container',
groupBy: [],
});
return resp.then((data) => {
@ -157,7 +153,7 @@ export default function ({ getService }: FtrProviderContext) {
before(() => esArchiver.load('infra/7.0.0/hosts'));
after(() => esArchiver.unload('infra/7.0.0/hosts'));
it('should basically work', () => {
it('should basically work', async () => {
const resp = fetchSnapshot({
sourceId: 'default',
timerange: {
@ -165,8 +161,8 @@ export default function ({ getService }: FtrProviderContext) {
from: min,
interval: '1m',
},
metrics: [{ type: 'cpu' }] as InfraSnapshotMetricInput[],
nodeType: 'host' as InfraNodeType,
metrics: [{ type: 'cpu' }],
nodeType: 'host',
groupBy: [],
});
return resp.then((data) => {
@ -193,7 +189,7 @@ export default function ({ getService }: FtrProviderContext) {
});
});
it('should allow for overrides for interval and ignoring lookback', () => {
it('should allow for overrides for interval and ignoring lookback', async () => {
const resp = fetchSnapshot({
sourceId: 'default',
timerange: {
@ -203,8 +199,8 @@ export default function ({ getService }: FtrProviderContext) {
forceInterval: true,
ignoreLookback: true,
},
metrics: [{ type: 'cpu' }] as InfraSnapshotMetricInput[],
nodeType: 'host' as InfraNodeType,
metrics: [{ type: 'cpu' }],
nodeType: 'host',
groupBy: [],
includeTimeseries: true,
});
@ -229,7 +225,7 @@ export default function ({ getService }: FtrProviderContext) {
});
});
it('should allow for overrides for lookback', () => {
it('should allow for overrides for lookback', async () => {
const resp = fetchSnapshot({
sourceId: 'default',
timerange: {
@ -238,8 +234,8 @@ export default function ({ getService }: FtrProviderContext) {
interval: '1m',
lookbackSize: 6,
},
metrics: [{ type: 'cpu' }] as InfraSnapshotMetricInput[],
nodeType: 'host' as InfraNodeType,
metrics: [{ type: 'cpu' }],
nodeType: 'host',
groupBy: [],
includeTimeseries: true,
});
@ -277,7 +273,7 @@ export default function ({ getService }: FtrProviderContext) {
id: '1',
},
] as SnapshotMetricInput[],
nodeType: 'host' as InfraNodeType,
nodeType: 'host',
groupBy: [],
});
@ -303,7 +299,7 @@ export default function ({ getService }: FtrProviderContext) {
}
});
it('should basically work with 1 grouping', () => {
it('should basically work with 1 grouping', async () => {
const resp = fetchSnapshot({
sourceId: 'default',
timerange: {
@ -311,8 +307,8 @@ export default function ({ getService }: FtrProviderContext) {
from: min,
interval: '1m',
},
metrics: [{ type: 'cpu' }] as InfraSnapshotMetricInput[],
nodeType: 'host' as InfraNodeType,
metrics: [{ type: 'cpu' }],
nodeType: 'host',
groupBy: [{ field: 'cloud.availability_zone' }],
});
return resp.then((data) => {
@ -330,7 +326,7 @@ export default function ({ getService }: FtrProviderContext) {
});
});
it('should basically work with 2 groupings', () => {
it('should basically work with 2 groupings', async () => {
const resp = fetchSnapshot({
sourceId: 'default',
timerange: {
@ -338,8 +334,8 @@ export default function ({ getService }: FtrProviderContext) {
from: min,
interval: '1m',
},
metrics: [{ type: 'cpu' }] as InfraSnapshotMetricInput[],
nodeType: 'host' as InfraNodeType,
metrics: [{ type: 'cpu' }],
nodeType: 'host',
groupBy: [{ field: 'cloud.provider' }, { field: 'cloud.availability_zone' }],
});
@ -359,7 +355,7 @@ export default function ({ getService }: FtrProviderContext) {
});
});
it('should show metrics for all nodes when grouping by service type', () => {
it('should show metrics for all nodes when grouping by service type', async () => {
const resp = fetchSnapshot({
sourceId: 'default',
timerange: {
@ -367,8 +363,8 @@ export default function ({ getService }: FtrProviderContext) {
from: min,
interval: '1m',
},
metrics: [{ type: 'cpu' }] as InfraSnapshotMetricInput[],
nodeType: 'host' as InfraNodeType,
metrics: [{ type: 'cpu' }],
nodeType: 'host',
groupBy: [{ field: 'service.type' }],
});
return resp.then((data) => {

View file

@ -5,20 +5,27 @@
*/
import expect from '@kbn/expect';
import gql from 'graphql-tag';
import { sourceQuery } from '../../../../plugins/infra/public/containers/source/query_source.gql_query';
import {
sourceConfigurationFieldsFragment,
sourceStatusFieldsFragment,
} from '../../../../plugins/infra/public/containers/source/source_fields_fragment.gql_query';
import { SourceQuery } from '../../../../plugins/infra/public/graphql/types';
SourceResponse,
InfraSavedSourceConfiguration,
SourceResponseRuntimeType,
} from '../../../../plugins/infra/common/http_api/source_api';
import { FtrProviderContext } from '../../ftr_provider_context';
import { sharedFragments } from '../../../../plugins/infra/common/graphql/shared';
export default function ({ getService }: FtrProviderContext) {
const esArchiver = getService('esArchiver');
const client = getService('infraOpsGraphQLClient');
const supertest = getService('supertest');
const patchRequest = async (
body: InfraSavedSourceConfiguration
): Promise<SourceResponse | undefined> => {
const response = await supertest
.patch('/api/metrics/source/default')
.set('kbn-xsrf', 'xxx')
.send(body)
.expect(200);
return response.body;
};
describe('sources', () => {
before(() => esArchiver.load('infra/metrics_and_logs'));
@ -26,409 +33,145 @@ export default function ({ getService }: FtrProviderContext) {
beforeEach(() => esArchiver.load('empty_kibana'));
afterEach(() => esArchiver.unload('empty_kibana'));
describe('query from container', () => {
it('returns the default source configuration when none has been saved', async () => {
const response = await client.query<SourceQuery.Query>({
query: sourceQuery,
variables: {
sourceId: 'default',
},
});
const sourceConfiguration = response.data.source.configuration;
const sourceStatus = response.data.source.status;
// shipped default values
expect(sourceConfiguration.name).to.be('Default');
expect(sourceConfiguration.metricAlias).to.be('metrics-*,metricbeat-*');
expect(sourceConfiguration.logAlias).to.be('logs-*,filebeat-*,kibana_sample_data_logs*');
expect(sourceConfiguration.fields.container).to.be('container.id');
expect(sourceConfiguration.fields.host).to.be('host.name');
expect(sourceConfiguration.fields.pod).to.be('kubernetes.pod.uid');
expect(sourceConfiguration.logColumns).to.have.length(3);
expect(sourceConfiguration.logColumns[0]).to.have.key('timestampColumn');
expect(sourceConfiguration.logColumns[1]).to.have.key('fieldColumn');
expect(sourceConfiguration.logColumns[2]).to.have.key('messageColumn');
// test data in x-pack/test/functional/es_archives/infra/data.json.gz
expect(sourceStatus.indexFields.length).to.be(1765);
expect(sourceStatus.logIndicesExist).to.be(true);
expect(sourceStatus.metricIndicesExist).to.be(true);
});
});
describe('createSource mutation', () => {
it('saves and returns source configurations', async () => {
const response = await client.mutate<any>({
mutation: createSourceMutation,
variables: {
sourceProperties: {
name: 'NAME',
description: 'DESCRIPTION',
logAlias: 'filebeat-**',
metricAlias: 'metricbeat-**',
fields: {
container: 'CONTAINER',
host: 'HOST',
pod: 'POD',
tiebreaker: 'TIEBREAKER',
timestamp: 'TIMESTAMP',
},
logColumns: [
{
messageColumn: {
id: 'MESSAGE_COLUMN',
},
},
],
},
sourceId: 'default',
},
});
const { version, updatedAt, configuration, status } =
response.data && response.data.createSource.source;
expect(version).to.be.a('string');
expect(updatedAt).to.be.greaterThan(0);
expect(configuration.name).to.be('NAME');
expect(configuration.description).to.be('DESCRIPTION');
expect(configuration.metricAlias).to.be('metricbeat-**');
expect(configuration.logAlias).to.be('filebeat-**');
expect(configuration.fields.container).to.be('CONTAINER');
expect(configuration.fields.host).to.be('HOST');
expect(configuration.fields.pod).to.be('POD');
expect(configuration.fields.tiebreaker).to.be('TIEBREAKER');
expect(configuration.fields.timestamp).to.be('TIMESTAMP');
expect(configuration.logColumns).to.have.length(1);
expect(configuration.logColumns[0]).to.have.key('messageColumn');
expect(status.logIndicesExist).to.be(true);
expect(status.metricIndicesExist).to.be(true);
});
it('saves partial source configuration and returns it amended with defaults', async () => {
const response = await client.mutate<any>({
mutation: createSourceMutation,
variables: {
sourceProperties: {
name: 'NAME',
},
sourceId: 'default',
},
});
const { version, updatedAt, configuration, status } =
response.data && response.data.createSource.source;
expect(version).to.be.a('string');
expect(updatedAt).to.be.greaterThan(0);
expect(configuration.name).to.be('NAME');
expect(configuration.description).to.be('');
expect(configuration.metricAlias).to.be('metrics-*,metricbeat-*');
expect(configuration.logAlias).to.be('logs-*,filebeat-*,kibana_sample_data_logs*');
expect(configuration.fields.container).to.be('container.id');
expect(configuration.fields.host).to.be('host.name');
expect(configuration.fields.pod).to.be('kubernetes.pod.uid');
expect(configuration.fields.tiebreaker).to.be('_doc');
expect(configuration.fields.timestamp).to.be('@timestamp');
expect(configuration.logColumns).to.have.length(3);
expect(status.logIndicesExist).to.be(true);
expect(status.metricIndicesExist).to.be(true);
});
it('refuses to overwrite an existing source', async () => {
await client.mutate<any>({
mutation: createSourceMutation,
variables: {
sourceProperties: {
name: 'NAME',
},
sourceId: 'default',
},
});
await client
.mutate<any>({
mutation: createSourceMutation,
variables: {
sourceProperties: {
name: 'NAME',
},
sourceId: 'default',
},
})
.then(
() => {
expect().fail('should have failed with a conflict');
},
(err) => {
expect(err.message).to.contain('conflict');
}
);
});
});
describe('deleteSource mutation', () => {
it('deletes an existing source', async () => {
const creationResponse = await client.mutate<any>({
mutation: createSourceMutation,
variables: {
sourceProperties: {
name: 'NAME',
},
sourceId: 'default',
},
});
const { version } = creationResponse.data && creationResponse.data.createSource.source;
expect(version).to.be.a('string');
const deletionResponse = await client.mutate<any>({
mutation: deleteSourceMutation,
variables: {
sourceId: 'default',
},
});
const { id } = deletionResponse.data && deletionResponse.data.deleteSource;
expect(id).to.be('default');
});
});
describe('updateSource mutation', () => {
describe('patch request', () => {
it('applies all top-level field updates to an existing source', async () => {
const creationResponse = await client.mutate<any>({
mutation: createSourceMutation,
variables: {
sourceProperties: {
name: 'NAME',
},
sourceId: 'default',
},
const creationResponse = await patchRequest({
name: 'NAME',
});
const { version: initialVersion, updatedAt: createdAt } =
creationResponse.data && creationResponse.data.createSource.source;
const initialVersion = creationResponse?.source.version;
const createdAt = creationResponse?.source.updatedAt;
expect(initialVersion).to.be.a('string');
expect(createdAt).to.be.greaterThan(0);
const updateResponse = await client.mutate<any>({
mutation: updateSourceMutation,
variables: {
sourceId: 'default',
sourceProperties: {
name: 'UPDATED_NAME',
description: 'UPDATED_DESCRIPTION',
metricAlias: 'metricbeat-**',
logAlias: 'filebeat-**',
},
},
const updateResponse = await patchRequest({
name: 'UPDATED_NAME',
description: 'UPDATED_DESCRIPTION',
metricAlias: 'metricbeat-**',
logAlias: 'filebeat-**',
});
const { version, updatedAt, configuration, status } =
updateResponse.data && updateResponse.data.updateSource.source;
expect(SourceResponseRuntimeType.is(updateResponse)).to.be(true);
const version = updateResponse?.source.version;
const updatedAt = updateResponse?.source.updatedAt;
const configuration = updateResponse?.source.configuration;
const status = updateResponse?.source.status;
expect(version).to.be.a('string');
expect(version).to.not.be(initialVersion);
expect(updatedAt).to.be.greaterThan(createdAt);
expect(configuration.name).to.be('UPDATED_NAME');
expect(configuration.description).to.be('UPDATED_DESCRIPTION');
expect(configuration.metricAlias).to.be('metricbeat-**');
expect(configuration.logAlias).to.be('filebeat-**');
expect(configuration.fields.host).to.be('host.name');
expect(configuration.fields.pod).to.be('kubernetes.pod.uid');
expect(configuration.fields.tiebreaker).to.be('_doc');
expect(configuration.fields.timestamp).to.be('@timestamp');
expect(configuration.fields.container).to.be('container.id');
expect(configuration.logColumns).to.have.length(3);
expect(status.logIndicesExist).to.be(true);
expect(status.metricIndicesExist).to.be(true);
expect(updatedAt).to.be.greaterThan(createdAt || 0);
expect(configuration?.name).to.be('UPDATED_NAME');
expect(configuration?.description).to.be('UPDATED_DESCRIPTION');
expect(configuration?.metricAlias).to.be('metricbeat-**');
expect(configuration?.logAlias).to.be('filebeat-**');
expect(configuration?.fields.host).to.be('host.name');
expect(configuration?.fields.pod).to.be('kubernetes.pod.uid');
expect(configuration?.fields.tiebreaker).to.be('_doc');
expect(configuration?.fields.timestamp).to.be('@timestamp');
expect(configuration?.fields.container).to.be('container.id');
expect(configuration?.logColumns).to.have.length(3);
expect(status?.logIndicesExist).to.be(true);
expect(status?.metricIndicesExist).to.be(true);
});
it('applies a single top-level update to an existing source', async () => {
const creationResponse = await client.mutate<any>({
mutation: createSourceMutation,
variables: {
sourceProperties: {
name: 'NAME',
},
sourceId: 'default',
},
const creationResponse = await patchRequest({
name: 'NAME',
});
const { version: initialVersion, updatedAt: createdAt } =
creationResponse.data && creationResponse.data.createSource.source;
const initialVersion = creationResponse?.source.version;
const createdAt = creationResponse?.source.updatedAt;
expect(initialVersion).to.be.a('string');
expect(createdAt).to.be.greaterThan(0);
const updateResponse = await client.mutate<any>({
mutation: updateSourceMutation,
variables: {
sourceId: 'default',
sourceProperties: {
metricAlias: 'metricbeat-**',
},
},
const updateResponse = await patchRequest({
name: 'UPDATED_NAME',
description: 'UPDATED_DESCRIPTION',
metricAlias: 'metricbeat-**',
});
const { version, updatedAt, configuration, status } =
updateResponse.data && updateResponse.data.updateSource.source;
const version = updateResponse?.source.version;
const updatedAt = updateResponse?.source.updatedAt;
const configuration = updateResponse?.source.configuration;
const status = updateResponse?.source.status;
expect(version).to.be.a('string');
expect(version).to.not.be(initialVersion);
expect(updatedAt).to.be.greaterThan(createdAt);
expect(configuration.metricAlias).to.be('metricbeat-**');
expect(configuration.logAlias).to.be('logs-*,filebeat-*,kibana_sample_data_logs*');
expect(status.logIndicesExist).to.be(true);
expect(status.metricIndicesExist).to.be(true);
expect(updatedAt).to.be.greaterThan(createdAt || 0);
expect(configuration?.metricAlias).to.be('metricbeat-**');
expect(configuration?.logAlias).to.be('logs-*,filebeat-*,kibana_sample_data_logs*');
expect(status?.logIndicesExist).to.be(true);
expect(status?.metricIndicesExist).to.be(true);
});
it('applies a single nested field update to an existing source', async () => {
const creationResponse = await client.mutate<any>({
mutation: createSourceMutation,
variables: {
sourceProperties: {
name: 'NAME',
fields: {
host: 'HOST',
},
},
sourceId: 'default',
const creationResponse = await patchRequest({
name: 'NAME',
fields: {
host: 'HOST',
},
});
const { version: initialVersion, updatedAt: createdAt } =
creationResponse.data && creationResponse.data.createSource.source;
const initialVersion = creationResponse?.source.version;
const createdAt = creationResponse?.source.updatedAt;
expect(initialVersion).to.be.a('string');
expect(createdAt).to.be.greaterThan(0);
const updateResponse = await client.mutate<any>({
mutation: updateSourceMutation,
variables: {
sourceId: 'default',
sourceProperties: {
fields: {
container: 'UPDATED_CONTAINER',
},
},
const updateResponse = await patchRequest({
fields: {
container: 'UPDATED_CONTAINER',
},
});
const { version, updatedAt, configuration } =
updateResponse.data && updateResponse.data.updateSource.source;
const version = updateResponse?.source.version;
const updatedAt = updateResponse?.source.updatedAt;
const configuration = updateResponse?.source.configuration;
expect(version).to.be.a('string');
expect(version).to.not.be(initialVersion);
expect(updatedAt).to.be.greaterThan(createdAt);
expect(configuration.fields.container).to.be('UPDATED_CONTAINER');
expect(configuration.fields.host).to.be('HOST');
expect(configuration.fields.pod).to.be('kubernetes.pod.uid');
expect(configuration.fields.tiebreaker).to.be('_doc');
expect(configuration.fields.timestamp).to.be('@timestamp');
expect(updatedAt).to.be.greaterThan(createdAt || 0);
expect(configuration?.fields.container).to.be('UPDATED_CONTAINER');
expect(configuration?.fields.host).to.be('HOST');
expect(configuration?.fields.pod).to.be('kubernetes.pod.uid');
expect(configuration?.fields.tiebreaker).to.be('_doc');
expect(configuration?.fields.timestamp).to.be('@timestamp');
});
it('applies a log column update to an existing source', async () => {
const creationResponse = await client.mutate<any>({
mutation: createSourceMutation,
variables: {
sourceProperties: {
name: 'NAME',
},
sourceId: 'default',
},
const creationResponse = await patchRequest({
name: 'NAME',
});
const { version: initialVersion, updatedAt: createdAt } =
creationResponse.data && creationResponse.data.createSource.source;
const initialVersion = creationResponse?.source.version;
const createdAt = creationResponse?.source.updatedAt;
expect(initialVersion).to.be.a('string');
expect(createdAt).to.be.greaterThan(0);
const updateResponse = await client.mutate<any>({
mutation: updateSourceMutation,
variables: {
sourceId: 'default',
sourceProperties: {
logColumns: [
{
fieldColumn: {
id: 'ADDED_COLUMN_ID',
field: 'ADDED_COLUMN_FIELD',
},
},
],
const updateResponse = await patchRequest({
logColumns: [
{
fieldColumn: {
id: 'ADDED_COLUMN_ID',
field: 'ADDED_COLUMN_FIELD',
},
},
},
],
});
const { version, updatedAt, configuration } =
updateResponse.data && updateResponse.data.updateSource.source;
const version = updateResponse?.source.version;
const updatedAt = updateResponse?.source.updatedAt;
const configuration = updateResponse?.source.configuration;
expect(version).to.be.a('string');
expect(version).to.not.be(initialVersion);
expect(updatedAt).to.be.greaterThan(createdAt);
expect(configuration.logColumns).to.have.length(1);
expect(configuration.logColumns[0]).to.have.key('fieldColumn');
expect(configuration.logColumns[0].fieldColumn).to.have.property('id', 'ADDED_COLUMN_ID');
expect(configuration.logColumns[0].fieldColumn).to.have.property(
'field',
'ADDED_COLUMN_FIELD'
);
expect(updatedAt).to.be.greaterThan(createdAt || 0);
expect(configuration?.logColumns).to.have.length(1);
expect(configuration?.logColumns[0]).to.have.key('fieldColumn');
const fieldColumn = (configuration?.logColumns[0] as any).fieldColumn;
expect(fieldColumn).to.have.property('id', 'ADDED_COLUMN_ID');
expect(fieldColumn).to.have.property('field', 'ADDED_COLUMN_FIELD');
});
});
});
}
const createSourceMutation = gql`
mutation createSource($sourceId: ID!, $sourceProperties: UpdateSourceInput!) {
createSource(id: $sourceId, sourceProperties: $sourceProperties) {
source {
...InfraSourceFields
configuration {
...SourceConfigurationFields
}
status {
...SourceStatusFields
}
}
}
}
${sharedFragments.InfraSourceFields}
${sourceConfigurationFieldsFragment}
${sourceStatusFieldsFragment}
`;
const deleteSourceMutation = gql`
mutation deleteSource($sourceId: ID!) {
deleteSource(id: $sourceId) {
id
}
}
`;
const updateSourceMutation = gql`
mutation updateSource($sourceId: ID!, $sourceProperties: UpdateSourceInput!) {
updateSource(id: $sourceId, sourceProperties: $sourceProperties) {
source {
...InfraSourceFields
configuration {
...SourceConfigurationFields
}
status {
...SourceStatusFields
}
}
}
}
${sharedFragments.InfraSourceFields}
${sourceConfigurationFieldsFragment}
${sourceStatusFieldsFragment}
`;

View file

@ -15,10 +15,6 @@ import { EsSupertestWithoutAuthProvider } from './es_supertest_without_auth';
import { SupertestWithoutAuthProvider } from './supertest_without_auth';
import { UsageAPIProvider } from './usage_api';
import {
InfraOpsGraphQLClientProvider,
InfraOpsGraphQLClientFactoryProvider,
} from './infraops_graphql_client';
import {
SecuritySolutionGraphQLClientProvider,
SecuritySolutionGraphQLClientFactoryProvider,
@ -37,8 +33,6 @@ export const services = {
legacyEs: LegacyEsProvider,
esSupertestWithoutAuth: EsSupertestWithoutAuthProvider,
infraOpsGraphQLClient: InfraOpsGraphQLClientProvider,
infraOpsGraphQLClientFactory: InfraOpsGraphQLClientFactoryProvider,
infraOpsSourceConfiguration: InfraOpsSourceConfigurationProvider,
infraLogSourceConfiguration: InfraLogSourceConfigurationProvider,
securitySolutionGraphQLClient: SecuritySolutionGraphQLClientProvider,

View file

@ -1,68 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { format as formatUrl } from 'url';
import fetch from 'node-fetch';
import { InMemoryCache, IntrospectionFragmentMatcher } from 'apollo-cache-inmemory';
import { ApolloClient } from 'apollo-client';
import { HttpLink } from 'apollo-link-http';
import { FtrProviderContext } from '../ftr_provider_context';
import introspectionQueryResultData from '../../../plugins/infra/public/graphql/introspection.json';
export function InfraOpsGraphQLClientProvider(context: FtrProviderContext) {
return InfraOpsGraphQLClientFactoryProvider(context)();
}
interface InfraOpsGraphQLClientFactoryOptions {
username?: string;
password?: string;
basePath?: string;
}
export function InfraOpsGraphQLClientFactoryProvider({ getService }: FtrProviderContext) {
const config = getService('config');
const superAuth: string = config.get('servers.elasticsearch.auth');
const [superUsername, superPassword] = superAuth.split(':');
return function (options?: InfraOpsGraphQLClientFactoryOptions) {
const { username = superUsername, password = superPassword, basePath = null } = options || {};
const kbnURLWithoutAuth = formatUrl({ ...config.get('servers.kibana'), auth: false });
const httpLink = new HttpLink({
credentials: 'same-origin',
fetch: fetch as any,
headers: {
'kbn-xsrf': 'xxx',
authorization: `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}`,
},
uri: `${kbnURLWithoutAuth}${basePath || ''}/api/infra/graphql`,
});
return new ApolloClient({
cache: new InMemoryCache({
fragmentMatcher: new IntrospectionFragmentMatcher({
// @ts-expect-error apollo-cache-inmemory types don't match actual introspection data
introspectionQueryResultData,
}),
}),
defaultOptions: {
query: {
fetchPolicy: 'no-cache',
},
watchQuery: {
fetchPolicy: 'no-cache',
},
mutate: {
fetchPolicy: 'no-cache',
} as any,
},
link: httpLink,
});
};
}

View file

@ -4,65 +4,36 @@
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
import {
InfraSavedSourceConfiguration,
SourceResponse,
} from '../../../plugins/infra/common/http_api/source_api';
import { FtrProviderContext } from '../ftr_provider_context';
import { UpdateSourceInput, UpdateSourceResult } from '../../../plugins/infra/public/graphql/types';
const createSourceMutation = gql`
mutation createSource($sourceId: ID!, $sourceProperties: UpdateSourceInput!) {
createSource(id: $sourceId, sourceProperties: $sourceProperties) {
source {
id
version
configuration {
name
logColumns {
... on InfraSourceTimestampLogColumn {
timestampColumn {
id
}
}
... on InfraSourceMessageLogColumn {
messageColumn {
id
}
}
... on InfraSourceFieldLogColumn {
fieldColumn {
id
field
}
}
}
}
}
}
}
`;
export function InfraOpsSourceConfigurationProvider({ getService }: FtrProviderContext) {
const client = getService('infraOpsGraphQLClient');
const log = getService('log');
const supertest = getService('supertest');
const patchRequest = async (
body: InfraSavedSourceConfiguration
): Promise<SourceResponse | undefined> => {
const response = await supertest
.patch('/api/metrics/source/default')
.set('kbn-xsrf', 'xxx')
.send(body)
.expect(200);
return response.body;
};
return {
async createConfiguration(sourceId: string, sourceProperties: UpdateSourceInput) {
async createConfiguration(sourceId: string, sourceProperties: InfraSavedSourceConfiguration) {
log.debug(
`Creating Infra UI source configuration "${sourceId}" with properties ${JSON.stringify(
sourceProperties
)}`
);
const response = await client.mutate({
mutation: createSourceMutation,
variables: {
sourceProperties,
sourceId,
},
});
const result: UpdateSourceResult = response.data!.createSource;
return result.source.version;
const response = await patchRequest(sourceProperties);
return response?.source.version;
},
};
}