mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Infra UI] Read and write data sources from and to saved objects (#28016)
This commit is contained in:
parent
b3eacc04d2
commit
9588443601
48 changed files with 2338 additions and 458 deletions
3
kibana.d.ts
vendored
3
kibana.d.ts
vendored
|
@ -32,9 +32,12 @@ import * as LegacyKibanaServer from './src/server/kbn_server';
|
|||
*/
|
||||
// tslint:disable:no-namespace
|
||||
export namespace Legacy {
|
||||
export type IndexPatternsService = LegacyKibanaServer.IndexPatternsService;
|
||||
export type KibanaConfig = LegacyKibanaServer.KibanaConfig;
|
||||
export type Request = LegacyKibanaServer.Request;
|
||||
export type ResponseToolkit = LegacyKibanaServer.ResponseToolkit;
|
||||
export type SavedObjectsClient = LegacyKibanaServer.SavedObjectsClient;
|
||||
export type SavedObjectsService = LegacyKibanaServer.SavedObjectsService;
|
||||
export type Server = LegacyKibanaServer.Server;
|
||||
|
||||
export namespace Plugins {
|
||||
|
|
21
src/server/index_patterns/index.d.ts
vendored
Normal file
21
src/server/index_patterns/index.d.ts
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
export { IndexPatternsService } from './service';
|
||||
export { IndexPatternsServiceFactory } from './mixin';
|
24
src/server/index_patterns/mixin.d.ts
vendored
Normal file
24
src/server/index_patterns/mixin.d.ts
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { IndexPatternsService } from './service';
|
||||
|
||||
export type IndexPatternsServiceFactory = (
|
||||
args: { callCluster: (endpoint: string, clientParams: any, options: any) => Promise<any> }
|
||||
) => IndexPatternsService;
|
20
src/server/index_patterns/service/index.d.ts
vendored
Normal file
20
src/server/index_patterns/service/index.d.ts
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
export { IndexPatternsService } from './index_patterns_service';
|
32
src/server/index_patterns/service/index_patterns_service.d.ts
vendored
Normal file
32
src/server/index_patterns/service/index_patterns_service.d.ts
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
export interface IndexPatternsService {
|
||||
// ATTENTION: this interface is incomplete
|
||||
|
||||
getFieldsForWildcard(options: { pattern: string | string[] }): Promise<FieldDescriptor[]>;
|
||||
}
|
||||
|
||||
export interface FieldDescriptor {
|
||||
aggregatable: boolean;
|
||||
name: string;
|
||||
readFromDocValues: boolean;
|
||||
searchable: boolean;
|
||||
type: string;
|
||||
}
|
14
src/server/kbn_server.d.ts
vendored
14
src/server/kbn_server.d.ts
vendored
|
@ -18,7 +18,9 @@
|
|||
*/
|
||||
|
||||
import { Server } from 'hapi';
|
||||
import { ElasticsearchPlugin } from '../legacy/core_plugins/elasticsearch';
|
||||
import { CallClusterWithRequest, ElasticsearchPlugin } from '../legacy/core_plugins/elasticsearch';
|
||||
import { IndexPatternsServiceFactory } from './index_patterns';
|
||||
import { SavedObjectsService } from './saved_objects';
|
||||
|
||||
export interface KibanaConfig {
|
||||
get<T>(key: string): T;
|
||||
|
@ -34,6 +36,12 @@ declare module 'hapi' {
|
|||
|
||||
interface Server {
|
||||
config: () => KibanaConfig;
|
||||
indexPatternsServiceFactory: IndexPatternsServiceFactory;
|
||||
savedObjects: SavedObjectsService;
|
||||
}
|
||||
|
||||
interface Request {
|
||||
getBasePath: () => string;
|
||||
}
|
||||
|
||||
interface Request {
|
||||
|
@ -58,3 +66,7 @@ export default class KbnServer {
|
|||
|
||||
// Re-export commonly used hapi types.
|
||||
export { Server, Request, ResponseToolkit } from 'hapi';
|
||||
|
||||
// Re-export commonly accessed api types.
|
||||
export { IndexPatternsService } from './index_patterns';
|
||||
export { SavedObject, SavedObjectsClient, SavedObjectsService } from './saved_objects';
|
||||
|
|
|
@ -17,7 +17,8 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { ScopedSavedObjectsClientProvider } from './lib';
|
||||
import { SavedObjectsRepository, ScopedSavedObjectsClientProvider } from './lib';
|
||||
import { SavedObjectsClient } from './saved_objects_client';
|
||||
|
||||
export interface SavedObjectsService<Request = any> {
|
||||
// ATTENTION: these types are incomplete
|
||||
|
@ -25,5 +26,10 @@ export interface SavedObjectsService<Request = any> {
|
|||
addScopedSavedObjectsClientWrapperFactory: ScopedSavedObjectsClientProvider<
|
||||
Request
|
||||
>['addClientWrapperFactory'];
|
||||
getSavedObjectsRepository: (
|
||||
callCluster: (endpoint: string, clientParams: any, options: any) => Promise<any>
|
||||
) => SavedObjectsRepository;
|
||||
getScopedSavedObjectsClient: ScopedSavedObjectsClientProvider<Request>['getClient'];
|
||||
SavedObjectsClient: typeof SavedObjectsClient;
|
||||
types: string[];
|
||||
}
|
||||
|
|
27
src/server/saved_objects/service/lib/errors.d.ts
vendored
Normal file
27
src/server/saved_objects/service/lib/errors.d.ts
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
export function isBadRequestError(maybeError: any): boolean;
|
||||
export function isNotAuthorizedError(maybeError: any): boolean;
|
||||
export function isForbiddenError(maybeError: any): boolean;
|
||||
export function isRequestEntityTooLargeError(maybeError: any): boolean;
|
||||
export function isNotFoundError(maybeError: any): boolean;
|
||||
export function isConflictError(maybeError: any): boolean;
|
||||
export function isEsUnavailableError(maybeError: any): boolean;
|
||||
export function isEsAutoCreateIndexError(maybeError: any): boolean;
|
|
@ -17,6 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import errors from './errors';
|
||||
|
||||
export { errors };
|
||||
|
||||
export { SavedObjectsRepository, SavedObjectsRepositoryOptions } from './repository';
|
||||
|
||||
export {
|
||||
SavedObjectsClientWrapperFactory,
|
||||
SavedObjectsClientWrapperOptions,
|
||||
|
|
34
src/server/saved_objects/service/lib/repository.d.ts
vendored
Normal file
34
src/server/saved_objects/service/lib/repository.d.ts
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
export interface SavedObjectsRepositoryOptions {
|
||||
index: string | string[];
|
||||
mappings: unknown;
|
||||
callCluster: unknown;
|
||||
schema: unknown;
|
||||
serializer: unknown;
|
||||
migrator: unknown;
|
||||
onBeforeWrite: unknown;
|
||||
}
|
||||
|
||||
export declare class SavedObjectsRepository {
|
||||
// ATTENTION: this interface is incomplete
|
||||
|
||||
constructor(options: SavedObjectsRepositoryOptions);
|
||||
}
|
|
@ -35,4 +35,5 @@ export interface ScopedSavedObjectsClientProvider<Request = any> {
|
|||
priority: number,
|
||||
wrapperFactory: SavedObjectsClientWrapperFactory<Request>
|
||||
): void;
|
||||
getClient(request: Request): SavedObjectsClient;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { errors, SavedObjectsRepository } from './lib';
|
||||
|
||||
export interface BaseOptions {
|
||||
namespace?: string;
|
||||
}
|
||||
|
@ -47,7 +49,7 @@ export interface FindOptions extends BaseOptions {
|
|||
}
|
||||
|
||||
export interface FindResponse {
|
||||
savedObjects: SavedObject[];
|
||||
saved_objects: SavedObject[];
|
||||
total: number;
|
||||
perPage: number;
|
||||
page: number;
|
||||
|
@ -68,36 +70,41 @@ export interface BulkGetResponse {
|
|||
}
|
||||
|
||||
export interface SavedObjectAttributes {
|
||||
[key: string]: string | number | boolean | null;
|
||||
[key: string]: SavedObjectAttributes | string | number | boolean | null;
|
||||
}
|
||||
|
||||
export interface SavedObject {
|
||||
id: string;
|
||||
type: string;
|
||||
version?: number;
|
||||
updatedAt?: string;
|
||||
updated_at?: string;
|
||||
error?: {
|
||||
message: string;
|
||||
};
|
||||
attributes: SavedObjectAttributes;
|
||||
}
|
||||
|
||||
export interface SavedObjectsClient {
|
||||
errors: any;
|
||||
create: (
|
||||
export declare class SavedObjectsClient {
|
||||
public static errors: typeof errors;
|
||||
public errors: typeof errors;
|
||||
public create: (
|
||||
type: string,
|
||||
attributes: SavedObjectAttributes,
|
||||
options?: CreateOptions
|
||||
) => Promise<SavedObject>;
|
||||
bulkCreate: (objects: BulkCreateObject[], options?: CreateOptions) => Promise<BulkCreateResponse>;
|
||||
delete: (type: string, id: string, options?: BaseOptions) => Promise<{}>;
|
||||
find: (options: FindOptions) => Promise<FindResponse>;
|
||||
bulkGet: (objects: BulkGetObjects, options?: BaseOptions) => Promise<BulkGetResponse>;
|
||||
get: (type: string, id: string, options?: BaseOptions) => Promise<SavedObject>;
|
||||
update: (
|
||||
public bulkCreate: (
|
||||
objects: BulkCreateObject[],
|
||||
options?: CreateOptions
|
||||
) => Promise<BulkCreateResponse>;
|
||||
public delete: (type: string, id: string, options?: BaseOptions) => Promise<{}>;
|
||||
public find: (options: FindOptions) => Promise<FindResponse>;
|
||||
public bulkGet: (objects: BulkGetObjects, options?: BaseOptions) => Promise<BulkGetResponse>;
|
||||
public get: (type: string, id: string, options?: BaseOptions) => Promise<SavedObject>;
|
||||
public update: (
|
||||
type: string,
|
||||
id: string,
|
||||
attributes: SavedObjectAttributes,
|
||||
options?: UpdateOptions
|
||||
) => Promise<SavedObject>;
|
||||
constructor(repository: SavedObjectsRepository);
|
||||
}
|
||||
|
|
|
@ -9,10 +9,10 @@ import gql from 'graphql-tag';
|
|||
export const rootSchema = gql`
|
||||
schema {
|
||||
query: Query
|
||||
#mutation: Mutation
|
||||
mutation: Mutation
|
||||
}
|
||||
|
||||
type Query
|
||||
|
||||
#type Mutation
|
||||
type Mutation
|
||||
`;
|
||||
|
|
39
x-pack/plugins/infra/common/utility_types.ts
Normal file
39
x-pack/plugins/infra/common/utility_types.ts
Normal file
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export type Pick2<T, K1 extends keyof T, K2 extends keyof T[K1]> = {
|
||||
[P1 in K1]: { [P2 in K2]: (T[K1])[P2] }
|
||||
};
|
||||
export type Pick3<T, K1 extends keyof T, K2 extends keyof T[K1], K3 extends keyof T[K1][K2]> = {
|
||||
[P1 in K1]: { [P2 in K2]: { [P3 in K3]: ((T[K1])[K2])[P3] } }
|
||||
};
|
||||
|
||||
/**
|
||||
* Portions of below code are derived from https://github.com/tycho01/typical
|
||||
* under the MIT License
|
||||
*
|
||||
* Copyright (c) 2017 Thomas Crockett
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*/
|
||||
|
||||
export type DeepPartial<T> = T extends any[]
|
||||
? DeepPartialArray<T[number]>
|
||||
: T extends object
|
||||
? DeepPartialObject<T>
|
||||
: T;
|
||||
|
||||
interface DeepPartialArray<T> extends Array<DeepPartial<T>> {}
|
||||
|
||||
type DeepPartialObject<T> = { [P in keyof T]+?: DeepPartial<T[P]> };
|
|
@ -8,7 +8,13 @@ import { i18n } from '@kbn/i18n';
|
|||
import JoiNamespace from 'joi';
|
||||
import { resolve } from 'path';
|
||||
|
||||
import { getConfigSchema, initServerWithKibana, KbnServer } from './server/kibana.index';
|
||||
import {
|
||||
getConfigSchema,
|
||||
// getDeprecations,
|
||||
initServerWithKibana,
|
||||
KbnServer,
|
||||
} from './server/kibana.index';
|
||||
import { savedObjectMappings } from './server/saved_objects';
|
||||
|
||||
const APP_ID = 'infra';
|
||||
|
||||
|
@ -60,10 +66,14 @@ export function infra(kibana: any) {
|
|||
url: `/app/${APP_ID}#/logs`,
|
||||
},
|
||||
],
|
||||
mappings: savedObjectMappings,
|
||||
},
|
||||
config(Joi: typeof JoiNamespace) {
|
||||
return getConfigSchema(Joi);
|
||||
},
|
||||
// deprecations(helpers: any) {
|
||||
// return getDeprecations(helpers);
|
||||
// },
|
||||
init(server: KbnServer) {
|
||||
initServerWithKibana(server);
|
||||
},
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"__schema": {
|
||||
"queryType": { "name": "Query" },
|
||||
"mutationType": null,
|
||||
"mutationType": { "name": "Mutation" },
|
||||
"subscriptionType": null,
|
||||
"types": [
|
||||
{
|
||||
|
@ -11,7 +11,7 @@
|
|||
"fields": [
|
||||
{
|
||||
"name": "source",
|
||||
"description": "Get an infrastructure data source by id",
|
||||
"description": "Get an infrastructure data source by id.\n\nThe resolution order for the source configuration attributes is as follows\nwith the first defined value winning:\n\n1. The attributes of the saved object with the given 'id'.\n2. The attributes defined in the static Kibana configuration key\n 'xpack.infra.sources.default'.\n3. The hard-coded default values.\n\nAs a consequence, querying a source without a corresponding saved object\ndoesn't error out, but returns the configured or hardcoded defaults.",
|
||||
"args": [
|
||||
{
|
||||
"name": "id",
|
||||
|
@ -85,6 +85,22 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"description": "The version number the source configuration was last persisted with",
|
||||
"args": [],
|
||||
"type": { "kind": "SCALAR", "name": "Float", "ofType": null },
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "updatedAt",
|
||||
"description": "The timestamp the source configuration was last persisted at",
|
||||
"args": [],
|
||||
"type": { "kind": "SCALAR", "name": "Float", "ofType": null },
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "configuration",
|
||||
"description": "The raw configuration of the source",
|
||||
|
@ -391,11 +407,45 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "SCALAR",
|
||||
"name": "Float",
|
||||
"description": "The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). ",
|
||||
"fields": null,
|
||||
"inputFields": null,
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraSourceConfiguration",
|
||||
"description": "A set of configuration options for an infrastructure data source",
|
||||
"fields": [
|
||||
{
|
||||
"name": "name",
|
||||
"description": "The name of the data source",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"description": "A description of the data source",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "metricAlias",
|
||||
"description": "The alias to read metric data from",
|
||||
|
@ -477,26 +527,6 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"description": "The fields that may contain the log event message. The first field found win.",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
}
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "pod",
|
||||
"description": "The field to identify a pod by",
|
||||
|
@ -888,16 +918,6 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "SCALAR",
|
||||
"name": "Float",
|
||||
"description": "The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). ",
|
||||
"fields": null,
|
||||
"inputFields": null,
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "SCALAR",
|
||||
"name": "Int",
|
||||
|
@ -1595,7 +1615,7 @@
|
|||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "displayName",
|
||||
"name": "label",
|
||||
"description": "",
|
||||
"args": [],
|
||||
"type": {
|
||||
|
@ -1933,6 +1953,418 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "Mutation",
|
||||
"description": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "createSource",
|
||||
"description": "Create a new source of infrastructure data",
|
||||
"args": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "The id of the source",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "ID", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "source",
|
||||
"description": "",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "INPUT_OBJECT", "name": "CreateSourceInput", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "CreateSourceResult", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "updateSource",
|
||||
"description": "Modify an existing source using the given sequence of update operations",
|
||||
"args": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "The id of the source",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "ID", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "changes",
|
||||
"description": "A sequence of update operations",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateSourceInput",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "UpdateSourceResult", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "deleteSource",
|
||||
"description": "Delete a source of infrastructure data",
|
||||
"args": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "The id of the source",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "ID", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "DeleteSourceResult", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "CreateSourceInput",
|
||||
"description": "The source to be created",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "name",
|
||||
"description": "The name of the data source",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"description": "A description of the data source",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "metricAlias",
|
||||
"description": "The alias to read metric data from",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "logAlias",
|
||||
"description": "The alias to read log data from",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "fields",
|
||||
"description": "The field mapping to use for this source",
|
||||
"type": { "kind": "INPUT_OBJECT", "name": "CreateSourceFieldsInput", "ofType": null },
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "CreateSourceFieldsInput",
|
||||
"description": "The mapping of semantic fields of the source to be created",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "container",
|
||||
"description": "The field to identify a container by",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "host",
|
||||
"description": "The fields to identify a host by",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "pod",
|
||||
"description": "The field to identify a pod by",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "tiebreaker",
|
||||
"description": "The field to use as a tiebreaker for log events that have identical timestamps",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "timestamp",
|
||||
"description": "The field to use as a timestamp for metrics and logs",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "CreateSourceResult",
|
||||
"description": "The result of a successful source creation",
|
||||
"fields": [
|
||||
{
|
||||
"name": "source",
|
||||
"description": "The source that was created",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "InfraSource", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateSourceInput",
|
||||
"description": "The update operations to be performed",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "setName",
|
||||
"description": "The name update operation to be performed",
|
||||
"type": { "kind": "INPUT_OBJECT", "name": "UpdateSourceNameInput", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "setDescription",
|
||||
"description": "The description update operation to be performed",
|
||||
"type": {
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateSourceDescriptionInput",
|
||||
"ofType": null
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "setAliases",
|
||||
"description": "The alias update operation to be performed",
|
||||
"type": { "kind": "INPUT_OBJECT", "name": "UpdateSourceAliasInput", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "setFields",
|
||||
"description": "The field update operation to be performed",
|
||||
"type": { "kind": "INPUT_OBJECT", "name": "UpdateSourceFieldsInput", "ofType": null },
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateSourceNameInput",
|
||||
"description": "A name update operation",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "name",
|
||||
"description": "The new name to be set",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateSourceDescriptionInput",
|
||||
"description": "A description update operation",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "description",
|
||||
"description": "The new description to be set",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateSourceAliasInput",
|
||||
"description": "An alias update operation",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "logAlias",
|
||||
"description": "The new log index pattern or alias to bet set",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "metricAlias",
|
||||
"description": "The new metric index pattern or alias to bet set",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateSourceFieldsInput",
|
||||
"description": "A field update operations",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "container",
|
||||
"description": "The new container field to be set",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "host",
|
||||
"description": "The new host field to be set",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "pod",
|
||||
"description": "The new pod field to be set",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "tiebreaker",
|
||||
"description": "The new tiebreaker field to be set",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "timestamp",
|
||||
"description": "The new timestamp field to be set",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "UpdateSourceResult",
|
||||
"description": "The result of a sequence of source update operations",
|
||||
"fields": [
|
||||
{
|
||||
"name": "source",
|
||||
"description": "The source after the operations were performed",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "InfraSource", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "DeleteSourceResult",
|
||||
"description": "The result of a source deletion operations",
|
||||
"fields": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "The id of the source that was deleted",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "ID", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "__Schema",
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
// ====================================================
|
||||
|
||||
export interface Query {
|
||||
/** Get an infrastructure data source by id */
|
||||
/** Get an infrastructure data source by id.The resolution order for the source configuration attributes is as followswith the first defined value winning:1. The attributes of the saved object with the given 'id'.2. The attributes defined in the static Kibana configuration key'xpack.infra.sources.default'.3. The hard-coded default values.As a consequence, querying a source without a corresponding saved objectdoesn't error out, but returns the configured or hardcoded defaults. */
|
||||
source: InfraSource;
|
||||
/** Get a list of all infrastructure data sources */
|
||||
allSources: InfraSource[];
|
||||
|
@ -18,6 +18,10 @@ export interface Query {
|
|||
export interface InfraSource {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
/** The version number the source configuration was last persisted with */
|
||||
version?: number | null;
|
||||
/** The timestamp the source configuration was last persisted at */
|
||||
updatedAt?: number | null;
|
||||
/** The raw configuration of the source */
|
||||
configuration: InfraSourceConfiguration;
|
||||
/** The status of the source */
|
||||
|
@ -37,6 +41,10 @@ export interface InfraSource {
|
|||
}
|
||||
/** A set of configuration options for an infrastructure data source */
|
||||
export interface InfraSourceConfiguration {
|
||||
/** The name of the data source */
|
||||
name: string;
|
||||
/** A description of the data source */
|
||||
description: string;
|
||||
/** The alias to read metric data from */
|
||||
metricAlias: string;
|
||||
/** The alias to read log data from */
|
||||
|
@ -50,8 +58,6 @@ export interface InfraSourceFields {
|
|||
container: string;
|
||||
/** The fields to identify a host by */
|
||||
host: string;
|
||||
/** The fields that may contain the log event message. The first field found win. */
|
||||
message: string[];
|
||||
/** The field to identify a pod by */
|
||||
pod: string;
|
||||
/** The field to use as a tiebreaker for log events that have identical timestamps */
|
||||
|
@ -211,6 +217,30 @@ export interface InfraDataPoint {
|
|||
value?: number | null;
|
||||
}
|
||||
|
||||
export interface Mutation {
|
||||
/** Create a new source of infrastructure data */
|
||||
createSource: CreateSourceResult;
|
||||
/** Modify an existing source using the given sequence of update operations */
|
||||
updateSource: UpdateSourceResult;
|
||||
/** Delete a source of infrastructure data */
|
||||
deleteSource: DeleteSourceResult;
|
||||
}
|
||||
/** The result of a successful source creation */
|
||||
export interface CreateSourceResult {
|
||||
/** The source that was created */
|
||||
source: InfraSource;
|
||||
}
|
||||
/** The result of a sequence of source update operations */
|
||||
export interface UpdateSourceResult {
|
||||
/** The source after the operations were performed */
|
||||
source: InfraSource;
|
||||
}
|
||||
/** The result of a source deletion operations */
|
||||
export interface DeleteSourceResult {
|
||||
/** The id of the source that was deleted */
|
||||
id: string;
|
||||
}
|
||||
|
||||
// ====================================================
|
||||
// InputTypes
|
||||
// ====================================================
|
||||
|
@ -252,6 +282,73 @@ export interface InfraMetricInput {
|
|||
/** The type of metric */
|
||||
type: InfraMetricType;
|
||||
}
|
||||
/** The source to be created */
|
||||
export interface CreateSourceInput {
|
||||
/** The name of the data source */
|
||||
name: string;
|
||||
/** A description of the data source */
|
||||
description?: string | null;
|
||||
/** The alias to read metric data from */
|
||||
metricAlias?: string | null;
|
||||
/** The alias to read log data from */
|
||||
logAlias?: string | null;
|
||||
/** The field mapping to use for this source */
|
||||
fields?: CreateSourceFieldsInput | null;
|
||||
}
|
||||
/** The mapping of semantic fields of the source to be created */
|
||||
export interface CreateSourceFieldsInput {
|
||||
/** The field to identify a container by */
|
||||
container?: string | null;
|
||||
/** The fields to identify a host by */
|
||||
host?: string | null;
|
||||
/** The field to identify a pod by */
|
||||
pod?: string | null;
|
||||
/** The field to use as a tiebreaker for log events that have identical timestamps */
|
||||
tiebreaker?: string | null;
|
||||
/** The field to use as a timestamp for metrics and logs */
|
||||
timestamp?: string | null;
|
||||
}
|
||||
/** The update operations to be performed */
|
||||
export interface UpdateSourceInput {
|
||||
/** The name update operation to be performed */
|
||||
setName?: UpdateSourceNameInput | null;
|
||||
/** The description update operation to be performed */
|
||||
setDescription?: UpdateSourceDescriptionInput | null;
|
||||
/** The alias update operation to be performed */
|
||||
setAliases?: UpdateSourceAliasInput | null;
|
||||
/** The field update operation to be performed */
|
||||
setFields?: UpdateSourceFieldsInput | null;
|
||||
}
|
||||
/** A name update operation */
|
||||
export interface UpdateSourceNameInput {
|
||||
/** The new name to be set */
|
||||
name: string;
|
||||
}
|
||||
/** A description update operation */
|
||||
export interface UpdateSourceDescriptionInput {
|
||||
/** The new description to be set */
|
||||
description: string;
|
||||
}
|
||||
/** An alias update operation */
|
||||
export interface UpdateSourceAliasInput {
|
||||
/** The new log index pattern or alias to bet set */
|
||||
logAlias?: string | null;
|
||||
/** The new metric index pattern or alias to bet set */
|
||||
metricAlias?: string | null;
|
||||
}
|
||||
/** A field update operations */
|
||||
export interface UpdateSourceFieldsInput {
|
||||
/** The new container field to be set */
|
||||
container?: string | null;
|
||||
/** The new host field to be set */
|
||||
host?: string | null;
|
||||
/** The new pod field to be set */
|
||||
pod?: string | null;
|
||||
/** The new tiebreaker field to be set */
|
||||
tiebreaker?: string | null;
|
||||
/** The new timestamp field to be set */
|
||||
timestamp?: string | null;
|
||||
}
|
||||
|
||||
// ====================================================
|
||||
// Arguments
|
||||
|
@ -320,6 +417,22 @@ export interface NodesInfraResponseArgs {
|
|||
|
||||
metric: InfraMetricInput;
|
||||
}
|
||||
export interface CreateSourceMutationArgs {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
|
||||
source: CreateSourceInput;
|
||||
}
|
||||
export interface UpdateSourceMutationArgs {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
/** A sequence of update operations */
|
||||
changes: UpdateSourceInput[];
|
||||
}
|
||||
export interface DeleteSourceMutationArgs {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
}
|
||||
|
||||
// ====================================================
|
||||
// Enums
|
||||
|
|
|
@ -14,7 +14,7 @@ export const nodesSchema: any = gql`
|
|||
|
||||
type InfraNodePath {
|
||||
value: String!
|
||||
label: String
|
||||
label: String!
|
||||
}
|
||||
|
||||
type InfraNode {
|
||||
|
|
|
@ -4,7 +4,13 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraSourceResolvers, QueryResolvers } from '../../graphql/types';
|
||||
import {
|
||||
InfraSourceConfiguration,
|
||||
InfraSourceResolvers,
|
||||
MutationResolvers,
|
||||
QueryResolvers,
|
||||
UpdateSourceInput,
|
||||
} from '../../graphql/types';
|
||||
import { InfraSourceStatus } from '../../lib/source_status';
|
||||
import { InfraSources } from '../../lib/sources';
|
||||
import {
|
||||
|
@ -16,12 +22,12 @@ import {
|
|||
|
||||
export type QuerySourceResolver = InfraResolverWithFields<
|
||||
QueryResolvers.SourceResolver,
|
||||
'id' | 'configuration'
|
||||
'id' | 'version' | 'updatedAt' | 'configuration'
|
||||
>;
|
||||
|
||||
export type QueryAllSourcesResolver = InfraResolverWithFields<
|
||||
QueryResolvers.AllSourcesResolver,
|
||||
'id' | 'configuration'
|
||||
'id' | 'version' | 'updatedAt' | 'configuration'
|
||||
>;
|
||||
|
||||
export type InfraSourceStatusResolver = ChildResolverOf<
|
||||
|
@ -29,6 +35,20 @@ export type InfraSourceStatusResolver = ChildResolverOf<
|
|||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
export type MutationCreateSourceResolver = InfraResolverOf<
|
||||
MutationResolvers.CreateSourceResolver<{
|
||||
source: ResultOf<QuerySourceResolver>;
|
||||
}>
|
||||
>;
|
||||
|
||||
export type MutationDeleteSourceResolver = InfraResolverOf<MutationResolvers.DeleteSourceResolver>;
|
||||
|
||||
export type MutationUpdateSourceResolver = InfraResolverOf<
|
||||
MutationResolvers.UpdateSourceResolver<{
|
||||
source: ResultOf<QuerySourceResolver>;
|
||||
}>
|
||||
>;
|
||||
|
||||
interface SourcesResolversDeps {
|
||||
sources: InfraSources;
|
||||
sourceStatus: InfraSourceStatus;
|
||||
|
@ -44,23 +64,22 @@ export const createSourcesResolvers = (
|
|||
InfraSource: {
|
||||
status: InfraSourceStatusResolver;
|
||||
};
|
||||
Mutation: {
|
||||
createSource: MutationCreateSourceResolver;
|
||||
deleteSource: MutationDeleteSourceResolver;
|
||||
updateSource: MutationUpdateSourceResolver;
|
||||
};
|
||||
} => ({
|
||||
Query: {
|
||||
async source(root, args) {
|
||||
const requestedSourceConfiguration = await libs.sources.getConfiguration(args.id);
|
||||
async source(root, args, { req }) {
|
||||
const requestedSourceConfiguration = await libs.sources.getSourceConfiguration(req, args.id);
|
||||
|
||||
return {
|
||||
id: args.id,
|
||||
configuration: requestedSourceConfiguration,
|
||||
};
|
||||
return requestedSourceConfiguration;
|
||||
},
|
||||
async allSources() {
|
||||
const sourceConfigurations = await libs.sources.getAllConfigurations();
|
||||
async allSources(root, args, { req }) {
|
||||
const sourceConfigurations = await libs.sources.getAllSourceConfigurations(req);
|
||||
|
||||
return Object.entries(sourceConfigurations).map(([sourceName, sourceConfiguration]) => ({
|
||||
id: sourceName,
|
||||
configuration: sourceConfiguration,
|
||||
}));
|
||||
return sourceConfigurations;
|
||||
},
|
||||
},
|
||||
InfraSource: {
|
||||
|
@ -68,4 +87,88 @@ export const createSourcesResolvers = (
|
|||
return source;
|
||||
},
|
||||
},
|
||||
Mutation: {
|
||||
async createSource(root, args, { req }) {
|
||||
const sourceConfiguration = await libs.sources.createSourceConfiguration(
|
||||
req,
|
||||
args.id,
|
||||
compactObject({
|
||||
...args.source,
|
||||
fields: args.source.fields ? compactObject(args.source.fields) : undefined,
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
source: sourceConfiguration,
|
||||
};
|
||||
},
|
||||
async deleteSource(root, args, { req }) {
|
||||
await libs.sources.deleteSourceConfiguration(req, args.id);
|
||||
|
||||
return {
|
||||
id: args.id,
|
||||
};
|
||||
},
|
||||
async updateSource(root, args, { req }) {
|
||||
const updaters = args.changes.map(convertChangeToUpdater);
|
||||
|
||||
const updatedSourceConfiguration = await libs.sources.updateSourceConfiguration(
|
||||
req,
|
||||
args.id,
|
||||
updaters
|
||||
);
|
||||
|
||||
return {
|
||||
source: updatedSourceConfiguration,
|
||||
};
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
type CompactObject<T> = { [K in keyof T]: NonNullable<T[K]> };
|
||||
|
||||
const compactObject = <T>(obj: T): CompactObject<T> =>
|
||||
Object.entries(obj).reduce<CompactObject<T>>(
|
||||
(accumulatedObj, [key, value]) =>
|
||||
typeof value === 'undefined' || value === null
|
||||
? accumulatedObj
|
||||
: {
|
||||
...(accumulatedObj as any),
|
||||
[key]: value,
|
||||
},
|
||||
{} as CompactObject<T>
|
||||
);
|
||||
|
||||
const convertChangeToUpdater = (change: UpdateSourceInput) => (
|
||||
configuration: InfraSourceConfiguration
|
||||
): InfraSourceConfiguration => {
|
||||
const updaters: Array<(c: InfraSourceConfiguration) => InfraSourceConfiguration> = [
|
||||
c => (change.setName ? { ...c, name: change.setName.name } : c),
|
||||
c => (change.setDescription ? { ...c, description: change.setDescription.description } : c),
|
||||
c =>
|
||||
change.setAliases
|
||||
? {
|
||||
...c,
|
||||
metricAlias: change.setAliases.metricAlias || c.metricAlias,
|
||||
logAlias: change.setAliases.logAlias || c.logAlias,
|
||||
}
|
||||
: c,
|
||||
c =>
|
||||
change.setFields
|
||||
? {
|
||||
...c,
|
||||
fields: {
|
||||
container: change.setFields.container || c.fields.container,
|
||||
host: change.setFields.host || c.fields.host,
|
||||
pod: change.setFields.pod || c.fields.pod,
|
||||
tiebreaker: change.setFields.tiebreaker || c.fields.tiebreaker,
|
||||
timestamp: change.setFields.timestamp || c.fields.timestamp,
|
||||
},
|
||||
}
|
||||
: c,
|
||||
];
|
||||
return updaters.reduce(
|
||||
(updatedConfiguration, updater) => updater(updatedConfiguration),
|
||||
configuration
|
||||
);
|
||||
};
|
||||
|
|
|
@ -11,6 +11,10 @@ export const sourcesSchema = gql`
|
|||
type InfraSource {
|
||||
"The id of the source"
|
||||
id: ID!
|
||||
"The version number the source configuration was last persisted with"
|
||||
version: Float
|
||||
"The timestamp the source configuration was last persisted at"
|
||||
updatedAt: Float
|
||||
"The raw configuration of the source"
|
||||
configuration: InfraSourceConfiguration!
|
||||
"The status of the source"
|
||||
|
@ -22,6 +26,10 @@ export const sourcesSchema = gql`
|
|||
|
||||
"A set of configuration options for an infrastructure data source"
|
||||
type InfraSourceConfiguration {
|
||||
"The name of the data source"
|
||||
name: String!
|
||||
"A description of the data source"
|
||||
description: String!
|
||||
"The alias to read metric data from"
|
||||
metricAlias: String!
|
||||
"The alias to read log data from"
|
||||
|
@ -36,8 +44,6 @@ export const sourcesSchema = gql`
|
|||
container: String!
|
||||
"The fields to identify a host by"
|
||||
host: String!
|
||||
"The fields that may contain the log event message. The first field found win."
|
||||
message: [String!]!
|
||||
"The field to identify a pod by"
|
||||
pod: String!
|
||||
"The field to use as a tiebreaker for log events that have identical timestamps"
|
||||
|
@ -47,9 +53,128 @@ export const sourcesSchema = gql`
|
|||
}
|
||||
|
||||
extend type Query {
|
||||
"Get an infrastructure data source by id"
|
||||
"""
|
||||
Get an infrastructure data source by id.
|
||||
|
||||
The resolution order for the source configuration attributes is as follows
|
||||
with the first defined value winning:
|
||||
|
||||
1. The attributes of the saved object with the given 'id'.
|
||||
2. The attributes defined in the static Kibana configuration key
|
||||
'xpack.infra.sources.default'.
|
||||
3. The hard-coded default values.
|
||||
|
||||
As a consequence, querying a source that doesn't exist doesn't error out,
|
||||
but returns the configured or hardcoded defaults.
|
||||
"""
|
||||
source("The id of the source" id: ID!): InfraSource!
|
||||
"Get a list of all infrastructure data sources"
|
||||
allSources: [InfraSource!]!
|
||||
}
|
||||
|
||||
"The source to be created"
|
||||
input CreateSourceInput {
|
||||
"The name of the data source"
|
||||
name: String!
|
||||
"A description of the data source"
|
||||
description: String
|
||||
"The alias to read metric data from"
|
||||
metricAlias: String
|
||||
"The alias to read log data from"
|
||||
logAlias: String
|
||||
"The field mapping to use for this source"
|
||||
fields: CreateSourceFieldsInput
|
||||
}
|
||||
|
||||
"The mapping of semantic fields of the source to be created"
|
||||
input CreateSourceFieldsInput {
|
||||
"The field to identify a container by"
|
||||
container: String
|
||||
"The fields to identify a host by"
|
||||
host: String
|
||||
"The field to identify a pod by"
|
||||
pod: String
|
||||
"The field to use as a tiebreaker for log events that have identical timestamps"
|
||||
tiebreaker: String
|
||||
"The field to use as a timestamp for metrics and logs"
|
||||
timestamp: String
|
||||
}
|
||||
|
||||
"The result of a successful source creation"
|
||||
type CreateSourceResult {
|
||||
"The source that was created"
|
||||
source: InfraSource!
|
||||
}
|
||||
|
||||
"The update operations to be performed"
|
||||
input UpdateSourceInput {
|
||||
"The name update operation to be performed"
|
||||
setName: UpdateSourceNameInput
|
||||
"The description update operation to be performed"
|
||||
setDescription: UpdateSourceDescriptionInput
|
||||
"The alias update operation to be performed"
|
||||
setAliases: UpdateSourceAliasInput
|
||||
"The field update operation to be performed"
|
||||
setFields: UpdateSourceFieldsInput
|
||||
}
|
||||
|
||||
"A name update operation"
|
||||
input UpdateSourceNameInput {
|
||||
"The new name to be set"
|
||||
name: String!
|
||||
}
|
||||
|
||||
"A description update operation"
|
||||
input UpdateSourceDescriptionInput {
|
||||
"The new description to be set"
|
||||
description: String!
|
||||
}
|
||||
|
||||
"An alias update operation"
|
||||
input UpdateSourceAliasInput {
|
||||
"The new log index pattern or alias to bet set"
|
||||
logAlias: String
|
||||
"The new metric index pattern or alias to bet set"
|
||||
metricAlias: String
|
||||
}
|
||||
|
||||
"A field update operations"
|
||||
input UpdateSourceFieldsInput {
|
||||
"The new container field to be set"
|
||||
container: String
|
||||
"The new host field to be set"
|
||||
host: String
|
||||
"The new pod field to be set"
|
||||
pod: String
|
||||
"The new tiebreaker field to be set"
|
||||
tiebreaker: String
|
||||
"The new timestamp field to be set"
|
||||
timestamp: String
|
||||
}
|
||||
|
||||
"The result of a sequence of source update operations"
|
||||
type UpdateSourceResult {
|
||||
"The source after the operations were performed"
|
||||
source: InfraSource!
|
||||
}
|
||||
|
||||
"The result of a source deletion operations"
|
||||
type DeleteSourceResult {
|
||||
"The id of the source that was deleted"
|
||||
id: ID!
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
"Create a new source of infrastructure data"
|
||||
createSource("The id of the source" id: ID!, source: CreateSourceInput!): CreateSourceResult!
|
||||
"Modify an existing source using the given sequence of update operations"
|
||||
updateSource(
|
||||
"The id of the source"
|
||||
id: ID!
|
||||
"A sequence of update operations"
|
||||
changes: [UpdateSourceInput!]!
|
||||
): UpdateSourceResult!
|
||||
"Delete a source of infrastructure data"
|
||||
deleteSource("The id of the source" id: ID!): DeleteSourceResult!
|
||||
}
|
||||
`;
|
||||
|
|
|
@ -37,7 +37,7 @@ export type SubscriptionResolver<Result, Parent = any, Context = any, Args = nev
|
|||
// ====================================================
|
||||
|
||||
export interface Query {
|
||||
/** Get an infrastructure data source by id */
|
||||
/** Get an infrastructure data source by id.The resolution order for the source configuration attributes is as followswith the first defined value winning:1. The attributes of the saved object with the given 'id'.2. The attributes defined in the static Kibana configuration key'xpack.infra.sources.default'.3. The hard-coded default values.As a consequence, querying a source without a corresponding saved objectdoesn't error out, but returns the configured or hardcoded defaults. */
|
||||
source: InfraSource;
|
||||
/** Get a list of all infrastructure data sources */
|
||||
allSources: InfraSource[];
|
||||
|
@ -46,6 +46,10 @@ export interface Query {
|
|||
export interface InfraSource {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
/** The version number the source configuration was last persisted with */
|
||||
version?: number | null;
|
||||
/** The timestamp the source configuration was last persisted at */
|
||||
updatedAt?: number | null;
|
||||
/** The raw configuration of the source */
|
||||
configuration: InfraSourceConfiguration;
|
||||
/** The status of the source */
|
||||
|
@ -65,6 +69,10 @@ export interface InfraSource {
|
|||
}
|
||||
/** A set of configuration options for an infrastructure data source */
|
||||
export interface InfraSourceConfiguration {
|
||||
/** The name of the data source */
|
||||
name: string;
|
||||
/** A description of the data source */
|
||||
description: string;
|
||||
/** The alias to read metric data from */
|
||||
metricAlias: string;
|
||||
/** The alias to read log data from */
|
||||
|
@ -78,8 +86,6 @@ export interface InfraSourceFields {
|
|||
container: string;
|
||||
/** The fields to identify a host by */
|
||||
host: string;
|
||||
/** The fields that may contain the log event message. The first field found win. */
|
||||
message: string[];
|
||||
/** The field to identify a pod by */
|
||||
pod: string;
|
||||
/** The field to use as a tiebreaker for log events that have identical timestamps */
|
||||
|
@ -239,6 +245,30 @@ export interface InfraDataPoint {
|
|||
value?: number | null;
|
||||
}
|
||||
|
||||
export interface Mutation {
|
||||
/** Create a new source of infrastructure data */
|
||||
createSource: CreateSourceResult;
|
||||
/** Modify an existing source using the given sequence of update operations */
|
||||
updateSource: UpdateSourceResult;
|
||||
/** Delete a source of infrastructure data */
|
||||
deleteSource: DeleteSourceResult;
|
||||
}
|
||||
/** The result of a successful source creation */
|
||||
export interface CreateSourceResult {
|
||||
/** The source that was created */
|
||||
source: InfraSource;
|
||||
}
|
||||
/** The result of a sequence of source update operations */
|
||||
export interface UpdateSourceResult {
|
||||
/** The source after the operations were performed */
|
||||
source: InfraSource;
|
||||
}
|
||||
/** The result of a source deletion operations */
|
||||
export interface DeleteSourceResult {
|
||||
/** The id of the source that was deleted */
|
||||
id: string;
|
||||
}
|
||||
|
||||
// ====================================================
|
||||
// InputTypes
|
||||
// ====================================================
|
||||
|
@ -280,6 +310,73 @@ export interface InfraMetricInput {
|
|||
/** The type of metric */
|
||||
type: InfraMetricType;
|
||||
}
|
||||
/** The source to be created */
|
||||
export interface CreateSourceInput {
|
||||
/** The name of the data source */
|
||||
name: string;
|
||||
/** A description of the data source */
|
||||
description?: string | null;
|
||||
/** The alias to read metric data from */
|
||||
metricAlias?: string | null;
|
||||
/** The alias to read log data from */
|
||||
logAlias?: string | null;
|
||||
/** The field mapping to use for this source */
|
||||
fields?: CreateSourceFieldsInput | null;
|
||||
}
|
||||
/** The mapping of semantic fields of the source to be created */
|
||||
export interface CreateSourceFieldsInput {
|
||||
/** The field to identify a container by */
|
||||
container?: string | null;
|
||||
/** The fields to identify a host by */
|
||||
host?: string | null;
|
||||
/** The field to identify a pod by */
|
||||
pod?: string | null;
|
||||
/** The field to use as a tiebreaker for log events that have identical timestamps */
|
||||
tiebreaker?: string | null;
|
||||
/** The field to use as a timestamp for metrics and logs */
|
||||
timestamp?: string | null;
|
||||
}
|
||||
/** The update operations to be performed */
|
||||
export interface UpdateSourceInput {
|
||||
/** The name update operation to be performed */
|
||||
setName?: UpdateSourceNameInput | null;
|
||||
/** The description update operation to be performed */
|
||||
setDescription?: UpdateSourceDescriptionInput | null;
|
||||
/** The alias update operation to be performed */
|
||||
setAliases?: UpdateSourceAliasInput | null;
|
||||
/** The field update operation to be performed */
|
||||
setFields?: UpdateSourceFieldsInput | null;
|
||||
}
|
||||
/** A name update operation */
|
||||
export interface UpdateSourceNameInput {
|
||||
/** The new name to be set */
|
||||
name: string;
|
||||
}
|
||||
/** A description update operation */
|
||||
export interface UpdateSourceDescriptionInput {
|
||||
/** The new description to be set */
|
||||
description: string;
|
||||
}
|
||||
/** An alias update operation */
|
||||
export interface UpdateSourceAliasInput {
|
||||
/** The new log index pattern or alias to bet set */
|
||||
logAlias?: string | null;
|
||||
/** The new metric index pattern or alias to bet set */
|
||||
metricAlias?: string | null;
|
||||
}
|
||||
/** A field update operations */
|
||||
export interface UpdateSourceFieldsInput {
|
||||
/** The new container field to be set */
|
||||
container?: string | null;
|
||||
/** The new host field to be set */
|
||||
host?: string | null;
|
||||
/** The new pod field to be set */
|
||||
pod?: string | null;
|
||||
/** The new tiebreaker field to be set */
|
||||
tiebreaker?: string | null;
|
||||
/** The new timestamp field to be set */
|
||||
timestamp?: string | null;
|
||||
}
|
||||
|
||||
// ====================================================
|
||||
// Arguments
|
||||
|
@ -348,6 +445,22 @@ export interface NodesInfraResponseArgs {
|
|||
|
||||
metric: InfraMetricInput;
|
||||
}
|
||||
export interface CreateSourceMutationArgs {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
|
||||
source: CreateSourceInput;
|
||||
}
|
||||
export interface UpdateSourceMutationArgs {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
/** A sequence of update operations */
|
||||
changes: UpdateSourceInput[];
|
||||
}
|
||||
export interface DeleteSourceMutationArgs {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
}
|
||||
|
||||
// ====================================================
|
||||
// Enums
|
||||
|
@ -438,7 +551,7 @@ export type InfraLogMessageSegment = InfraLogMessageFieldSegment | InfraLogMessa
|
|||
|
||||
export namespace QueryResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = never> {
|
||||
/** Get an infrastructure data source by id */
|
||||
/** Get an infrastructure data source by id.The resolution order for the source configuration attributes is as followswith the first defined value winning:1. The attributes of the saved object with the given 'id'.2. The attributes defined in the static Kibana configuration key'xpack.infra.sources.default'.3. The hard-coded default values.As a consequence, querying a source without a corresponding saved objectdoesn't error out, but returns the configured or hardcoded defaults. */
|
||||
source?: SourceResolver<InfraSource, TypeParent, Context>;
|
||||
/** Get a list of all infrastructure data sources */
|
||||
allSources?: AllSourcesResolver<InfraSource[], TypeParent, Context>;
|
||||
|
@ -466,6 +579,10 @@ export namespace InfraSourceResolvers {
|
|||
export interface Resolvers<Context = InfraContext, TypeParent = InfraSource> {
|
||||
/** The id of the source */
|
||||
id?: IdResolver<string, TypeParent, Context>;
|
||||
/** The version number the source configuration was last persisted with */
|
||||
version?: VersionResolver<number | null, TypeParent, Context>;
|
||||
/** The timestamp the source configuration was last persisted at */
|
||||
updatedAt?: UpdatedAtResolver<number | null, TypeParent, Context>;
|
||||
/** The raw configuration of the source */
|
||||
configuration?: ConfigurationResolver<InfraSourceConfiguration, TypeParent, Context>;
|
||||
/** The status of the source */
|
||||
|
@ -489,6 +606,16 @@ export namespace InfraSourceResolvers {
|
|||
Parent,
|
||||
Context
|
||||
>;
|
||||
export type VersionResolver<
|
||||
R = number | null,
|
||||
Parent = InfraSource,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type UpdatedAtResolver<
|
||||
R = number | null,
|
||||
Parent = InfraSource,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type ConfigurationResolver<
|
||||
R = InfraSourceConfiguration,
|
||||
Parent = InfraSource,
|
||||
|
@ -589,6 +716,10 @@ export namespace InfraSourceResolvers {
|
|||
/** A set of configuration options for an infrastructure data source */
|
||||
export namespace InfraSourceConfigurationResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = InfraSourceConfiguration> {
|
||||
/** The name of the data source */
|
||||
name?: NameResolver<string, TypeParent, Context>;
|
||||
/** A description of the data source */
|
||||
description?: DescriptionResolver<string, TypeParent, Context>;
|
||||
/** The alias to read metric data from */
|
||||
metricAlias?: MetricAliasResolver<string, TypeParent, Context>;
|
||||
/** The alias to read log data from */
|
||||
|
@ -597,6 +728,16 @@ export namespace InfraSourceConfigurationResolvers {
|
|||
fields?: FieldsResolver<InfraSourceFields, TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type NameResolver<
|
||||
R = string,
|
||||
Parent = InfraSourceConfiguration,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type DescriptionResolver<
|
||||
R = string,
|
||||
Parent = InfraSourceConfiguration,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type MetricAliasResolver<
|
||||
R = string,
|
||||
Parent = InfraSourceConfiguration,
|
||||
|
@ -620,8 +761,6 @@ export namespace InfraSourceFieldsResolvers {
|
|||
container?: ContainerResolver<string, TypeParent, Context>;
|
||||
/** The fields to identify a host by */
|
||||
host?: HostResolver<string, TypeParent, Context>;
|
||||
/** The fields that may contain the log event message. The first field found win. */
|
||||
message?: MessageResolver<string[], TypeParent, Context>;
|
||||
/** The field to identify a pod by */
|
||||
pod?: PodResolver<string, TypeParent, Context>;
|
||||
/** The field to use as a tiebreaker for log events that have identical timestamps */
|
||||
|
@ -640,11 +779,6 @@ export namespace InfraSourceFieldsResolvers {
|
|||
Parent = InfraSourceFields,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type MessageResolver<
|
||||
R = string[],
|
||||
Parent = InfraSourceFields,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type PodResolver<
|
||||
R = string,
|
||||
Parent = InfraSourceFields,
|
||||
|
@ -1049,7 +1183,7 @@ export namespace InfraNodePathResolvers {
|
|||
export interface Resolvers<Context = InfraContext, TypeParent = InfraNodePath> {
|
||||
value?: ValueResolver<string, TypeParent, Context>;
|
||||
|
||||
label?: DisplayNameResolver<string, TypeParent, Context>;
|
||||
label?: LabelResolver<string, TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type ValueResolver<R = string, Parent = InfraNodePath, Context = InfraContext> = Resolver<
|
||||
|
@ -1057,11 +1191,11 @@ export namespace InfraNodePathResolvers {
|
|||
Parent,
|
||||
Context
|
||||
>;
|
||||
export type DisplayNameResolver<
|
||||
R = string,
|
||||
Parent = InfraNodePath,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type LabelResolver<R = string, Parent = InfraNodePath, Context = InfraContext> = Resolver<
|
||||
R,
|
||||
Parent,
|
||||
Context
|
||||
>;
|
||||
}
|
||||
|
||||
export namespace InfraNodeMetricResolvers {
|
||||
|
@ -1139,3 +1273,87 @@ export namespace InfraDataPointResolvers {
|
|||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
|
||||
export namespace MutationResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = never> {
|
||||
/** Create a new source of infrastructure data */
|
||||
createSource?: CreateSourceResolver<CreateSourceResult, TypeParent, Context>;
|
||||
/** Modify an existing source using the given sequence of update operations */
|
||||
updateSource?: UpdateSourceResolver<UpdateSourceResult, TypeParent, Context>;
|
||||
/** Delete a source of infrastructure data */
|
||||
deleteSource?: DeleteSourceResolver<DeleteSourceResult, TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type CreateSourceResolver<
|
||||
R = CreateSourceResult,
|
||||
Parent = never,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context, CreateSourceArgs>;
|
||||
export interface CreateSourceArgs {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
|
||||
source: CreateSourceInput;
|
||||
}
|
||||
|
||||
export type UpdateSourceResolver<
|
||||
R = UpdateSourceResult,
|
||||
Parent = never,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context, UpdateSourceArgs>;
|
||||
export interface UpdateSourceArgs {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
/** A sequence of update operations */
|
||||
changes: UpdateSourceInput[];
|
||||
}
|
||||
|
||||
export type DeleteSourceResolver<
|
||||
R = DeleteSourceResult,
|
||||
Parent = never,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context, DeleteSourceArgs>;
|
||||
export interface DeleteSourceArgs {
|
||||
/** The id of the source */
|
||||
id: string;
|
||||
}
|
||||
}
|
||||
/** The result of a successful source creation */
|
||||
export namespace CreateSourceResultResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = CreateSourceResult> {
|
||||
/** The source that was created */
|
||||
source?: SourceResolver<InfraSource, TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type SourceResolver<
|
||||
R = InfraSource,
|
||||
Parent = CreateSourceResult,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
/** The result of a sequence of source update operations */
|
||||
export namespace UpdateSourceResultResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = UpdateSourceResult> {
|
||||
/** The source after the operations were performed */
|
||||
source?: SourceResolver<InfraSource, TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type SourceResolver<
|
||||
R = InfraSource,
|
||||
Parent = UpdateSourceResult,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
/** The result of a source deletion operations */
|
||||
export namespace DeleteSourceResultResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = DeleteSourceResult> {
|
||||
/** The id of the source that was deleted */
|
||||
id?: IdResolver<string, TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type IdResolver<
|
||||
R = string,
|
||||
Parent = DeleteSourceResult,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
|
|
|
@ -29,20 +29,12 @@ export const getConfigSchema = (Joi: typeof JoiNamespace) => {
|
|||
fields: Joi.object({
|
||||
container: Joi.string(),
|
||||
host: Joi.string(),
|
||||
message: Joi.array()
|
||||
.items(Joi.string())
|
||||
.single(),
|
||||
pod: Joi.string(),
|
||||
tiebreaker: Joi.string(),
|
||||
timestamp: Joi.string(),
|
||||
}),
|
||||
});
|
||||
|
||||
const InfraSourceConfigSchema = InfraDefaultSourceConfigSchema.keys({
|
||||
metricAlias: Joi.reach(InfraDefaultSourceConfigSchema, 'metricAlias').required(),
|
||||
logAlias: Joi.reach(InfraDefaultSourceConfigSchema, 'logAlias').required(),
|
||||
});
|
||||
|
||||
const InfraRootConfigSchema = Joi.object({
|
||||
enabled: Joi.boolean().default(true),
|
||||
query: Joi.object({
|
||||
|
@ -53,9 +45,15 @@ export const getConfigSchema = (Joi: typeof JoiNamespace) => {
|
|||
.keys({
|
||||
default: InfraDefaultSourceConfigSchema,
|
||||
})
|
||||
.pattern(/.*/, InfraSourceConfigSchema)
|
||||
.default(),
|
||||
}).default();
|
||||
|
||||
return InfraRootConfigSchema;
|
||||
};
|
||||
|
||||
export const getDeprecations = () => [];
|
||||
|
||||
// interface DeprecationHelpers {
|
||||
// rename(oldKey: string, newKey: string): (settings: unknown, log: unknown) => void;
|
||||
// unused(oldKey: string): (settings: unknown, log: unknown) => void;
|
||||
// }
|
||||
|
|
|
@ -4,6 +4,16 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export interface InfraConfigurationAdapter<Configuration> {
|
||||
export interface InfraConfigurationAdapter<
|
||||
Configuration extends InfraBaseConfiguration = InfraBaseConfiguration
|
||||
> {
|
||||
get(): Promise<Configuration>;
|
||||
}
|
||||
|
||||
export interface InfraBaseConfiguration {
|
||||
enabled: boolean;
|
||||
query: {
|
||||
partitionSize: number;
|
||||
partitionFactor: number;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -4,9 +4,9 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraConfigurationAdapter } from './adapter_types';
|
||||
import { InfraBaseConfiguration, InfraConfigurationAdapter } from './adapter_types';
|
||||
|
||||
export class InfraInmemoryConfigurationAdapter<Configuration>
|
||||
export class InfraInmemoryConfigurationAdapter<Configuration extends InfraBaseConfiguration>
|
||||
implements InfraConfigurationAdapter<Configuration> {
|
||||
constructor(private readonly configuration: Configuration) {}
|
||||
|
||||
|
|
|
@ -6,10 +6,9 @@
|
|||
|
||||
import Joi from 'joi';
|
||||
|
||||
import { InfraConfigurationAdapter } from './adapter_types';
|
||||
import { InfraBaseConfiguration, InfraConfigurationAdapter } from './adapter_types';
|
||||
|
||||
export class InfraKibanaConfigurationAdapter<Configuration>
|
||||
implements InfraConfigurationAdapter<Configuration> {
|
||||
export class InfraKibanaConfigurationAdapter implements InfraConfigurationAdapter {
|
||||
private readonly server: ServerWithConfig;
|
||||
|
||||
constructor(server: any) {
|
||||
|
@ -28,16 +27,15 @@ export class InfraKibanaConfigurationAdapter<Configuration>
|
|||
}
|
||||
|
||||
const configuration = config.get('xpack.infra') || {};
|
||||
const configurationWithDefaults = {
|
||||
const configurationWithDefaults: InfraBaseConfiguration = {
|
||||
enabled: true,
|
||||
query: {
|
||||
partitionSize: 75,
|
||||
partitionFactor: 1.2,
|
||||
...(configuration.query || {}),
|
||||
},
|
||||
sources: {},
|
||||
...configuration,
|
||||
} as Configuration;
|
||||
};
|
||||
|
||||
// we assume this to be the configuration because Kibana would have already validated it
|
||||
return configurationWithDefaults;
|
||||
|
|
|
@ -7,9 +7,10 @@
|
|||
import { SearchResponse } from 'elasticsearch';
|
||||
import { GraphQLSchema } from 'graphql';
|
||||
import { Lifecycle, ResponseToolkit, RouteOptions } from 'hapi';
|
||||
import { InfraMetricModel } from '../metrics/adapter_types';
|
||||
import { Legacy } from 'kibana';
|
||||
|
||||
import { JsonObject } from '../../../../common/typed_json';
|
||||
import { InfraMetricModel } from '../metrics/adapter_types';
|
||||
|
||||
export const internalInfraFrameworkRequest = Symbol('internalInfraFrameworkRequest');
|
||||
|
||||
|
@ -50,7 +51,8 @@ export interface InfraBackendFrameworkAdapter {
|
|||
method: string,
|
||||
options?: object
|
||||
): Promise<InfraDatabaseSearchResponse>;
|
||||
getIndexPatternsService(req: InfraFrameworkRequest<any>): InfraFrameworkIndexPatternsService;
|
||||
getIndexPatternsService(req: InfraFrameworkRequest<any>): Legacy.IndexPatternsService;
|
||||
getSavedObjectsService(): Legacy.SavedObjectsService;
|
||||
makeTSVBRequest(
|
||||
req: InfraFrameworkRequest,
|
||||
model: InfraMetricModel,
|
||||
|
@ -178,20 +180,6 @@ export interface InfraFieldDef {
|
|||
[type: string]: InfraFieldDetails;
|
||||
}
|
||||
|
||||
interface InfraFrameworkIndexFieldDescriptor {
|
||||
name: string;
|
||||
type: string;
|
||||
searchable: boolean;
|
||||
aggregatable: boolean;
|
||||
readFromDocValues: boolean;
|
||||
}
|
||||
|
||||
export interface InfraFrameworkIndexPatternsService {
|
||||
getFieldsForWildcard(options: {
|
||||
pattern: string | string[];
|
||||
}): Promise<InfraFrameworkIndexFieldDescriptor[]>;
|
||||
}
|
||||
|
||||
export interface InfraTSVBResponse {
|
||||
[key: string]: InfraTSVBPanel;
|
||||
}
|
||||
|
|
|
@ -4,14 +4,13 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { GenericParams } from 'elasticsearch';
|
||||
import { GraphQLSchema } from 'graphql';
|
||||
import { Legacy } from 'kibana';
|
||||
|
||||
import { GenericParams } from 'elasticsearch';
|
||||
import { InfraMetricModel } from '../metrics/adapter_types';
|
||||
import {
|
||||
InfraBackendFrameworkAdapter,
|
||||
InfraFrameworkIndexPatternsService,
|
||||
InfraFrameworkRequest,
|
||||
InfraFrameworkRouteOptions,
|
||||
InfraResponse,
|
||||
|
@ -28,11 +27,9 @@ import {
|
|||
|
||||
export class InfraKibanaBackendFrameworkAdapter implements InfraBackendFrameworkAdapter {
|
||||
public version: string;
|
||||
private server: Legacy.Server;
|
||||
|
||||
constructor(hapiServer: Legacy.Server) {
|
||||
this.server = hapiServer;
|
||||
this.version = hapiServer.plugins.kibana.status.plugin.version;
|
||||
constructor(private server: Legacy.Server) {
|
||||
this.version = server.plugins.kibana.status.plugin.version;
|
||||
}
|
||||
|
||||
public exposeStaticDir(urlPath: string, dir: string): void {
|
||||
|
@ -61,10 +58,10 @@ export class InfraKibanaBackendFrameworkAdapter implements InfraBackendFramework
|
|||
|
||||
this.server.register<HapiGraphiQLPluginOptions>({
|
||||
options: {
|
||||
graphiqlOptions: {
|
||||
endpointURL: routePath,
|
||||
graphiqlOptions: request => ({
|
||||
endpointURL: request ? `${request.getBasePath()}${routePath}` : routePath,
|
||||
passHeader: `'kbn-version': '${this.version}'`,
|
||||
},
|
||||
}),
|
||||
path: `${routePath}/graphiql`,
|
||||
},
|
||||
plugin: graphiqlHapi,
|
||||
|
@ -106,10 +103,7 @@ export class InfraKibanaBackendFrameworkAdapter implements InfraBackendFramework
|
|||
|
||||
public getIndexPatternsService(
|
||||
request: InfraFrameworkRequest<Legacy.Request>
|
||||
): InfraFrameworkIndexPatternsService {
|
||||
if (!isServerWithIndexPatternsServiceFactory(this.server)) {
|
||||
throw new Error('Failed to access indexPatternsService for the request');
|
||||
}
|
||||
): Legacy.IndexPatternsService {
|
||||
return this.server.indexPatternsServiceFactory({
|
||||
callCluster: async (method: string, args: [GenericParams], ...rest: any[]) => {
|
||||
const fieldCaps = await this.callWithRequest(
|
||||
|
@ -123,6 +117,10 @@ export class InfraKibanaBackendFrameworkAdapter implements InfraBackendFramework
|
|||
});
|
||||
}
|
||||
|
||||
public getSavedObjectsService() {
|
||||
return this.server.savedObjects;
|
||||
}
|
||||
|
||||
public async makeTSVBRequest(
|
||||
req: InfraFrameworkRequest<Legacy.Request>,
|
||||
model: InfraMetricModel,
|
||||
|
@ -163,14 +161,3 @@ export function wrapRequest<InternalRequest extends InfraWrappableRequest>(
|
|||
query,
|
||||
};
|
||||
}
|
||||
|
||||
interface ServerWithIndexPatternsServiceFactory extends Legacy.Server {
|
||||
indexPatternsServiceFactory(options: {
|
||||
callCluster: (...args: any[]) => any;
|
||||
}): InfraFrameworkIndexPatternsService;
|
||||
}
|
||||
|
||||
const isServerWithIndexPatternsServiceFactory = (
|
||||
server: Legacy.Server
|
||||
): server is ServerWithIndexPatternsServiceFactory =>
|
||||
typeof (server as any).indexPatternsServiceFactory === 'function';
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraSourceConfiguration } from '../../sources';
|
||||
|
||||
export type PartialInfraSourceConfigurations = {
|
||||
default?: PartialInfraDefaultSourceConfiguration;
|
||||
} & {
|
||||
[sourceId: string]: PartialInfraSourceConfiguration;
|
||||
};
|
||||
|
||||
export type PartialInfraDefaultSourceConfiguration = {
|
||||
fields?: Partial<InfraSourceConfiguration['fields']>;
|
||||
} & Partial<Pick<InfraSourceConfiguration, Exclude<keyof InfraSourceConfiguration, 'fields'>>>;
|
||||
|
||||
export type PartialInfraSourceConfiguration = {
|
||||
fields?: Partial<InfraSourceConfiguration['fields']>;
|
||||
} & Pick<InfraSourceConfiguration, Exclude<keyof InfraSourceConfiguration, 'fields'>>;
|
|
@ -1,111 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraInmemoryConfigurationAdapter } from '../configuration/inmemory_configuration_adapter';
|
||||
import { PartialInfraSourceConfiguration } from './adapter_types';
|
||||
import { InfraConfigurationSourcesAdapter } from './configuration_sources_adapter';
|
||||
|
||||
describe('the InfraConfigurationSourcesAdapter', () => {
|
||||
test('adds the default source when no sources are configured', async () => {
|
||||
const sourcesAdapter = new InfraConfigurationSourcesAdapter(
|
||||
new InfraInmemoryConfigurationAdapter({ sources: {} })
|
||||
);
|
||||
|
||||
expect(await sourcesAdapter.getAll()).toMatchObject({
|
||||
default: {
|
||||
metricAlias: expect.any(String),
|
||||
logAlias: expect.any(String),
|
||||
fields: {
|
||||
container: expect.any(String),
|
||||
host: expect.any(String),
|
||||
message: expect.arrayContaining([expect.any(String)]),
|
||||
pod: expect.any(String),
|
||||
tiebreaker: expect.any(String),
|
||||
timestamp: expect.any(String),
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('adds missing aliases to default source when they are missing from the configuration', async () => {
|
||||
const sourcesAdapter = new InfraConfigurationSourcesAdapter(
|
||||
new InfraInmemoryConfigurationAdapter({
|
||||
sources: {
|
||||
default: {} as PartialInfraSourceConfiguration,
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
expect(await sourcesAdapter.getAll()).toMatchObject({
|
||||
default: {
|
||||
metricAlias: expect.any(String),
|
||||
logAlias: expect.any(String),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('adds missing fields to default source when they are missing from the configuration', async () => {
|
||||
const sourcesAdapter = new InfraConfigurationSourcesAdapter(
|
||||
new InfraInmemoryConfigurationAdapter({
|
||||
sources: {
|
||||
default: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logAlias: 'LOG_ALIAS',
|
||||
fields: {
|
||||
container: 'DIFFERENT_CONTAINER_FIELD',
|
||||
},
|
||||
} as PartialInfraSourceConfiguration,
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
expect(await sourcesAdapter.getAll()).toMatchObject({
|
||||
default: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logAlias: 'LOG_ALIAS',
|
||||
fields: {
|
||||
container: 'DIFFERENT_CONTAINER_FIELD',
|
||||
host: expect.any(String),
|
||||
message: expect.arrayContaining([expect.any(String)]),
|
||||
pod: expect.any(String),
|
||||
tiebreaker: expect.any(String),
|
||||
timestamp: expect.any(String),
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('adds missing fields to non-default sources when they are missing from the configuration', async () => {
|
||||
const sourcesAdapter = new InfraConfigurationSourcesAdapter(
|
||||
new InfraInmemoryConfigurationAdapter({
|
||||
sources: {
|
||||
sourceOne: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logAlias: 'LOG_ALIAS',
|
||||
fields: {
|
||||
container: 'DIFFERENT_CONTAINER_FIELD',
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
expect(await sourcesAdapter.getAll()).toMatchObject({
|
||||
sourceOne: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logAlias: 'LOG_ALIAS',
|
||||
fields: {
|
||||
container: 'DIFFERENT_CONTAINER_FIELD',
|
||||
host: expect.any(String),
|
||||
message: expect.arrayContaining([expect.any(String)]),
|
||||
pod: expect.any(String),
|
||||
tiebreaker: expect.any(String),
|
||||
timestamp: expect.any(String),
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,64 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraSourceConfigurations, InfraSourcesAdapter } from '../../sources';
|
||||
import { InfraConfigurationAdapter } from '../configuration';
|
||||
import { PartialInfraSourceConfigurations } from './adapter_types';
|
||||
|
||||
interface ConfigurationWithSources {
|
||||
sources?: PartialInfraSourceConfigurations;
|
||||
}
|
||||
|
||||
export class InfraConfigurationSourcesAdapter implements InfraSourcesAdapter {
|
||||
private readonly configuration: InfraConfigurationAdapter<ConfigurationWithSources>;
|
||||
|
||||
constructor(configuration: InfraConfigurationAdapter<ConfigurationWithSources>) {
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
public async getAll() {
|
||||
const sourceConfigurations = (await this.configuration.get()).sources || {
|
||||
default: DEFAULT_SOURCE,
|
||||
};
|
||||
const sourceConfigurationsWithDefault = {
|
||||
...sourceConfigurations,
|
||||
default: {
|
||||
...DEFAULT_SOURCE,
|
||||
...(sourceConfigurations.default || {}),
|
||||
},
|
||||
} as PartialInfraSourceConfigurations;
|
||||
|
||||
return Object.entries(sourceConfigurationsWithDefault).reduce<InfraSourceConfigurations>(
|
||||
(result, [sourceId, sourceConfiguration]) =>
|
||||
({
|
||||
...result,
|
||||
[sourceId]: {
|
||||
...sourceConfiguration,
|
||||
fields: {
|
||||
...DEFAULT_FIELDS,
|
||||
...(sourceConfiguration.fields || {}),
|
||||
},
|
||||
},
|
||||
} as InfraSourceConfigurations),
|
||||
{}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_FIELDS = {
|
||||
container: 'docker.container.id',
|
||||
host: 'host.name',
|
||||
message: ['message', '@message'],
|
||||
pod: 'kubernetes.pod.uid',
|
||||
tiebreaker: '_doc',
|
||||
timestamp: '@timestamp',
|
||||
};
|
||||
|
||||
const DEFAULT_SOURCE = {
|
||||
metricAlias: 'metricbeat-*',
|
||||
logAlias: 'filebeat-*',
|
||||
fields: DEFAULT_FIELDS,
|
||||
};
|
|
@ -14,20 +14,22 @@ import { ElasticsearchMetadataAdapter } from '../adapters/metadata/elasticsearch
|
|||
import { KibanaMetricsAdapter } from '../adapters/metrics/kibana_metrics_adapter';
|
||||
import { ElasticsearchNodesAdapter } from '../adapters/nodes/elasticsearch_nodes_adapter';
|
||||
import { InfraElasticsearchSourceStatusAdapter } from '../adapters/source_status';
|
||||
import { InfraConfigurationSourcesAdapter } from '../adapters/sources/configuration_sources_adapter';
|
||||
import { InfraFieldsDomain } from '../domains/fields_domain';
|
||||
import { InfraLogEntriesDomain } from '../domains/log_entries_domain';
|
||||
import { InfraMetadataDomain } from '../domains/metadata_domain';
|
||||
import { InfraMetricsDomain } from '../domains/metrics_domain';
|
||||
import { InfraNodesDomain } from '../domains/nodes_domain';
|
||||
import { InfraBackendLibs, InfraConfiguration, InfraDomainLibs } from '../infra_types';
|
||||
import { InfraBackendLibs, InfraDomainLibs } from '../infra_types';
|
||||
import { InfraSourceStatus } from '../source_status';
|
||||
import { InfraSources } from '../sources';
|
||||
|
||||
export function compose(server: Server): InfraBackendLibs {
|
||||
const configuration = new InfraKibanaConfigurationAdapter<InfraConfiguration>(server);
|
||||
const configuration = new InfraKibanaConfigurationAdapter(server);
|
||||
const framework = new InfraKibanaBackendFrameworkAdapter(server);
|
||||
const sources = new InfraSources(new InfraConfigurationSourcesAdapter(configuration));
|
||||
const sources = new InfraSources({
|
||||
configuration,
|
||||
savedObjects: framework.getSavedObjectsService(),
|
||||
});
|
||||
const sourceStatus = new InfraSourceStatus(new InfraElasticsearchSourceStatusAdapter(framework), {
|
||||
sources,
|
||||
});
|
||||
|
|
|
@ -20,13 +20,13 @@ export class InfraFieldsDomain {
|
|||
sourceId: string,
|
||||
indexType: InfraIndexType
|
||||
): Promise<InfraIndexField[]> {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const includeMetricIndices = [InfraIndexType.ANY, InfraIndexType.METRICS].includes(indexType);
|
||||
const includeLogIndices = [InfraIndexType.ANY, InfraIndexType.LOGS].includes(indexType);
|
||||
|
||||
const fields = await this.adapter.getIndexFields(request, [
|
||||
...(includeMetricIndices ? [sourceConfiguration.metricAlias] : []),
|
||||
...(includeLogIndices ? [sourceConfiguration.logAlias] : []),
|
||||
...(includeMetricIndices ? [configuration.metricAlias] : []),
|
||||
...(includeLogIndices ? [configuration.logAlias] : []),
|
||||
]);
|
||||
|
||||
return fields;
|
||||
|
|
|
@ -38,12 +38,12 @@ export class InfraLogEntriesDomain {
|
|||
};
|
||||
}
|
||||
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const formattingRules = compileFormattingRules(builtinRules);
|
||||
|
||||
const documentsBefore = await this.adapter.getAdjacentLogEntryDocuments(
|
||||
request,
|
||||
sourceConfiguration,
|
||||
configuration,
|
||||
formattingRules.requiredFields,
|
||||
key,
|
||||
'desc',
|
||||
|
@ -61,7 +61,7 @@ export class InfraLogEntriesDomain {
|
|||
|
||||
const documentsAfter = await this.adapter.getAdjacentLogEntryDocuments(
|
||||
request,
|
||||
sourceConfiguration,
|
||||
configuration,
|
||||
formattingRules.requiredFields,
|
||||
lastKeyBefore,
|
||||
'asc',
|
||||
|
@ -86,11 +86,11 @@ export class InfraLogEntriesDomain {
|
|||
filterQuery?: LogEntryQuery,
|
||||
highlightQuery?: string
|
||||
): Promise<InfraLogEntry[]> {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const formattingRules = compileFormattingRules(builtinRules);
|
||||
const documents = await this.adapter.getContainedLogEntryDocuments(
|
||||
request,
|
||||
sourceConfiguration,
|
||||
configuration,
|
||||
formattingRules.requiredFields,
|
||||
startKey,
|
||||
endKey,
|
||||
|
@ -109,10 +109,10 @@ export class InfraLogEntriesDomain {
|
|||
bucketSize: number,
|
||||
filterQuery?: LogEntryQuery
|
||||
): Promise<InfraLogSummaryBucket[]> {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const dateRangeBuckets = await this.adapter.getContainedLogSummaryBuckets(
|
||||
request,
|
||||
sourceConfiguration,
|
||||
configuration,
|
||||
start,
|
||||
end,
|
||||
bucketSize,
|
||||
|
|
|
@ -20,14 +20,9 @@ export class InfraMetadataDomain {
|
|||
nodeId: string,
|
||||
nodeType: string
|
||||
) {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const metricsPromise = this.adapter.getMetricMetadata(
|
||||
req,
|
||||
sourceConfiguration,
|
||||
nodeId,
|
||||
nodeType
|
||||
);
|
||||
const logsPromise = this.adapter.getLogMetadata(req, sourceConfiguration, nodeId, nodeType);
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(req, sourceId);
|
||||
const metricsPromise = this.adapter.getMetricMetadata(req, configuration, nodeId, nodeType);
|
||||
const logsPromise = this.adapter.getLogMetadata(req, configuration, nodeId, nodeType);
|
||||
|
||||
const metrics = await metricsPromise;
|
||||
const logs = await logsPromise;
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraSourceConfiguration } from '../../public/graphql/types';
|
||||
import { InfraConfigurationAdapter } from './adapters/configuration';
|
||||
import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from './adapters/framework';
|
||||
import { InfraFieldsDomain } from './domains/fields_domain';
|
||||
|
@ -12,7 +13,7 @@ import { InfraMetadataDomain } from './domains/metadata_domain';
|
|||
import { InfraMetricsDomain } from './domains/metrics_domain';
|
||||
import { InfraNodesDomain } from './domains/nodes_domain';
|
||||
import { InfraSourceStatus } from './source_status';
|
||||
import { InfraSourceConfigurations, InfraSources } from './sources';
|
||||
import { InfraSources } from './sources';
|
||||
|
||||
export interface InfraDomainLibs {
|
||||
metadata: InfraMetadataDomain;
|
||||
|
@ -23,7 +24,7 @@ export interface InfraDomainLibs {
|
|||
}
|
||||
|
||||
export interface InfraBackendLibs extends InfraDomainLibs {
|
||||
configuration: InfraConfigurationAdapter<InfraConfiguration>;
|
||||
configuration: InfraConfigurationAdapter;
|
||||
framework: InfraBackendFrameworkAdapter;
|
||||
sources: InfraSources;
|
||||
sourceStatus: InfraSourceStatus;
|
||||
|
@ -35,7 +36,9 @@ export interface InfraConfiguration {
|
|||
partitionSize: number;
|
||||
partitionFactor: number;
|
||||
};
|
||||
sources: InfraSourceConfigurations;
|
||||
sources: {
|
||||
default: InfraSourceConfiguration;
|
||||
};
|
||||
}
|
||||
|
||||
export interface InfraContext {
|
||||
|
|
|
@ -17,39 +17,57 @@ export class InfraSourceStatus {
|
|||
request: InfraFrameworkRequest,
|
||||
sourceId: string
|
||||
): Promise<string[]> {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const indexNames = await this.adapter.getIndexNames(request, sourceConfiguration.logAlias);
|
||||
const sourceConfiguration = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const indexNames = await this.adapter.getIndexNames(
|
||||
request,
|
||||
sourceConfiguration.configuration.logAlias
|
||||
);
|
||||
return indexNames;
|
||||
}
|
||||
public async getMetricIndexNames(
|
||||
request: InfraFrameworkRequest,
|
||||
sourceId: string
|
||||
): Promise<string[]> {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const indexNames = await this.adapter.getIndexNames(request, sourceConfiguration.metricAlias);
|
||||
const sourceConfiguration = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const indexNames = await this.adapter.getIndexNames(
|
||||
request,
|
||||
sourceConfiguration.configuration.metricAlias
|
||||
);
|
||||
return indexNames;
|
||||
}
|
||||
public async hasLogAlias(request: InfraFrameworkRequest, sourceId: string): Promise<boolean> {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const hasAlias = await this.adapter.hasAlias(request, sourceConfiguration.logAlias);
|
||||
const sourceConfiguration = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const hasAlias = await this.adapter.hasAlias(
|
||||
request,
|
||||
sourceConfiguration.configuration.logAlias
|
||||
);
|
||||
return hasAlias;
|
||||
}
|
||||
public async hasMetricAlias(request: InfraFrameworkRequest, sourceId: string): Promise<boolean> {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const hasAlias = await this.adapter.hasAlias(request, sourceConfiguration.metricAlias);
|
||||
const sourceConfiguration = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const hasAlias = await this.adapter.hasAlias(
|
||||
request,
|
||||
sourceConfiguration.configuration.metricAlias
|
||||
);
|
||||
return hasAlias;
|
||||
}
|
||||
public async hasLogIndices(request: InfraFrameworkRequest, sourceId: string): Promise<boolean> {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const hasIndices = await this.adapter.hasIndices(request, sourceConfiguration.logAlias);
|
||||
const sourceConfiguration = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const hasIndices = await this.adapter.hasIndices(
|
||||
request,
|
||||
sourceConfiguration.configuration.logAlias
|
||||
);
|
||||
return hasIndices;
|
||||
}
|
||||
public async hasMetricIndices(
|
||||
request: InfraFrameworkRequest,
|
||||
sourceId: string
|
||||
): Promise<boolean> {
|
||||
const sourceConfiguration = await this.libs.sources.getConfiguration(sourceId);
|
||||
const hasIndices = await this.adapter.hasIndices(request, sourceConfiguration.metricAlias);
|
||||
const sourceConfiguration = await this.libs.sources.getSourceConfiguration(request, sourceId);
|
||||
const hasIndices = await this.adapter.hasIndices(
|
||||
request,
|
||||
sourceConfiguration.configuration.metricAlias
|
||||
);
|
||||
return hasIndices;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
export class InfraSources {
|
||||
constructor(private readonly adapter: InfraSourcesAdapter) {}
|
||||
|
||||
public async getConfiguration(sourceId: string) {
|
||||
const sourceConfigurations = await this.getAllConfigurations();
|
||||
const requestedSourceConfiguration = sourceConfigurations[sourceId];
|
||||
|
||||
if (!requestedSourceConfiguration) {
|
||||
throw new Error(
|
||||
i18n.translate('xpack.infra.infraSources.failedToFindSourceErrorMessage', {
|
||||
defaultMessage: 'Failed to find source {sourceId}',
|
||||
values: {
|
||||
sourceId: `'${sourceId}'`,
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return requestedSourceConfiguration;
|
||||
}
|
||||
|
||||
public getAllConfigurations() {
|
||||
return this.adapter.getAll();
|
||||
}
|
||||
}
|
||||
|
||||
export interface InfraSourcesAdapter {
|
||||
getAll(): Promise<InfraSourceConfigurations>;
|
||||
}
|
||||
|
||||
export interface InfraSourceConfigurations {
|
||||
[sourceId: string]: InfraSourceConfiguration;
|
||||
}
|
||||
|
||||
export interface InfraSourceConfiguration {
|
||||
metricAlias: string;
|
||||
logAlias: string;
|
||||
fields: {
|
||||
container: string;
|
||||
host: string;
|
||||
message: string[];
|
||||
pod: string;
|
||||
tiebreaker: string;
|
||||
timestamp: string;
|
||||
};
|
||||
}
|
19
x-pack/plugins/infra/server/lib/sources/defaults.ts
Normal file
19
x-pack/plugins/infra/server/lib/sources/defaults.ts
Normal file
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export const defaultSourceConfiguration = {
|
||||
name: 'Unnamed Source',
|
||||
description: '',
|
||||
metricAlias: 'metricbeat-*',
|
||||
logAlias: 'filebeat-*',
|
||||
fields: {
|
||||
container: 'docker.container.id',
|
||||
host: 'host.name',
|
||||
pod: 'kubernetes.pod.uid',
|
||||
tiebreaker: '_doc',
|
||||
timestamp: '@timestamp',
|
||||
},
|
||||
};
|
|
@ -4,4 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export { InfraConfigurationSourcesAdapter } from './configuration_sources_adapter';
|
||||
export * from './defaults';
|
||||
export * from './saved_object_mappings';
|
||||
export * from './sources';
|
||||
export * from './types';
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { ElasticsearchMappingOf } from '../../utils/typed_elasticsearch_mappings';
|
||||
import { InfraSourceConfiguration } from './types';
|
||||
|
||||
export const infraSourceConfigurationSavedObjectType = 'infrastructure-ui-source';
|
||||
|
||||
export const infraSourceConfigurationSavedObjectMappings: {
|
||||
[infraSourceConfigurationSavedObjectType]: ElasticsearchMappingOf<InfraSourceConfiguration>;
|
||||
} = {
|
||||
[infraSourceConfigurationSavedObjectType]: {
|
||||
properties: {
|
||||
name: {
|
||||
type: 'text',
|
||||
},
|
||||
description: {
|
||||
type: 'text',
|
||||
},
|
||||
metricAlias: {
|
||||
type: 'keyword',
|
||||
},
|
||||
logAlias: {
|
||||
type: 'keyword',
|
||||
},
|
||||
fields: {
|
||||
properties: {
|
||||
container: {
|
||||
type: 'keyword',
|
||||
},
|
||||
host: {
|
||||
type: 'keyword',
|
||||
},
|
||||
pod: {
|
||||
type: 'keyword',
|
||||
},
|
||||
tiebreaker: {
|
||||
type: 'keyword',
|
||||
},
|
||||
timestamp: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
157
x-pack/plugins/infra/server/lib/sources/sources.test.ts
Normal file
157
x-pack/plugins/infra/server/lib/sources/sources.test.ts
Normal file
|
@ -0,0 +1,157 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraInmemoryConfigurationAdapter } from '../adapters/configuration/inmemory_configuration_adapter';
|
||||
import { InfraSources } from './sources';
|
||||
|
||||
describe('the InfraSources lib', () => {
|
||||
describe('getSourceConfiguration method', () => {
|
||||
test('returns a source configuration if it exists', async () => {
|
||||
const sourcesLib = new InfraSources({
|
||||
configuration: createMockStaticConfiguration({}),
|
||||
savedObjects: createMockSavedObjectsService({
|
||||
id: 'TEST_ID',
|
||||
version: 1,
|
||||
updated_at: '2000-01-01T00:00:00.000Z',
|
||||
attributes: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logAlias: 'LOG_ALIAS',
|
||||
fields: {
|
||||
container: 'CONTAINER',
|
||||
host: 'HOST',
|
||||
pod: 'POD',
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const request: any = Symbol();
|
||||
|
||||
expect(await sourcesLib.getSourceConfiguration(request, 'TEST_ID')).toMatchObject({
|
||||
id: 'TEST_ID',
|
||||
version: 1,
|
||||
updatedAt: 946684800000,
|
||||
configuration: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logAlias: 'LOG_ALIAS',
|
||||
fields: {
|
||||
container: 'CONTAINER',
|
||||
host: 'HOST',
|
||||
pod: 'POD',
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('adds missing attributes from the static configuration to a source configuration', async () => {
|
||||
const sourcesLib = new InfraSources({
|
||||
configuration: createMockStaticConfiguration({
|
||||
default: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logAlias: 'LOG_ALIAS',
|
||||
fields: {
|
||||
host: 'HOST',
|
||||
pod: 'POD',
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
},
|
||||
}),
|
||||
savedObjects: createMockSavedObjectsService({
|
||||
id: 'TEST_ID',
|
||||
version: 1,
|
||||
updated_at: '2000-01-01T00:00:00.000Z',
|
||||
attributes: {
|
||||
fields: {
|
||||
container: 'CONTAINER',
|
||||
},
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const request: any = Symbol();
|
||||
|
||||
expect(await sourcesLib.getSourceConfiguration(request, 'TEST_ID')).toMatchObject({
|
||||
id: 'TEST_ID',
|
||||
version: 1,
|
||||
updatedAt: 946684800000,
|
||||
configuration: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logAlias: 'LOG_ALIAS',
|
||||
fields: {
|
||||
container: 'CONTAINER',
|
||||
host: 'HOST',
|
||||
pod: 'POD',
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('adds missing attributes from the default configuration to a source configuration', async () => {
|
||||
const sourcesLib = new InfraSources({
|
||||
configuration: createMockStaticConfiguration({}),
|
||||
savedObjects: createMockSavedObjectsService({
|
||||
id: 'TEST_ID',
|
||||
version: 1,
|
||||
updated_at: '2000-01-01T00:00:00.000Z',
|
||||
attributes: {},
|
||||
}),
|
||||
});
|
||||
|
||||
const request: any = Symbol();
|
||||
|
||||
expect(await sourcesLib.getSourceConfiguration(request, 'TEST_ID')).toMatchObject({
|
||||
id: 'TEST_ID',
|
||||
version: 1,
|
||||
updatedAt: 946684800000,
|
||||
configuration: {
|
||||
metricAlias: expect.any(String),
|
||||
logAlias: expect.any(String),
|
||||
fields: {
|
||||
container: expect.any(String),
|
||||
host: expect.any(String),
|
||||
pod: expect.any(String),
|
||||
tiebreaker: expect.any(String),
|
||||
timestamp: expect.any(String),
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const createMockStaticConfiguration = (sources: any) =>
|
||||
new InfraInmemoryConfigurationAdapter({
|
||||
enabled: true,
|
||||
query: {
|
||||
partitionSize: 1,
|
||||
partitionFactor: 1,
|
||||
},
|
||||
sources,
|
||||
});
|
||||
|
||||
const createMockSavedObjectsService = (savedObject?: any) => ({
|
||||
getScopedSavedObjectsClient() {
|
||||
return {
|
||||
async get() {
|
||||
return savedObject;
|
||||
},
|
||||
} as any;
|
||||
},
|
||||
SavedObjectsClient: {
|
||||
errors: {
|
||||
isNotFoundError() {
|
||||
return typeof savedObject === 'undefined';
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
211
x-pack/plugins/infra/server/lib/sources/sources.ts
Normal file
211
x-pack/plugins/infra/server/lib/sources/sources.ts
Normal file
|
@ -0,0 +1,211 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as runtimeTypes from 'io-ts';
|
||||
import { failure } from 'io-ts/lib/PathReporter';
|
||||
import { Legacy } from 'kibana';
|
||||
|
||||
import { Pick3 } from '../../../common/utility_types';
|
||||
import { InfraConfigurationAdapter } from '../adapters/configuration';
|
||||
import { InfraFrameworkRequest, internalInfraFrameworkRequest } from '../adapters/framework';
|
||||
import { defaultSourceConfiguration } from './defaults';
|
||||
import { infraSourceConfigurationSavedObjectType } from './saved_object_mappings';
|
||||
import {
|
||||
InfraSavedSourceConfigurationRuntimeType,
|
||||
InfraSourceConfiguration,
|
||||
PartialInfraSourceConfiguration,
|
||||
PartialInfraSourceConfigurationRuntimeType,
|
||||
} from './types';
|
||||
|
||||
export class InfraSources {
|
||||
constructor(
|
||||
private readonly libs: {
|
||||
configuration: InfraConfigurationAdapter;
|
||||
savedObjects: Pick<Legacy.SavedObjectsService, 'getScopedSavedObjectsClient'> &
|
||||
Pick3<Legacy.SavedObjectsService, 'SavedObjectsClient', 'errors', 'isNotFoundError'>;
|
||||
}
|
||||
) {}
|
||||
|
||||
public async getSourceConfiguration(request: InfraFrameworkRequest, sourceId: string) {
|
||||
const staticDefaultSourceConfiguration = await this.getStaticDefaultSourceConfiguration();
|
||||
|
||||
const savedSourceConfiguration = await this.getSavedSourceConfiguration(request, sourceId).then(
|
||||
result => ({
|
||||
...result,
|
||||
configuration: mergeSourceConfiguration(
|
||||
staticDefaultSourceConfiguration,
|
||||
result.configuration
|
||||
),
|
||||
}),
|
||||
err =>
|
||||
this.libs.savedObjects.SavedObjectsClient.errors.isNotFoundError(err)
|
||||
? Promise.resolve({
|
||||
id: sourceId,
|
||||
version: undefined,
|
||||
updatedAt: undefined,
|
||||
configuration: staticDefaultSourceConfiguration,
|
||||
})
|
||||
: Promise.reject(err)
|
||||
);
|
||||
|
||||
return savedSourceConfiguration;
|
||||
}
|
||||
|
||||
public async getAllSourceConfigurations(request: InfraFrameworkRequest) {
|
||||
const staticDefaultSourceConfiguration = await this.getStaticDefaultSourceConfiguration();
|
||||
|
||||
const savedSourceConfigurations = await this.getAllSavedSourceConfigurations(request);
|
||||
|
||||
return savedSourceConfigurations.map(savedSourceConfiguration => ({
|
||||
...savedSourceConfiguration,
|
||||
configuration: mergeSourceConfiguration(
|
||||
staticDefaultSourceConfiguration,
|
||||
savedSourceConfiguration.configuration
|
||||
),
|
||||
}));
|
||||
}
|
||||
|
||||
public async createSourceConfiguration(
|
||||
request: InfraFrameworkRequest,
|
||||
sourceId: string,
|
||||
source: PartialInfraSourceConfiguration
|
||||
) {
|
||||
const staticDefaultSourceConfiguration = await this.getStaticDefaultSourceConfiguration();
|
||||
|
||||
const newSourceConfiguration = mergeSourceConfiguration(
|
||||
staticDefaultSourceConfiguration,
|
||||
source
|
||||
);
|
||||
|
||||
const createdSourceConfiguration = convertSavedObjectToSavedSourceConfiguration(
|
||||
await this.libs.savedObjects
|
||||
.getScopedSavedObjectsClient(request[internalInfraFrameworkRequest])
|
||||
.create(
|
||||
infraSourceConfigurationSavedObjectType,
|
||||
{ ...newSourceConfiguration },
|
||||
{ id: sourceId }
|
||||
)
|
||||
);
|
||||
|
||||
return {
|
||||
...createdSourceConfiguration,
|
||||
configuration: mergeSourceConfiguration(
|
||||
staticDefaultSourceConfiguration,
|
||||
createdSourceConfiguration.configuration
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
public async deleteSourceConfiguration(request: InfraFrameworkRequest, sourceId: string) {
|
||||
await this.libs.savedObjects
|
||||
.getScopedSavedObjectsClient(request[internalInfraFrameworkRequest])
|
||||
.delete(infraSourceConfigurationSavedObjectType, sourceId);
|
||||
}
|
||||
|
||||
public async updateSourceConfiguration(
|
||||
request: InfraFrameworkRequest,
|
||||
sourceId: string,
|
||||
updaters: Array<(configuration: InfraSourceConfiguration) => InfraSourceConfiguration>
|
||||
) {
|
||||
const staticDefaultSourceConfiguration = await this.getStaticDefaultSourceConfiguration();
|
||||
|
||||
const { configuration, version } = await this.getSourceConfiguration(request, sourceId);
|
||||
|
||||
const updatedConfigurationAttributes = updaters.reduce(
|
||||
(accumulatedConfiguration, updater) => updater(accumulatedConfiguration),
|
||||
configuration
|
||||
);
|
||||
|
||||
const updatedSourceConfiguration = convertSavedObjectToSavedSourceConfiguration(
|
||||
await this.libs.savedObjects
|
||||
.getScopedSavedObjectsClient(request[internalInfraFrameworkRequest])
|
||||
.update(
|
||||
infraSourceConfigurationSavedObjectType,
|
||||
sourceId,
|
||||
{ ...updatedConfigurationAttributes },
|
||||
{
|
||||
version,
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
return {
|
||||
...updatedSourceConfiguration,
|
||||
configuration: mergeSourceConfiguration(
|
||||
staticDefaultSourceConfiguration,
|
||||
updatedSourceConfiguration.configuration
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
private async getStaticDefaultSourceConfiguration() {
|
||||
const staticConfiguration = await this.libs.configuration.get();
|
||||
const staticSourceConfiguration = runtimeTypes
|
||||
.type({
|
||||
sources: runtimeTypes.type({
|
||||
default: PartialInfraSourceConfigurationRuntimeType,
|
||||
}),
|
||||
})
|
||||
.decode(staticConfiguration)
|
||||
.map(({ sources: { default: defaultConfiguration } }) => defaultConfiguration)
|
||||
.getOrElse({});
|
||||
|
||||
return mergeSourceConfiguration(defaultSourceConfiguration, staticSourceConfiguration);
|
||||
}
|
||||
|
||||
private async getSavedSourceConfiguration(request: InfraFrameworkRequest, sourceId: string) {
|
||||
const savedObjectsClient = this.libs.savedObjects.getScopedSavedObjectsClient(
|
||||
request[internalInfraFrameworkRequest]
|
||||
);
|
||||
|
||||
const savedObject = await savedObjectsClient.get(
|
||||
infraSourceConfigurationSavedObjectType,
|
||||
sourceId
|
||||
);
|
||||
|
||||
return convertSavedObjectToSavedSourceConfiguration(savedObject);
|
||||
}
|
||||
|
||||
private async getAllSavedSourceConfigurations(request: InfraFrameworkRequest) {
|
||||
const savedObjectsClient = this.libs.savedObjects.getScopedSavedObjectsClient(
|
||||
request[internalInfraFrameworkRequest]
|
||||
);
|
||||
|
||||
const savedObjects = await savedObjectsClient.find({
|
||||
type: infraSourceConfigurationSavedObjectType,
|
||||
});
|
||||
|
||||
return savedObjects.saved_objects.map(convertSavedObjectToSavedSourceConfiguration);
|
||||
}
|
||||
}
|
||||
|
||||
const mergeSourceConfiguration = (
|
||||
first: InfraSourceConfiguration,
|
||||
...others: PartialInfraSourceConfiguration[]
|
||||
) =>
|
||||
others.reduce<InfraSourceConfiguration>(
|
||||
(previousSourceConfiguration, currentSourceConfiguration) => ({
|
||||
...previousSourceConfiguration,
|
||||
...currentSourceConfiguration,
|
||||
fields: {
|
||||
...previousSourceConfiguration.fields,
|
||||
...currentSourceConfiguration.fields,
|
||||
},
|
||||
}),
|
||||
first
|
||||
);
|
||||
|
||||
const convertSavedObjectToSavedSourceConfiguration = (savedObject: unknown) =>
|
||||
InfraSavedSourceConfigurationRuntimeType.decode(savedObject)
|
||||
.map(savedSourceConfiguration => ({
|
||||
id: savedSourceConfiguration.id,
|
||||
version: savedSourceConfiguration.version,
|
||||
updatedAt: savedSourceConfiguration.updated_at,
|
||||
configuration: savedSourceConfiguration.attributes,
|
||||
}))
|
||||
.getOrElseL(errors => {
|
||||
throw new Error(failure(errors).join('\n'));
|
||||
});
|
60
x-pack/plugins/infra/server/lib/sources/types.ts
Normal file
60
x-pack/plugins/infra/server/lib/sources/types.ts
Normal file
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as runtimeTypes from 'io-ts';
|
||||
import moment from 'moment';
|
||||
|
||||
export const TimestampFromString = new runtimeTypes.Type<number, string>(
|
||||
'TimestampFromString',
|
||||
(input): input is number => typeof input === 'number',
|
||||
(input, context) =>
|
||||
runtimeTypes.string.validate(input, context).chain(stringInput => {
|
||||
const momentValue = moment(stringInput);
|
||||
return momentValue.isValid()
|
||||
? runtimeTypes.success(momentValue.valueOf())
|
||||
: runtimeTypes.failure(stringInput, context);
|
||||
}),
|
||||
output => new Date(output).toISOString()
|
||||
);
|
||||
|
||||
export const InfraSourceConfigurationRuntimeType = runtimeTypes.type({
|
||||
name: runtimeTypes.string,
|
||||
description: runtimeTypes.string,
|
||||
metricAlias: runtimeTypes.string,
|
||||
logAlias: runtimeTypes.string,
|
||||
fields: runtimeTypes.type({
|
||||
container: runtimeTypes.string,
|
||||
host: runtimeTypes.string,
|
||||
pod: runtimeTypes.string,
|
||||
tiebreaker: runtimeTypes.string,
|
||||
timestamp: runtimeTypes.string,
|
||||
}),
|
||||
});
|
||||
|
||||
export interface InfraSourceConfiguration
|
||||
extends runtimeTypes.TypeOf<typeof InfraSourceConfigurationRuntimeType> {}
|
||||
|
||||
export const PartialInfraSourceConfigurationRuntimeType = runtimeTypes.partial({
|
||||
...InfraSourceConfigurationRuntimeType.props,
|
||||
fields: runtimeTypes.partial(InfraSourceConfigurationRuntimeType.props.fields.props),
|
||||
});
|
||||
|
||||
export interface PartialInfraSourceConfiguration
|
||||
extends runtimeTypes.TypeOf<typeof PartialInfraSourceConfigurationRuntimeType> {}
|
||||
|
||||
export const InfraSavedSourceConfigurationRuntimeType = runtimeTypes.intersection([
|
||||
runtimeTypes.type({
|
||||
id: runtimeTypes.string,
|
||||
attributes: PartialInfraSourceConfigurationRuntimeType,
|
||||
}),
|
||||
runtimeTypes.partial({
|
||||
version: runtimeTypes.number,
|
||||
updated_at: TimestampFromString,
|
||||
}),
|
||||
]);
|
||||
|
||||
export interface InfraSavedSourceConfiguration
|
||||
extends runtimeTypes.TypeOf<typeof InfraSavedSourceConfigurationRuntimeType> {}
|
11
x-pack/plugins/infra/server/saved_objects.ts
Normal file
11
x-pack/plugins/infra/server/saved_objects.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { infraSourceConfigurationSavedObjectMappings } from './lib/sources';
|
||||
|
||||
export const savedObjectMappings = {
|
||||
...infraSourceConfigurationSavedObjectMappings,
|
||||
};
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export type ElasticsearchMappingOf<Type> = Type extends string
|
||||
? ElasticsearchStringFieldMapping
|
||||
: Type extends number
|
||||
? ElasticsearchNumberFieldMapping
|
||||
: Type extends {}
|
||||
? ElasticsearchObjectFieldMapping<Type>
|
||||
: never;
|
||||
|
||||
export interface ElasticsearchStringFieldMapping {
|
||||
type: 'keyword' | 'text';
|
||||
}
|
||||
|
||||
export interface ElasticsearchNumberFieldMapping {
|
||||
type:
|
||||
| 'long'
|
||||
| 'integer'
|
||||
| 'short'
|
||||
| 'byte'
|
||||
| 'double'
|
||||
| 'float'
|
||||
| 'half_float'
|
||||
| 'scaled_float'
|
||||
| 'date';
|
||||
}
|
||||
|
||||
export interface ElasticsearchObjectFieldMapping<Obj extends {}> {
|
||||
type?: 'object';
|
||||
properties: { [K in keyof Obj]-?: ElasticsearchMappingOf<Obj[K]> };
|
||||
}
|
|
@ -28,7 +28,7 @@ const createMockRequest = (space: Partial<Space>) => ({
|
|||
});
|
||||
|
||||
const createMockClient = () => {
|
||||
const errors = Symbol();
|
||||
const errors = Symbol() as any;
|
||||
|
||||
return {
|
||||
get: jest.fn(),
|
||||
|
|
|
@ -48,10 +48,6 @@ const baseConfig: TestConfig = {
|
|||
|
||||
// Merge / extend default interfaces for hapi. This is all faked out below.
|
||||
declare module 'hapi' {
|
||||
interface Server {
|
||||
savedObjects: any;
|
||||
}
|
||||
|
||||
interface PluginProperties {
|
||||
spaces: any;
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
|
||||
import expect from 'expect.js';
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
import { sourceQuery } from '../../../../plugins/infra/public/containers/with_source/query_source.gql_query';
|
||||
import { SourceQuery } from '../../../../plugins/infra/public/graphql/types';
|
||||
|
@ -17,34 +18,388 @@ const sourcesTests: KbnTestProvider = ({ getService }) => {
|
|||
describe('sources', () => {
|
||||
before(() => esArchiver.load('infra/metrics_and_logs'));
|
||||
after(() => esArchiver.unload('infra/metrics_and_logs'));
|
||||
beforeEach(() => esArchiver.load('empty_kibana'));
|
||||
afterEach(() => esArchiver.unload('empty_kibana'));
|
||||
|
||||
it('supports the redux store query', () => {
|
||||
return client
|
||||
.query<SourceQuery.Query>({
|
||||
describe('query from container', () => {
|
||||
it('returns the default source configuration when none has been saved', async () => {
|
||||
const response = await client.query<SourceQuery.Query>({
|
||||
query: sourceQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
},
|
||||
})
|
||||
.then(resp => {
|
||||
const sourceConfiguration = resp.data.source.configuration;
|
||||
const sourceStatus = resp.data.source.status;
|
||||
|
||||
// shipped default values
|
||||
expect(sourceConfiguration.metricAlias).to.be('metricbeat-*');
|
||||
expect(sourceConfiguration.logAlias).to.be('filebeat-*');
|
||||
expect(sourceConfiguration.fields.container).to.be('docker.container.id');
|
||||
expect(sourceConfiguration.fields.host).to.be('host.name');
|
||||
expect(sourceConfiguration.fields.pod).to.be('kubernetes.pod.uid');
|
||||
|
||||
// test data in x-pack/test/functional/es_archives/infra/data.json.gz
|
||||
expect(sourceStatus.indexFields.length).to.be(1765);
|
||||
expect(sourceStatus.logIndicesExist).to.be(true);
|
||||
expect(sourceStatus.metricIndicesExist).to.be(true);
|
||||
});
|
||||
|
||||
const sourceConfiguration = response.data.source.configuration;
|
||||
const sourceStatus = response.data.source.status;
|
||||
|
||||
// shipped default values
|
||||
expect(sourceConfiguration.metricAlias).to.be('metricbeat-*');
|
||||
expect(sourceConfiguration.logAlias).to.be('filebeat-*');
|
||||
expect(sourceConfiguration.fields.container).to.be('docker.container.id');
|
||||
expect(sourceConfiguration.fields.host).to.be('host.name');
|
||||
expect(sourceConfiguration.fields.pod).to.be('kubernetes.pod.uid');
|
||||
|
||||
// test data in x-pack/test/functional/es_archives/infra/data.json.gz
|
||||
expect(sourceStatus.indexFields.length).to.be(1765);
|
||||
expect(sourceStatus.logIndicesExist).to.be(true);
|
||||
expect(sourceStatus.metricIndicesExist).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createSource mutation', () => {
|
||||
it('saves and returns source configurations', async () => {
|
||||
const response = await client.mutate<any>({
|
||||
mutation: createSourceMutation,
|
||||
variables: {
|
||||
source: {
|
||||
name: 'NAME',
|
||||
description: 'DESCRIPTION',
|
||||
logAlias: 'filebeat-**',
|
||||
metricAlias: 'metricbeat-**',
|
||||
fields: {
|
||||
container: 'CONTAINER',
|
||||
host: 'HOST',
|
||||
pod: 'POD',
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
},
|
||||
sourceId: 'default',
|
||||
},
|
||||
});
|
||||
|
||||
const { version, updatedAt, configuration, status } =
|
||||
response.data && response.data.createSource.source;
|
||||
|
||||
expect(version).to.be.greaterThan(0);
|
||||
expect(updatedAt).to.be.greaterThan(0);
|
||||
expect(configuration.name).to.be('NAME');
|
||||
expect(configuration.description).to.be('DESCRIPTION');
|
||||
expect(configuration.metricAlias).to.be('metricbeat-**');
|
||||
expect(configuration.logAlias).to.be('filebeat-**');
|
||||
expect(configuration.fields.container).to.be('CONTAINER');
|
||||
expect(configuration.fields.host).to.be('HOST');
|
||||
expect(configuration.fields.pod).to.be('POD');
|
||||
expect(configuration.fields.tiebreaker).to.be('TIEBREAKER');
|
||||
expect(configuration.fields.timestamp).to.be('TIMESTAMP');
|
||||
expect(status.logIndicesExist).to.be(true);
|
||||
expect(status.metricIndicesExist).to.be(true);
|
||||
});
|
||||
|
||||
it('saves partial source configuration and returns it amended with defaults', async () => {
|
||||
const response = await client.mutate<any>({
|
||||
mutation: createSourceMutation,
|
||||
variables: {
|
||||
source: {
|
||||
name: 'NAME',
|
||||
},
|
||||
sourceId: 'default',
|
||||
},
|
||||
});
|
||||
|
||||
const { version, updatedAt, configuration, status } =
|
||||
response.data && response.data.createSource.source;
|
||||
|
||||
expect(version).to.be.greaterThan(0);
|
||||
expect(updatedAt).to.be.greaterThan(0);
|
||||
expect(configuration.name).to.be('NAME');
|
||||
expect(configuration.description).to.be('');
|
||||
expect(configuration.metricAlias).to.be('metricbeat-*');
|
||||
expect(configuration.logAlias).to.be('filebeat-*');
|
||||
expect(configuration.fields.container).to.be('docker.container.id');
|
||||
expect(configuration.fields.host).to.be('host.name');
|
||||
expect(configuration.fields.pod).to.be('kubernetes.pod.uid');
|
||||
expect(configuration.fields.tiebreaker).to.be('_doc');
|
||||
expect(configuration.fields.timestamp).to.be('@timestamp');
|
||||
expect(status.logIndicesExist).to.be(true);
|
||||
expect(status.metricIndicesExist).to.be(true);
|
||||
});
|
||||
|
||||
it('refuses to overwrite an existing source', async () => {
|
||||
await client.mutate<any>({
|
||||
mutation: createSourceMutation,
|
||||
variables: {
|
||||
source: {
|
||||
name: 'NAME',
|
||||
},
|
||||
sourceId: 'default',
|
||||
},
|
||||
});
|
||||
|
||||
await client
|
||||
.mutate<any>({
|
||||
mutation: createSourceMutation,
|
||||
variables: {
|
||||
source: {
|
||||
name: 'NAME',
|
||||
},
|
||||
sourceId: 'default',
|
||||
},
|
||||
})
|
||||
.then(
|
||||
() => {
|
||||
expect().fail('should have failed with a conflict');
|
||||
},
|
||||
err => {
|
||||
expect(err.message).to.contain('conflict');
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteSource mutation', () => {
|
||||
it('deletes an existing source', async () => {
|
||||
const creationResponse = await client.mutate<any>({
|
||||
mutation: createSourceMutation,
|
||||
variables: {
|
||||
source: {
|
||||
name: 'NAME',
|
||||
},
|
||||
sourceId: 'default',
|
||||
},
|
||||
});
|
||||
|
||||
const { version } = creationResponse.data && creationResponse.data.createSource.source;
|
||||
|
||||
expect(version).to.be.greaterThan(0);
|
||||
|
||||
const deletionResponse = await client.mutate<any>({
|
||||
mutation: deleteSourceMutation,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
},
|
||||
});
|
||||
|
||||
const { id } = deletionResponse.data && deletionResponse.data.deleteSource;
|
||||
|
||||
expect(id).to.be('default');
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateSource mutation', () => {
|
||||
it('applies multiple updates to an existing source', async () => {
|
||||
const creationResponse = await client.mutate<any>({
|
||||
mutation: createSourceMutation,
|
||||
variables: {
|
||||
source: {
|
||||
name: 'NAME',
|
||||
},
|
||||
sourceId: 'default',
|
||||
},
|
||||
});
|
||||
|
||||
const { version: initialVersion, updatedAt: createdAt } =
|
||||
creationResponse.data && creationResponse.data.createSource.source;
|
||||
|
||||
expect(initialVersion).to.be.greaterThan(0);
|
||||
expect(createdAt).to.be.greaterThan(0);
|
||||
|
||||
const updateResponse = await client.mutate<any>({
|
||||
mutation: updateSourceMutation,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
changes: [
|
||||
{
|
||||
setName: {
|
||||
name: 'UPDATED_NAME',
|
||||
},
|
||||
},
|
||||
{
|
||||
setDescription: {
|
||||
description: 'UPDATED_DESCRIPTION',
|
||||
},
|
||||
},
|
||||
{
|
||||
setAliases: {
|
||||
logAlias: 'filebeat-**',
|
||||
metricAlias: 'metricbeat-**',
|
||||
},
|
||||
},
|
||||
{
|
||||
setFields: {
|
||||
container: 'UPDATED_CONTAINER',
|
||||
host: 'UPDATED_HOST',
|
||||
pod: 'UPDATED_POD',
|
||||
tiebreaker: 'UPDATED_TIEBREAKER',
|
||||
timestamp: 'UPDATED_TIMESTAMP',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const { version, updatedAt, configuration, status } =
|
||||
updateResponse.data && updateResponse.data.updateSource.source;
|
||||
|
||||
expect(version).to.be.greaterThan(initialVersion);
|
||||
expect(updatedAt).to.be.greaterThan(createdAt);
|
||||
expect(configuration.name).to.be('UPDATED_NAME');
|
||||
expect(configuration.description).to.be('UPDATED_DESCRIPTION');
|
||||
expect(configuration.metricAlias).to.be('metricbeat-**');
|
||||
expect(configuration.logAlias).to.be('filebeat-**');
|
||||
expect(configuration.fields.container).to.be('UPDATED_CONTAINER');
|
||||
expect(configuration.fields.host).to.be('UPDATED_HOST');
|
||||
expect(configuration.fields.pod).to.be('UPDATED_POD');
|
||||
expect(configuration.fields.tiebreaker).to.be('UPDATED_TIEBREAKER');
|
||||
expect(configuration.fields.timestamp).to.be('UPDATED_TIMESTAMP');
|
||||
expect(status.logIndicesExist).to.be(true);
|
||||
expect(status.metricIndicesExist).to.be(true);
|
||||
});
|
||||
|
||||
it('updates a single alias', async () => {
|
||||
const creationResponse = await client.mutate<any>({
|
||||
mutation: createSourceMutation,
|
||||
variables: {
|
||||
source: {
|
||||
name: 'NAME',
|
||||
},
|
||||
sourceId: 'default',
|
||||
},
|
||||
});
|
||||
|
||||
const { version: initialVersion, updatedAt: createdAt } =
|
||||
creationResponse.data && creationResponse.data.createSource.source;
|
||||
|
||||
expect(initialVersion).to.be.greaterThan(0);
|
||||
expect(createdAt).to.be.greaterThan(0);
|
||||
|
||||
const updateResponse = await client.mutate<any>({
|
||||
mutation: updateSourceMutation,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
changes: [
|
||||
{
|
||||
setAliases: {
|
||||
metricAlias: 'metricbeat-**',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const { version, updatedAt, configuration, status } =
|
||||
updateResponse.data && updateResponse.data.updateSource.source;
|
||||
|
||||
expect(version).to.be.greaterThan(initialVersion);
|
||||
expect(updatedAt).to.be.greaterThan(createdAt);
|
||||
expect(configuration.metricAlias).to.be('metricbeat-**');
|
||||
expect(configuration.logAlias).to.be('filebeat-*');
|
||||
expect(status.logIndicesExist).to.be(true);
|
||||
expect(status.metricIndicesExist).to.be(true);
|
||||
});
|
||||
|
||||
it('updates a single field', async () => {
|
||||
const creationResponse = await client.mutate<any>({
|
||||
mutation: createSourceMutation,
|
||||
variables: {
|
||||
source: {
|
||||
name: 'NAME',
|
||||
},
|
||||
sourceId: 'default',
|
||||
},
|
||||
});
|
||||
|
||||
const { version: initialVersion, updatedAt: createdAt } =
|
||||
creationResponse.data && creationResponse.data.createSource.source;
|
||||
|
||||
expect(initialVersion).to.be.greaterThan(0);
|
||||
expect(createdAt).to.be.greaterThan(0);
|
||||
|
||||
const updateResponse = await client.mutate<any>({
|
||||
mutation: updateSourceMutation,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
changes: [
|
||||
{
|
||||
setFields: {
|
||||
container: 'UPDATED_CONTAINER',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const { version, updatedAt, configuration } =
|
||||
updateResponse.data && updateResponse.data.updateSource.source;
|
||||
|
||||
expect(version).to.be.greaterThan(initialVersion);
|
||||
expect(updatedAt).to.be.greaterThan(createdAt);
|
||||
expect(configuration.fields.container).to.be('UPDATED_CONTAINER');
|
||||
expect(configuration.fields.host).to.be('host.name');
|
||||
expect(configuration.fields.pod).to.be('kubernetes.pod.uid');
|
||||
expect(configuration.fields.tiebreaker).to.be('_doc');
|
||||
expect(configuration.fields.timestamp).to.be('@timestamp');
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
// tslint:disable-next-line no-default-export
|
||||
export default sourcesTests;
|
||||
|
||||
const createSourceMutation = gql`
|
||||
mutation createSource($sourceId: ID!, $source: CreateSourceInput!) {
|
||||
createSource(id: $sourceId, source: $source) {
|
||||
source {
|
||||
id
|
||||
version
|
||||
updatedAt
|
||||
configuration {
|
||||
name
|
||||
description
|
||||
metricAlias
|
||||
logAlias
|
||||
fields {
|
||||
container
|
||||
host
|
||||
pod
|
||||
tiebreaker
|
||||
timestamp
|
||||
}
|
||||
}
|
||||
status {
|
||||
logIndicesExist
|
||||
metricIndicesExist
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const deleteSourceMutation = gql`
|
||||
mutation deleteSource($sourceId: ID!) {
|
||||
deleteSource(id: $sourceId) {
|
||||
id
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const updateSourceMutation = gql`
|
||||
mutation updateSource($sourceId: ID!, $changes: [UpdateSourceInput!]!) {
|
||||
updateSource(id: $sourceId, changes: $changes) {
|
||||
source {
|
||||
id
|
||||
version
|
||||
updatedAt
|
||||
configuration {
|
||||
name
|
||||
description
|
||||
metricAlias
|
||||
logAlias
|
||||
fields {
|
||||
container
|
||||
host
|
||||
pod
|
||||
tiebreaker
|
||||
timestamp
|
||||
}
|
||||
}
|
||||
status {
|
||||
logIndicesExist
|
||||
metricIndicesExist
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue