mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
* Add shims for new platform structure Defines both our Plugin class and the factory function. * Simplify server init code path We were doing some work with server, then dropping into another function to further mutate it. I'm moving this all to the same level (initServerWithKibana) to make decoupling from Hapi simpler to follow. * Remove unnecessary casting Server has newPlatform on it, now. * Remove unused arguments These had their usage removed in #36662 but their signature remained. Since we're trying to pin down the existing interface with hapi, this is just noise that should be deleted. * Remove unneeded destructuring This was only needed for a type assertion, originally. * Document current interface with hapi via ServerFacade This is everything we're using from hapi's server right now. The next step is moving what we can to the new platform, and abstracting the rest behind a facade layer. * Include NP plugin in initialization path * Instantiates plugin and passes NP modules * We're just passing the LoggerFactory from the init context, for now. * Whitelist functionality from Server, pass through as ServerFacade * This will verify our facade at runtime in addition to typechecking * Uses Pick and Partial to use existing types while shimming properties as we go * Remove redundant logging mechanism We were logger in two different ways, but we can move to the NP version now that it's ready. * Bind server's route function context This was causing a test failure and needed to be bound similarly to the register function. Slight rename of variables as well. * Type Hapi.Request usage via RequestFacade This is everything we're currently using from Hapi.Request; as we move things away we can update this interface and let TS tell us what's broken. Remove any typing of our request payloads These _can_ have fields like `variables` and `operationName` from graphQL, or they might not. In the majority of cases the payload was typed as any, and to cut down on churn and because I have no confidence in typing each individual request, we're going to make that the default. * Inline our GraphQL Hapi plugin A la uptime, this effectively just moves the call to server.route() into the function itself, rather than registering a Hapi plugin that ultimately does the same. * Remove register from our list of Hapi dependencies This was only used to register our GraphQL plugin(s), which are now inlined and use `route`. * Invoke existing init path from plugin's setup This isn't the final format as we're eventually doing away with __legacy entirely, but this makes our plugin 'live' and is a step in the right direction. * Remove usage of Pick in favor of a type assertion The onus here should be on the shim invocation, not the plugin itself. * Pass existing NP modules into our plugin Another step toward the proper plugin signature. We're needlessly destructuring coreContext just to get the logger and pass it right back in. The other logger usage will be removed momentarily when we change the signature of `initServerWithKibana`. Also adds core and plugins to our setup method, which are currently unused. * Remove dependence on newPlatform property in Server We now get these through NP's initializerContext, so we do that in the plugin now instead. The extra parameters are gross but temporary (initServerWithKibana will eventually go away), and it's much better to shrink the size of ServerFacade. * Remove unused mocking fn/logging/tests These are relics from the initial graphQL implementation and are no longer used. * Move log statement into plugin Makes more sense to decorate the init invocation than to log 'start' from outside and 'end' from inside. * Fix shape of our InitializerContext While PluginInitializerContext type has the env property, the newPlatform object on the Server has it in a different shape. * Ensure our request payloads are typed Rather than the free-for-all of `any`, let's instead type our payload as unknown and actually name the request types whose payloads are being reached into. A type assertion in the resolver for these requests is the secret sauce here.
This commit is contained in:
parent
78af6c4b23
commit
cd8fe02ed0
24 changed files with 256 additions and 319 deletions
|
@ -8,8 +8,8 @@ import { i18n } from '@kbn/i18n';
|
|||
import { resolve } from 'path';
|
||||
import { Server } from 'hapi';
|
||||
|
||||
import KbnServer from '../../../../src/legacy/server/kbn_server';
|
||||
import { initServerWithKibana } from './server/kibana.index';
|
||||
import { PluginInitializerContext } from 'src/core/server';
|
||||
import { plugin } from './server';
|
||||
import { savedObjectMappings } from './server/saved_objects';
|
||||
|
||||
import {
|
||||
|
@ -24,9 +24,7 @@ import {
|
|||
DEFAULT_FROM,
|
||||
DEFAULT_TO,
|
||||
} from './common/constants';
|
||||
import { signalsAlertType } from './server/lib/detection_engine/alerts/signals_alert_type';
|
||||
import { defaultIndexPattern } from './default_index_pattern';
|
||||
import { isAlertExecutor } from './server/lib/detection_engine/alerts/types';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export const siem = (kibana: any) => {
|
||||
|
@ -125,17 +123,39 @@ export const siem = (kibana: any) => {
|
|||
mappings: savedObjectMappings,
|
||||
},
|
||||
init(server: Server) {
|
||||
const newPlatform = ((server as unknown) as KbnServer).newPlatform;
|
||||
if (server.plugins.alerting != null) {
|
||||
const type = signalsAlertType({
|
||||
logger: newPlatform.coreContext.logger.get('plugins', APP_ID),
|
||||
});
|
||||
if (isAlertExecutor(type)) {
|
||||
server.plugins.alerting.setup.registerType(type);
|
||||
}
|
||||
}
|
||||
server.injectUiAppVars('siem', async () => server.getInjectedUiAppVars('kibana'));
|
||||
initServerWithKibana(server);
|
||||
const {
|
||||
config,
|
||||
getInjectedUiAppVars,
|
||||
indexPatternsServiceFactory,
|
||||
injectUiAppVars,
|
||||
newPlatform,
|
||||
plugins,
|
||||
route,
|
||||
savedObjects,
|
||||
} = server;
|
||||
|
||||
const {
|
||||
env,
|
||||
coreContext: { logger },
|
||||
setup,
|
||||
} = newPlatform;
|
||||
const initializerContext = { logger, env };
|
||||
|
||||
const serverFacade = {
|
||||
config,
|
||||
getInjectedUiAppVars,
|
||||
indexPatternsServiceFactory,
|
||||
injectUiAppVars,
|
||||
plugins: { alerting: plugins.alerting, xpack_main: plugins.xpack_main },
|
||||
route: route.bind(server),
|
||||
savedObjects,
|
||||
};
|
||||
|
||||
plugin(initializerContext as PluginInitializerContext).setup(
|
||||
setup.core,
|
||||
setup.plugins,
|
||||
serverFacade
|
||||
);
|
||||
},
|
||||
});
|
||||
};
|
||||
|
|
|
@ -9,6 +9,7 @@ import { AppResolverOf, ChildResolverOf } from '../../lib/framework';
|
|||
import { IndexFields } from '../../lib/index_fields';
|
||||
import { SourceStatus } from '../../lib/source_status';
|
||||
import { QuerySourceResolver } from '../sources/resolvers';
|
||||
import { FrameworkFieldsRequest } from '../../lib/index_fields/types';
|
||||
|
||||
export type SourceStatusIndicesExistResolver = ChildResolverOf<
|
||||
AppResolverOf<SourceStatusResolvers.IndicesExistResolver>,
|
||||
|
@ -46,7 +47,7 @@ export const createSourceStatusResolvers = (libs: {
|
|||
) {
|
||||
return [];
|
||||
}
|
||||
return libs.fields.getFields(req, args.defaultIndex);
|
||||
return libs.fields.getFields(req as FrameworkFieldsRequest, args.defaultIndex);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
12
x-pack/legacy/plugins/siem/server/index.ts
Normal file
12
x-pack/legacy/plugins/siem/server/index.ts
Normal file
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { PluginInitializerContext } from 'src/core/server';
|
||||
import { Plugin } from './plugin';
|
||||
|
||||
export const plugin = (context: PluginInitializerContext) => {
|
||||
return new Plugin(context);
|
||||
};
|
|
@ -27,15 +27,9 @@ import { createTimelineResolvers } from './graphql/timeline';
|
|||
import { createUncommonProcessesResolvers } from './graphql/uncommon_processes';
|
||||
import { createWhoAmIResolvers } from './graphql/who_am_i';
|
||||
import { AppBackendLibs } from './lib/types';
|
||||
import { Logger } from './utils/logger';
|
||||
import { createTlsResolvers } from './graphql/tls';
|
||||
|
||||
export interface Config {
|
||||
mocking: boolean;
|
||||
logger: Logger;
|
||||
}
|
||||
|
||||
export const initServer = (libs: AppBackendLibs, config: Config) => {
|
||||
export const initServer = (libs: AppBackendLibs) => {
|
||||
const schema = makeExecutableSchema({
|
||||
resolvers: [
|
||||
createAuthenticationsResolvers(libs) as IResolvers,
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { amMocking } from './kibana.index';
|
||||
|
||||
describe('kibana.index', () => {
|
||||
describe('#amMocking', () => {
|
||||
afterEach(() => delete process.env.INGEST_MOCKS);
|
||||
|
||||
test('should return true when process.ENV.mocking is set to a lower case string true', () => {
|
||||
process.env.INGEST_MOCKS = 'true';
|
||||
expect(amMocking()).toEqual(true);
|
||||
});
|
||||
test('should return false when process.ENV.mocking is not set', () => {
|
||||
expect(amMocking()).toEqual(false);
|
||||
});
|
||||
test('should return false when process.ENV.mocking is not set to a lower case string (since I am picky)', () => {
|
||||
process.env.INGEST_MOCKS = 'TRUE';
|
||||
expect(amMocking()).toEqual(false);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -5,41 +5,42 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { Server } from 'hapi';
|
||||
|
||||
import { Logger, EnvironmentMode } from 'src/core/server';
|
||||
import { initServer } from './init_server';
|
||||
import { compose } from './lib/compose/kibana';
|
||||
import { createLogger } from './utils/logger';
|
||||
import {
|
||||
noteSavedObjectType,
|
||||
pinnedEventSavedObjectType,
|
||||
timelineSavedObjectType,
|
||||
} from './saved_objects';
|
||||
|
||||
import { signalsAlertType } from './lib/detection_engine/alerts/signals_alert_type';
|
||||
import { isAlertExecutor } from './lib/detection_engine/alerts/types';
|
||||
import { createSignalsRoute } from './lib/detection_engine/routes/create_signals_route';
|
||||
import { readSignalsRoute } from './lib/detection_engine/routes/read_signals_route';
|
||||
import { findSignalsRoute } from './lib/detection_engine/routes/find_signals_route';
|
||||
import { deleteSignalsRoute } from './lib/detection_engine/routes/delete_signals_route';
|
||||
import { updateSignalsRoute } from './lib/detection_engine/routes/update_signals_route';
|
||||
import { ServerFacade } from './types';
|
||||
|
||||
const APP_ID = 'siem';
|
||||
|
||||
export const amMocking = (): boolean => process.env.INGEST_MOCKS === 'true';
|
||||
|
||||
export const initServerWithKibana = (kbnServer: Server) => {
|
||||
// bind is so "this" binds correctly to the logger since hapi server does not auto-bind its methods
|
||||
const logger = createLogger(kbnServer.log.bind(kbnServer));
|
||||
logger.info('Plugin initializing');
|
||||
|
||||
const mocking = amMocking();
|
||||
if (mocking) {
|
||||
logger.info(
|
||||
`Mocks for ${APP_ID} is activated. No real ${APP_ID} data will be used, only mocks will be used.`
|
||||
);
|
||||
export const initServerWithKibana = (
|
||||
kbnServer: ServerFacade,
|
||||
logger: Logger,
|
||||
mode: EnvironmentMode
|
||||
) => {
|
||||
if (kbnServer.plugins.alerting != null) {
|
||||
const type = signalsAlertType({ logger });
|
||||
if (isAlertExecutor(type)) {
|
||||
kbnServer.plugins.alerting.setup.registerType(type);
|
||||
}
|
||||
}
|
||||
kbnServer.injectUiAppVars('siem', async () => kbnServer.getInjectedUiAppVars('kibana'));
|
||||
|
||||
const libs = compose(kbnServer);
|
||||
initServer(libs, { mocking, logger });
|
||||
const libs = compose(kbnServer, mode);
|
||||
initServer(libs);
|
||||
if (
|
||||
kbnServer.config().has('xpack.actions.enabled') &&
|
||||
kbnServer.config().get('xpack.actions.enabled') === true &&
|
||||
|
@ -55,7 +56,6 @@ export const initServerWithKibana = (kbnServer: Server) => {
|
|||
deleteSignalsRoute(kbnServer);
|
||||
findSignalsRoute(kbnServer);
|
||||
}
|
||||
logger.info('Plugin done initializing');
|
||||
|
||||
const xpackMainPlugin = kbnServer.plugins.xpack_main;
|
||||
xpackMainPlugin.registerFeature({
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Server } from 'hapi';
|
||||
|
||||
import { EnvironmentMode } from 'src/core/server';
|
||||
import { ServerFacade } from '../../types';
|
||||
import { Authentications } from '../authentications';
|
||||
import { ElasticsearchAuthenticationAdapter } from '../authentications/elasticsearch_adapter';
|
||||
import { KibanaConfigurationAdapter } from '../configuration/kibana_configuration_adapter';
|
||||
|
@ -32,9 +32,9 @@ import { Note } from '../note/saved_object';
|
|||
import { PinnedEvent } from '../pinned_event/saved_object';
|
||||
import { Timeline } from '../timeline/saved_object';
|
||||
|
||||
export function compose(server: Server): AppBackendLibs {
|
||||
export function compose(server: ServerFacade, mode: EnvironmentMode): AppBackendLibs {
|
||||
const configuration = new KibanaConfigurationAdapter<Configuration>(server);
|
||||
const framework = new KibanaBackendFrameworkAdapter(server);
|
||||
const framework = new KibanaBackendFrameworkAdapter(server, mode);
|
||||
const sources = new Sources(new ConfigurationSourcesAdapter(configuration));
|
||||
const sourceStatus = new SourceStatus(new ElasticsearchSourceStatusAdapter(framework));
|
||||
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { Logger } from 'src/core/server';
|
||||
import { SIGNALS_ID, DEFAULT_SIGNALS_INDEX } from '../../../../common/constants';
|
||||
import { Logger } from '../../../../../../../../src/core/server';
|
||||
// TODO: Remove this for the build_events_query call eventually
|
||||
import { buildEventsReIndex } from './build_events_reindex';
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
|
||||
import { get } from 'lodash/fp';
|
||||
|
||||
import Hapi from 'hapi';
|
||||
import { SIGNALS_ID } from '../../../../common/constants';
|
||||
import {
|
||||
Alert,
|
||||
|
@ -16,6 +15,7 @@ import {
|
|||
} from '../../../../../alerting/server/types';
|
||||
import { AlertsClient } from '../../../../../alerting/server/alerts_client';
|
||||
import { ActionsClient } from '../../../../../actions/server/actions_client';
|
||||
import { RequestFacade } from '../../../types';
|
||||
import { SearchResponse } from '../../types';
|
||||
import { esFilters } from '../../../../../../../../src/plugins/data/server';
|
||||
|
||||
|
@ -89,7 +89,7 @@ export type DeleteSignalParams = Clients & {
|
|||
ruleId: string | undefined | null;
|
||||
};
|
||||
|
||||
export interface FindSignalsRequest extends Omit<Hapi.Request, 'query'> {
|
||||
export interface FindSignalsRequest extends Omit<RequestFacade, 'query'> {
|
||||
query: {
|
||||
per_page: number;
|
||||
page: number;
|
||||
|
@ -125,11 +125,11 @@ export type SignalAlertType = Alert & {
|
|||
alertTypeParams: AlertTypeParams;
|
||||
};
|
||||
|
||||
export interface SignalsRequest extends Hapi.Request {
|
||||
export interface SignalsRequest extends RequestFacade {
|
||||
payload: SignalAlertParamsRest;
|
||||
}
|
||||
|
||||
export interface UpdateSignalsRequest extends Hapi.Request {
|
||||
export interface UpdateSignalsRequest extends RequestFacade {
|
||||
payload: UpdateSignalAlertParamsRest;
|
||||
}
|
||||
|
||||
|
@ -164,7 +164,7 @@ export interface BulkResponse {
|
|||
export type SignalSearchResponse = SearchResponse<SignalSource>;
|
||||
export type SignalSourceHit = SignalSearchResponse['hits']['hits'][0];
|
||||
|
||||
export type QueryRequest = Omit<Hapi.Request, 'query'> & {
|
||||
export type QueryRequest = Omit<RequestFacade, 'query'> & {
|
||||
query: { id: string | undefined; rule_id: string | undefined };
|
||||
};
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ import { DETECTION_ENGINE_RULES_URL } from '../../../../common/constants';
|
|||
import { createSignals } from '../alerts/create_signals';
|
||||
import { SignalsRequest } from '../alerts/types';
|
||||
import { createSignalsSchema } from './schemas';
|
||||
import { ServerFacade } from '../../../types';
|
||||
import { readSignals } from '../alerts/read_signals';
|
||||
import { transformOrError } from './utils';
|
||||
|
||||
|
@ -98,6 +99,6 @@ export const createCreateSignalsRoute: Hapi.ServerRoute = {
|
|||
},
|
||||
};
|
||||
|
||||
export const createSignalsRoute = (server: Hapi.Server) => {
|
||||
export const createSignalsRoute = (server: ServerFacade) => {
|
||||
server.route(createCreateSignalsRoute);
|
||||
};
|
||||
|
|
|
@ -9,6 +9,7 @@ import { isFunction } from 'lodash/fp';
|
|||
|
||||
import { DETECTION_ENGINE_RULES_URL } from '../../../../common/constants';
|
||||
import { deleteSignals } from '../alerts/delete_signals';
|
||||
import { ServerFacade } from '../../../types';
|
||||
import { querySignalSchema } from './schemas';
|
||||
import { QueryRequest } from '../alerts/types';
|
||||
import { getIdError, transformOrError } from './utils';
|
||||
|
@ -49,6 +50,6 @@ export const createDeleteSignalsRoute: Hapi.ServerRoute = {
|
|||
},
|
||||
};
|
||||
|
||||
export const deleteSignalsRoute = (server: Hapi.Server): void => {
|
||||
export const deleteSignalsRoute = (server: ServerFacade): void => {
|
||||
server.route(createDeleteSignalsRoute);
|
||||
};
|
||||
|
|
|
@ -10,6 +10,7 @@ import { DETECTION_ENGINE_RULES_URL } from '../../../../common/constants';
|
|||
import { findSignals } from '../alerts/find_signals';
|
||||
import { FindSignalsRequest } from '../alerts/types';
|
||||
import { findSignalsSchema } from './schemas';
|
||||
import { ServerFacade } from '../../../types';
|
||||
import { transformFindAlertsOrError } from './utils';
|
||||
|
||||
export const createFindSignalRoute: Hapi.ServerRoute = {
|
||||
|
@ -43,6 +44,6 @@ export const createFindSignalRoute: Hapi.ServerRoute = {
|
|||
},
|
||||
};
|
||||
|
||||
export const findSignalsRoute = (server: Hapi.Server) => {
|
||||
export const findSignalsRoute = (server: ServerFacade) => {
|
||||
server.route(createFindSignalRoute);
|
||||
};
|
||||
|
|
|
@ -10,6 +10,7 @@ import { DETECTION_ENGINE_RULES_URL } from '../../../../common/constants';
|
|||
import { getIdError, transformOrError } from './utils';
|
||||
|
||||
import { readSignals } from '../alerts/read_signals';
|
||||
import { ServerFacade } from '../../../types';
|
||||
import { querySignalSchema } from './schemas';
|
||||
import { QueryRequest } from '../alerts/types';
|
||||
|
||||
|
@ -46,6 +47,6 @@ export const createReadSignalsRoute: Hapi.ServerRoute = {
|
|||
},
|
||||
};
|
||||
|
||||
export const readSignalsRoute = (server: Hapi.Server) => {
|
||||
export const readSignalsRoute = (server: ServerFacade) => {
|
||||
server.route(createReadSignalsRoute);
|
||||
};
|
||||
|
|
|
@ -10,6 +10,7 @@ import { DETECTION_ENGINE_RULES_URL } from '../../../../common/constants';
|
|||
import { updateSignal } from '../alerts/update_signals';
|
||||
import { UpdateSignalsRequest } from '../alerts/types';
|
||||
import { updateSignalSchema } from './schemas';
|
||||
import { ServerFacade } from '../../../types';
|
||||
import { getIdError, transformOrError } from './utils';
|
||||
|
||||
export const createUpdateSignalsRoute: Hapi.ServerRoute = {
|
||||
|
@ -94,6 +95,6 @@ export const createUpdateSignalsRoute: Hapi.ServerRoute = {
|
|||
},
|
||||
};
|
||||
|
||||
export const updateSignalsRoute = (server: Hapi.Server) => {
|
||||
export const updateSignalsRoute = (server: ServerFacade) => {
|
||||
server.route(createUpdateSignalsRoute);
|
||||
};
|
||||
|
|
|
@ -1,118 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as GraphiQL from 'apollo-server-module-graphiql';
|
||||
import Boom from 'boom';
|
||||
import { Plugin, Request, ResponseToolkit, RouteOptions, Server } from 'hapi';
|
||||
|
||||
import { GraphQLOptions, runHttpQuery } from 'apollo-server-core';
|
||||
|
||||
export type HapiOptionsFunction = (req: Request) => GraphQLOptions | Promise<GraphQLOptions>;
|
||||
|
||||
export interface HapiGraphQLPluginOptions {
|
||||
path: string;
|
||||
vhost?: string;
|
||||
route?: RouteOptions;
|
||||
graphqlOptions: GraphQLOptions | HapiOptionsFunction;
|
||||
}
|
||||
|
||||
export const graphqlHapi: Plugin<HapiGraphQLPluginOptions> = {
|
||||
name: 'graphql-siem',
|
||||
register: (server: Server, options: HapiGraphQLPluginOptions) => {
|
||||
if (!options || !options.graphqlOptions) {
|
||||
throw new Error('Apollo Server requires options.');
|
||||
}
|
||||
|
||||
server.route({
|
||||
options: options.route || {},
|
||||
handler: async (request: Request, h: ResponseToolkit) => {
|
||||
try {
|
||||
const query =
|
||||
request.method === 'post'
|
||||
? (request.payload as Record<string, any>) // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
: (request.query as Record<string, any>); // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
|
||||
const gqlResponse = await runHttpQuery([request], {
|
||||
method: request.method.toUpperCase(),
|
||||
options: options.graphqlOptions,
|
||||
query,
|
||||
});
|
||||
|
||||
return h.response(gqlResponse).type('application/json');
|
||||
} catch (error) {
|
||||
if ('HttpQueryError' !== error.name) {
|
||||
const queryError = Boom.boomify(error);
|
||||
|
||||
queryError.output.payload.message = error.message;
|
||||
|
||||
return queryError;
|
||||
}
|
||||
|
||||
if (error.isGraphQLError === true) {
|
||||
return h
|
||||
.response(error.message)
|
||||
.code(error.statusCode)
|
||||
.type('application/json');
|
||||
}
|
||||
|
||||
const genericError = new Boom(error.message, { statusCode: error.statusCode });
|
||||
|
||||
if (error.headers) {
|
||||
Object.keys(error.headers).forEach(header => {
|
||||
genericError.output.headers[header] = error.headers[header];
|
||||
});
|
||||
}
|
||||
|
||||
// Boom hides the error when status code is 500
|
||||
|
||||
genericError.output.payload.message = error.message;
|
||||
|
||||
throw genericError;
|
||||
}
|
||||
},
|
||||
method: ['GET', 'POST'],
|
||||
path: options.path || '/graphql',
|
||||
vhost: options.vhost || undefined,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
export type HapiGraphiQLOptionsFunction = (
|
||||
req?: Request
|
||||
) => GraphiQL.GraphiQLData | Promise<GraphiQL.GraphiQLData>;
|
||||
|
||||
export interface HapiGraphiQLPluginOptions {
|
||||
path: string;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
route?: any;
|
||||
|
||||
graphiqlOptions: GraphiQL.GraphiQLData | HapiGraphiQLOptionsFunction;
|
||||
}
|
||||
|
||||
export const graphiqlHapi: Plugin<HapiGraphiQLPluginOptions> = {
|
||||
name: 'graphiql-siem',
|
||||
register: (server: Server, options: HapiGraphiQLPluginOptions) => {
|
||||
if (!options || !options.graphiqlOptions) {
|
||||
throw new Error('Apollo Server GraphiQL requires options.');
|
||||
}
|
||||
|
||||
server.route({
|
||||
options: options.route || {},
|
||||
handler: async (request: Request, h: ResponseToolkit) => {
|
||||
const graphiqlString = await GraphiQL.resolveGraphiQLString(
|
||||
request.query,
|
||||
options.graphiqlOptions,
|
||||
request
|
||||
);
|
||||
|
||||
return h.response(graphiqlString).type('text/html');
|
||||
},
|
||||
method: 'GET',
|
||||
path: options.path || '/graphiql',
|
||||
});
|
||||
},
|
||||
};
|
|
@ -5,16 +5,14 @@
|
|||
*/
|
||||
|
||||
import { GenericParams } from 'elasticsearch';
|
||||
import * as GraphiQL from 'apollo-server-module-graphiql';
|
||||
import Boom from 'boom';
|
||||
import { ResponseToolkit } from 'hapi';
|
||||
import { EnvironmentMode } from 'kibana/public';
|
||||
import { GraphQLSchema } from 'graphql';
|
||||
import { Legacy } from 'kibana';
|
||||
import { runHttpQuery } from 'apollo-server-core';
|
||||
import { ServerFacade, RequestFacade } from '../../types';
|
||||
|
||||
import {
|
||||
graphiqlHapi,
|
||||
graphqlHapi,
|
||||
HapiGraphiQLPluginOptions,
|
||||
HapiGraphQLPluginOptions,
|
||||
} from './apollo_server_hapi';
|
||||
import {
|
||||
FrameworkAdapter,
|
||||
FrameworkIndexPatternsService,
|
||||
|
@ -31,13 +29,13 @@ export class KibanaBackendFrameworkAdapter implements FrameworkAdapter {
|
|||
public version: string;
|
||||
public envMode: EnvironmentMode;
|
||||
|
||||
constructor(private server: Legacy.Server) {
|
||||
constructor(private server: ServerFacade, mode: EnvironmentMode) {
|
||||
this.version = server.config().get('pkg.version');
|
||||
this.envMode = server.newPlatform.env.mode;
|
||||
this.envMode = mode;
|
||||
}
|
||||
|
||||
public async callWithRequest(
|
||||
req: FrameworkRequest<Legacy.Request>,
|
||||
req: FrameworkRequest,
|
||||
endpoint: string,
|
||||
params: CallWithRequestParams,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
|
@ -79,40 +77,87 @@ export class KibanaBackendFrameworkAdapter implements FrameworkAdapter {
|
|||
}
|
||||
|
||||
public registerGraphQLEndpoint(routePath: string, schema: GraphQLSchema): void {
|
||||
this.server.register<HapiGraphQLPluginOptions>({
|
||||
this.server.route({
|
||||
options: {
|
||||
graphqlOptions: (req: Legacy.Request) => ({
|
||||
context: { req: wrapRequest(req) },
|
||||
schema,
|
||||
}),
|
||||
path: routePath,
|
||||
route: {
|
||||
tags: ['access:siem'],
|
||||
},
|
||||
tags: ['access:siem'],
|
||||
},
|
||||
plugin: graphqlHapi,
|
||||
handler: async (request: RequestFacade, h: ResponseToolkit) => {
|
||||
try {
|
||||
const query =
|
||||
request.method === 'post'
|
||||
? (request.payload as Record<string, any>) // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
: (request.query as Record<string, any>); // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
|
||||
const gqlResponse = await runHttpQuery([request], {
|
||||
method: request.method.toUpperCase(),
|
||||
options: (req: RequestFacade) => ({
|
||||
context: { req: wrapRequest(req) },
|
||||
schema,
|
||||
}),
|
||||
|
||||
query,
|
||||
});
|
||||
|
||||
return h.response(gqlResponse).type('application/json');
|
||||
} catch (error) {
|
||||
if ('HttpQueryError' !== error.name) {
|
||||
const queryError = Boom.boomify(error);
|
||||
|
||||
queryError.output.payload.message = error.message;
|
||||
|
||||
return queryError;
|
||||
}
|
||||
|
||||
if (error.isGraphQLError === true) {
|
||||
return h
|
||||
.response(error.message)
|
||||
.code(error.statusCode)
|
||||
.type('application/json');
|
||||
}
|
||||
|
||||
const genericError = new Boom(error.message, { statusCode: error.statusCode });
|
||||
|
||||
if (error.headers) {
|
||||
Object.keys(error.headers).forEach(header => {
|
||||
genericError.output.headers[header] = error.headers[header];
|
||||
});
|
||||
}
|
||||
|
||||
// Boom hides the error when status code is 500
|
||||
genericError.output.payload.message = error.message;
|
||||
|
||||
throw genericError;
|
||||
}
|
||||
},
|
||||
method: ['GET', 'POST'],
|
||||
path: routePath,
|
||||
vhost: undefined,
|
||||
});
|
||||
|
||||
if (!this.envMode.prod) {
|
||||
this.server.register<HapiGraphiQLPluginOptions>({
|
||||
this.server.route({
|
||||
options: {
|
||||
graphiqlOptions: {
|
||||
endpointURL: routePath,
|
||||
passHeader: `'kbn-version': '${this.version}'`,
|
||||
},
|
||||
path: `${routePath}/graphiql`,
|
||||
route: {
|
||||
tags: ['access:siem'],
|
||||
},
|
||||
tags: ['access:siem'],
|
||||
},
|
||||
plugin: graphiqlHapi,
|
||||
handler: async (request: RequestFacade, h: ResponseToolkit) => {
|
||||
const graphiqlString = await GraphiQL.resolveGraphiQLString(
|
||||
request.query,
|
||||
{
|
||||
endpointURL: routePath,
|
||||
passHeader: `'kbn-version': '${this.version}'`,
|
||||
},
|
||||
request
|
||||
);
|
||||
|
||||
return h.response(graphiqlString).type('text/html');
|
||||
},
|
||||
method: 'GET',
|
||||
path: `${routePath}/graphiql`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public getIndexPatternsService(
|
||||
request: FrameworkRequest<Legacy.Request>
|
||||
): FrameworkIndexPatternsService {
|
||||
public getIndexPatternsService(request: FrameworkRequest): FrameworkIndexPatternsService {
|
||||
return this.server.indexPatternsServiceFactory({
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
callCluster: async (method: string, args: [GenericParams], ...rest: any[]) => {
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
SourceConfiguration,
|
||||
TimerangeInput,
|
||||
} from '../../graphql/types';
|
||||
import { RequestFacade } from '../../types';
|
||||
|
||||
export * from '../../utils/typed_resolvers';
|
||||
|
||||
|
@ -51,12 +52,11 @@ export interface FrameworkAdapter {
|
|||
method: 'indices.getAlias' | 'indices.get', // eslint-disable-line
|
||||
options?: object
|
||||
): Promise<DatabaseGetIndicesResponse>;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
getIndexPatternsService(req: FrameworkRequest<any>): FrameworkIndexPatternsService;
|
||||
getIndexPatternsService(req: FrameworkRequest): FrameworkIndexPatternsService;
|
||||
getSavedObjectsService(): Legacy.SavedObjectsService;
|
||||
}
|
||||
|
||||
export interface FrameworkRequest<InternalRequest extends WrappableRequest = WrappableRequest> {
|
||||
export interface FrameworkRequest<InternalRequest extends WrappableRequest = RequestFacade> {
|
||||
[internalFrameworkRequest]: InternalRequest;
|
||||
payload: InternalRequest['payload'];
|
||||
params: InternalRequest['params'];
|
||||
|
|
|
@ -14,16 +14,18 @@ import {
|
|||
hasDocumentation,
|
||||
IndexAlias,
|
||||
} from '../../utils/beat_schema';
|
||||
import { FrameworkAdapter, FrameworkRequest } from '../framework';
|
||||
|
||||
import { FieldsAdapter, IndexFieldDescriptor } from './types';
|
||||
import { FrameworkAdapter } from '../framework';
|
||||
import { FieldsAdapter, IndexFieldDescriptor, FrameworkFieldsRequest } from './types';
|
||||
|
||||
type IndexesAliasIndices = Record<string, string[]>;
|
||||
|
||||
export class ElasticsearchIndexFieldAdapter implements FieldsAdapter {
|
||||
constructor(private readonly framework: FrameworkAdapter) {}
|
||||
|
||||
public async getIndexFields(request: FrameworkRequest, indices: string[]): Promise<IndexField[]> {
|
||||
public async getIndexFields(
|
||||
request: FrameworkFieldsRequest,
|
||||
indices: string[]
|
||||
): Promise<IndexField[]> {
|
||||
const indexPatternsService = this.framework.getIndexPatternsService(request);
|
||||
const indexesAliasIndices: IndexesAliasIndices = indices.reduce(
|
||||
(accumulator: IndexesAliasIndices, indice: string) => {
|
||||
|
|
|
@ -5,15 +5,17 @@
|
|||
*/
|
||||
|
||||
import { IndexField } from '../../graphql/types';
|
||||
import { FrameworkRequest } from '../framework';
|
||||
|
||||
import { FieldsAdapter } from './types';
|
||||
import { FieldsAdapter, FrameworkFieldsRequest } from './types';
|
||||
export { ElasticsearchIndexFieldAdapter } from './elasticsearch_adapter';
|
||||
|
||||
export class IndexFields {
|
||||
constructor(private readonly adapter: FieldsAdapter) {}
|
||||
|
||||
public async getFields(request: FrameworkRequest, defaultIndex: string[]): Promise<IndexField[]> {
|
||||
public async getFields(
|
||||
request: FrameworkFieldsRequest,
|
||||
defaultIndex: string[]
|
||||
): Promise<IndexField[]> {
|
||||
return this.adapter.getIndexFields(request, defaultIndex);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,9 +6,20 @@
|
|||
|
||||
import { IndexField } from '../../graphql/types';
|
||||
import { FrameworkRequest } from '../framework';
|
||||
import { RequestFacade } from '../../types';
|
||||
|
||||
type IndexFieldsRequest = RequestFacade & {
|
||||
payload: {
|
||||
variables: {
|
||||
defaultIndex: string[];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
export type FrameworkFieldsRequest = FrameworkRequest<IndexFieldsRequest>;
|
||||
|
||||
export interface FieldsAdapter {
|
||||
getIndexFields(req: FrameworkRequest, indices: string[]): Promise<IndexField[]>;
|
||||
getIndexFields(req: FrameworkFieldsRequest, indices: string[]): Promise<IndexField[]>;
|
||||
}
|
||||
|
||||
export interface IndexFieldDescriptor {
|
||||
|
|
30
x-pack/legacy/plugins/siem/server/plugin.ts
Normal file
30
x-pack/legacy/plugins/siem/server/plugin.ts
Normal file
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { CoreSetup, EnvironmentMode, PluginInitializerContext, Logger } from 'src/core/server';
|
||||
import { ServerFacade } from './types';
|
||||
import { initServerWithKibana } from './kibana.index';
|
||||
|
||||
export class Plugin {
|
||||
name = 'siem';
|
||||
private mode: EnvironmentMode;
|
||||
private logger: Logger;
|
||||
|
||||
constructor({ env, logger }: PluginInitializerContext) {
|
||||
this.logger = logger.get('plugins', this.name);
|
||||
this.mode = env.mode;
|
||||
|
||||
this.logger.info('NP plugin initialized');
|
||||
}
|
||||
|
||||
public setup(core: CoreSetup, dependencies: {}, __legacy: ServerFacade) {
|
||||
this.logger.info('NP plugin setup');
|
||||
|
||||
initServerWithKibana(__legacy, this.logger, this.mode);
|
||||
|
||||
this.logger.info('NP plugin setup complete');
|
||||
}
|
||||
}
|
35
x-pack/legacy/plugins/siem/server/types.ts
Normal file
35
x-pack/legacy/plugins/siem/server/types.ts
Normal file
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Legacy } from 'kibana';
|
||||
|
||||
export interface ServerFacade {
|
||||
config: Legacy.Server['config'];
|
||||
getInjectedUiAppVars: Legacy.Server['getInjectedUiAppVars'];
|
||||
indexPatternsServiceFactory: Legacy.Server['indexPatternsServiceFactory'];
|
||||
injectUiAppVars: Legacy.Server['injectUiAppVars'];
|
||||
plugins: {
|
||||
alerting?: Legacy.Server['plugins']['alerting'];
|
||||
xpack_main: Legacy.Server['plugins']['xpack_main'];
|
||||
};
|
||||
route: Legacy.Server['route'];
|
||||
savedObjects: Legacy.Server['savedObjects'];
|
||||
}
|
||||
|
||||
export interface RequestFacade {
|
||||
auth: Legacy.Request['auth'];
|
||||
getAlertsClient?: Legacy.Request['getAlertsClient'];
|
||||
getActionsClient?: Legacy.Request['getActionsClient'];
|
||||
getUiSettingsService: Legacy.Request['getUiSettingsService'];
|
||||
headers: Legacy.Request['headers'];
|
||||
method: Legacy.Request['method'];
|
||||
params: Legacy.Request['params'];
|
||||
payload: unknown;
|
||||
query: Legacy.Request['query'];
|
||||
server: {
|
||||
plugins: { elasticsearch: Legacy.Request['server']['plugins']['elasticsearch'] };
|
||||
};
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { createLogger } from './logger';
|
||||
|
||||
const APP_ID = 'siem';
|
||||
|
||||
const createMockHapiLogger = () => jest.fn();
|
||||
|
||||
describe('logger', () => {
|
||||
describe('#createLogger', () => {
|
||||
test('should log out debug', () => {
|
||||
const hapiLogger = createMockHapiLogger();
|
||||
const logger = createLogger(hapiLogger);
|
||||
logger.debug('debug information');
|
||||
expect(hapiLogger.mock.calls[0][0]).toEqual(['debug', APP_ID]);
|
||||
expect(hapiLogger.mock.calls[0][1]).toEqual('debug information');
|
||||
});
|
||||
|
||||
test('should log out info', () => {
|
||||
const hapiLogger = createMockHapiLogger();
|
||||
const logger = createLogger(hapiLogger);
|
||||
logger.info('info information');
|
||||
expect(hapiLogger.mock.calls[0][0]).toEqual(['info', APP_ID]);
|
||||
expect(hapiLogger.mock.calls[0][1]).toEqual('info information');
|
||||
});
|
||||
|
||||
test('should log out warn', () => {
|
||||
const hapiLogger = createMockHapiLogger();
|
||||
const logger = createLogger(hapiLogger);
|
||||
logger.warn('warn information');
|
||||
expect(hapiLogger.mock.calls[0][0]).toEqual(['warning', APP_ID]);
|
||||
expect(hapiLogger.mock.calls[0][1]).toEqual('warn information');
|
||||
});
|
||||
|
||||
test('should log out error', () => {
|
||||
const hapiLogger = createMockHapiLogger();
|
||||
const logger = createLogger(hapiLogger);
|
||||
logger.error('error information');
|
||||
expect(hapiLogger.mock.calls[0][0]).toEqual(['error', APP_ID]);
|
||||
expect(hapiLogger.mock.calls[0][1]).toEqual('error information');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
const LOGGING_TAGS = ['siem'];
|
||||
|
||||
// Definition is from:
|
||||
// https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/hapi/v16/index.d.ts#L318
|
||||
export type HapiLogger = (
|
||||
tags: string | string[],
|
||||
// eslint-disable-next-line
|
||||
data?: string | Object | Function,
|
||||
timestamp?: number
|
||||
) => void;
|
||||
|
||||
export interface Logger {
|
||||
debug: (message: string) => void;
|
||||
info: (message: string) => void;
|
||||
warn: (message: string) => void;
|
||||
error: (message: string) => void;
|
||||
}
|
||||
|
||||
export const createLogger = (logger: HapiLogger): Readonly<Logger> => ({
|
||||
debug: (message: string) => logger(['debug', ...LOGGING_TAGS], message),
|
||||
info: (message: string) => logger(['info', ...LOGGING_TAGS], message),
|
||||
warn: (message: string) => logger(['warning', ...LOGGING_TAGS], message),
|
||||
error: (message: string) => logger(['error', ...LOGGING_TAGS], message),
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue