mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[Security Solution] add initial workflow insights service (#199606)
## Summary Adds an SecurityWorkflowInsightsService that is setup during security solution plugin initialization. The service setup installs the component templates, index template, and datastream used by the service. Depends on: - https://github.com/elastic/elasticsearch/pull/116485 ### Checklist - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios ### For maintainers - [x] This was checked for breaking API changes and was [labeled appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#_add_your_labels) Co-authored-by: Konrad Szwarc <konrad.szwarc@elastic.co>
This commit is contained in:
parent
655cb79333
commit
2e004f867f
9 changed files with 665 additions and 1 deletions
|
@ -9,4 +9,5 @@ export * from './artifacts';
|
|||
export * from './actions';
|
||||
export * from './agent';
|
||||
export * from './artifacts_exception_list';
|
||||
export * from './workflow_insights';
|
||||
export type { FeatureKeys } from './feature_usage';
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const DATA_STREAM_PREFIX = '.security-workflow-insights';
|
||||
export const COMPONENT_TEMPLATE_NAME = `${DATA_STREAM_PREFIX}-component-template`;
|
||||
export const INDEX_TEMPLATE_NAME = `${DATA_STREAM_PREFIX}-index-template`;
|
||||
export const INGEST_PIPELINE_NAME = `${DATA_STREAM_PREFIX}-ingest-pipeline`;
|
||||
export const DATA_STREAM_NAME = `${DATA_STREAM_PREFIX}-default`;
|
||||
|
||||
export const TOTAL_FIELDS_LIMIT = 2000;
|
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { EndpointError } from '../../../../common/endpoint/errors';
|
||||
|
||||
export class SecurityWorkflowInsightsFailedInitialized extends EndpointError {
|
||||
constructor(msg: string) {
|
||||
super(`security workflow insights service failed to initialize with error: ${msg}`);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,183 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import type { FieldMap } from '@kbn/data-stream-adapter';
|
||||
|
||||
export const securityWorkflowInsightsFieldMap: FieldMap = {
|
||||
'@timestamp': {
|
||||
type: 'date',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
message: {
|
||||
type: 'text',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// endpoint or other part of security
|
||||
category: {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// incompatible_virus, noisy_process_tree, etc
|
||||
type: {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// the creator of the insight
|
||||
source: {
|
||||
type: 'nested',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// kibana-insight-task, llm-request-id, etc
|
||||
'source.id': {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// endpoint, kibana, llm, etc
|
||||
'source.type': {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// starting timestamp of the source data used to generate the insight
|
||||
'source.data_range_start': {
|
||||
type: 'date',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// ending timestamp of the source data used to generate the insight
|
||||
'source.data_range_end': {
|
||||
type: 'date',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// the target that this insight is created for
|
||||
target: {
|
||||
type: 'nested',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// endpoint, policy, etc
|
||||
'target.id': {
|
||||
type: 'keyword',
|
||||
array: true,
|
||||
required: true,
|
||||
},
|
||||
// endpoint ids, policy ids, etc
|
||||
'target.type': {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// latest action taken on the insight
|
||||
action: {
|
||||
type: 'nested',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// refreshed, remediated, suppressed, dismissed
|
||||
'action.type': {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
'action.timestamp': {
|
||||
type: 'date',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// unique key for this insight, used for deduplicating insights.
|
||||
// ie. crowdstrike or windows_defender
|
||||
value: {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// suggested remediation for insight
|
||||
remediation: {
|
||||
type: 'object',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// if remediation includes exception list items
|
||||
'remediation.exception_list_items': {
|
||||
type: 'object',
|
||||
array: true,
|
||||
required: false,
|
||||
},
|
||||
'remediation.exception_list_items.list_id': {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
'remediation.exception_list_items.name': {
|
||||
type: 'text',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
'remediation.exception_list_items.description': {
|
||||
type: 'text',
|
||||
array: false,
|
||||
required: false,
|
||||
},
|
||||
'remediation.exception_list_items.entries': {
|
||||
type: 'object',
|
||||
array: true,
|
||||
required: true,
|
||||
},
|
||||
'remediation.exception_list_items.entries.field': {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
'remediation.exception_list_items.entries.operator': {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
'remediation.exception_list_items.entries.type': {
|
||||
type: 'keyword',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
'remediation.exception_list_items.entries.value': {
|
||||
type: 'text',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
'remediation.exception_list_items.tags': {
|
||||
type: 'keyword',
|
||||
array: true,
|
||||
required: true,
|
||||
},
|
||||
'remediation.exception_list_items.os_types': {
|
||||
type: 'keyword',
|
||||
array: true,
|
||||
required: true,
|
||||
},
|
||||
metadata: {
|
||||
type: 'object',
|
||||
array: false,
|
||||
required: true,
|
||||
},
|
||||
// optional KV for notes
|
||||
'metadata.notes': {
|
||||
type: 'object',
|
||||
array: false,
|
||||
required: false,
|
||||
},
|
||||
// optional i8n variables
|
||||
'metadata.message_variables': {
|
||||
type: 'text',
|
||||
array: true,
|
||||
required: false,
|
||||
},
|
||||
} as const;
|
|
@ -0,0 +1,80 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ElasticsearchClient } from '@kbn/core/server';
|
||||
import { elasticsearchServiceMock } from '@kbn/core-elasticsearch-server-mocks';
|
||||
import { DataStreamSpacesAdapter } from '@kbn/data-stream-adapter';
|
||||
import { kibanaPackageJson } from '@kbn/repo-info';
|
||||
|
||||
import { createDatastream, createPipeline } from './helpers';
|
||||
import {
|
||||
DATA_STREAM_PREFIX,
|
||||
COMPONENT_TEMPLATE_NAME,
|
||||
INDEX_TEMPLATE_NAME,
|
||||
INGEST_PIPELINE_NAME,
|
||||
TOTAL_FIELDS_LIMIT,
|
||||
} from './constants';
|
||||
import { securityWorkflowInsightsFieldMap } from './field_map_configurations';
|
||||
|
||||
jest.mock('@kbn/data-stream-adapter', () => ({
|
||||
DataStreamSpacesAdapter: jest.fn().mockImplementation(() => ({
|
||||
setComponentTemplate: jest.fn(),
|
||||
setIndexTemplate: jest.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
describe('helpers', () => {
|
||||
describe('createDatastream', () => {
|
||||
it('should create a DataStreamSpacesAdapter with the correct configuration', () => {
|
||||
const kibanaVersion = kibanaPackageJson.version;
|
||||
const ds = createDatastream(kibanaVersion);
|
||||
|
||||
expect(DataStreamSpacesAdapter).toHaveBeenCalledTimes(1);
|
||||
expect(DataStreamSpacesAdapter).toHaveBeenCalledWith(DATA_STREAM_PREFIX, {
|
||||
kibanaVersion,
|
||||
totalFieldsLimit: TOTAL_FIELDS_LIMIT,
|
||||
});
|
||||
expect(ds.setComponentTemplate).toHaveBeenCalledTimes(1);
|
||||
expect(ds.setComponentTemplate).toHaveBeenCalledWith({
|
||||
name: COMPONENT_TEMPLATE_NAME,
|
||||
fieldMap: securityWorkflowInsightsFieldMap,
|
||||
});
|
||||
expect(ds.setIndexTemplate).toHaveBeenCalledTimes(1);
|
||||
expect(ds.setIndexTemplate).toHaveBeenCalledWith({
|
||||
name: INDEX_TEMPLATE_NAME,
|
||||
componentTemplateRefs: [COMPONENT_TEMPLATE_NAME],
|
||||
template: {
|
||||
settings: {
|
||||
default_pipeline: INGEST_PIPELINE_NAME,
|
||||
},
|
||||
},
|
||||
hidden: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('createPipeline', () => {
|
||||
let esClient: ElasticsearchClient;
|
||||
|
||||
beforeEach(() => {
|
||||
esClient = elasticsearchServiceMock.createElasticsearchClient();
|
||||
});
|
||||
|
||||
it('should create an ingest pipeline with the correct configuration', async () => {
|
||||
await createPipeline(esClient);
|
||||
|
||||
expect(esClient.ingest.putPipeline).toHaveBeenCalledTimes(1);
|
||||
expect(esClient.ingest.putPipeline).toHaveBeenCalledWith({
|
||||
id: INGEST_PIPELINE_NAME,
|
||||
processors: [],
|
||||
_meta: {
|
||||
managed: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ElasticsearchClient } from '@kbn/core/server';
|
||||
|
||||
import { DataStreamSpacesAdapter } from '@kbn/data-stream-adapter';
|
||||
|
||||
import {
|
||||
COMPONENT_TEMPLATE_NAME,
|
||||
DATA_STREAM_PREFIX,
|
||||
INDEX_TEMPLATE_NAME,
|
||||
INGEST_PIPELINE_NAME,
|
||||
TOTAL_FIELDS_LIMIT,
|
||||
} from './constants';
|
||||
import { securityWorkflowInsightsFieldMap } from './field_map_configurations';
|
||||
|
||||
export function createDatastream(kibanaVersion: string): DataStreamSpacesAdapter {
|
||||
const ds = new DataStreamSpacesAdapter(DATA_STREAM_PREFIX, {
|
||||
kibanaVersion,
|
||||
totalFieldsLimit: TOTAL_FIELDS_LIMIT,
|
||||
});
|
||||
ds.setComponentTemplate({
|
||||
name: COMPONENT_TEMPLATE_NAME,
|
||||
fieldMap: securityWorkflowInsightsFieldMap,
|
||||
});
|
||||
ds.setIndexTemplate({
|
||||
name: INDEX_TEMPLATE_NAME,
|
||||
componentTemplateRefs: [COMPONENT_TEMPLATE_NAME],
|
||||
template: {
|
||||
settings: {
|
||||
default_pipeline: INGEST_PIPELINE_NAME,
|
||||
},
|
||||
},
|
||||
hidden: true,
|
||||
});
|
||||
return ds;
|
||||
}
|
||||
|
||||
export async function createPipeline(esClient: ElasticsearchClient): Promise<boolean> {
|
||||
const response = await esClient.ingest.putPipeline({
|
||||
id: INGEST_PIPELINE_NAME,
|
||||
processors: [
|
||||
// requires @elastic/elasticsearch 8.16.0
|
||||
// {
|
||||
// fingerprint: {
|
||||
// fields: ['type', 'category', 'value', 'target.type', 'target.id'],
|
||||
// target_field: '_id',
|
||||
// method: 'SHA-256',
|
||||
// if: 'ctx._id == null',
|
||||
// },
|
||||
// },
|
||||
],
|
||||
_meta: {
|
||||
managed: true,
|
||||
},
|
||||
});
|
||||
return response.acknowledged;
|
||||
}
|
|
@ -0,0 +1,163 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ReplaySubject } from 'rxjs';
|
||||
|
||||
import type { ElasticsearchClient, Logger } from '@kbn/core/server';
|
||||
import { elasticsearchServiceMock } from '@kbn/core-elasticsearch-server-mocks';
|
||||
import { DataStreamSpacesAdapter } from '@kbn/data-stream-adapter';
|
||||
import { loggerMock } from '@kbn/logging-mocks';
|
||||
import { kibanaPackageJson } from '@kbn/repo-info';
|
||||
|
||||
import { createDatastream, createPipeline } from './helpers';
|
||||
import { securityWorkflowInsightsService } from '.';
|
||||
import { DATA_STREAM_NAME } from './constants';
|
||||
|
||||
jest.mock('./helpers', () => ({
|
||||
createDatastream: jest.fn(),
|
||||
createPipeline: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('SecurityWorkflowInsightsService', () => {
|
||||
let logger: Logger;
|
||||
let esClient: ElasticsearchClient;
|
||||
|
||||
beforeEach(() => {
|
||||
logger = loggerMock.create();
|
||||
esClient = elasticsearchServiceMock.createElasticsearchClient();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('setup', () => {
|
||||
it('should set up the data stream', () => {
|
||||
const createDatastreamMock = createDatastream as jest.Mock;
|
||||
createDatastreamMock.mockReturnValueOnce(
|
||||
new DataStreamSpacesAdapter(DATA_STREAM_NAME, {
|
||||
kibanaVersion: kibanaPackageJson.version,
|
||||
})
|
||||
);
|
||||
|
||||
securityWorkflowInsightsService.setup({
|
||||
kibanaVersion: kibanaPackageJson.version,
|
||||
logger,
|
||||
isFeatureEnabled: true,
|
||||
});
|
||||
|
||||
expect(createDatastreamMock).toHaveBeenCalledTimes(1);
|
||||
expect(createDatastreamMock).toHaveBeenCalledWith(kibanaPackageJson.version);
|
||||
});
|
||||
|
||||
it('should log a warning if createDatastream throws an error', () => {
|
||||
const createDatastreamMock = createDatastream as jest.Mock;
|
||||
createDatastreamMock.mockImplementation(() => {
|
||||
throw new Error('test error');
|
||||
});
|
||||
|
||||
securityWorkflowInsightsService.setup({
|
||||
kibanaVersion: kibanaPackageJson.version,
|
||||
logger,
|
||||
isFeatureEnabled: true,
|
||||
});
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledTimes(1);
|
||||
expect(logger.warn).toHaveBeenCalledWith(expect.stringContaining('test error'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('start', () => {
|
||||
it('should start the service', async () => {
|
||||
const createDatastreamMock = createDatastream as jest.Mock;
|
||||
const ds = new DataStreamSpacesAdapter(DATA_STREAM_NAME, {
|
||||
kibanaVersion: kibanaPackageJson.version,
|
||||
});
|
||||
const dsInstallSpy = jest.spyOn(ds, 'install');
|
||||
dsInstallSpy.mockResolvedValueOnce();
|
||||
createDatastreamMock.mockReturnValueOnce(ds);
|
||||
const createPipelineMock = createPipeline as jest.Mock;
|
||||
createPipelineMock.mockResolvedValueOnce(true);
|
||||
const createDataStreamMock = esClient.indices.createDataStream as jest.Mock;
|
||||
|
||||
securityWorkflowInsightsService.setup({
|
||||
kibanaVersion: kibanaPackageJson.version,
|
||||
logger,
|
||||
isFeatureEnabled: true,
|
||||
});
|
||||
expect(createDatastreamMock).toHaveBeenCalledTimes(1);
|
||||
expect(createDatastreamMock).toHaveBeenCalledWith(kibanaPackageJson.version);
|
||||
|
||||
await securityWorkflowInsightsService.start({ esClient });
|
||||
|
||||
expect(createPipelineMock).toHaveBeenCalledTimes(1);
|
||||
expect(createPipelineMock).toHaveBeenCalledWith(esClient);
|
||||
expect(dsInstallSpy).toHaveBeenCalledTimes(1);
|
||||
expect(dsInstallSpy).toHaveBeenCalledWith({
|
||||
logger,
|
||||
esClient,
|
||||
pluginStop$: expect.any(ReplaySubject),
|
||||
});
|
||||
expect(createDataStreamMock).toHaveBeenCalledTimes(1);
|
||||
expect(createDataStreamMock).toHaveBeenCalledWith({ name: DATA_STREAM_NAME });
|
||||
});
|
||||
|
||||
it('should log a warning if createPipeline or ds.install throws an error', async () => {
|
||||
securityWorkflowInsightsService.setup({
|
||||
kibanaVersion: kibanaPackageJson.version,
|
||||
logger,
|
||||
isFeatureEnabled: true,
|
||||
});
|
||||
|
||||
const createPipelineMock = createPipeline as jest.Mock;
|
||||
createPipelineMock.mockImplementationOnce(() => {
|
||||
throw new Error('test error');
|
||||
});
|
||||
|
||||
await securityWorkflowInsightsService.start({ esClient });
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledTimes(2);
|
||||
expect(logger.warn).toHaveBeenNthCalledWith(1, expect.stringContaining('test error'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should wait for initialization', async () => {
|
||||
const isInitializedSpy = jest
|
||||
.spyOn(securityWorkflowInsightsService, 'isInitialized', 'get')
|
||||
.mockResolvedValueOnce([undefined, undefined]);
|
||||
|
||||
await securityWorkflowInsightsService.create();
|
||||
|
||||
expect(isInitializedSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should wait for initialization', async () => {
|
||||
const isInitializedSpy = jest
|
||||
.spyOn(securityWorkflowInsightsService, 'isInitialized', 'get')
|
||||
.mockResolvedValueOnce([undefined, undefined]);
|
||||
|
||||
await securityWorkflowInsightsService.update();
|
||||
|
||||
expect(isInitializedSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetch', () => {
|
||||
it('should wait for initialization', async () => {
|
||||
const isInitializedSpy = jest
|
||||
.spyOn(securityWorkflowInsightsService, 'isInitialized', 'get')
|
||||
.mockResolvedValueOnce([undefined, undefined]);
|
||||
|
||||
await securityWorkflowInsightsService.fetch();
|
||||
|
||||
expect(isInitializedSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ReplaySubject, firstValueFrom, combineLatest } from 'rxjs';
|
||||
|
||||
import type { ElasticsearchClient, Logger } from '@kbn/core/server';
|
||||
|
||||
import type { DataStreamSpacesAdapter } from '@kbn/data-stream-adapter';
|
||||
|
||||
import { SecurityWorkflowInsightsFailedInitialized } from './errors';
|
||||
import { createDatastream, createPipeline } from './helpers';
|
||||
import { DATA_STREAM_NAME } from './constants';
|
||||
|
||||
interface SetupInterface {
|
||||
kibanaVersion: string;
|
||||
logger: Logger;
|
||||
isFeatureEnabled: boolean;
|
||||
}
|
||||
|
||||
interface StartInterface {
|
||||
esClient: ElasticsearchClient;
|
||||
}
|
||||
|
||||
class SecurityWorkflowInsightsService {
|
||||
private setup$ = new ReplaySubject<void>(1);
|
||||
private start$ = new ReplaySubject<void>(1);
|
||||
private stop$ = new ReplaySubject<void>(1);
|
||||
private ds: DataStreamSpacesAdapter | undefined;
|
||||
// private _esClient: ElasticsearchClient | undefined;
|
||||
private _logger: Logger | undefined;
|
||||
private _isInitialized: Promise<[void, void]> = firstValueFrom(
|
||||
combineLatest<[void, void]>([this.setup$, this.start$])
|
||||
);
|
||||
private isFeatureEnabled = false;
|
||||
|
||||
public get isInitialized() {
|
||||
return this._isInitialized;
|
||||
}
|
||||
|
||||
public setup({ kibanaVersion, logger, isFeatureEnabled }: SetupInterface) {
|
||||
this.isFeatureEnabled = isFeatureEnabled;
|
||||
if (!isFeatureEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._logger = logger;
|
||||
|
||||
try {
|
||||
this.ds = createDatastream(kibanaVersion);
|
||||
} catch (err) {
|
||||
this.logger.warn(new SecurityWorkflowInsightsFailedInitialized(err.message).message);
|
||||
return;
|
||||
}
|
||||
|
||||
this.setup$.next();
|
||||
}
|
||||
|
||||
public async start({ esClient }: StartInterface) {
|
||||
if (!this.isFeatureEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// this._esClient = esClient;
|
||||
await firstValueFrom(this.setup$);
|
||||
|
||||
try {
|
||||
await createPipeline(esClient);
|
||||
await this.ds?.install({
|
||||
logger: this.logger,
|
||||
esClient,
|
||||
pluginStop$: this.stop$,
|
||||
});
|
||||
await esClient.indices.createDataStream({ name: DATA_STREAM_NAME });
|
||||
} catch (err) {
|
||||
// ignore datastream already exists error
|
||||
if (err?.body?.error?.type === 'resource_already_exists_exception') {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.warn(new SecurityWorkflowInsightsFailedInitialized(err.message).message);
|
||||
return;
|
||||
}
|
||||
|
||||
this.start$.next();
|
||||
}
|
||||
|
||||
public stop() {
|
||||
this.setup$.next();
|
||||
this.setup$.complete();
|
||||
this.start$.next();
|
||||
this.start$.complete();
|
||||
this.stop$.next();
|
||||
this.stop$.complete();
|
||||
}
|
||||
|
||||
public async create() {
|
||||
await this.isInitialized;
|
||||
}
|
||||
|
||||
public async update() {
|
||||
await this.isInitialized;
|
||||
}
|
||||
|
||||
public async fetch() {
|
||||
await this.isInitialized;
|
||||
}
|
||||
|
||||
// to be used in create/update/fetch above
|
||||
// private get esClient(): ElasticsearchClient {
|
||||
// if (!this._esClient) {
|
||||
// throw new SecurityWorkflowInsightsFailedInitialized('no elasticsearch client found');
|
||||
// }
|
||||
|
||||
// return this._esClient;
|
||||
// }
|
||||
|
||||
private get logger(): Logger {
|
||||
if (!this._logger) {
|
||||
throw new SecurityWorkflowInsightsFailedInitialized('no logger found');
|
||||
}
|
||||
|
||||
return this._logger;
|
||||
}
|
||||
}
|
||||
|
||||
export const securityWorkflowInsightsService = new SecurityWorkflowInsightsService();
|
|
@ -56,7 +56,11 @@ import { registerEndpointRoutes } from './endpoint/routes/metadata';
|
|||
import { registerPolicyRoutes } from './endpoint/routes/policy';
|
||||
import { registerActionRoutes } from './endpoint/routes/actions';
|
||||
import { registerEndpointSuggestionsRoutes } from './endpoint/routes/suggestions';
|
||||
import { EndpointArtifactClient, ManifestManager } from './endpoint/services';
|
||||
import {
|
||||
EndpointArtifactClient,
|
||||
ManifestManager,
|
||||
securityWorkflowInsightsService,
|
||||
} from './endpoint/services';
|
||||
import { EndpointAppContextService } from './endpoint/endpoint_app_context_services';
|
||||
import type { EndpointAppContext } from './endpoint/types';
|
||||
import { initUsageCollectors } from './usage';
|
||||
|
@ -519,6 +523,12 @@ export class Plugin implements ISecuritySolutionPlugin {
|
|||
|
||||
featureUsageService.setup(plugins.licensing);
|
||||
|
||||
securityWorkflowInsightsService.setup({
|
||||
kibanaVersion: pluginContext.env.packageInfo.version,
|
||||
logger: this.logger,
|
||||
isFeatureEnabled: config.experimentalFeatures.defendInsights,
|
||||
});
|
||||
|
||||
return {
|
||||
setProductFeaturesConfigurator:
|
||||
productFeaturesService.setProductFeaturesConfigurator.bind(productFeaturesService),
|
||||
|
@ -672,6 +682,12 @@ export class Plugin implements ISecuritySolutionPlugin {
|
|||
this.telemetryReceiver
|
||||
);
|
||||
|
||||
securityWorkflowInsightsService
|
||||
.start({
|
||||
esClient: core.elasticsearch.client.asInternalUser,
|
||||
})
|
||||
.catch(() => {});
|
||||
|
||||
const endpointPkgInstallationPromise = this.endpointContext.service
|
||||
.getInternalFleetServices()
|
||||
.packages.getInstallation(FLEET_ENDPOINT_PACKAGE);
|
||||
|
@ -727,6 +743,7 @@ export class Plugin implements ISecuritySolutionPlugin {
|
|||
this.policyWatcher?.stop();
|
||||
this.completeExternalResponseActionsTask.stop().catch(() => {});
|
||||
this.siemMigrationsService.stop();
|
||||
securityWorkflowInsightsService.stop();
|
||||
licenseService.stop();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue