mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
Endpoint Timeline telemetry (#132626)
* Staging branch. Update. Fix up timeline task types + received logic. switch between internal / current user based on the context of the call. Send to telemetry endpoint. * [CI] Auto-commit changed files from 'node scripts/eslint --no-cache --fix' * Add test for the entire task. * This code shouldn't have been deleted. * Return 100 every 3 hours. * Don't be explicit about types for test. Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
f9b065e228
commit
65054ae3b0
18 changed files with 3195 additions and 157 deletions
|
@ -13,7 +13,7 @@ import {
|
|||
} from '../../../common/endpoint/schema/resolver';
|
||||
|
||||
import { handleTree } from './resolver/tree/handler';
|
||||
import { handleEntities } from './resolver/entity';
|
||||
import { handleEntities } from './resolver/entity/handler';
|
||||
import { handleEvents } from './resolver/events';
|
||||
|
||||
export function registerResolverRoutes(router: IRouter) {
|
||||
|
|
|
@ -1,136 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { RequestHandler } from '@kbn/core/server';
|
||||
import { TypeOf } from '@kbn/config-schema';
|
||||
import { validateEntities } from '../../../../common/endpoint/schema/resolver';
|
||||
import { ResolverEntityIndex, ResolverSchema } from '../../../../common/endpoint/types';
|
||||
|
||||
interface SupportedSchema {
|
||||
/**
|
||||
* A name for the schema being used
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* A constraint to search for in the documented returned by Elasticsearch
|
||||
*/
|
||||
constraints: Array<{ field: string; value: string }>;
|
||||
|
||||
/**
|
||||
* Schema to return to the frontend so that it can be passed in to call to the /tree API
|
||||
*/
|
||||
schema: ResolverSchema;
|
||||
}
|
||||
|
||||
/**
|
||||
* This structure defines the preset supported schemas for a resolver graph. We'll probably want convert this
|
||||
* implementation to something similar to how row renderers is implemented.
|
||||
*/
|
||||
const supportedSchemas: SupportedSchema[] = [
|
||||
{
|
||||
name: 'endpoint',
|
||||
constraints: [
|
||||
{
|
||||
field: 'agent.type',
|
||||
value: 'endpoint',
|
||||
},
|
||||
],
|
||||
schema: {
|
||||
id: 'process.entity_id',
|
||||
parent: 'process.parent.entity_id',
|
||||
ancestry: 'process.Ext.ancestry',
|
||||
name: 'process.name',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'winlogbeat',
|
||||
constraints: [
|
||||
{
|
||||
field: 'agent.type',
|
||||
value: 'winlogbeat',
|
||||
},
|
||||
{
|
||||
field: 'event.module',
|
||||
value: 'sysmon',
|
||||
},
|
||||
],
|
||||
schema: {
|
||||
id: 'process.entity_id',
|
||||
parent: 'process.parent.entity_id',
|
||||
name: 'process.name',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
function getFieldAsString(doc: unknown, field: string): string | undefined {
|
||||
const value = _.get(doc, field);
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return String(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* This is used to get an 'entity_id' which is an internal-to-Resolver concept, from an `_id`, which
|
||||
* is the artificial ID generated by ES for each document.
|
||||
*/
|
||||
export function handleEntities(): RequestHandler<unknown, TypeOf<typeof validateEntities.query>> {
|
||||
return async (context, request, response) => {
|
||||
const {
|
||||
query: { _id, indices },
|
||||
} = request;
|
||||
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
const queryResponse = await esClient.asCurrentUser.search({
|
||||
ignore_unavailable: true,
|
||||
index: indices,
|
||||
body: {
|
||||
// only return 1 match at most
|
||||
size: 1,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
// only return documents with the matching _id
|
||||
ids: {
|
||||
values: _id,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const responseBody: ResolverEntityIndex = [];
|
||||
for (const hit of queryResponse.hits.hits) {
|
||||
for (const supportedSchema of supportedSchemas) {
|
||||
let foundSchema = true;
|
||||
// check that the constraint and id fields are defined and that the id field is not an empty string
|
||||
const id = getFieldAsString(hit._source, supportedSchema.schema.id);
|
||||
for (const constraint of supportedSchema.constraints) {
|
||||
const fieldValue = getFieldAsString(hit._source, constraint.field);
|
||||
// track that all the constraints are true, if one of them is false then this schema is not valid so mark it
|
||||
// that we did not find the schema
|
||||
foundSchema = foundSchema && fieldValue?.toLowerCase() === constraint.value.toLowerCase();
|
||||
}
|
||||
|
||||
if (foundSchema && id !== undefined && id !== '') {
|
||||
responseBody.push({
|
||||
name: supportedSchema.name,
|
||||
schema: supportedSchema.schema,
|
||||
id,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return response.ok({ body: responseBody });
|
||||
};
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { RequestHandler } from '@kbn/core/server';
|
||||
import { TypeOf } from '@kbn/config-schema';
|
||||
import { validateEntities } from '../../../../../common/endpoint/schema/resolver';
|
||||
import { ResolverEntityIndex } from '../../../../../common/endpoint/types';
|
||||
import { resolverEntity } from './utils/build_resolver_entity';
|
||||
|
||||
/**
|
||||
* This is used to get an 'entity_id' which is an internal-to-Resolver concept, from an `_id`, which
|
||||
* is the artificial ID generated by ES for each document.
|
||||
*/
|
||||
export function handleEntities(): RequestHandler<unknown, TypeOf<typeof validateEntities.query>> {
|
||||
return async (context, request, response) => {
|
||||
const {
|
||||
query: { _id, indices },
|
||||
} = request;
|
||||
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
const queryResponse = await esClient.asCurrentUser.search({
|
||||
ignore_unavailable: true,
|
||||
index: indices,
|
||||
body: {
|
||||
// only return 1 match at most
|
||||
size: 1,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
// only return documents with the matching _id
|
||||
ids: {
|
||||
values: _id,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const responseBody: ResolverEntityIndex = resolverEntity(queryResponse.hits.hits);
|
||||
return response.ok({ body: responseBody });
|
||||
};
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { getFieldAsString, supportedSchemas } from './supported_schemas';
|
||||
import { ResolverEntityIndex } from '../../../../../../common/endpoint/types';
|
||||
|
||||
export function resolverEntity(hits: Array<estypes.SearchHit<unknown>>) {
|
||||
const responseBody: ResolverEntityIndex = [];
|
||||
for (const hit of hits) {
|
||||
for (const supportedSchema of supportedSchemas) {
|
||||
let foundSchema = true;
|
||||
// check that the constraint and id fields are defined and that the id field is not an empty string
|
||||
const id = getFieldAsString(hit._source, supportedSchema.schema.id);
|
||||
for (const constraint of supportedSchema.constraints) {
|
||||
const fieldValue = getFieldAsString(hit._source, constraint.field);
|
||||
// track that all the constraints are true, if one of them is false then this schema is not valid so mark it
|
||||
// that we did not find the schema
|
||||
foundSchema = foundSchema && fieldValue?.toLowerCase() === constraint.value.toLowerCase();
|
||||
}
|
||||
|
||||
if (foundSchema && id !== undefined && id !== '') {
|
||||
responseBody.push({
|
||||
name: supportedSchema.name,
|
||||
schema: supportedSchema.schema,
|
||||
id,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return responseBody;
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { ResolverSchema } from '../../../../../../common/endpoint/types';
|
||||
|
||||
interface SupportedSchema {
|
||||
/**
|
||||
* A name for the schema being used
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* A constraint to search for in the documented returned by Elasticsearch
|
||||
*/
|
||||
constraints: Array<{ field: string; value: string }>;
|
||||
|
||||
/**
|
||||
* Schema to return to the frontend so that it can be passed in to call to the /tree API
|
||||
*/
|
||||
schema: ResolverSchema;
|
||||
}
|
||||
|
||||
/**
|
||||
* This structure defines the preset supported schemas for a resolver graph. We'll probably want convert this
|
||||
* implementation to something similar to how row renderers is implemented.
|
||||
*/
|
||||
export const supportedSchemas: SupportedSchema[] = [
|
||||
{
|
||||
name: 'endpoint',
|
||||
constraints: [
|
||||
{
|
||||
field: 'agent.type',
|
||||
value: 'endpoint',
|
||||
},
|
||||
],
|
||||
schema: {
|
||||
id: 'process.entity_id',
|
||||
parent: 'process.parent.entity_id',
|
||||
ancestry: 'process.Ext.ancestry',
|
||||
name: 'process.name',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'winlogbeat',
|
||||
constraints: [
|
||||
{
|
||||
field: 'agent.type',
|
||||
value: 'winlogbeat',
|
||||
},
|
||||
{
|
||||
field: 'event.module',
|
||||
value: 'sysmon',
|
||||
},
|
||||
],
|
||||
schema: {
|
||||
id: 'process.entity_id',
|
||||
parent: 'process.parent.entity_id',
|
||||
name: 'process.name',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
export function getFieldAsString(doc: unknown, field: string): string | undefined {
|
||||
const value = _.get(doc, field);
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return String(value);
|
||||
}
|
|
@ -46,7 +46,6 @@ export function handleEvents(): RequestHandler<
|
|||
timeRange: body.timeRange,
|
||||
});
|
||||
const results = await query.search(client, body.filter);
|
||||
|
||||
return res.ok({
|
||||
body: createEvents(results, PaginationBuilder.buildCursorRequestLimit(limit, results)),
|
||||
});
|
||||
|
|
|
@ -15,6 +15,7 @@ interface DescendantsParams {
|
|||
schema: ResolverSchema;
|
||||
indexPatterns: string | string[];
|
||||
timeRange: TimeRange;
|
||||
isInternalRequest: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -25,12 +26,14 @@ export class DescendantsQuery {
|
|||
private readonly indexPatterns: string | string[];
|
||||
private readonly timeRange: TimeRange;
|
||||
private readonly docValueFields: JsonValue[];
|
||||
private readonly isInternalRequest: boolean;
|
||||
|
||||
constructor({ schema, indexPatterns, timeRange }: DescendantsParams) {
|
||||
constructor({ schema, indexPatterns, timeRange, isInternalRequest }: DescendantsParams) {
|
||||
this.docValueFields = docValueFields(schema);
|
||||
this.schema = schema;
|
||||
this.indexPatterns = indexPatterns;
|
||||
this.timeRange = timeRange;
|
||||
this.isInternalRequest = isInternalRequest;
|
||||
}
|
||||
|
||||
private query(nodes: NodeID[], size: number): JsonObject {
|
||||
|
@ -198,14 +201,16 @@ export class DescendantsQuery {
|
|||
return [];
|
||||
}
|
||||
|
||||
const esClient = this.isInternalRequest ? client.asInternalUser : client.asCurrentUser;
|
||||
|
||||
let response: estypes.SearchResponse<unknown>;
|
||||
if (this.schema.ancestry) {
|
||||
response = await client.asCurrentUser.search({
|
||||
response = await esClient.search({
|
||||
body: this.queryWithAncestryArray(validNodes, this.schema.ancestry, limit),
|
||||
index: this.indexPatterns,
|
||||
});
|
||||
} else {
|
||||
response = await client.asCurrentUser.search({
|
||||
response = await esClient.search({
|
||||
body: this.query(validNodes, limit),
|
||||
index: this.indexPatterns,
|
||||
});
|
||||
|
|
|
@ -14,6 +14,7 @@ interface LifecycleParams {
|
|||
schema: ResolverSchema;
|
||||
indexPatterns: string | string[];
|
||||
timeRange: TimeRange;
|
||||
isInternalRequest: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -24,11 +25,13 @@ export class LifecycleQuery {
|
|||
private readonly indexPatterns: string | string[];
|
||||
private readonly timeRange: TimeRange;
|
||||
private readonly docValueFields: JsonValue[];
|
||||
constructor({ schema, indexPatterns, timeRange }: LifecycleParams) {
|
||||
private readonly isInternalRequest: boolean;
|
||||
constructor({ schema, indexPatterns, timeRange, isInternalRequest }: LifecycleParams) {
|
||||
this.docValueFields = docValueFields(schema);
|
||||
this.schema = schema;
|
||||
this.indexPatterns = indexPatterns;
|
||||
this.timeRange = timeRange;
|
||||
this.isInternalRequest = isInternalRequest;
|
||||
}
|
||||
|
||||
private query(nodes: NodeID[]): JsonObject {
|
||||
|
@ -91,7 +94,9 @@ export class LifecycleQuery {
|
|||
return [];
|
||||
}
|
||||
|
||||
const body = await client.asCurrentUser.search({
|
||||
const esClient = this.isInternalRequest ? client.asInternalUser : client.asCurrentUser;
|
||||
|
||||
const body = await esClient.search({
|
||||
body: this.query(validNodes),
|
||||
index: this.indexPatterns,
|
||||
});
|
||||
|
|
|
@ -29,6 +29,7 @@ interface StatsParams {
|
|||
schema: ResolverSchema;
|
||||
indexPatterns: string | string[];
|
||||
timeRange: TimeRange;
|
||||
isInternalRequest: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -38,10 +39,13 @@ export class StatsQuery {
|
|||
private readonly schema: ResolverSchema;
|
||||
private readonly indexPatterns: string | string[];
|
||||
private readonly timeRange: TimeRange;
|
||||
constructor({ schema, indexPatterns, timeRange }: StatsParams) {
|
||||
private readonly isInternalRequest: boolean;
|
||||
|
||||
constructor({ schema, indexPatterns, timeRange, isInternalRequest }: StatsParams) {
|
||||
this.schema = schema;
|
||||
this.indexPatterns = indexPatterns;
|
||||
this.timeRange = timeRange;
|
||||
this.isInternalRequest = isInternalRequest;
|
||||
}
|
||||
|
||||
private query(nodes: NodeID[]): JsonObject {
|
||||
|
@ -122,8 +126,10 @@ export class StatsQuery {
|
|||
return {};
|
||||
}
|
||||
|
||||
const esClient = this.isInternalRequest ? client.asInternalUser : client.asCurrentUser;
|
||||
|
||||
// leaving unknown here because we don't actually need the hits part of the body
|
||||
const body = await client.asCurrentUser.search({
|
||||
const body = await esClient.search({
|
||||
body: this.query(nodes),
|
||||
index: this.indexPatterns,
|
||||
});
|
||||
|
|
|
@ -49,10 +49,13 @@ export class Fetcher {
|
|||
*
|
||||
* @param options the options for retrieving the structure of the tree.
|
||||
*/
|
||||
public async tree(options: TreeOptions): Promise<ResolverNode[]> {
|
||||
public async tree(
|
||||
options: TreeOptions,
|
||||
isInternalRequest: boolean = false
|
||||
): Promise<ResolverNode[]> {
|
||||
const treeParts = await Promise.all([
|
||||
this.retrieveAncestors(options),
|
||||
this.retrieveDescendants(options),
|
||||
this.retrieveAncestors(options, isInternalRequest),
|
||||
this.retrieveDescendants(options, isInternalRequest),
|
||||
]);
|
||||
|
||||
const tree = treeParts.reduce((results, partArray) => {
|
||||
|
@ -60,12 +63,13 @@ export class Fetcher {
|
|||
return results;
|
||||
}, []);
|
||||
|
||||
return this.formatResponse(tree, options);
|
||||
return this.formatResponse(tree, options, isInternalRequest);
|
||||
}
|
||||
|
||||
private async formatResponse(
|
||||
treeNodes: FieldsObject[],
|
||||
options: TreeOptions
|
||||
options: TreeOptions,
|
||||
isInternalRequest: boolean
|
||||
): Promise<ResolverNode[]> {
|
||||
const statsIDs: NodeID[] = [];
|
||||
for (const node of treeNodes) {
|
||||
|
@ -79,6 +83,7 @@ export class Fetcher {
|
|||
indexPatterns: options.indexPatterns,
|
||||
schema: options.schema,
|
||||
timeRange: options.timeRange,
|
||||
isInternalRequest,
|
||||
});
|
||||
|
||||
const eventStats = await query.search(this.client, statsIDs);
|
||||
|
@ -133,12 +138,16 @@ export class Fetcher {
|
|||
return nodes;
|
||||
}
|
||||
|
||||
private async retrieveAncestors(options: TreeOptions): Promise<FieldsObject[]> {
|
||||
private async retrieveAncestors(
|
||||
options: TreeOptions,
|
||||
isInternalRequest: boolean
|
||||
): Promise<FieldsObject[]> {
|
||||
const ancestors: FieldsObject[] = [];
|
||||
const query = new LifecycleQuery({
|
||||
schema: options.schema,
|
||||
indexPatterns: options.indexPatterns,
|
||||
timeRange: options.timeRange,
|
||||
isInternalRequest,
|
||||
});
|
||||
|
||||
let nodes = options.nodes;
|
||||
|
@ -179,12 +188,16 @@ export class Fetcher {
|
|||
return ancestors;
|
||||
}
|
||||
|
||||
private async retrieveDescendants(options: TreeOptions): Promise<FieldsObject[]> {
|
||||
private async retrieveDescendants(
|
||||
options: TreeOptions,
|
||||
isInternalRequest: boolean
|
||||
): Promise<FieldsObject[]> {
|
||||
const descendants: FieldsObject[] = [];
|
||||
const query = new DescendantsQuery({
|
||||
schema: options.schema,
|
||||
indexPatterns: options.indexPatterns,
|
||||
timeRange: options.timeRange,
|
||||
isInternalRequest,
|
||||
});
|
||||
|
||||
let nodes: NodeID[] = options.nodes;
|
||||
|
|
|
@ -5,11 +5,13 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
import { ConcreteTaskInstance, TaskStatus } from '@kbn/task-manager-plugin/server';
|
||||
import { TelemetryEventsSender } from '../sender';
|
||||
import { TelemetryReceiver } from '../receiver';
|
||||
import { SecurityTelemetryTaskConfig } from '../task';
|
||||
import { PackagePolicy } from '@kbn/fleet-plugin/common/types/models/package_policy';
|
||||
import { stubEndpointAlertResponse, stubProcessTree, stubFetchTimelineEvents } from './timeline';
|
||||
|
||||
export const createMockTelemetryEventsSender = (
|
||||
enableTelemetry?: boolean
|
||||
|
@ -29,13 +31,45 @@ export const createMockTelemetryEventsSender = (
|
|||
} as unknown as jest.Mocked<TelemetryEventsSender>;
|
||||
};
|
||||
|
||||
const stubClusterInfo = {
|
||||
name: 'Stub-MacBook-Pro.local',
|
||||
cluster_name: 'elasticsearch',
|
||||
cluster_uuid: '5Pr5PXRQQpGJUTn0czAvKQ',
|
||||
version: {
|
||||
number: '8.0.0-SNAPSHOT',
|
||||
build_type: 'tar',
|
||||
build_hash: '38537ab4a726b42ce8f034aad78d8fca4d4f3e51',
|
||||
build_date: moment().toISOString(),
|
||||
build_snapshot: true,
|
||||
lucene_version: '9.2.0',
|
||||
minimum_wire_compatibility_version: '7.17.0',
|
||||
minimum_index_compatibility_version: '7.0.0',
|
||||
},
|
||||
tagline: 'You Know, for Search',
|
||||
};
|
||||
|
||||
const stubLicenseInfo = {
|
||||
status: 'active',
|
||||
uid: '4a7dde08-e5f8-4e50-80f8-bc85b72b4934',
|
||||
type: 'trial',
|
||||
issue_date: moment().toISOString(),
|
||||
issue_date_in_millis: 1653299879146,
|
||||
expiry_date: moment().toISOString(),
|
||||
expiry_date_in_millis: 1655891879146,
|
||||
max_nodes: 1000,
|
||||
max_resource_units: null,
|
||||
issued_to: 'elasticsearch',
|
||||
issuer: 'elasticsearch',
|
||||
start_date_in_millis: -1,
|
||||
};
|
||||
|
||||
export const createMockTelemetryReceiver = (
|
||||
diagnosticsAlert?: unknown
|
||||
): jest.Mocked<TelemetryReceiver> => {
|
||||
return {
|
||||
start: jest.fn(),
|
||||
fetchClusterInfo: jest.fn(),
|
||||
fetchLicenseInfo: jest.fn(),
|
||||
fetchClusterInfo: jest.fn().mockReturnValue(stubClusterInfo),
|
||||
fetchLicenseInfo: jest.fn().mockReturnValue(stubLicenseInfo),
|
||||
copyLicenseFields: jest.fn(),
|
||||
fetchFleetAgents: jest.fn(),
|
||||
fetchDiagnosticAlerts: jest.fn().mockReturnValue(diagnosticsAlert ?? jest.fn()),
|
||||
|
@ -45,6 +79,11 @@ export const createMockTelemetryReceiver = (
|
|||
fetchEndpointList: jest.fn(),
|
||||
fetchDetectionRules: jest.fn().mockReturnValue({ body: null }),
|
||||
fetchEndpointMetadata: jest.fn(),
|
||||
fetchTimelineEndpointAlerts: jest
|
||||
.fn()
|
||||
.mockReturnValue(Promise.resolve(stubEndpointAlertResponse())),
|
||||
buildProcessTree: jest.fn().mockReturnValue(Promise.resolve(stubProcessTree())),
|
||||
fetchTimelineEvents: jest.fn().mockReturnValue(Promise.resolve(stubFetchTimelineEvents())),
|
||||
} as unknown as jest.Mocked<TelemetryReceiver>;
|
||||
};
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -21,6 +21,8 @@ export const TELEMETRY_CHANNEL_ENDPOINT_META = 'endpoint-metadata';
|
|||
|
||||
export const TELEMETRY_CHANNEL_DETECTION_ALERTS = 'alerts-detections';
|
||||
|
||||
export const TELEMETRY_CHANNEL_TIMELINE = 'alerts-timeline';
|
||||
|
||||
export const LIST_DETECTION_RULE_EXCEPTION = 'detection_rule_exception';
|
||||
|
||||
export const LIST_ENDPOINT_EXCEPTION = 'endpoint_exception';
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import {
|
||||
Logger,
|
||||
CoreStart,
|
||||
IScopedClusterClient,
|
||||
ElasticsearchClient,
|
||||
SavedObjectsClientContract,
|
||||
} from '@kbn/core/server';
|
||||
|
@ -37,8 +38,16 @@ import {
|
|||
trustedApplicationToTelemetryEntry,
|
||||
ruleExceptionListItemToTelemetryEvent,
|
||||
} from './helpers';
|
||||
import { Fetcher } from '../../endpoint/routes/resolver/tree/utils/fetch';
|
||||
import type { TreeOptions } from '../../endpoint/routes/resolver/tree/utils/fetch';
|
||||
import type {
|
||||
ResolverNode,
|
||||
SafeEndpointEvent,
|
||||
ResolverSchema,
|
||||
} from '../../../common/endpoint/types';
|
||||
import type {
|
||||
TelemetryEvent,
|
||||
EnhancedAlertEvent,
|
||||
ESLicense,
|
||||
ESClusterInfo,
|
||||
GetEndpointListResponse,
|
||||
|
@ -134,6 +143,21 @@ export interface ITelemetryReceiver {
|
|||
};
|
||||
|
||||
fetchPrebuiltRuleAlerts(): Promise<TelemetryEvent[]>;
|
||||
|
||||
fetchTimelineEndpointAlerts(
|
||||
interval: number
|
||||
): Promise<SearchResponse<EnhancedAlertEvent, Record<string, AggregationsAggregate>>>;
|
||||
|
||||
buildProcessTree(
|
||||
entityId: string,
|
||||
resolverSchema: ResolverSchema,
|
||||
startOfDay: string,
|
||||
endOfDay: string
|
||||
): Promise<ResolverNode[]>;
|
||||
|
||||
fetchTimelineEvents(
|
||||
nodeIds: string[]
|
||||
): Promise<SearchResponse<SafeEndpointEvent, Record<string, AggregationsAggregate>>>;
|
||||
}
|
||||
|
||||
export class TelemetryReceiver implements ITelemetryReceiver {
|
||||
|
@ -146,6 +170,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
|
|||
private kibanaIndex?: string;
|
||||
private alertsIndex?: string;
|
||||
private clusterInfo?: ESClusterInfo;
|
||||
private processTreeFetcher?: Fetcher;
|
||||
private readonly maxRecords = 10_000 as const;
|
||||
|
||||
constructor(logger: Logger) {
|
||||
|
@ -168,6 +193,9 @@ export class TelemetryReceiver implements ITelemetryReceiver {
|
|||
this.soClient =
|
||||
core?.savedObjects.createInternalRepository() as unknown as SavedObjectsClientContract;
|
||||
this.clusterInfo = await this.fetchClusterInfo();
|
||||
|
||||
const elasticsearch = core?.elasticsearch.client as unknown as IScopedClusterClient;
|
||||
this.processTreeFetcher = new Fetcher(elasticsearch);
|
||||
}
|
||||
|
||||
public getClusterInfo(): ESClusterInfo | undefined {
|
||||
|
@ -176,7 +204,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
|
|||
|
||||
public async fetchFleetAgents() {
|
||||
if (this.esClient === undefined || this.esClient === null) {
|
||||
throw Error('elasticsearch client is unavailable: cannot retrieve fleet policy responses');
|
||||
throw Error('elasticsearch client is unavailable: cannot retrieve fleet agents');
|
||||
}
|
||||
|
||||
return this.agentClient?.listAgents({
|
||||
|
@ -430,7 +458,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
|
|||
*/
|
||||
public async fetchDetectionRules() {
|
||||
if (this.esClient === undefined || this.esClient === null) {
|
||||
throw Error('elasticsearch client is unavailable: cannot retrieve diagnostic alerts');
|
||||
throw Error('elasticsearch client is unavailable: cannot retrieve detection rules');
|
||||
}
|
||||
|
||||
const query: SearchRequest = {
|
||||
|
@ -509,7 +537,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
|
|||
*/
|
||||
public async fetchPrebuiltRuleAlerts() {
|
||||
if (this.esClient === undefined || this.esClient === null) {
|
||||
throw Error('elasticsearch client is unavailable: cannot retrieve detection rule alerts');
|
||||
throw Error('elasticsearch client is unavailable: cannot retrieve pre-built rule alerts');
|
||||
}
|
||||
|
||||
const query: SearchRequest = {
|
||||
|
@ -630,6 +658,131 @@ export class TelemetryReceiver implements ITelemetryReceiver {
|
|||
return telemetryEvents;
|
||||
}
|
||||
|
||||
public async fetchTimelineEndpointAlerts(interval: number) {
|
||||
if (this.esClient === undefined || this.esClient === null) {
|
||||
throw Error('elasticsearch client is unavailable: cannot retrieve cluster infomation');
|
||||
}
|
||||
|
||||
const query: SearchRequest = {
|
||||
expand_wildcards: ['open' as const, 'hidden' as const],
|
||||
index: `${this.alertsIndex}*`,
|
||||
ignore_unavailable: true,
|
||||
size: 100,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match_phrase: {
|
||||
'event.module': 'endpoint',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match_phrase: {
|
||||
'kibana.alert.rule.parameters.immutable': 'true',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
range: {
|
||||
'@timestamp': {
|
||||
gte: `now-${interval}h`,
|
||||
lte: 'now',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return this.esClient.search<EnhancedAlertEvent>(query);
|
||||
}
|
||||
|
||||
public async buildProcessTree(
|
||||
entityId: string,
|
||||
resolverSchema: ResolverSchema,
|
||||
startOfDay: string,
|
||||
endOfDay: string
|
||||
): Promise<ResolverNode[]> {
|
||||
if (this.processTreeFetcher === undefined || this.processTreeFetcher === null) {
|
||||
throw Error(
|
||||
'resolver tree builder is unavailable: cannot build encoded endpoint event graph'
|
||||
);
|
||||
}
|
||||
|
||||
const request: TreeOptions = {
|
||||
ancestors: 200,
|
||||
descendants: 500,
|
||||
timeRange: {
|
||||
from: startOfDay,
|
||||
to: endOfDay,
|
||||
},
|
||||
schema: resolverSchema,
|
||||
nodes: [entityId],
|
||||
indexPatterns: [`${this.alertsIndex}*`, 'logs-*'],
|
||||
descendantLevels: 20,
|
||||
};
|
||||
|
||||
return this.processTreeFetcher.tree(request, true);
|
||||
}
|
||||
|
||||
public async fetchTimelineEvents(nodeIds: string[]) {
|
||||
if (this.esClient === undefined || this.esClient === null) {
|
||||
throw Error('elasticsearch client is unavailable: cannot retrieve timeline endpoint events');
|
||||
}
|
||||
|
||||
const query: SearchRequest = {
|
||||
expand_wildcards: ['open' as const, 'hidden' as const],
|
||||
index: [`${this.alertsIndex}*`, 'logs-*'],
|
||||
ignore_unavailable: true,
|
||||
size: 100,
|
||||
body: {
|
||||
_source: {
|
||||
include: [
|
||||
'@timestamp',
|
||||
'process',
|
||||
'event',
|
||||
'file',
|
||||
'network',
|
||||
'dns',
|
||||
'kibana.rule.alert.uuid',
|
||||
],
|
||||
},
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
terms: {
|
||||
'process.entity_id': nodeIds,
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
'event.category': 'process',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return this.esClient.search<SafeEndpointEvent>(query);
|
||||
}
|
||||
|
||||
public async fetchClusterInfo(): Promise<ESClusterInfo> {
|
||||
if (this.esClient === undefined || this.esClient === null) {
|
||||
throw Error('elasticsearch client is unavailable: cannot retrieve cluster infomation');
|
||||
|
|
|
@ -11,6 +11,7 @@ import { createTelemetryEndpointTaskConfig } from './endpoint';
|
|||
import { createTelemetrySecurityListTaskConfig } from './security_lists';
|
||||
import { createTelemetryDetectionRuleListsTaskConfig } from './detection_rule';
|
||||
import { createTelemetryPrebuiltRuleAlertsTaskConfig } from './prebuilt_rule_alerts';
|
||||
import { createTelemetryTimelineTaskConfig } from './timelines';
|
||||
import {
|
||||
MAX_SECURITY_LIST_TELEMETRY_BATCH,
|
||||
MAX_ENDPOINT_TELEMETRY_BATCH,
|
||||
|
@ -25,5 +26,6 @@ export function createTelemetryTaskConfigs(): SecurityTelemetryTaskConfig[] {
|
|||
createTelemetrySecurityListTaskConfig(MAX_ENDPOINT_TELEMETRY_BATCH),
|
||||
createTelemetryDetectionRuleListsTaskConfig(MAX_DETECTION_RULE_TELEMETRY_BATCH),
|
||||
createTelemetryPrebuiltRuleAlertsTaskConfig(MAX_DETECTION_ALERTS_BATCH),
|
||||
createTelemetryTimelineTaskConfig(),
|
||||
];
|
||||
}
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { createTelemetryTimelineTaskConfig } from './timelines';
|
||||
import { createMockTelemetryEventsSender, createMockTelemetryReceiver } from '../__mocks__';
|
||||
|
||||
describe('timeline telemetry task test', () => {
|
||||
let logger: ReturnType<typeof loggingSystemMock.createLogger>;
|
||||
|
||||
beforeEach(() => {
|
||||
logger = loggingSystemMock.createLogger();
|
||||
});
|
||||
|
||||
test('timeline telemetry task should be correctly set up', async () => {
|
||||
const testTaskExecutionPeriod = {
|
||||
last: undefined,
|
||||
current: new Date().toISOString(),
|
||||
};
|
||||
const mockTelemetryEventsSender = createMockTelemetryEventsSender();
|
||||
const mockTelemetryReceiver = createMockTelemetryReceiver();
|
||||
const telemetryTelemetryTaskConfig = createTelemetryTimelineTaskConfig();
|
||||
|
||||
await telemetryTelemetryTaskConfig.runTask(
|
||||
'test-timeline-task-id',
|
||||
logger,
|
||||
mockTelemetryReceiver,
|
||||
mockTelemetryEventsSender,
|
||||
testTaskExecutionPeriod
|
||||
);
|
||||
|
||||
expect(mockTelemetryReceiver.buildProcessTree).toHaveBeenCalled();
|
||||
expect(mockTelemetryReceiver.fetchTimelineEvents).toHaveBeenCalled();
|
||||
expect(mockTelemetryReceiver.fetchTimelineEndpointAlerts).toHaveBeenCalled();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,149 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
import { Logger } from '@kbn/core/server';
|
||||
import { SafeEndpointEvent } from '../../../../common/endpoint/types';
|
||||
import { ITelemetryEventsSender } from '../sender';
|
||||
import { ITelemetryReceiver } from '../receiver';
|
||||
import type { TaskExecutionPeriod } from '../task';
|
||||
import type {
|
||||
ESClusterInfo,
|
||||
ESLicense,
|
||||
TimelineTelemetryTemplate,
|
||||
TimelineTelemetryEvent,
|
||||
} from '../types';
|
||||
import { TELEMETRY_CHANNEL_TIMELINE } from '../constants';
|
||||
import { resolverEntity } from '../../../endpoint/routes/resolver/entity/utils/build_resolver_entity';
|
||||
|
||||
export function createTelemetryTimelineTaskConfig() {
|
||||
return {
|
||||
type: 'security:telemetry-timelines',
|
||||
title: 'Security Solution Timeline telemetry',
|
||||
interval: '3h',
|
||||
timeout: '10m',
|
||||
version: '1.0.0',
|
||||
runTask: async (
|
||||
taskId: string,
|
||||
logger: Logger,
|
||||
receiver: ITelemetryReceiver,
|
||||
sender: ITelemetryEventsSender,
|
||||
taskExecutionPeriod: TaskExecutionPeriod
|
||||
) => {
|
||||
let counter = 0;
|
||||
|
||||
logger.debug(`Running task: ${taskId}`);
|
||||
|
||||
const [clusterInfoPromise, licenseInfoPromise] = await Promise.allSettled([
|
||||
receiver.fetchClusterInfo(),
|
||||
receiver.fetchLicenseInfo(),
|
||||
]);
|
||||
|
||||
const clusterInfo =
|
||||
clusterInfoPromise.status === 'fulfilled'
|
||||
? clusterInfoPromise.value
|
||||
: ({} as ESClusterInfo);
|
||||
|
||||
const licenseInfo =
|
||||
licenseInfoPromise.status === 'fulfilled'
|
||||
? licenseInfoPromise.value
|
||||
: ({} as ESLicense | undefined);
|
||||
|
||||
const now = moment();
|
||||
const startOfDay = now.startOf('day').toISOString();
|
||||
const endOfDay = now.endOf('day').toISOString();
|
||||
|
||||
const baseDocument = {
|
||||
version: clusterInfo.version?.number,
|
||||
cluster_name: clusterInfo.cluster_name,
|
||||
cluster_uuid: clusterInfo.cluster_uuid,
|
||||
license_uuid: licenseInfo?.uid,
|
||||
};
|
||||
|
||||
// Fetch EP Alerts
|
||||
|
||||
const endpointAlerts = await receiver.fetchTimelineEndpointAlerts(3);
|
||||
|
||||
// No EP Alerts -> Nothing to do
|
||||
|
||||
if (
|
||||
endpointAlerts.hits.hits?.length === 0 ||
|
||||
endpointAlerts.hits.hits?.length === undefined
|
||||
) {
|
||||
logger.debug('no endpoint alerts received. exiting telemetry task.');
|
||||
return counter;
|
||||
}
|
||||
|
||||
// Build process tree for each EP Alert recieved
|
||||
|
||||
for (const alert of endpointAlerts.hits.hits) {
|
||||
const eventId = alert._source ? alert._source['event.id'] : 'unknown';
|
||||
const alertUUID = alert._source ? alert._source['kibana.alert.uuid'] : 'unknown';
|
||||
|
||||
const entities = resolverEntity([alert]);
|
||||
|
||||
// Build Tree
|
||||
|
||||
const tree = await receiver.buildProcessTree(
|
||||
entities[0].id,
|
||||
entities[0].schema,
|
||||
startOfDay,
|
||||
endOfDay
|
||||
);
|
||||
|
||||
const nodeIds = [] as string[];
|
||||
for (const node of tree) {
|
||||
const nodeId = node?.id.toString();
|
||||
nodeIds.push(nodeId);
|
||||
}
|
||||
|
||||
// Fetch event lineage
|
||||
|
||||
const timelineEvents = await receiver.fetchTimelineEvents(nodeIds);
|
||||
|
||||
const eventsStore = new Map<string, SafeEndpointEvent>();
|
||||
for (const event of timelineEvents.hits.hits) {
|
||||
const doc = event._source;
|
||||
|
||||
if (doc !== null && doc !== undefined) {
|
||||
const entityId = doc?.process?.entity_id?.toString();
|
||||
if (entityId !== null && entityId !== undefined) eventsStore.set(entityId, doc);
|
||||
}
|
||||
}
|
||||
|
||||
// Create telemetry record
|
||||
|
||||
const telemetryTimeline: TimelineTelemetryEvent[] = [];
|
||||
for (const node of tree) {
|
||||
const id = node.id.toString();
|
||||
const event = eventsStore.get(id);
|
||||
|
||||
const timelineTelemetryEvent: TimelineTelemetryEvent = {
|
||||
...node,
|
||||
event,
|
||||
};
|
||||
|
||||
telemetryTimeline.push(timelineTelemetryEvent);
|
||||
}
|
||||
|
||||
const record: TimelineTelemetryTemplate = {
|
||||
'@timestamp': moment().toISOString(),
|
||||
...baseDocument,
|
||||
alert_id: alertUUID,
|
||||
event_id: eventId,
|
||||
timeline: telemetryTimeline,
|
||||
};
|
||||
|
||||
sender.sendOnDemand(TELEMETRY_CHANNEL_TIMELINE, [record]);
|
||||
counter += 1;
|
||||
}
|
||||
|
||||
logger.debug(`sent ${counter} timelines. exiting telemetry task.`);
|
||||
return counter;
|
||||
},
|
||||
};
|
||||
}
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import { schema, TypeOf } from '@kbn/config-schema';
|
||||
import { AlertEvent, ResolverNode, SafeResolverEvent } from '../../../common/endpoint/types';
|
||||
|
||||
type BaseSearchTypes = string | number | boolean | object;
|
||||
export type SearchTypes = BaseSearchTypes | BaseSearchTypes[] | undefined;
|
||||
|
@ -357,3 +358,20 @@ export interface RuleSearchResult {
|
|||
params: DetectionRuleParms;
|
||||
};
|
||||
}
|
||||
|
||||
// EP Timeline telemetry
|
||||
|
||||
export type EnhancedAlertEvent = AlertEvent & { 'event.id': string; 'kibana.alert.uuid': string };
|
||||
|
||||
export type TimelineTelemetryEvent = ResolverNode & { event: SafeResolverEvent | undefined };
|
||||
|
||||
export interface TimelineTelemetryTemplate {
|
||||
'@timestamp': string;
|
||||
cluster_uuid: string;
|
||||
cluster_name: string;
|
||||
version: string | undefined;
|
||||
license_uuid: string | undefined;
|
||||
alert_id: string | undefined;
|
||||
event_id: string;
|
||||
timeline: TimelineTelemetryEvent[];
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue