mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[Attack discovery] Add utils for testing (#182918)
## Summary Add `load_attack_discovery_data` script that populates alerts data for Attack discovery testing To load Attack discovery data to the existing instance: ``` node x-pack/solutions/security/plugins/security_solution/scripts/load_attack_discovery_data.js --kibanaUrl http://127.0.0.1:5620 --elasticsearchUrl http://127.0.0.1:9220 ``` --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Mark Hopkin <mark.hopkin@elastic.co>
This commit is contained in:
parent
0640f38b7a
commit
2c9e55dc44
22 changed files with 116346 additions and 1 deletions
|
@ -52,7 +52,7 @@
|
|||
"@kbn/data-views-plugin",
|
||||
"@kbn/core-analytics-server",
|
||||
"@kbn/llm-tasks-plugin",
|
||||
"@kbn/product-doc-base-plugin"
|
||||
"@kbn/product-doc-base-plugin",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { run } from '@kbn/dev-cli-runner';
|
||||
import { createEsClient, createKbnClient } from '../endpoint/common/stack_services';
|
||||
import { HORIZONTAL_LINE } from '../endpoint/common/constants';
|
||||
import { createToolingLogger } from '../../common/endpoint/data_loaders/utils';
|
||||
import { loadAttackDiscoveryData } from './load';
|
||||
|
||||
export const cli = () => {
|
||||
run(
|
||||
async (cliContext) => {
|
||||
createToolingLogger.setDefaultLogLevelFromCliFlags(cliContext.flags);
|
||||
|
||||
const log = cliContext.log;
|
||||
const kbnClient = createKbnClient({
|
||||
log,
|
||||
url: cliContext.flags.kibanaUrl as string,
|
||||
username: cliContext.flags.username as string,
|
||||
password: cliContext.flags.password as string,
|
||||
});
|
||||
const esClient = createEsClient({
|
||||
log,
|
||||
url: cliContext.flags.elasticsearchUrl as string,
|
||||
username: cliContext.flags.username as string,
|
||||
password: cliContext.flags.password as string,
|
||||
});
|
||||
|
||||
log.info(`${HORIZONTAL_LINE}
|
||||
Environment Data Loader
|
||||
${HORIZONTAL_LINE}
|
||||
`);
|
||||
log.info(`Loading data to: ${kbnClient.resolveUrl('')}`);
|
||||
|
||||
await loadAttackDiscoveryData({ kbnClient, esClient, log });
|
||||
},
|
||||
|
||||
// Options
|
||||
{
|
||||
description: `Loads data into a environment for testing/development`,
|
||||
flags: {
|
||||
string: ['kibanaUrl', 'elasticsearchUrl', 'username', 'password'],
|
||||
default: {
|
||||
kibanaUrl: 'http://127.0.0.1:5601',
|
||||
elasticsearchUrl: 'http://127.0.0.1:9200',
|
||||
username: 'elastic',
|
||||
password: 'changeme',
|
||||
},
|
||||
allowUnexpected: false,
|
||||
help: `
|
||||
--username User name to be used for auth against elasticsearch and
|
||||
kibana (Default: elastic).
|
||||
--password User name Password (Default: changeme)
|
||||
--kibanaUrl The url to Kibana (Default: http://127.0.0.1:5601)
|
||||
--elasticsearchUrl The url to Elasticsearch (Default: http://127.0.0.1:9200)
|
||||
`,
|
||||
},
|
||||
}
|
||||
);
|
||||
};
|
|
@ -0,0 +1,328 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import type { Client } from '@elastic/elasticsearch';
|
||||
import type { ToolingLog } from '@kbn/tooling-log';
|
||||
import type { KbnClient } from '@kbn/test';
|
||||
|
||||
const PIPELINE_NAME = 'insights_pipeline';
|
||||
const DIRECTORY_PATH = path.resolve(
|
||||
__dirname,
|
||||
'../../../../../../test/security_solution_cypress/cypress/fixtures/assistant/attack_discovery'
|
||||
);
|
||||
const MAPPING_FILE_PATH = path.join(DIRECTORY_PATH, 'mapping.json');
|
||||
|
||||
const enableRule = async ({
|
||||
kbnClient,
|
||||
ruleId,
|
||||
log,
|
||||
}: {
|
||||
kbnClient: KbnClient;
|
||||
ruleId: string;
|
||||
log: ToolingLog;
|
||||
}) => {
|
||||
log.info(`Enabling rule with ID: ${ruleId}...`);
|
||||
try {
|
||||
await kbnClient.request({
|
||||
method: 'POST',
|
||||
path: `/api/detection_engine/rules/_bulk_action`,
|
||||
body: {
|
||||
action: 'enable',
|
||||
ids: [ruleId],
|
||||
},
|
||||
headers: {
|
||||
'elastic-api-version': '2023-10-31',
|
||||
},
|
||||
});
|
||||
|
||||
log.info(`Rule with ID ${ruleId} has been enabled.`);
|
||||
} catch (error) {
|
||||
log.error(`Error enabling rule with ID ${ruleId}:`);
|
||||
log.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
const getRule = async ({ kbnClient, log }: { kbnClient: KbnClient; log: ToolingLog }) => {
|
||||
const response = await kbnClient.request<{
|
||||
total: number;
|
||||
data?: Array<{ id: string; enabled: boolean; rule_id: string }>;
|
||||
}>({
|
||||
method: 'GET',
|
||||
path: `/api/detection_engine/rules/_find?page=1&per_page=5&sort_field=enabled&sort_order=asc&filter=alert.attributes.name:%22Endpoint%20Security%20%5BInsights%5D%22`,
|
||||
headers: {
|
||||
'elastic-api-version': '2023-10-31',
|
||||
},
|
||||
});
|
||||
|
||||
return response.data.data?.[0];
|
||||
};
|
||||
|
||||
const importRule = async ({ kbnClient, log }: { kbnClient: KbnClient; log: ToolingLog }) => {
|
||||
log.info('Importing rule from endpoint_alert.ndjson...');
|
||||
|
||||
const RULE_FILE_PATH = path.join(DIRECTORY_PATH, 'endpoint_alert.ndjson');
|
||||
|
||||
try {
|
||||
await kbnClient.request({
|
||||
method: 'POST',
|
||||
path: `/api/detection_engine/rules/_import`,
|
||||
headers: {
|
||||
'kbn-xsrf': 'true',
|
||||
'Content-Type': 'multipart/form-data',
|
||||
'elastic-api-version': '2023-10-31',
|
||||
},
|
||||
body: {
|
||||
file: fs.createReadStream(RULE_FILE_PATH),
|
||||
},
|
||||
});
|
||||
|
||||
const ruleId = (await getRule({ kbnClient, log }))?.id;
|
||||
|
||||
if (!ruleId) throw new Error('Failed to import rule');
|
||||
|
||||
await enableRule({ kbnClient, ruleId, log });
|
||||
} catch (error) {
|
||||
log.error('Error importing rule:');
|
||||
log.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
const createPipeline = async ({ esClient, log }: { esClient: Client; log: ToolingLog }) => {
|
||||
try {
|
||||
await esClient.ingest.getPipeline({ id: PIPELINE_NAME });
|
||||
|
||||
log.info(`Ingest pipeline ${PIPELINE_NAME} already exists.`);
|
||||
} catch (error) {
|
||||
if (error.meta?.statusCode === 404) {
|
||||
log.info(`Creating ingest pipeline ${PIPELINE_NAME}...`);
|
||||
|
||||
const pipelineConfig = {
|
||||
description: 'Ingest pipeline created by script',
|
||||
processors: [
|
||||
{
|
||||
date: {
|
||||
field: '@timestamp',
|
||||
formats: ['ISO8601'],
|
||||
output_format: "yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSSXXX",
|
||||
},
|
||||
},
|
||||
{
|
||||
set: {
|
||||
field: 'event.ingested',
|
||||
value: '{{_ingest.timestamp}}',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await esClient.ingest.putPipeline({
|
||||
id: PIPELINE_NAME,
|
||||
body: pipelineConfig,
|
||||
});
|
||||
} else {
|
||||
log.error('Error checking or creating ingest pipeline:');
|
||||
log.error(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const createAndConfigureIndex = async ({
|
||||
esClient,
|
||||
epNum,
|
||||
indexType,
|
||||
log,
|
||||
}: {
|
||||
esClient: Client;
|
||||
epNum: string;
|
||||
indexType: string;
|
||||
log: ToolingLog;
|
||||
}) => {
|
||||
const indexNameSuffix = new Date().toISOString().slice(0, 10).replace(/-/g, '.');
|
||||
let indexName: string;
|
||||
|
||||
if (indexType === 'alerts') {
|
||||
indexName = `insights-alerts-ep${epNum}-${indexNameSuffix}`;
|
||||
} else {
|
||||
indexName = `logs-endpoint.events.insights.ep${epNum}.${indexNameSuffix}`;
|
||||
}
|
||||
|
||||
try {
|
||||
const mappingData = fs.readFileSync(MAPPING_FILE_PATH, 'utf8');
|
||||
|
||||
const indexExists = await esClient.indices.exists({ index: indexName });
|
||||
|
||||
if (!indexExists) {
|
||||
log.info(`Creating and configuring Elasticsearch index: ${indexName}`);
|
||||
await esClient.indices.create({
|
||||
index: indexName,
|
||||
body: {
|
||||
settings: {
|
||||
'index.mapping.total_fields.limit': '6000',
|
||||
},
|
||||
mappings: JSON.parse(mappingData),
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
log.error(`Error creating and configuring index ${indexName}:`);
|
||||
log.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
const processFile = async ({
|
||||
esClient,
|
||||
file,
|
||||
indexType,
|
||||
log,
|
||||
}: {
|
||||
esClient: Client;
|
||||
file: string;
|
||||
indexType: string;
|
||||
log: ToolingLog;
|
||||
}) => {
|
||||
const epNum = path.basename(file).match(/ep(\d+)/)?.[1];
|
||||
|
||||
await createAndConfigureIndex({ esClient, epNum: epNum as string, indexType, log });
|
||||
|
||||
const indexNameSuffix = new Date().toISOString().slice(0, 10).replace(/-/g, '.');
|
||||
let indexName: string;
|
||||
|
||||
if (indexType === 'alerts') {
|
||||
indexName = `insights-alerts-ep${epNum}-${indexNameSuffix}`;
|
||||
} else {
|
||||
indexName = `logs-endpoint.events.insights.ep${epNum}.${indexNameSuffix}`;
|
||||
}
|
||||
|
||||
log.info(`Processing and indexing file: ${file} ...`);
|
||||
|
||||
const fileData = await fs.readFileSync(file).toString().split('\n');
|
||||
|
||||
try {
|
||||
const response = await esClient.bulk<string>({
|
||||
pipeline: PIPELINE_NAME,
|
||||
pretty: true,
|
||||
body: [
|
||||
fileData.reduce((acc, item) => {
|
||||
if (!item.length) return acc;
|
||||
|
||||
return acc.concat(`{ "index" : { "_index" : "${indexName}" } }\n${item}\n`);
|
||||
}, ''),
|
||||
],
|
||||
});
|
||||
if (!response.errors) {
|
||||
log.info('Success.');
|
||||
} else {
|
||||
log.info(`Failed with errors.`);
|
||||
}
|
||||
} catch (error) {
|
||||
log.error('Error indexing data:');
|
||||
log.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
const processFilesForEpisode = async ({
|
||||
esClient,
|
||||
epNum,
|
||||
log,
|
||||
}: {
|
||||
esClient: Client;
|
||||
epNum: string;
|
||||
log: ToolingLog;
|
||||
}) => {
|
||||
const dataFiles = fs
|
||||
.readdirSync(DIRECTORY_PATH)
|
||||
.filter((file) => file.includes(`ep${epNum}data.ndjson`));
|
||||
const alertFiles = fs
|
||||
.readdirSync(DIRECTORY_PATH)
|
||||
.filter((file) => file.includes(`ep${epNum}alerts.ndjson`));
|
||||
|
||||
for (const file of dataFiles) {
|
||||
await processFile({ esClient, file: path.join(DIRECTORY_PATH, file), indexType: 'data', log });
|
||||
}
|
||||
|
||||
for (const file of alertFiles) {
|
||||
await processFile({
|
||||
esClient,
|
||||
file: path.join(DIRECTORY_PATH, file),
|
||||
indexType: 'alerts',
|
||||
log,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const checkRuleExistsAndStatus = async ({
|
||||
kbnClient,
|
||||
log,
|
||||
}: {
|
||||
kbnClient: KbnClient;
|
||||
log: ToolingLog;
|
||||
}) => {
|
||||
log.info("Checking if the rule 'Endpoint Security [Insights]' exists and its status...");
|
||||
|
||||
try {
|
||||
const rule = await getRule({ kbnClient, log });
|
||||
|
||||
if (!rule) {
|
||||
await importRule({ kbnClient, log });
|
||||
} else if (!rule.enabled) {
|
||||
await enableRule({ kbnClient, ruleId: rule.id, log });
|
||||
}
|
||||
} catch (error) {
|
||||
log.error('Error checking rule status:');
|
||||
log.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
const checkDeleteIndices = async ({ esClient, log }: { esClient: Client; log: ToolingLog }) => {
|
||||
const promptDeletion = async (pattern: string, description: string) => {
|
||||
try {
|
||||
const response = await esClient.cat.indices({
|
||||
index: [pattern],
|
||||
h: 'index',
|
||||
});
|
||||
|
||||
const existingIndices = (response as unknown as string).trim().split(' ');
|
||||
|
||||
if (existingIndices.length > 0) {
|
||||
log.info(
|
||||
`Found existing ${description} indices matching pattern '${pattern}': ${existingIndices.join(
|
||||
', '
|
||||
)}`
|
||||
);
|
||||
} else {
|
||||
log.info(`No ${description} indices matching pattern '${pattern}' found.`);
|
||||
}
|
||||
} catch (error) {
|
||||
log.error(`Error checking or deleting ${description} indices:`);
|
||||
log.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
await promptDeletion('logs-endpoint.events.insights.*', 'data');
|
||||
await promptDeletion('insights-alerts-*', 'alerts');
|
||||
};
|
||||
export const loadAttackDiscoveryData = async ({
|
||||
kbnClient,
|
||||
esClient,
|
||||
log,
|
||||
}: {
|
||||
kbnClient: KbnClient;
|
||||
esClient: Client;
|
||||
log: ToolingLog;
|
||||
}) => {
|
||||
await checkRuleExistsAndStatus({ kbnClient, log });
|
||||
await checkDeleteIndices({ esClient, log });
|
||||
await createPipeline({ esClient, log });
|
||||
|
||||
for (const epNum of ['1', '2']) {
|
||||
await processFilesForEpisode({ esClient, epNum, log });
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
require('../../../../../../src/setup_node_env');
|
||||
require('./attack_discovery').cli();
|
|
@ -0,0 +1,2 @@
|
|||
{"id":"7b75f973-e958-4a09-bc49-6dd05d88e7e4","updated_at":"2024-03-25T00:42:30.397Z","updated_by":"james.spiteri@elastic.co","created_at":"2024-03-25T00:39:58.881Z","created_by":"james.spiteri@elastic.co","name":"Endpoint Security [Insights]","tags":["Data Source: Elastic Defend"],"interval":"5m","enabled":false,"revision":2,"description":"Generates a detection alert each time an Elastic Endpoint Security alert is received. Enabling this rule allows you to immediately begin investigating your Endpoint alerts.","risk_score":47,"severity":"medium","license":"Elastic License v2","output_index":"","meta":{"from":"5m","kibana_siem_app_url":"https://ffa0e3abcb444c9ab93ffcefcbdb60a8.europe-west1.gcp.cloud.es.io/app/security"},"rule_name_override":"message","timestamp_override":"event.ingested","timestamp_override_fallback_disabled":false,"author":["Elastic"],"false_positives":[],"from":"now-600s","rule_id":"61e90241-c8f2-47bc-8e47-238420a34fb6","max_signals":10000,"risk_score_mapping":[{"field":"event.risk_score","operator":"equals","value":""}],"severity_mapping":[{"field":"event.severity","operator":"equals","severity":"low","value":"21"},{"field":"event.severity","operator":"equals","severity":"medium","value":"47"},{"field":"event.severity","operator":"equals","severity":"high","value":"73"},{"field":"event.severity","operator":"equals","severity":"critical","value":"99"}],"threat":[],"to":"now","references":[],"version":102,"exceptions_list":[],"immutable":false,"related_integrations":[],"required_fields":[],"setup":"","type":"query","language":"kuery","index":["insights-alerts-*"],"query":"event.kind:alert and event.module:(endpoint and not endgame)\n","filters":[],"actions":[]}
|
||||
{"exported_count":1,"exported_rules_count":1,"missing_rules":[],"missing_rules_count":0,"exported_exception_list_count":0,"exported_exception_list_item_count":0,"missing_exception_list_item_count":0,"missing_exception_list_items":[],"missing_exception_lists":[],"missing_exception_lists_count":0,"exported_action_connector_count":0,"missing_action_connection_count":0,"missing_action_connections":[],"excluded_action_connection_count":0,"excluded_action_connections":[]}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue