mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[Security solution] Script to add ebt fields to the correct security solution data view in staging (#184259)
This commit is contained in:
parent
dfbfe050d6
commit
7d7deae332
8 changed files with 356 additions and 2 deletions
|
@ -19,7 +19,7 @@ export const alertsGroupingToggledEvent: TelemetryEvent = {
|
|||
},
|
||||
},
|
||||
tableId: {
|
||||
type: 'text',
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Table ID',
|
||||
optional: false,
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { telemetryEvents } from './telemetry_events';
|
||||
|
||||
describe('telemetry events', () => {
|
||||
it('ensure properties have consistent types', () => {
|
||||
const propertyTypes: Record<string, string> = {};
|
||||
telemetryEvents.forEach((event) => {
|
||||
expect(event).toHaveProperty('eventType');
|
||||
expect(event).toHaveProperty('schema'); // schema is an object
|
||||
Object.keys(event.schema).forEach((item) => {
|
||||
// @ts-ignore
|
||||
const eventType = event.schema[item].type;
|
||||
if (!propertyTypes[item]) {
|
||||
propertyTypes[item] = eventType;
|
||||
} else {
|
||||
expect(propertyTypes[item]).toEqual(eventType);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,38 @@
|
|||
## Telemetry data view generation script
|
||||
|
||||
The purpose of the script is to map telemetry fields to runtime fields on the appropriate security solution ebt data views on the staging cluster. This can be used to automate the addition of new fields to the data views.
|
||||
|
||||
### Events
|
||||
- The browser ebt events come from `telemetryEvents` imported from `x-pack/plugins/security_solution/public/common/lib/telemetry/events/telemetry_events`
|
||||
- The server ebt events come from:
|
||||
- `events` imported from `x-pack/plugins/elastic-assistant/server/lib/telemetry/event_based_telemetry`
|
||||
- `telemetryEvents` imported from `x-pack/plugins/security_solution_serverless/server/telemetry/event_based_telemetry`
|
||||
- `events` imported from `x-pack/plugins/security_solution/server/lib/telemetry/event_based/events`
|
||||
|
||||
If you have further events to be included in the data views, please update the script to include the event schema.
|
||||
|
||||
### Usage
|
||||
|
||||
1. Login with Vault (`vault login -method github`), ensure you have siem-team access. If you have never accessed Vault before, follow [these instructions](https://github.com/elastic/infra/blob/master/docs/vault/README.md)
|
||||
2. cd into this directory
|
||||
3. Run the script with the appropriate arguments. By default, the script will run for the `security-solution-ebt-kibana-browser` data view in the `securitysolution` space. If you want to run the script for the server data view, pass the `--telemetry_type` argument with the value `server`.
|
||||
|
||||
```bash
|
||||
# Run the script for the security-solution-ebt-kibana-browser data view
|
||||
./build_ebt_data_view.sh
|
||||
|
||||
# Run the script for the security-solution-ebt-server data view
|
||||
./build_ebt_data_view.sh --telemetry_type=server
|
||||
```
|
||||
|
||||
### Data view recovery
|
||||
|
||||
If a security solution ebt data view is for some reason deleted, upload the saved object that is committed in this directory to the staging cluster. This will recreate the data view with the correct mappings. Then you can run this script to ensure any new fields get added.
|
||||
|
||||
#### Why upload the saved object? Why not just run this script?
|
||||
|
||||
There are some fields not covered by this script, at least for the security-solution-ebt-kibana-browser data view (ex: `day_of_week`). I'm not sure where they came from. In order to be on the safe side, the data view saved objects will be updated per minor release to ensure that all fields are covered.
|
||||
|
||||
### Production data views
|
||||
|
||||
This script manages the staging data views. To make updates to the production data views, export the saved objects from staging and upload them to production.
|
|
@ -0,0 +1,75 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
#
|
||||
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License
|
||||
# 2.0; you may not use this file except in compliance with the Elastic License
|
||||
# 2.0.
|
||||
#
|
||||
|
||||
# Set default values for optional arguments if not provided
|
||||
space_id="securitysolution"
|
||||
telemetry_type="browser"
|
||||
|
||||
# Function to read from Vault and check for 403 errors
|
||||
vault_read() {
|
||||
local secret_path=$1
|
||||
local field=$2
|
||||
output=$(vault read --field="$field" "$secret_path" 2>&1)
|
||||
if echo "$output" | grep -q "permission denied"; then
|
||||
echo "Error: Permission denied. Please log in to Vault and ensure you have siem-team access: https://github.com/elastic/infra/blob/master/docs/vault/README.md" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "$output"
|
||||
}
|
||||
|
||||
# Fetch values from Vault and check they are defined
|
||||
kibana_url=$(vault_read secret/siem-team/elastic-cloud/telemetry-v2-staging url)
|
||||
if [ -z "$kibana_url" ]; then
|
||||
echo "Error: kibana_url is a mandatory argument." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if mandatory arguments are provided
|
||||
api_key=$(vault_read secret/siem-team/elastic-cloud/telemetry-v2-staging api_key)
|
||||
if [ -z "$api_key" ]; then
|
||||
echo "Error: api_key is a mandatory argument." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Parse named arguments
|
||||
while [ "$#" -gt 0 ]; do
|
||||
case "$1" in
|
||||
--api_key=*)
|
||||
api_key="${1#*=}"
|
||||
;;
|
||||
--kibana_url=*)
|
||||
kibana_url="${1#*=}"
|
||||
;;
|
||||
--space_id=*)
|
||||
space_id="${1#*=}"
|
||||
;;
|
||||
--telemetry_type=*)
|
||||
telemetry_type="${1#*=}"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Invalid argument: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
|
||||
# Validate telemetry_type
|
||||
if [ "$telemetry_type" != "browser" ] && [ "$telemetry_type" != "server" ]; then
|
||||
echo "Error: telemetry_type must be either 'browser' or 'server'." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
npx ts-node "$(dirname "${0}")/build_ebt_data_view.ts" \
|
||||
--api_key="$api_key" \
|
||||
--kibana_url="$kibana_url" \
|
||||
--space_id="$space_id" \
|
||||
--telemetry_type="$telemetry_type"
|
|
@ -0,0 +1,210 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import axios from 'axios';
|
||||
import { events as genAiEvents } from '@kbn/elastic-assistant-plugin/server/lib/telemetry/event_based_telemetry';
|
||||
|
||||
import { events as securityEvents } from '../../../../../server/lib/telemetry/event_based/events';
|
||||
import { telemetryEvents } from '../events/telemetry_events';
|
||||
// uncomment and add to run script, but do not commit as creates cirular dependency
|
||||
// import { telemetryEvents as serverlessEvents } from '@kbn/security-solution-serverless/server/telemetry/event_based_telemetry';
|
||||
|
||||
const logger = new ToolingLog({
|
||||
level: 'info',
|
||||
writeTo: process.stdout,
|
||||
});
|
||||
|
||||
cli()
|
||||
.then(() => logger.success('End ebt data view update'))
|
||||
.catch((e) => logger.error(e));
|
||||
|
||||
async function cli(): Promise<void> {
|
||||
logger.info(`Begin ebt data view update`);
|
||||
const args = process.argv.slice(2); // Ignore first two arguments (path to node and script file)
|
||||
|
||||
const namedArgs: { [key: string]: string } = {};
|
||||
|
||||
// Parse named arguments
|
||||
args.forEach((arg) => {
|
||||
// this strategy ensures that the value can contain an equals sign
|
||||
const [key, ...valueParts] = arg.split('=');
|
||||
const value = valueParts.join('=');
|
||||
namedArgs[key.replace('--', '')] = value;
|
||||
});
|
||||
|
||||
// Access named arguments
|
||||
const {
|
||||
api_key: apiKey,
|
||||
kibana_url: kibanaUrl,
|
||||
space_id: spaceId,
|
||||
telemetry_type: telemetryType,
|
||||
} = namedArgs;
|
||||
// writes to either the browser or server side security solution data view
|
||||
const dataViewName = `security-solution-ebt-kibana-${telemetryType}`;
|
||||
logger.info(`API key: ${apiKey}`);
|
||||
logger.info(`Kibana URL: ${kibanaUrl}`);
|
||||
logger.info(`Space ID: ${spaceId}`);
|
||||
logger.info(`Data view name: ${dataViewName}`);
|
||||
const requestHeaders = {
|
||||
Authorization: `ApiKey ${apiKey}`,
|
||||
'kbn-xsrf': 'xxx',
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
const dataViewApiUrl = `${removeTrailingSlash(kibanaUrl)}/s/${spaceId}/api/data_views`;
|
||||
|
||||
try {
|
||||
logger.info(`Fetching data view "${dataViewName}"...`);
|
||||
const {
|
||||
data: { data_view: dataViews },
|
||||
} = await axios.get(dataViewApiUrl, {
|
||||
headers: requestHeaders,
|
||||
});
|
||||
const ourDataView = dataViews.find(
|
||||
(dataView: { id: string; name: string }) => dataView.name === dataViewName
|
||||
);
|
||||
|
||||
if (!ourDataView) {
|
||||
throw new Error(
|
||||
`Data view "${dataViewName}" not found, check your data view is spelled correctly and is defined in the ${spaceId} space`
|
||||
);
|
||||
}
|
||||
|
||||
logger.info(`Data view "${dataViewName}" has been fetched`);
|
||||
const runtimeFields: Record<string, string> = {};
|
||||
const manualRuntimeFields: Record<string, string> = {};
|
||||
const valueMap: Record<string, string> = {
|
||||
// actual allowed values
|
||||
boolean: 'boolean',
|
||||
composite: 'composite',
|
||||
date: 'date',
|
||||
double: 'double',
|
||||
geo_point: 'geo_point',
|
||||
ip: 'ip',
|
||||
keyword: 'keyword',
|
||||
long: 'long',
|
||||
lookup: 'lookup',
|
||||
// custom mapped
|
||||
text: 'keyword',
|
||||
integer: 'long',
|
||||
};
|
||||
const allowedValues = Object.keys(valueMap);
|
||||
|
||||
const events =
|
||||
telemetryType === 'browser'
|
||||
? telemetryEvents
|
||||
: // serverside events, uncomment serverlessEvents import above for all events
|
||||
[...genAiEvents, ...securityEvents]; // ...serverlessEvents,]
|
||||
|
||||
events.forEach((event) => {
|
||||
const newProps = flattenSchema(event.schema);
|
||||
Object.entries(newProps).forEach(([key, value]) => {
|
||||
if (!runtimeFields[key] && allowedValues.includes(value)) {
|
||||
runtimeFields[key] = valueMap[value];
|
||||
} else if (!allowedValues.includes(value) && !manualRuntimeFields[key]) {
|
||||
manualRuntimeFields[key] = value;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const runtimeFieldUrl = `${dataViewApiUrl}/data_view/${ourDataView.id}/runtime_field`;
|
||||
await upsertRuntimeFields(runtimeFields, runtimeFieldUrl, requestHeaders);
|
||||
const manualFieldLength = Object.keys(manualRuntimeFields).length;
|
||||
const runtimeFieldLength = Object.keys(runtimeFields).length;
|
||||
if (runtimeFieldLength > 0) {
|
||||
logger.info(
|
||||
`Data view "${dataViewName}" has been updated with ${
|
||||
Object.keys(runtimeFields).length
|
||||
} runtime fields`
|
||||
);
|
||||
}
|
||||
|
||||
if (manualFieldLength > 0) {
|
||||
logger.info(
|
||||
`The following ${
|
||||
Object.keys(manualRuntimeFields).length
|
||||
} fields have non-standard types and will need to be manually updated: ${JSON.stringify(
|
||||
manualRuntimeFields,
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error(`Error updating data view "${dataViewName}" - ${e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function removeTrailingSlash(url: string) {
|
||||
if (url.endsWith('/')) {
|
||||
return url.slice(0, -1);
|
||||
} else {
|
||||
return url;
|
||||
}
|
||||
}
|
||||
interface NestedObject {
|
||||
[key: string]: { type?: string; properties?: NestedObject };
|
||||
}
|
||||
|
||||
function flattenSchema(inputObj: NestedObject): { [key: string]: string } {
|
||||
const result: { [key: string]: string } = {};
|
||||
const queue: Array<{ obj: NestedObject; prefix: string }> = [{ obj: inputObj, prefix: '' }];
|
||||
while (queue.length > 0) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const { obj, prefix } = queue.shift()!;
|
||||
for (const key in obj) {
|
||||
if (typeof obj[key] === 'object' && obj[key] !== null) {
|
||||
if ('type' in obj[key]) {
|
||||
const newKey = `${prefix}${key}`;
|
||||
// @ts-ignore
|
||||
result[newKey] = obj[key].type;
|
||||
} else if (obj[key].properties) {
|
||||
const nestedObj = obj[key].properties;
|
||||
const nestedPrefix = `${prefix}${key}.`;
|
||||
// @ts-ignore
|
||||
queue.push({ obj: nestedObj, prefix: nestedPrefix });
|
||||
} else if (obj[key]) {
|
||||
const nestedObj = obj[key];
|
||||
const nestedPrefix = `${prefix}${key}.`;
|
||||
// @ts-ignore
|
||||
queue.push({ obj: nestedObj, prefix: nestedPrefix });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function upsertRuntimeFields(
|
||||
fields: { [key: string]: string },
|
||||
requestUrl: string,
|
||||
requestHeaders: { [key: string]: string }
|
||||
) {
|
||||
for (const fieldName in fields) {
|
||||
if (typeof fields[fieldName] === 'string') {
|
||||
const fieldType = fields[fieldName];
|
||||
const payload = {
|
||||
name: `properties.${fieldName}`,
|
||||
runtimeField: {
|
||||
type: fieldType,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
await axios.put(requestUrl, payload, {
|
||||
headers: requestHeaders,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Error upserting field ${fieldName}:`,
|
||||
error.response ? error.response.data : error.message
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,2 @@
|
|||
{"attributes":{"allowHidden":false,"fieldAttrs":"{\"properties.model\":{},\"properties.resourceAccessed\":{},\"properties.resultCount\":{},\"properties.responseTime\":{},\"properties.errorMessage\":{},\"properties.isEnabledKnowledgeBase\":{},\"properties.isEnabledRAGAlerts\":{},\"properties.assistantStreamingEnabled\":{},\"properties.actionTypeId\":{},\"properties.message\":{},\"properties.productTier\":{},\"properties.failedToDeleteCount\":{},\"properties.totalInstalledCount\":{},\"properties.scoresWritten\":{},\"properties.taskDurationInSeconds\":{},\"properties.interval\":{},\"properties.alertSampleSizePerShard\":{},\"properties.status\":{},\"properties.processing.startTime\":{},\"properties.processing.endTime\":{},\"properties.processing.tookMs\":{},\"properties.result.successful\":{},\"properties.result.failed\":{},\"properties.result.total\":{}}","fieldFormatMap":"{}","fields":"[]","name":"security-solution-ebt-kibana-server","runtimeFieldMap":"{\"properties.model\":{\"type\":\"keyword\"},\"properties.resourceAccessed\":{\"type\":\"keyword\"},\"properties.resultCount\":{\"type\":\"long\"},\"properties.responseTime\":{\"type\":\"long\"},\"properties.errorMessage\":{\"type\":\"keyword\"},\"properties.isEnabledKnowledgeBase\":{\"type\":\"boolean\"},\"properties.isEnabledRAGAlerts\":{\"type\":\"boolean\"},\"properties.assistantStreamingEnabled\":{\"type\":\"boolean\"},\"properties.actionTypeId\":{\"type\":\"keyword\"},\"properties.message\":{\"type\":\"keyword\"},\"properties.productTier\":{\"type\":\"keyword\"},\"properties.failedToDeleteCount\":{\"type\":\"long\"},\"properties.totalInstalledCount\":{\"type\":\"long\"},\"properties.scoresWritten\":{\"type\":\"long\"},\"properties.taskDurationInSeconds\":{\"type\":\"long\"},\"properties.interval\":{\"type\":\"keyword\"},\"properties.alertSampleSizePerShard\":{\"type\":\"long\"},\"properties.status\":{\"type\":\"keyword\"},\"properties.processing.startTime\":{\"type\":\"date\"},\"properties.processing.endTime\":{\"type\":\"date\"},\"properties.processing.tookMs\":{\"type\":\"long\"},\"properties.result.successful\":{\"type\":\"long\"},\"properties.result.failed\":{\"type\":\"long\"},\"properties.result.total\":{\"type\":\"long\"}}","sourceFilters":"[]","timeFieldName":"timestamp","title":"ebt-kibana-server"},"coreMigrationVersion":"8.8.0","created_at":"2024-05-24T16:08:01.010Z","id":"1f69f020-6e7d-4d19-bbad-0b052bddf552","managed":false,"references":[],"type":"index-pattern","typeMigrationVersion":"8.0.0","updated_at":"2024-05-24T16:12:57.199Z","version":"WzI5NDY2LDVd"}
|
||||
{"excludedObjects":[],"excludedObjectsCount":0,"exportedCount":1,"missingRefCount":0,"missingReferences":[]}
|
|
@ -205,6 +205,6 @@
|
|||
"@kbn/langchain",
|
||||
"@kbn/core-analytics-browser",
|
||||
"@kbn/core-i18n-browser",
|
||||
"@kbn/core-theme-browser"
|
||||
"@kbn/core-theme-browser",
|
||||
]
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue