[Security Solution] [Attack discovery] Display error messages for invalid Anonymization configurations (#214506)

## [Security Solution] [Attack discovery] Display error messages for invalid Anonymization configurations

This PR detects invalid Anonymization configurations when generating Attack discoveries.

It displays an error message when:

- The Security AI Anonymization settings are configured to not allow _any_ fields, as reported in <https://github.com/elastic/kibana/issues/214310>
- The Security AI Anonymization settings are configured to not allow the `_id` field, as reported by @aarju

### Out of scope: detecting configurations that don't include enough useful fields

The default fields in the Security AI Anonymization settings were chosen because they are most likely to provide relevant context for the AI Assistant and Attack discovery.

However, there isn't a well defined threshold for the minimum set of useful fields.

For example, Attack discovery may still produce useful results (depending on the data), if the `user.name`, `host.name`, and `source.ip` fields are not allowed, but in most cases omitting these important fields will reduce the quality of results.

Another example: A configuration that **only** allows just **two** fields, for example the `_id` field AND `user.name` fields is valid, but NOT _useful_.

- Detecting configurations that don't include enough _useful_ fields is beyond the scope of this PR
- Configurations that **only** allow the `_id` field are _valid_, but not _useful_

#### Desk testing

1. Navigate to Stack Management > AI Assistants > Security

2. Configure the Security AI Anonymization settings to deny all fields.

Note: At the time of this writing, using the bulk actions to update all `102` default fields may result in an `Unable to load page` error that appears below the table when it is saved. Refreshing the page reveals the settings are not updated after clicking save. As a workaround for this separate, unrelated issue, apply bulk actions to only one page at a time, and be sure to refresh the page after saving changes to verify the update(s) before continuing to the next step.

3. Navigate to Security > Attack discovery

4. Click `Generate`

**Expected result**

- The following error message is displayed:

```
Your Security AI Anonymization settings are configured to not allow any fields. Fields must be allowed to generate Attack discoveries.
```

as illustrated by the screenshot below:

![no fields allowed error](https://github.com/user-attachments/assets/37933ffa-4ed1-4280-8b77-f2300c6736b2)

5. Once again, navigate to Stack Management > AI Assistants > Security

6. Allow all the (`102`) default fields

7. Once again, navigate to Security > Attack discovery

8. Click `Generate`

**Expected result**

- Attack discoveries are generated

9. Navigate back to Stack Management > AI Assistants > Security

10. Configure the `_id` field, and another (arbitrary) field to NOT be allowed

11. Navigate back to Security > Attack discovery

12. Click `Generate`

**Expected result**

- The following error message is displayed:

```
Your Security AI Anonymization settings are configured to not allow the _id field. The _id field must be allowed to generate Attack discoveries.
```

as illustrated by the screenshot below:

![_id not allowed error](https://github.com/user-attachments/assets/5ac296b1-3380-4aa0-b791-2848f6a1103c)
This commit is contained in:
Andrew Macri 2025-03-14 10:41:40 -06:00 committed by GitHub
parent 7cab88be97
commit c631bdd574
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 152 additions and 0 deletions

View file

@ -22,6 +22,7 @@ import {
} from '../../../../../lib/attack_discovery/graphs/default_attack_discovery_graph/constants';
import { GraphState } from '../../../../../lib/attack_discovery/graphs/default_attack_discovery_graph/types';
import { throwIfErrorCountsExceeded } from '../throw_if_error_counts_exceeded';
import { throwIfInvalidAnonymization } from '../throw_if_invalid_anonymization';
import { getLlmType } from '../../../../utils';
import { getAttackDiscoveryPrompts } from '../../../../../lib/attack_discovery/graphs/default_attack_discovery_graph/nodes/helpers/prompts';
@ -63,6 +64,8 @@ export const invokeAttackDiscoveryGraph = async ({
anonymizedAlerts: Document[];
attackDiscoveries: AttackDiscovery[] | null;
}> => {
throwIfInvalidAnonymization(anonymizationFields);
const llmType = getLlmType(apiConfig.actionTypeId);
const model = apiConfig.model;
const tags = [ATTACK_DISCOVERY_TAG, llmType, model].flatMap((tag) => tag ?? []);

View file

@ -0,0 +1,96 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { AnonymizationFieldResponse } from '@kbn/elastic-assistant-common/impl/schemas/anonymization_fields/bulk_crud_anonymization_fields_route.gen';
import { throwIfInvalidAnonymization } from '.';
import * as i18n from './translations';
const userNameFieldNotAllowed: AnonymizationFieldResponse = {
allowed: false, // <-- This field is NOT allowed
anonymized: true,
createdAt: '2025-03-13T06:07:34.493Z',
field: 'user.name',
id: 'kB8djpUBwtgi0OSKhvlf',
namespace: 'default',
timestamp: '2025-03-13T06:07:34.493Z',
updatedAt: '2025-03-13T20:33:58.283Z',
};
const userNameFieldAllowed: AnonymizationFieldResponse = {
...userNameFieldNotAllowed,
allowed: true, // <-- the user.name field IS allowed
};
const idFieldNotAllowed: AnonymizationFieldResponse = {
allowed: false, // <-- This field is NOT allowed
anonymized: false,
createdAt: '2025-03-13T20:45:41.877Z',
field: '_id',
id: 'TShBkZUBT8Bn3CFdeOR3',
namespace: 'default',
timestamp: '2025-03-13T20:45:41.877Z',
updatedAt: '2025-03-13T20:33:58.283Z',
};
describe('throwIfInvalidAnonymization', () => {
it('throws when the anonymizationFields are empty', () => {
const emptyAnonymizationFields: AnonymizationFieldResponse[] = [];
expect(() => {
throwIfInvalidAnonymization(emptyAnonymizationFields);
}).toThrowError(i18n.NO_FIELDS_ALLOWED);
});
it('throws when all fields are NOT allowed', () => {
const anonymizationFields: AnonymizationFieldResponse[] = [
userNameFieldNotAllowed,
idFieldNotAllowed,
];
expect(() => {
throwIfInvalidAnonymization(anonymizationFields);
}).toThrowError(i18n.NO_FIELDS_ALLOWED);
});
it('throws when the _id field is NOT included', () => {
const idFieldNotIncluded: AnonymizationFieldResponse[] = [
userNameFieldAllowed, // <-- at least one field is allowed
];
expect(() => {
throwIfInvalidAnonymization(idFieldNotIncluded);
}).toThrowError(i18n.ID_FIELD_REQUIRED);
});
it('throws when the _id field is NOT allowed', () => {
const anonymizationFields: AnonymizationFieldResponse[] = [
userNameFieldAllowed, // <-- at least one field is allowed
idFieldNotAllowed,
];
expect(() => {
throwIfInvalidAnonymization(anonymizationFields);
}).toThrowError(i18n.ID_FIELD_REQUIRED);
});
it('does NOT throw when the _id field is allowed', () => {
const idFieldAllowed: AnonymizationFieldResponse = {
...idFieldNotAllowed,
allowed: true, // <-- the _id field is allowed
};
const anonymizationFields: AnonymizationFieldResponse[] = [
idFieldAllowed,
userNameFieldNotAllowed,
];
expect(() => {
throwIfInvalidAnonymization(anonymizationFields);
}).not.toThrow();
});
});

View file

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { AnonymizationFieldResponse } from '@kbn/elastic-assistant-common/impl/schemas/anonymization_fields/bulk_crud_anonymization_fields_route.gen';
import * as i18n from './translations';
export const throwIfInvalidAnonymization = (
anonymizationFields: AnonymizationFieldResponse[]
): void => {
const idField = anonymizationFields.find((field) => field.field === '_id');
// no fields allowed:
if (
anonymizationFields.length === 0 ||
anonymizationFields.every((field) => field.allowed === false)
) {
throw new Error(i18n.NO_FIELDS_ALLOWED);
}
// _id field NOT included, or NOT allowed:
if (idField == null || idField.allowed === false) {
throw new Error(i18n.ID_FIELD_REQUIRED);
}
};

View file

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { i18n } from '@kbn/i18n';
export const NO_FIELDS_ALLOWED = i18n.translate(
'xpack.elasticAssistantPlugin.attackDiscovery.defaultAttackDiscoveryGraph.nodes.retriever.helpers.throwIfInvalidAnonymization.noFieldsAllowedErrorMessage',
{
defaultMessage:
'Your Security AI Anonymization settings are configured to not allow any fields. Fields must be allowed to generate Attack discoveries.',
}
);
export const ID_FIELD_REQUIRED = i18n.translate(
'xpack.elasticAssistantPlugin.attackDiscovery.defaultAttackDiscoveryGraph.nodes.retriever.helpers.throwIfInvalidAnonymization.idFieldRequiredErrorMessage',
{
defaultMessage:
'Your Security AI Anonymization settings are configured to not allow the _id field. The _id field must be allowed to generate Attack discoveries.',
}
);