[8.18] [Security AI] Move security-ai-prompt to package (#208787) (#209196)

# Backport

This will backport the following commits from `main` to `8.18`:
- [[Security AI] Move `security-ai-prompt` to package
(#208787)](https://github.com/elastic/kibana/pull/208787)

<!--- Backport version: 9.6.4 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sorenlouv/backport)

<!--BACKPORT [{"author":{"name":"Steph
Milovic","email":"stephanie.milovic@elastic.co"},"sourceCommit":{"committedDate":"2025-01-30T16:34:30Z","message":"[Security
AI] Move `security-ai-prompt` to package
(#208787)","sha":"b0a72e2b9086728d98f4176d1692741f4df62333","branchLabelMapping":{"^v9.0.0$":"main","^v8.18.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","Team:
SecuritySolution","backport:prev-minor","Team:Security Generative
AI","v8.18.0"],"title":"[Security AI] Move `security-ai-prompt` to
package","number":208787,"url":"https://github.com/elastic/kibana/pull/208787","mergeCommit":{"message":"[Security
AI] Move `security-ai-prompt` to package
(#208787)","sha":"b0a72e2b9086728d98f4176d1692741f4df62333"}},"sourceBranch":"main","suggestedTargetBranches":[],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/208787","number":208787,"mergeCommit":{"message":"[Security
AI] Move `security-ai-prompt` to package
(#208787)","sha":"b0a72e2b9086728d98f4176d1692741f4df62333"}},{"branch":"8.x","label":"v8.18.0","branchLabelMappingKey":"^v8.18.0$","isSourceBranch":false,"url":"https://github.com/elastic/kibana/pull/208988","number":208988,"state":"MERGED","mergeCommit":{"sha":"1a0b65c0a571024a07a99d6464e8484a274d2f16","message":"[8.x]
[Security AI] Move `security-ai-prompt` to package (#208787)
(#208988)\n\n# Backport\n\nThis will backport the following commits from
`main` to `8.x`:\n- [[Security AI] Move &#x60;security-ai-prompt&#x60;
to
package\n(#208787)](https://github.com/elastic/kibana/pull/208787)\n\n<!---
Backport version: 9.6.4 -->\n\n### Questions ?\nPlease refer to the
[Backport
tool\ndocumentation](https://github.com/sorenlouv/backport)\n\n<!--BACKPORT
[{\"author\":{\"name\":\"Steph\nMilovic\",\"email\":\"stephanie.milovic@elastic.co\"},\"sourceCommit\":{\"committedDate\":\"2025-01-30T16:34:30Z\",\"message\":\"[Security\nAI]
Move `security-ai-prompt` to
package\n(#208787)\",\"sha\":\"b0a72e2b9086728d98f4176d1692741f4df62333\",\"branchLabelMapping\":{\"^v9.0.0$\":\"main\",\"^v8.18.0$\":\"8.x\",\"^v(\\\\d+).(\\\\d+).\\\\d+$\":\"$1.$2\"}},\"sourcePullRequest\":{\"labels\":[\"release_note:skip\",\"v9.0.0\",\"Team:\nSecuritySolution\",\"backport:prev-minor\",\"Team:Security
Generative\nAI\",\"v8.18.0\"],\"title\":\"[Security AI] Move
`security-ai-prompt`
to\npackage\",\"number\":208787,\"url\":\"https://github.com/elastic/kibana/pull/208787\",\"mergeCommit\":{\"message\":\"[Security\nAI]
Move `security-ai-prompt` to
package\n(#208787)\",\"sha\":\"b0a72e2b9086728d98f4176d1692741f4df62333\"}},\"sourceBranch\":\"main\",\"suggestedTargetBranches\":[\"8.x\"],\"targetPullRequestStates\":[{\"branch\":\"main\",\"label\":\"v9.0.0\",\"branchLabelMappingKey\":\"^v9.0.0$\",\"isSourceBranch\":true,\"state\":\"MERGED\",\"url\":\"https://github.com/elastic/kibana/pull/208787\",\"number\":208787,\"mergeCommit\":{\"message\":\"[Security\nAI]
Move `security-ai-prompt` to
package\n(#208787)\",\"sha\":\"b0a72e2b9086728d98f4176d1692741f4df62333\"}},{\"branch\":\"8.x\",\"label\":\"v8.18.0\",\"branchLabelMappingKey\":\"^v8.18.0$\",\"isSourceBranch\":false,\"state\":\"NOT_CREATED\"}]}]\nBACKPORT-->\n\n---------\n\nCo-authored-by:
kibanamachine
<42973632+kibanamachine@users.noreply.github.com>\nCo-authored-by:
Elastic Machine <elasticmachine@users.noreply.github.com>"}}]}]
BACKPORT-->

---------

Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
Steph Milovic 2025-02-02 17:02:13 -07:00 committed by GitHub
parent 93a7f478ab
commit 7a3dd3e2d9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 1210 additions and 318 deletions

778
.github/CODEOWNERS vendored

File diff suppressed because it is too large Load diff

View file

@ -818,6 +818,7 @@
"@kbn/search-synonyms": "link:x-pack/solutions/search/plugins/search_synonyms",
"@kbn/search-types": "link:src/platform/packages/shared/kbn-search-types",
"@kbn/searchprofiler-plugin": "link:x-pack/platform/plugins/shared/searchprofiler",
"@kbn/security-ai-prompts": "link:x-pack/solutions/security/packages/security-ai-prompts",
"@kbn/security-api-key-management": "link:x-pack/platform/packages/shared/security/api_key_management",
"@kbn/security-authorization-core": "link:x-pack/platform/packages/private/security/authorization_core",
"@kbn/security-authorization-core-common": "link:x-pack/platform/packages/private/security/authorization_core_common",

View file

@ -1624,6 +1624,8 @@
"@kbn/search-types/*": ["src/platform/packages/shared/kbn-search-types/*"],
"@kbn/searchprofiler-plugin": ["x-pack/platform/plugins/shared/searchprofiler"],
"@kbn/searchprofiler-plugin/*": ["x-pack/platform/plugins/shared/searchprofiler/*"],
"@kbn/security-ai-prompts": ["x-pack/solutions/security/packages/security-ai-prompts"],
"@kbn/security-ai-prompts/*": ["x-pack/solutions/security/packages/security-ai-prompts/*"],
"@kbn/security-api-integration-helpers": ["x-pack/test/security_api_integration/packages/helpers"],
"@kbn/security-api-integration-helpers/*": ["x-pack/test/security_api_integration/packages/helpers/*"],
"@kbn/security-api-key-management": ["x-pack/platform/packages/shared/security/api_key_management"],

View file

@ -0,0 +1,3 @@
# @kbn/security-ai-prompts
Utility library for Security AI Prompt management.

View file

@ -0,0 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export { promptType } from './src/saved_object_mappings';
export { getPrompt, getPromptsByGroupId } from './src/get_prompt';
export {
type PromptArray,
type Prompt,
type GetPromptArgs,
type GetPromptsByGroupIdArgs,
} from './src/types';

View file

@ -5,13 +5,8 @@
* 2.0.
*/
export interface Prompt {
promptId: string;
promptGroupId: string;
prompt: {
default: string;
};
provider?: string;
model?: string;
description?: string;
}
module.exports = {
preset: '@kbn/test',
rootDir: '../../../../..',
roots: ['<rootDir>/x-pack/solutions/security/packages/security-ai-prompts'],
};

View file

@ -0,0 +1,7 @@
{
"type": "shared-server",
"id": "@kbn/security-ai-prompts",
"owner": "@elastic/security-generative-ai",
"group": "security",
"visibility": "private"
}

View file

@ -0,0 +1,7 @@
{
"name": "@kbn/security-ai-prompts",
"version": "1.0.0",
"description": "Utility library for Elasticsearch index management",
"license": "Elastic License 2.0",
"private": true
}

View file

@ -8,8 +8,7 @@
import { getPrompt, getPromptsByGroupId } from './get_prompt';
import { SavedObjectsClientContract } from '@kbn/core-saved-objects-api-server';
import { ActionsClient } from '@kbn/actions-plugin/server';
import { BEDROCK_SYSTEM_PROMPT, DEFAULT_SYSTEM_PROMPT, GEMINI_USER_PROMPT } from './prompts';
import { promptDictionary, promptGroupId } from './local_prompt_object';
import { localPrompts, promptDictionary, promptGroupId } from './mock_prompts';
jest.mock('@kbn/core-saved-objects-api-server');
jest.mock('@kbn/actions-plugin/server');
@ -161,6 +160,7 @@ describe('get_prompt', () => {
it('returns the prompt matching provider and model', async () => {
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'openai',
@ -176,6 +176,7 @@ describe('get_prompt', () => {
it('returns the prompt matching provider when model does not have a match', async () => {
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'openai',
@ -191,6 +192,7 @@ describe('get_prompt', () => {
it('returns the prompt matching provider when model is not provided', async () => {
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'openai',
@ -205,6 +207,7 @@ describe('get_prompt', () => {
it('returns the default prompt when there is no match on provider', async () => {
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'badone',
@ -220,6 +223,7 @@ describe('get_prompt', () => {
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'inference',
@ -242,6 +246,7 @@ describe('get_prompt', () => {
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'inference',
@ -263,6 +268,7 @@ describe('get_prompt', () => {
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'inference',
@ -283,6 +289,7 @@ describe('get_prompt', () => {
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
actionsClient,
@ -290,7 +297,7 @@ describe('get_prompt', () => {
connectorId: 'connector-123',
});
expect(result).toBe(BEDROCK_SYSTEM_PROMPT);
expect(result).toBe('provider:bedrock default system prompt');
});
it('returns the default prompt when no prompts are found', async () => {
@ -303,13 +310,14 @@ describe('get_prompt', () => {
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
actionsClient,
connectorId: 'connector-123',
});
expect(result).toBe(DEFAULT_SYSTEM_PROMPT);
expect(result).toBe('default system prompt');
});
it('throws an error when no prompts are found', async () => {
@ -323,6 +331,7 @@ describe('get_prompt', () => {
await expect(
getPrompt({
savedObjectsClient,
localPrompts,
promptId: 'nonexistent-prompt',
promptGroupId: 'nonexistent-group',
actionsClient,
@ -340,6 +349,7 @@ describe('get_prompt', () => {
});
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'inference',
@ -360,6 +370,7 @@ describe('get_prompt', () => {
});
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
actionsClient,
@ -380,6 +391,7 @@ describe('get_prompt', () => {
});
const result = await getPrompt({
savedObjectsClient,
localPrompts,
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'bedrock',
@ -396,6 +408,7 @@ describe('get_prompt', () => {
it('returns prompts matching the provided promptIds', async () => {
const result = await getPromptsByGroupId({
savedObjectsClient,
localPrompts,
promptIds: [promptDictionary.systemPrompt],
promptGroupId: promptGroupId.aiAssistant,
provider: 'openai',
@ -420,6 +433,7 @@ describe('get_prompt', () => {
it('returns prompts matching the provided promptIds for gemini', async () => {
const result = await getPromptsByGroupId({
savedObjectsClient,
localPrompts,
promptIds: [promptDictionary.systemPrompt, promptDictionary.userPrompt],
promptGroupId: promptGroupId.aiAssistant,
provider: 'gemini',
@ -434,7 +448,7 @@ describe('get_prompt', () => {
},
{
promptId: promptDictionary.userPrompt,
prompt: GEMINI_USER_PROMPT,
prompt: 'provider:gemini user prompt',
},
]);
});
@ -442,6 +456,7 @@ describe('get_prompt', () => {
it('returns prompts matching the provided promptIds when connector is given', async () => {
const result = await getPromptsByGroupId({
savedObjectsClient,
localPrompts,
promptIds: [promptDictionary.systemPrompt, promptDictionary.userPrompt],
promptGroupId: promptGroupId.aiAssistant,
connector: {
@ -466,13 +481,14 @@ describe('get_prompt', () => {
},
{
promptId: promptDictionary.userPrompt,
prompt: GEMINI_USER_PROMPT,
prompt: 'provider:gemini user prompt',
},
]);
});
it('returns prompts matching the provided promptIds when inference connector is given', async () => {
const result = await getPromptsByGroupId({
savedObjectsClient,
localPrompts,
promptIds: [promptDictionary.systemPrompt],
promptGroupId: promptGroupId.aiAssistant,
connector: {
@ -509,6 +525,7 @@ describe('get_prompt', () => {
await expect(
getPromptsByGroupId({
savedObjectsClient,
localPrompts,
promptIds: [promptDictionary.systemPrompt, 'fake-id'],
promptGroupId: promptGroupId.aiAssistant,
actionsClient,

View file

@ -0,0 +1,213 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { PublicMethodsOf } from '@kbn/utility-types';
import { ActionsClient } from '@kbn/actions-plugin/server';
import type { Connector } from '@kbn/actions-plugin/server/application/connector/types';
import { elasticModelDictionary } from '@kbn/inference-common';
import { PromptArray, Prompt, GetPromptArgs, GetPromptsByGroupIdArgs } from './types';
import { getProviderFromActionTypeId } from './utils';
import { promptSavedObjectType } from './saved_object_mappings';
/**
* Get prompts by feature (promptGroupId)
* provide either model + provider or connector to avoid additional calls to get connector
* @param actionsClient - actions client
* @param connector - connector, provide if available. No need to provide model and provider in this case
* @param connectorId - connector id
* @param localPrompts - local prompts object
* @param model - model. No need to provide if connector provided
* @param promptGroupId - feature id, should be common across promptIds
* @param promptIds - prompt ids with shared promptGroupId
* @param provider - provider. No need to provide if connector provided
* @param savedObjectsClient - saved objects client
*/
export const getPromptsByGroupId = async ({
actionsClient,
connector,
connectorId,
localPrompts,
model: providedModel,
promptGroupId,
promptIds,
provider: providedProvider,
savedObjectsClient,
}: GetPromptsByGroupIdArgs): Promise<PromptArray> => {
const { provider, model } = await resolveProviderAndModel({
providedProvider,
providedModel,
connectorId,
actionsClient,
providedConnector: connector,
});
const prompts = await savedObjectsClient.find<Prompt>({
type: promptSavedObjectType,
searchFields: ['promptGroupId'],
search: promptGroupId,
});
const promptsOnly = prompts?.saved_objects.map((p) => p.attributes) ?? [];
return promptIds.map((promptId) => {
const prompt = findPromptEntry({
prompts: promptsOnly.filter((p) => p.promptId === promptId) ?? [],
promptId,
promptGroupId,
provider,
model,
localPrompts,
});
if (!prompt) {
throw new Error(
`Prompt not found for promptId: ${promptId} and promptGroupId: ${promptGroupId}`
);
}
return {
promptId,
prompt,
};
});
};
/**
* Get prompt by promptId
* provide either model + provider or connector to avoid additional calls to get connector
* @param actionsClient - actions client
* @param connector - connector, provide if available. No need to provide model and provider in this case
* @param connectorId - connector id
* @param localPrompts - local prompts object
* @param model - model. No need to provide if connector provided
* @param promptId - prompt id
* @param promptGroupId - feature id, should be common across promptIds
* @param provider - provider. No need to provide if connector provided
* @param savedObjectsClient - saved objects client
*/
export const getPrompt = async ({
actionsClient,
connector,
connectorId,
localPrompts,
model: providedModel,
promptGroupId,
promptId,
provider: providedProvider,
savedObjectsClient,
}: GetPromptArgs): Promise<string> => {
const { provider, model } = await resolveProviderAndModel({
providedProvider,
providedModel,
connectorId,
actionsClient,
providedConnector: connector,
});
const prompts = await savedObjectsClient.find<Prompt>({
type: promptSavedObjectType,
filter: `${promptSavedObjectType}.attributes.promptId: "${promptId}" AND ${promptSavedObjectType}.attributes.promptGroupId: "${promptGroupId}"`,
fields: ['provider', 'model', 'prompt'],
});
const prompt = findPromptEntry({
prompts: prompts?.saved_objects.map((p) => p.attributes) ?? [],
promptId,
promptGroupId,
provider,
model,
localPrompts,
});
if (!prompt) {
throw new Error(
`Prompt not found for promptId: ${promptId} and promptGroupId: ${promptGroupId}`
);
}
return prompt;
};
const resolveProviderAndModel = async ({
providedProvider,
providedModel,
connectorId,
actionsClient,
providedConnector,
}: {
providedProvider: string | undefined;
providedModel: string | undefined;
connectorId: string;
actionsClient: PublicMethodsOf<ActionsClient>;
providedConnector?: Connector;
}): Promise<{ provider?: string; model?: string }> => {
let model = providedModel;
let provider = providedProvider;
if (!provider || !model || provider === 'inference') {
const connector = providedConnector ?? (await actionsClient.get({ id: connectorId }));
if (provider === 'inference' && connector.config) {
provider = connector.config.provider || provider;
model = connector.config.providerConfig?.model_id || model;
if (provider === 'elastic' && model) {
provider = elasticModelDictionary[model]?.provider || 'inference';
model = elasticModelDictionary[model]?.model;
}
} else if (connector.config) {
provider = provider || getProviderFromActionTypeId(connector.actionTypeId);
model = model || connector.config.defaultModel;
}
}
return { provider: provider === 'inference' ? 'bedrock' : provider, model };
};
const findPrompt = ({
prompts,
conditions,
}: {
prompts: Array<{ provider?: string; model?: string; prompt: { default: string } }>;
conditions: Array<(prompt: { provider?: string; model?: string }) => boolean>;
}): string | undefined => {
for (const condition of conditions) {
const match = prompts.find(condition);
if (match) return match.prompt.default;
}
return undefined;
};
const findPromptEntry = ({
prompts,
promptId,
promptGroupId,
provider,
model,
localPrompts,
}: {
localPrompts: Prompt[];
prompts: Prompt[];
promptId: string;
promptGroupId: string;
provider?: string;
model?: string;
}): string | undefined => {
const conditions = [
(prompt: { provider?: string; model?: string }) =>
prompt.provider === provider && prompt.model === model,
(prompt: { provider?: string; model?: string }) =>
prompt.provider === provider && !prompt.model,
(prompt: { provider?: string; model?: string }) => !prompt.provider && !prompt.model,
];
return (
findPrompt({ prompts, conditions }) ??
findPrompt({
prompts: localPrompts.filter(
(p) => p.promptId === promptId && p.promptGroupId === promptGroupId
),
conditions,
})
);
};

View file

@ -0,0 +1,85 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { Prompt } from './types';
export const promptGroupId = {
attackDiscovery: 'attackDiscovery',
aiAssistant: 'aiAssistant',
};
export const promptDictionary = {
systemPrompt: `systemPrompt`,
userPrompt: `userPrompt`,
attackDiscoveryDefault: `default`,
attackDiscoveryRefine: `refine`,
};
export const localPrompts: Prompt[] = [
{
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'openai',
prompt: {
default: 'provider:openai default system prompt',
},
},
{
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
prompt: {
default: 'default system prompt',
},
},
{
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'bedrock',
prompt: {
default: 'provider:bedrock default system prompt',
},
},
{
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'gemini',
prompt: {
default: 'provider:gemini default system prompt',
},
},
{
promptId: promptDictionary.systemPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'openai',
model: 'oss',
prompt: {
default: 'provider:openai model:oss default system prompt',
},
},
{
promptId: promptDictionary.userPrompt,
promptGroupId: promptGroupId.aiAssistant,
provider: 'gemini',
prompt: {
default: 'provider:gemini user prompt',
},
},
{
promptId: promptDictionary.attackDiscoveryDefault,
promptGroupId: promptGroupId.attackDiscovery,
prompt: {
default: 'attack discovery default prompt',
},
},
{
promptId: promptDictionary.attackDiscoveryRefine,
promptGroupId: promptGroupId.attackDiscovery,
prompt: {
default: 'attack discovery refine prompt',
},
},
];

View file

@ -6,8 +6,8 @@
*/
import type { SavedObjectsType } from '@kbn/core/server';
import { promptSavedObjectType } from '../../../common/constants';
export const promptSavedObjectType = 'security-ai-prompt';
export const promptSavedObjectMappings: SavedObjectsType['mappings'] = {
dynamic: false,
properties: {

View file

@ -0,0 +1,40 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { PublicMethodsOf } from '@kbn/utility-types';
import { ActionsClient } from '@kbn/actions-plugin/server';
import type { Connector } from '@kbn/actions-plugin/server/application/connector/types';
import { SavedObjectsClientContract } from '@kbn/core-saved-objects-api-server';
export interface Prompt {
promptId: string;
promptGroupId: string;
prompt: {
default: string;
};
provider?: string;
model?: string;
description?: string;
}
export type PromptArray = Array<{ promptId: string; prompt: string }>;
export interface GetPromptArgs {
actionsClient: PublicMethodsOf<ActionsClient>;
connector?: Connector;
connectorId: string;
localPrompts: Prompt[];
model?: string;
promptId: string;
promptGroupId: string;
provider?: string;
savedObjectsClient: SavedObjectsClientContract;
}
export interface GetPromptsByGroupIdArgs extends Omit<GetPromptArgs, 'promptId'> {
promptGroupId: string;
promptIds: string[];
}

View file

@ -0,0 +1,16 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export const getProviderFromActionTypeId = (actionTypeId: string): string | undefined => {
const providerDictionary: Record<string, string> = {
[`.gen-ai`]: `openai`,
[`.bedrock`]: `bedrock`,
[`.gemini`]: `gemini`,
[`.inference`]: `inference`,
};
return providerDictionary[actionTypeId];
};

View file

@ -0,0 +1,21 @@
{
"extends": "../../../../../tsconfig.base.json",
"compilerOptions": {
"outDir": "target/types",
"types": [
"jest",
"node",
]
},
"include": ["**/*.ts"],
"kbn_references": [
"@kbn/core",
"@kbn/actions-plugin",
"@kbn/core-saved-objects-api-server",
"@kbn/utility-types",
"@kbn/inference-common"
],
"exclude": [
"target/**/*"
],
}

View file

@ -31,6 +31,3 @@ export const CAPABILITIES = `${BASE_PATH}/capabilities`;
Licensing requirements
*/
export const MINIMUM_AI_ASSISTANT_LICENSE = 'enterprise' as const;
// Saved Objects
export const promptSavedObjectType = 'security-ai-prompt';

View file

@ -5,218 +5,21 @@
* 2.0.
*/
import { SavedObjectsClientContract } from '@kbn/core-saved-objects-api-server';
import { PublicMethodsOf } from '@kbn/utility-types';
import { ActionsClient } from '@kbn/actions-plugin/server';
import type { Connector } from '@kbn/actions-plugin/server/application/connector/types';
import { elasticModelDictionary } from '@kbn/inference-common';
import { Prompt } from './types';
import {
getPrompt as _getPrompt,
getPromptsByGroupId as _getPromptsByGroupId,
type GetPromptArgs,
type PromptArray,
type GetPromptsByGroupIdArgs,
} from '@kbn/security-ai-prompts';
import { localPrompts } from './local_prompt_object';
import { getLlmType } from '../../routes/utils';
import { promptSavedObjectType } from '../../../common/constants';
interface GetPromptArgs {
actionsClient: PublicMethodsOf<ActionsClient>;
connector?: Connector;
connectorId: string;
model?: string;
promptId: string;
promptGroupId: string;
provider?: string;
savedObjectsClient: SavedObjectsClientContract;
}
interface GetPromptsByGroupIdArgs extends Omit<GetPromptArgs, 'promptId'> {
promptGroupId: string;
promptIds: string[];
}
type PromptArray = Array<{ promptId: string; prompt: string }>;
/**
* Get prompts by feature (promptGroupId)
* provide either model + provider or connector to avoid additional calls to get connector
* @param actionsClient - actions client
* @param connector - connector, provide if available. No need to provide model and provider in this case
* @param connectorId - connector id
* @param model - model. No need to provide if connector provided
* @param promptGroupId - feature id, should be common across promptIds
* @param promptIds - prompt ids with shared promptGroupId
* @param provider - provider. No need to provide if connector provided
* @param savedObjectsClient - saved objects client
*/
export const getPromptsByGroupId = async ({
actionsClient,
connector,
connectorId,
model: providedModel,
promptGroupId,
promptIds,
provider: providedProvider,
savedObjectsClient,
}: GetPromptsByGroupIdArgs): Promise<PromptArray> => {
const { provider, model } = await resolveProviderAndModel({
providedProvider,
providedModel,
connectorId,
actionsClient,
providedConnector: connector,
});
const prompts = await savedObjectsClient.find<Prompt>({
type: promptSavedObjectType,
searchFields: ['promptGroupId'],
search: promptGroupId,
});
const promptsOnly = prompts?.saved_objects.map((p) => p.attributes) ?? [];
return promptIds.map((promptId) => {
const prompt = findPromptEntry({
prompts: promptsOnly.filter((p) => p.promptId === promptId) ?? [],
promptId,
promptGroupId,
provider,
model,
});
if (!prompt) {
throw new Error(
`Prompt not found for promptId: ${promptId} and promptGroupId: ${promptGroupId}`
);
}
return {
promptId,
prompt,
};
});
export const getPromptsByGroupId = async (
args: Omit<GetPromptsByGroupIdArgs, 'localPrompts'>
): Promise<PromptArray> => {
return _getPromptsByGroupId({ ...args, localPrompts });
};
/**
* Get prompt by promptId
* provide either model + provider or connector to avoid additional calls to get connector
* @param actionsClient - actions client
* @param connector - connector, provide if available. No need to provide model and provider in this case
* @param connectorId - connector id
* @param model - model. No need to provide if connector provided
* @param promptId - prompt id
* @param promptGroupId - feature id, should be common across promptIds
* @param provider - provider. No need to provide if connector provided
* @param savedObjectsClient - saved objects client
*/
export const getPrompt = async ({
actionsClient,
connector,
connectorId,
model: providedModel,
promptGroupId,
promptId,
provider: providedProvider,
savedObjectsClient,
}: GetPromptArgs): Promise<string> => {
const { provider, model } = await resolveProviderAndModel({
providedProvider,
providedModel,
connectorId,
actionsClient,
providedConnector: connector,
});
const prompts = await savedObjectsClient.find<Prompt>({
type: promptSavedObjectType,
filter: `${promptSavedObjectType}.attributes.promptId: "${promptId}" AND ${promptSavedObjectType}.attributes.promptGroupId: "${promptGroupId}"`,
fields: ['provider', 'model', 'prompt'],
});
const prompt = findPromptEntry({
prompts: prompts?.saved_objects.map((p) => p.attributes) ?? [],
promptId,
promptGroupId,
provider,
model,
});
if (!prompt) {
throw new Error(
`Prompt not found for promptId: ${promptId} and promptGroupId: ${promptGroupId}`
);
}
return prompt;
};
const resolveProviderAndModel = async ({
providedProvider,
providedModel,
connectorId,
actionsClient,
providedConnector,
}: {
providedProvider: string | undefined;
providedModel: string | undefined;
connectorId: string;
actionsClient: PublicMethodsOf<ActionsClient>;
providedConnector?: Connector;
}): Promise<{ provider?: string; model?: string }> => {
let model = providedModel;
let provider = providedProvider;
if (!provider || !model || provider === 'inference') {
const connector = providedConnector ?? (await actionsClient.get({ id: connectorId }));
if (provider === 'inference' && connector.config) {
provider = connector.config.provider || provider;
model = connector.config.providerConfig?.model_id || model;
if (provider === 'elastic' && model) {
provider = elasticModelDictionary[model]?.provider || 'inference';
model = elasticModelDictionary[model]?.model;
}
} else if (connector.config) {
provider = provider || getLlmType(connector.actionTypeId);
model = model || connector.config.defaultModel;
}
}
return { provider: provider === 'inference' ? 'bedrock' : provider, model };
};
const findPrompt = ({
prompts,
conditions,
}: {
prompts: Array<{ provider?: string; model?: string; prompt: { default: string } }>;
conditions: Array<(prompt: { provider?: string; model?: string }) => boolean>;
}): string | undefined => {
for (const condition of conditions) {
const match = prompts.find(condition);
if (match) return match.prompt.default;
}
return undefined;
};
const findPromptEntry = ({
prompts,
promptId,
promptGroupId,
provider,
model,
}: {
prompts: Prompt[];
promptId: string;
promptGroupId: string;
provider?: string;
model?: string;
}): string | undefined => {
const conditions = [
(prompt: { provider?: string; model?: string }) =>
prompt.provider === provider && prompt.model === model,
(prompt: { provider?: string; model?: string }) =>
prompt.provider === provider && !prompt.model,
(prompt: { provider?: string; model?: string }) => !prompt.provider && !prompt.model,
];
return (
findPrompt({ prompts, conditions }) ??
findPrompt({
prompts: localPrompts.filter(
(p) => p.promptId === promptId && p.promptGroupId === promptGroupId
),
conditions,
})
);
export const getPrompt = async (args: Omit<GetPromptArgs, 'localPrompts'>): Promise<string> => {
return _getPrompt({ ...args, localPrompts });
};

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import { Prompt } from './types';
import { Prompt } from '@kbn/security-ai-prompts';
import {
ATTACK_DISCOVERY_GENERATION_DETAILS_MARKDOWN,
ATTACK_DISCOVERY_GENERATION_ENTITY_SUMMARY_MARKDOWN,

View file

@ -10,7 +10,6 @@ import { PluginInitializerContext, CoreStart, Plugin, Logger } from '@kbn/core/s
import { AssistantFeatures } from '@kbn/elastic-assistant-common';
import { ReplaySubject, type Subject } from 'rxjs';
import { MlPluginSetup } from '@kbn/ml-plugin/server';
import { initSavedObjects } from './saved_objects';
import { events } from './lib/telemetry/event_based_telemetry';
import {
AssistantTool,
@ -56,8 +55,6 @@ export class ElasticAssistantPlugin
) {
this.logger.debug('elasticAssistant: Setup');
initSavedObjects(core.savedObjects);
this.assistantService = new AIAssistantService({
logger: this.logger.get('service'),
ml: plugins.ml,

View file

@ -1,18 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { CoreSetup } from '@kbn/core/server';
import { promptType } from './lib/prompt/saved_object_mappings';
export const initSavedObjects = (savedObjects: CoreSetup['savedObjects']) => {
try {
savedObjects.registerType(promptType);
} catch (e) {
// implementation intends to fall back to reasonable defaults when the saved objects are unavailable
// do not block the plugin from starting
}
};

View file

@ -54,7 +54,7 @@
"@kbn/llm-tasks-plugin",
"@kbn/product-doc-base-plugin",
"@kbn/core-saved-objects-api-server-mocks",
"@kbn/inference-common"
"@kbn/security-ai-prompts"
],
"exclude": [
"target/**/*",

View file

@ -7,6 +7,7 @@
import type { CoreSetup } from '@kbn/core/server';
import { promptType } from '@kbn/security-ai-prompts';
import { protectionUpdatesNoteType } from './endpoint/lib/protection_updates_note/saved_object_mappings';
import { noteType, pinnedEventType, timelineType } from './lib/timeline/saved_object_mappings';
// eslint-disable-next-line no-restricted-imports
@ -29,6 +30,7 @@ const types = [
riskEngineConfigurationType,
entityEngineDescriptorType,
protectionUpdatesNoteType,
promptType,
];
export const savedObjectTypes = types.map((type) => type.name);

View file

@ -236,6 +236,7 @@
"@kbn/llm-tasks-plugin",
"@kbn/charts-theme",
"@kbn/product-doc-base-plugin",
"@kbn/shared-ux-error-boundary"
"@kbn/shared-ux-error-boundary",
"@kbn/security-ai-prompts",
]
}

View file

@ -377,6 +377,18 @@ export default function ({ getService }: FtrProviderContext) {
"saved_object:policy-settings-protection-updates-note/delete",
"saved_object:policy-settings-protection-updates-note/bulk_delete",
"saved_object:policy-settings-protection-updates-note/share_to_space",
"saved_object:security-ai-prompt/bulk_get",
"saved_object:security-ai-prompt/get",
"saved_object:security-ai-prompt/find",
"saved_object:security-ai-prompt/open_point_in_time",
"saved_object:security-ai-prompt/close_point_in_time",
"saved_object:security-ai-prompt/create",
"saved_object:security-ai-prompt/bulk_create",
"saved_object:security-ai-prompt/update",
"saved_object:security-ai-prompt/bulk_update",
"saved_object:security-ai-prompt/delete",
"saved_object:security-ai-prompt/bulk_delete",
"saved_object:security-ai-prompt/share_to_space",
"saved_object:csp_rule/bulk_get",
"saved_object:csp_rule/get",
"saved_object:csp_rule/find",
@ -1223,6 +1235,18 @@ export default function ({ getService }: FtrProviderContext) {
"saved_object:policy-settings-protection-updates-note/delete",
"saved_object:policy-settings-protection-updates-note/bulk_delete",
"saved_object:policy-settings-protection-updates-note/share_to_space",
"saved_object:security-ai-prompt/bulk_get",
"saved_object:security-ai-prompt/get",
"saved_object:security-ai-prompt/find",
"saved_object:security-ai-prompt/open_point_in_time",
"saved_object:security-ai-prompt/close_point_in_time",
"saved_object:security-ai-prompt/create",
"saved_object:security-ai-prompt/bulk_create",
"saved_object:security-ai-prompt/update",
"saved_object:security-ai-prompt/bulk_update",
"saved_object:security-ai-prompt/delete",
"saved_object:security-ai-prompt/bulk_delete",
"saved_object:security-ai-prompt/share_to_space",
"saved_object:csp_rule/bulk_get",
"saved_object:csp_rule/get",
"saved_object:csp_rule/find",
@ -1840,6 +1864,11 @@ export default function ({ getService }: FtrProviderContext) {
"saved_object:policy-settings-protection-updates-note/find",
"saved_object:policy-settings-protection-updates-note/open_point_in_time",
"saved_object:policy-settings-protection-updates-note/close_point_in_time",
"saved_object:security-ai-prompt/bulk_get",
"saved_object:security-ai-prompt/get",
"saved_object:security-ai-prompt/find",
"saved_object:security-ai-prompt/open_point_in_time",
"saved_object:security-ai-prompt/close_point_in_time",
"saved_object:csp_rule/bulk_get",
"saved_object:csp_rule/get",
"saved_object:csp_rule/find",
@ -2206,6 +2235,11 @@ export default function ({ getService }: FtrProviderContext) {
"saved_object:policy-settings-protection-updates-note/find",
"saved_object:policy-settings-protection-updates-note/open_point_in_time",
"saved_object:policy-settings-protection-updates-note/close_point_in_time",
"saved_object:security-ai-prompt/bulk_get",
"saved_object:security-ai-prompt/get",
"saved_object:security-ai-prompt/find",
"saved_object:security-ai-prompt/open_point_in_time",
"saved_object:security-ai-prompt/close_point_in_time",
"saved_object:csp_rule/bulk_get",
"saved_object:csp_rule/get",
"saved_object:csp_rule/find",

View file

@ -7093,6 +7093,10 @@
version "0.0.0"
uid ""
"@kbn/security-ai-prompts@link:x-pack/solutions/security/packages/security-ai-prompts":
version "0.0.0"
uid ""
"@kbn/security-api-integration-helpers@link:x-pack/test/security_api_integration/packages/helpers":
version "0.0.0"
uid ""