[Observability AI Assistant] rule connector - handle multiple prompt (#209221)

Intermediate releases to change rule or connector schemas in serverless
for #185032

## Summary

An Observability AI Assistant connector is available to be set as action
for Observability rules. When an alert is triggered, a conversation with
the AI assistant will be created sending the initial prompt set by the
user in the rule action. The conversation is then stored and can be
retrieved from the AI Assistant interface. the action is triggered on
any status change of the alert (active, recovered, untracked), creating
a new conversation for each of them using the same initial prompt which
may not be suitable for the 3 cases.

Improvement
The user is able to choose in when the action should be run (active,
recovered, untracked, all),. That would allow the user to specify more
than one AI Assistant action, with a different and more suitable prompt
in each case.
This commit is contained in:
Arturo Lidueña 2025-02-05 10:16:42 +01:00 committed by GitHub
parent 05b1cf2962
commit 3924e095c5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 402 additions and 149 deletions

View file

@ -9540,30 +9540,73 @@ Object {
],
"type": "string",
},
"message": Object {
"prompts": Object {
"flags": Object {
"error": [Function],
},
"metas": Array [
"items": Array [
Object {
"x-oas-min-length": 1,
"flags": Object {
"default": Object {
"special": "deep",
},
"error": [Function],
"presence": "optional",
},
"keys": Object {
"message": Object {
"flags": Object {
"error": [Function],
},
"metas": Array [
Object {
"x-oas-min-length": 1,
},
],
"rules": Array [
Object {
"args": Object {
"method": [Function],
},
"name": "custom",
},
Object {
"args": Object {
"method": [Function],
},
"name": "custom",
},
],
"type": "string",
},
"statuses": Object {
"flags": Object {
"error": [Function],
},
"items": Array [
Object {
"flags": Object {
"error": [Function],
"presence": "optional",
},
"rules": Array [
Object {
"args": Object {
"method": [Function],
},
"name": "custom",
},
],
"type": "string",
},
],
"type": "array",
},
},
"type": "object",
},
],
"rules": Array [
Object {
"args": Object {
"method": [Function],
},
"name": "custom",
},
Object {
"args": Object {
"method": [Function],
},
"name": "custom",
},
],
"type": "string",
"type": "array",
},
"rule": Object {
"flags": Object {

View file

@ -0,0 +1,14 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
ALERT_STATUS_ACTIVE,
ALERT_STATUS_RECOVERED,
ALERT_STATUS_UNTRACKED,
} from '@kbn/rule-data-utils';
export const ALERT_STATUSES = [ALERT_STATUS_ACTIVE, ALERT_STATUS_RECOVERED, ALERT_STATUS_UNTRACKED];

View file

@ -36,7 +36,11 @@ export function getConnectorType(
actionParams: ObsAIAssistantActionParams
): Promise<GenericValidationResult<ObsAIAssistantActionParams>> => {
const validationResult = {
errors: { connector: new Array<string>(), message: new Array<string>() },
errors: {
connector: [] as string[],
message: [] as string[],
prompts: [] as string[],
},
};
if (!actionParams.connector) {

View file

@ -7,5 +7,9 @@
export interface ObsAIAssistantActionParams {
connector: string;
message: string;
prompts?: Array<{
message: string;
statuses: string[];
}>;
message?: string; // this is a legacy field
}

View file

@ -8,7 +8,9 @@
import { AlertHit } from '@kbn/alerting-plugin/server/types';
import { ObservabilityAIAssistantRouteHandlerResources } from '@kbn/observability-ai-assistant-plugin/server/routes/types';
import { getFakeKibanaRequest } from '@kbn/security-plugin/server/authentication/api_keys/fake_kibana_request';
import { ALERT_STATUS_ACTIVE } from '@kbn/rule-data-utils';
import { OBSERVABILITY_AI_ASSISTANT_CONNECTOR_ID } from '../../common/rule_connector';
import { ALERT_STATUSES } from '../../common/constants';
import {
getObsAIAssistantConnectorAdapter,
getObsAIAssistantConnectorType,
@ -18,6 +20,63 @@ import { Observable } from 'rxjs';
import { MessageRole } from '@kbn/observability-ai-assistant-plugin/public';
import { AlertDetailsContextualInsightsService } from '@kbn/observability-plugin/server/services';
const buildConversation = (contentMessage: string) => [
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.System,
content: '',
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.User,
content: contentMessage,
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.Assistant,
content: '',
function_call: {
name: 'get_alerts_context',
arguments: JSON.stringify({}),
trigger: MessageRole.Assistant as const,
},
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.User,
name: 'get_alerts_context',
content: expect.any(String),
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.Assistant,
content: '',
function_call: {
name: 'get_connectors',
arguments: JSON.stringify({}),
trigger: MessageRole.Assistant as const,
},
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.User,
name: 'get_connectors',
content: JSON.stringify({ connectors: [{ id: 'connector_1' }] }),
},
},
];
describe('observabilityAIAssistant rule_connector', () => {
describe('getObsAIAssistantConnectorAdapter', () => {
it('uses correct connector_id', () => {
@ -42,7 +101,7 @@ describe('observabilityAIAssistant rule_connector', () => {
expect(params).toEqual({
connector: '.azure',
message: 'hello',
prompts: [{ message: 'hello', statuses: ALERT_STATUSES }],
rule: { id: 'foo', name: 'bar', tags: [], ruleUrl: 'http://myrule.com' },
alerts: {
new: [{ _id: 'new_alert' }],
@ -52,11 +111,85 @@ describe('observabilityAIAssistant rule_connector', () => {
});
});
describe('getObsAIAssistantConnectorType', () => {
it('is correctly configured', () => {
const initResources = jest
.fn()
.mockResolvedValue({} as ObservabilityAIAssistantRouteHandlerResources);
describe('Connector Type - getObsAIAssistantConnectorType', () => {
const completeMock = jest.fn().mockReturnValue(new Observable());
const initResources = jest.fn().mockResolvedValue({
service: {
getClient: async () => ({ complete: completeMock }),
getFunctionClient: async () => ({
getFunctions: () => [],
getInstructions: () => [],
getAdhocInstructions: () => [],
}),
},
context: {},
plugins: {
core: {
start: () =>
Promise.resolve({
http: { basePath: { publicBaseUrl: 'http://kibana.com' } },
}),
},
actions: {
start: async () => {
return {
getActionsClientWithRequest: jest.fn().mockResolvedValue({
async getAll() {
return [{ id: 'connector_1' }];
},
}),
};
},
},
},
} as unknown as ObservabilityAIAssistantRouteHandlerResources);
const adapter = getObsAIAssistantConnectorAdapter();
const buildActionParams = (params: {
connector: string;
message?: string;
prompts?: Array<{ message: string; statuses: string[] }>;
}) => {
return adapter.buildActionParams({
params,
rule: { id: 'foo', name: 'bar', tags: [], consumer: '', producer: '' },
spaceId: 'default',
alerts: {
all: { count: 1, data: [] },
new: {
count: 1,
data: [
{
'@timestamp': new Date().toISOString(),
_id: 'new_alert',
_index: 'alert_index',
'kibana.alert.instance.id': 'instance_id',
'kibana.alert.rule.category': 'rule_category',
'kibana.alert.rule.consumer': 'rule_consumer',
'kibana.alert.rule.name': 'rule_name',
'kibana.alert.rule.producer': 'rule_producer',
'kibana.alert.rule.revision': 1,
'kibana.alert.rule.tags': [],
'kibana.alert.rule.rule_type_id': 'rule_type_id',
'kibana.alert.uuid': 'alert_uuid',
'kibana.alert.rule.uuid': 'rule_uuid',
'kibana.alert.start': new Date().toISOString(),
'kibana.alert.status': ALERT_STATUS_ACTIVE,
'kibana.space_ids': ['default'],
},
],
},
ongoing: { count: 1, data: [] },
recovered: { count: 0, data: [] },
},
});
};
beforeEach(() => {
jest.clearAllMocks();
});
it('should have the correct configuration', () => {
const connectorType = getObsAIAssistantConnectorType(
initResources,
new AlertDetailsContextualInsightsService()
@ -66,10 +199,7 @@ describe('observabilityAIAssistant rule_connector', () => {
expect(connectorType.minimumLicenseRequired).toEqual('enterprise');
});
it('does not execute when no new or recovered alerts', async () => {
const initResources = jest
.fn()
.mockResolvedValue({} as ObservabilityAIAssistantRouteHandlerResources);
it('should not execute when there are no new or recovered alerts', async () => {
const connectorType = getObsAIAssistantConnectorType(
initResources,
new AlertDetailsContextualInsightsService()
@ -83,39 +213,9 @@ describe('observabilityAIAssistant rule_connector', () => {
expect(initResources).not.toHaveBeenCalled();
});
it('calls complete api', async () => {
const completeMock = jest.fn().mockReturnValue(new Observable());
const initResources = jest.fn().mockResolvedValue({
service: {
getClient: async () => ({ complete: completeMock }),
getFunctionClient: async () => ({
getFunctions: () => [],
getInstructions: () => [],
getAdhocInstructions: () => [],
}),
},
context: {},
plugins: {
core: {
start: () =>
Promise.resolve({
http: { basePath: { publicBaseUrl: 'http://kibana.com' } },
}),
},
actions: {
start: async () => {
return {
getActionsClientWithRequest: jest.fn().mockResolvedValue({
async getAll() {
return [{ id: 'connector_1' }];
},
}),
};
},
},
},
} as unknown as ObservabilityAIAssistantRouteHandlerResources);
it('should call the complete API with a single message', async () => {
const message = 'hello';
const params = buildActionParams({ connector: 'azure-open-ai', message });
const connectorType = getObsAIAssistantConnectorType(
initResources,
new AlertDetailsContextualInsightsService()
@ -123,11 +223,7 @@ describe('observabilityAIAssistant rule_connector', () => {
const result = await connectorType.executor({
actionId: 'observability-ai-assistant',
request: getFakeKibanaRequest({ id: 'foo', api_key: 'bar' }),
params: {
message: 'hello',
connector: 'azure-open-ai',
alerts: { new: [{ _id: 'new_alert' }], recovered: [] },
},
params,
} as unknown as ObsAIAssistantConnectorTypeExecutorOptions);
expect(result).toEqual({ actionId: 'observability-ai-assistant', status: 'ok' });
@ -140,62 +236,84 @@ describe('observabilityAIAssistant rule_connector', () => {
isPublic: true,
connectorId: 'azure-open-ai',
kibanaPublicUrl: 'http://kibana.com',
messages: [
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.System,
content: '',
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.User,
content: 'hello',
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.Assistant,
content: '',
function_call: {
name: 'get_alerts_context',
arguments: JSON.stringify({}),
trigger: MessageRole.Assistant as const,
},
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.User,
name: 'get_alerts_context',
content: expect.any(String),
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.Assistant,
content: '',
function_call: {
name: 'get_connectors',
arguments: JSON.stringify({}),
trigger: MessageRole.Assistant as const,
},
},
},
{
'@timestamp': expect.any(String),
message: {
role: MessageRole.User,
name: 'get_connectors',
content: JSON.stringify({ connectors: [{ id: 'connector_1' }] }),
},
},
],
messages: buildConversation(message),
})
);
});
it('executes the complete API with a single prompt', async () => {
const message = 'hello';
const params = buildActionParams({
connector: 'azure-open-ai',
prompts: [{ message, statuses: ALERT_STATUSES }],
});
const connectorType = getObsAIAssistantConnectorType(
initResources,
new AlertDetailsContextualInsightsService()
);
const result = await connectorType.executor({
actionId: 'observability-ai-assistant',
request: getFakeKibanaRequest({ id: 'foo', api_key: 'bar' }),
params,
} as unknown as ObsAIAssistantConnectorTypeExecutorOptions);
expect(result).toEqual({ actionId: 'observability-ai-assistant', status: 'ok' });
expect(initResources).toHaveBeenCalledTimes(1);
expect(completeMock).toHaveBeenCalledTimes(1);
expect(completeMock).toHaveBeenCalledWith(
expect.objectContaining({
persist: true,
isPublic: true,
connectorId: 'azure-open-ai',
kibanaPublicUrl: 'http://kibana.com',
messages: buildConversation(message),
})
);
});
it('should call the complete API with multiple prompts', async () => {
const message = 'hello';
const message2 = 'bye';
const params = buildActionParams({
connector: 'azure-open-ai',
prompts: [
{ message, statuses: ALERT_STATUSES },
{ message: message2, statuses: ALERT_STATUSES },
],
});
const connectorType = getObsAIAssistantConnectorType(
initResources,
new AlertDetailsContextualInsightsService()
);
const result = await connectorType.executor({
actionId: 'observability-ai-assistant',
request: getFakeKibanaRequest({ id: 'foo', api_key: 'bar' }),
params,
} as unknown as ObsAIAssistantConnectorTypeExecutorOptions);
expect(result).toEqual({ actionId: 'observability-ai-assistant', status: 'ok' });
expect(initResources).toHaveBeenCalledTimes(1);
expect(completeMock).toHaveBeenCalledTimes(2);
expect(completeMock).toHaveBeenCalledWith(
expect.objectContaining({
persist: true,
isPublic: true,
connectorId: 'azure-open-ai',
kibanaPublicUrl: 'http://kibana.com',
messages: buildConversation(message),
})
);
expect(completeMock).toHaveBeenCalledWith(
expect.objectContaining({
persist: true,
isPublic: true,
connectorId: 'azure-open-ai',
kibanaPublicUrl: 'http://kibana.com',
messages: buildConversation(message2),
})
);
});

View file

@ -37,8 +37,13 @@ import { AlertDetailsContextualInsightsService } from '@kbn/observability-plugin
import { getSystemMessageFromInstructions } from '@kbn/observability-ai-assistant-plugin/server/service/util/get_system_message_from_instructions';
import { AdHocInstruction } from '@kbn/observability-ai-assistant-plugin/common/types';
import { EXECUTE_CONNECTOR_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/server/functions/execute_connector';
import { ObservabilityAIAssistantClient } from '@kbn/observability-ai-assistant-plugin/server';
import { ChatFunctionClient } from '@kbn/observability-ai-assistant-plugin/server/service/chat_function_client';
import { ActionsClient } from '@kbn/actions-plugin/server';
import { PublicMethodsOf } from '@kbn/utility-types';
import { convertSchemaToOpenApi } from './convert_schema_to_open_api';
import { OBSERVABILITY_AI_ASSISTANT_CONNECTOR_ID } from '../../common/rule_connector';
import { ALERT_STATUSES } from '../../common/constants';
const CONNECTOR_PRIVILEGES = ['api:observabilityAIAssistant', 'app:observabilityAIAssistant'];
@ -64,7 +69,16 @@ const connectorParamsSchemas: Record<string, CompatibleJSONSchema> = {
const ParamsSchema = schema.object({
connector: schema.string(),
message: schema.string({ minLength: 1 }),
prompts: schema.maybe(
schema.arrayOf(
schema.object({
statuses: schema.arrayOf(schema.string()),
message: schema.string({ minLength: 1 }),
})
)
),
status: schema.maybe(schema.string()),
message: schema.maybe(schema.string({ minLength: 1 })), // this is a legacy field
});
const RuleSchema = schema.object({
@ -83,7 +97,12 @@ const AlertSummarySchema = schema.object({
const ConnectorParamsSchema = schema.object({
connector: schema.string(),
message: schema.string({ minLength: 1 }),
prompts: schema.arrayOf(
schema.object({
statuses: schema.arrayOf(schema.string()),
message: schema.string({ minLength: 1 }),
})
),
rule: RuleSchema,
alerts: AlertSummarySchema,
});
@ -140,7 +159,7 @@ function renderParameterTemplates(
): ConnectorParamsType {
return {
connector: params.connector,
message: params.message,
prompts: params.prompts,
rule: params.rule,
alerts: params.alerts,
};
@ -151,19 +170,18 @@ async function executor(
initResources: (request: KibanaRequest) => Promise<ObservabilityAIAssistantRouteHandlerResources>,
alertDetailsContextService: AlertDetailsContextualInsightsService
): Promise<ConnectorTypeExecutorResult<unknown>> {
const request = execOptions.request;
const alerts = execOptions.params.alerts;
const { request, params } = execOptions;
if (!request) {
throw new Error('AI Assistant connector requires a kibana request');
}
if (alerts.new.length === 0 && alerts.recovered.length === 0) {
if ((params.alerts?.new || []).length === 0 && (params.alerts?.recovered || []).length === 0) {
// connector could be executed with only ongoing actions. we use this path as
// dedup mechanism to prevent triggering the same worfklow for an ongoing alert
return { actionId: execOptions.actionId, status: 'ok' };
}
if (!request) {
throw new Error('AI Assistant connector requires a kibana request');
}
const resources = await initResources(request);
const client = await resources.service.getClient({ request, scopes: ['observability'] });
const functionClient = await resources.service.getFunctionClient({
@ -177,6 +195,52 @@ async function executor(
await resources.plugins.actions.start()
).getActionsClientWithRequest(request);
await Promise.all(
params.prompts.map((prompt) =>
executeAlertsChatCompletion(
resources,
prompt,
params,
alertDetailsContextService,
client,
functionClient,
actionsClient,
execOptions.logger
)
)
);
return { actionId: execOptions.actionId, status: 'ok' };
}
async function executeAlertsChatCompletion(
resources: ObservabilityAIAssistantRouteHandlerResources,
prompt: { statuses: string[]; message: string },
params: ConnectorParamsType,
alertDetailsContextService: AlertDetailsContextualInsightsService,
client: ObservabilityAIAssistantClient,
functionClient: ChatFunctionClient,
actionsClient: PublicMethodsOf<ActionsClient>,
logger: Logger
): Promise<void> {
const alerts = {
new: [...(params.alerts?.new || [])],
recovered: [...(params.alerts?.recovered || [])],
};
if (ALERT_STATUSES.some((status) => prompt.statuses.includes(status))) {
alerts.new = alerts.new.filter((alert) =>
prompt.statuses.includes(get(alert, 'kibana.alert.status'))
);
alerts.recovered = alerts.recovered.filter((alert) =>
prompt.statuses.includes(get(alert, 'kibana.alert.status'))
);
}
if (alerts.new.length === 0 && alerts.recovered.length === 0) {
return;
}
const connectorsList = await actionsClient.getAll().then((connectors) => {
return connectors.map((connector) => {
if (connector.actionTypeId in connectorParamsSchemas) {
@ -210,8 +274,8 @@ If available, include the link of the conversation at the end of your answer.`
text: dedent(
`The execute_connector function can be used to invoke Kibana connectors.
To send to the Slack connector, you need the following arguments:
- the "id" of the connector
- the "params" parameter that you will fill with the message
- the "id" of the connector
- the "params" parameter that you will fill with the message
Please include both "id" and "params.message" in the function arguments when executing the Slack connector..`
),
};
@ -219,10 +283,10 @@ If available, include the link of the conversation at the end of your answer.`
}
const alertsContext = await getAlertsContext(
execOptions.params.rule,
execOptions.params.alerts,
params.rule,
alerts,
async (alert: Record<string, any>) => {
const prompt = await alertDetailsContextService.getAlertDetailsContext(
const alertDetailsContext = await alertDetailsContextService.getAlertDetailsContext(
{
core: resources.context.core,
licensing: resources.context.licensing,
@ -235,7 +299,7 @@ If available, include the link of the conversation at the end of your answer.`
'host.name': get(alert, 'host.name'),
}
);
return prompt
return alertDetailsContext
.map(({ description, data }) => `${description}:\n${JSON.stringify(data, null, 2)}`)
.join('\n\n');
}
@ -246,7 +310,7 @@ If available, include the link of the conversation at the end of your answer.`
functionClient,
persist: true,
isPublic: true,
connectorId: execOptions.params.connector,
connectorId: params.connector,
signal: new AbortController().signal,
kibanaPublicUrl: (await resources.plugins.core.start()).http.basePath.publicBaseUrl,
instructions: [backgroundInstruction],
@ -267,7 +331,7 @@ If available, include the link of the conversation at the end of your answer.`
'@timestamp': new Date().toISOString(),
message: {
role: MessageRole.User,
content: execOptions.params.message,
content: prompt.message,
},
},
{
@ -323,11 +387,9 @@ If available, include the link of the conversation at the end of your answer.`
.pipe(concatenateChatCompletionChunks())
.subscribe({
error: (err) => {
execOptions.logger.error(err);
logger.error(err);
},
});
return { actionId: execOptions.actionId, status: 'ok' };
}
export const getObsAIAssistantConnectorAdapter = (): ConnectorAdapter<
@ -341,7 +403,15 @@ export const getObsAIAssistantConnectorAdapter = (): ConnectorAdapter<
buildActionParams: ({ params, rule, ruleUrl, alerts }) => {
return {
connector: params.connector,
message: params.message,
// Ensure backwards compatibility by using the message field as a prompt if prompts are missing
prompts: params.prompts
? params.prompts
: [
{
statuses: ALERT_STATUSES,
message: params.message || '',
},
],
rule: { id: rule.id, name: rule.name, tags: rule.tags, ruleUrl: ruleUrl ?? null },
alerts: {
new: alerts.new.data,

View file

@ -83,8 +83,8 @@
"@kbn/charts-theme",
"@kbn/ai-assistant-icon",
"@kbn/product-doc-base-plugin",
"@kbn/rule-data-utils",
"@kbn/utility-types",
],
"exclude": [
"target/**/*"
]
"exclude": ["target/**/*"]
}