[8.x] [Observability AI Assistant] add API title_conversation tests (#214721) (#215993)

# Backport

This will backport the following commits from `main` to `8.x`:
- [[Observability AI Assistant] add API title_conversation tests
(#214721)](https://github.com/elastic/kibana/pull/214721)

<!--- Backport version: 9.6.6 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sorenlouv/backport)

<!--BACKPORT [{"author":{"name":"Arturo
Lidueña","email":"arturo.liduena@elastic.co"},"sourceCommit":{"committedDate":"2025-03-25T16:10:00Z","message":"[Observability
AI Assistant] add API title_conversation tests (#214721)\n\nRelated:
https://github.com/elastic/kibana/issues/180787\n\n- Adds test for
`title_conversation`
function","sha":"9e62a294ca62478cd6482ede711d422d2e975f86","branchLabelMapping":{"^v9.1.0$":"main","^v8.19.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","Team:Obs
AI Assistant","backport:version","v9.1.0","v8.19.0"],"title":"add API
title_conversation
tests","number":214721,"url":"https://github.com/elastic/kibana/pull/214721","mergeCommit":{"message":"[Observability
AI Assistant] add API title_conversation tests (#214721)\n\nRelated:
https://github.com/elastic/kibana/issues/180787\n\n- Adds test for
`title_conversation`
function","sha":"9e62a294ca62478cd6482ede711d422d2e975f86"}},"sourceBranch":"main","suggestedTargetBranches":["9.0","8.x"],"targetPullRequestStates":[{"branch":"9.0","label":"v9.0.0","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"main","label":"v9.1.0","branchLabelMappingKey":"^v9.1.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/214721","number":214721,"mergeCommit":{"message":"[Observability
AI Assistant] add API title_conversation tests (#214721)\n\nRelated:
https://github.com/elastic/kibana/issues/180787\n\n- Adds test for
`title_conversation`
function","sha":"9e62a294ca62478cd6482ede711d422d2e975f86"}},{"branch":"8.x","label":"v8.19.0","branchLabelMappingKey":"^v8.19.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}]
BACKPORT-->
This commit is contained in:
Arturo Lidueña 2025-03-26 11:03:03 +01:00 committed by GitHub
parent 3be9169960
commit 6ed54de5e5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 113 additions and 12 deletions

View file

@ -13,7 +13,8 @@ import { Message, MessageRole } from '../../../../common';
import { LangTracer } from '../instrumentation/lang_tracer';
export const TITLE_CONVERSATION_FUNCTION_NAME = 'title_conversation';
export const TITLE_SYSTEM_MESSAGE =
'You are a helpful assistant for Elastic Observability. Assume the following message is the start of a conversation between you and a user; give this conversation a title based on the content below. DO NOT UNDER ANY CIRCUMSTANCES wrap this title in single or double quotes. This title is shown in a list of conversations to the user, so title it for the user, not for you.';
type ChatFunctionWithoutConnectorAndTokenCount = (
name: string,
params: Omit<
@ -35,8 +36,7 @@ export function getGeneratedTitle({
}): Observable<string> {
return from(
chat('generate_title', {
systemMessage:
'You are a helpful assistant for Elastic Observability. Assume the following message is the start of a conversation between you and a user; give this conversation a title based on the content below. DO NOT UNDER ANY CIRCUMSTANCES wrap this title in single or double quotes. This title is shown in a list of conversations to the user, so title it for the user, not for you.',
systemMessage: TITLE_SYSTEM_MESSAGE,
messages: [
{
'@timestamp': new Date().toISOString(),

View file

@ -0,0 +1,103 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { ChatCompletionStreamParams } from 'openai/lib/ChatCompletionStream';
import {
TITLE_CONVERSATION_FUNCTION_NAME,
TITLE_SYSTEM_MESSAGE,
} from '@kbn/observability-ai-assistant-plugin/server/service/client/operators/get_generated_title';
import {
LlmProxy,
createLlmProxy,
} from '../../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy';
import { chatComplete } from '../../utils/conversation';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const log = getService('log');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
describe('when calling the title_conversation function', function () {
// Fails on MKI: https://github.com/elastic/kibana/issues/205581
this.tags(['failsOnMKI']);
let llmProxy: LlmProxy;
let connectorId: string;
before(async () => {
llmProxy = await createLlmProxy(log);
connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({
port: llmProxy.getPort(),
});
});
after(async () => {
llmProxy.close();
await observabilityAIAssistantAPIClient.deleteActionConnector({
actionId: connectorId,
});
});
// Calling `title_conversation` via the chat/complete endpoint
describe('POST /internal/observability_ai_assistant/chat/complete', function () {
let titleRequestBody: ChatCompletionStreamParams;
const TITLE = 'Question about color of the sky';
let conversationId: string;
before(async () => {
void llmProxy.interceptTitle(TITLE);
void llmProxy.interceptConversation('The sky is blue because of Rayleigh scattering.');
const res = await chatComplete({
userPrompt: 'Why the sky is blue?',
connectorId,
persist: true,
observabilityAIAssistantAPIClient,
});
conversationId = res.conversationCreateEvent?.conversation.id || '';
await llmProxy.waitForAllInterceptorsToHaveBeenCalled();
titleRequestBody = llmProxy.interceptedRequests[0].requestBody;
});
it('makes 2 requests to the LLM', () => {
expect(llmProxy.interceptedRequests.length).to.be(2);
});
it('sends the correct system message to the LLM for the title', () => {
expect(
titleRequestBody.messages.find((message) => message.role === 'system')?.content
).to.be(TITLE_SYSTEM_MESSAGE);
});
it('sends the correct user message to the LLM for the title', () => {
expect(
titleRequestBody.messages.find((message) => message.role === 'user')?.content
).to.contain('Why the sky is blue?');
});
it('sends the correct function call to the LLM for the title', () => {
expect(titleRequestBody.tools?.[0].function.name).to.be(TITLE_CONVERSATION_FUNCTION_NAME);
});
it('stores the generated title in the conversation', async () => {
const { status, body } = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}',
params: {
path: {
conversationId,
},
},
});
expect(status).to.be(200);
expect(body.conversation.title).to.be(TITLE);
});
});
});
}

View file

@ -24,6 +24,7 @@ export default function aiAssistantApiIntegrationTests({
loadTestFile(require.resolve('./complete/functions/summarize.spec.ts'));
loadTestFile(require.resolve('./complete/functions/recall.spec.ts'));
loadTestFile(require.resolve('./complete/functions/context.spec.ts'));
loadTestFile(require.resolve('./complete/functions/title_conversation.spec.ts'));
loadTestFile(require.resolve('./public_complete/public_complete.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base_setup.spec.ts'));
loadTestFile(

View file

@ -84,11 +84,13 @@ export async function chatComplete({
userPrompt,
screenContexts = [],
connectorId,
persist = false,
observabilityAIAssistantAPIClient,
}: {
userPrompt: string;
screenContexts?: ObservabilityAIAssistantScreenContextRequest[];
connectorId: string;
persist?: boolean;
observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient;
}) {
const { status, body } = await observabilityAIAssistantAPIClient.editor({
@ -105,7 +107,7 @@ export async function chatComplete({
},
],
connectorId,
persist: false,
persist,
screenContexts,
scopes: ['observability' as const],
},
@ -113,9 +115,10 @@ export async function chatComplete({
});
expect(status).to.be(200);
const messageEvents = decodeEvents(body);
const messageAddedEvents = getMessageAddedEvents(body);
return { messageAddedEvents, body, status };
const conversationCreateEvent = getConversationCreatedEvent(body);
return { messageAddedEvents, conversationCreateEvent, messageEvents, status };
}
// order of instructions can vary, so we sort to compare them
@ -160,12 +163,6 @@ export function getConversationCreatedEvent(body: Readable | string) {
(event) => event.type === StreamingChatResponseEventType.ConversationCreate
) as ConversationCreateEvent;
if (!conversationCreatedEvent) {
throw new Error(
`No conversation created event found: ${JSON.stringify(decodedEvents, null, 2)}`
);
}
return conversationCreatedEvent;
}