mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
update retrieve_elastic_doc api test (#215237)
Related: https://github.com/elastic/kibana/issues/180787 - Update test for `retrieve_elastic_doc` function --------- Co-authored-by: Søren Louv-Jansen <sorenlouv@gmail.com> Co-authored-by: Viduni Wickramarachchi <viduni.ushanka@gmail.com>
This commit is contained in:
parent
fba2d2b809
commit
50c2237757
1 changed files with 75 additions and 22 deletions
|
@ -6,6 +6,10 @@
|
|||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { ChatCompletionStreamParams } from 'openai/lib/ChatCompletionStream';
|
||||
import { ChatCompletionMessageParam } from 'openai/resources';
|
||||
import { last } from 'lodash';
|
||||
import { MessageAddEvent, MessageRole } from '@kbn/observability-ai-assistant-plugin/common';
|
||||
import {
|
||||
LlmProxy,
|
||||
createLlmProxy,
|
||||
|
@ -22,7 +26,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
// Fails on MKI: https://github.com/elastic/kibana/issues/205581
|
||||
this.tags(['failsOnMKI']);
|
||||
const supertest = getService('supertest');
|
||||
const USER_MESSAGE = 'What is Kibana Lens?';
|
||||
const USER_PROMPT = 'What is Kibana Lens?';
|
||||
|
||||
describe('POST /internal/observability_ai_assistant/chat/complete without product doc installed', function () {
|
||||
let llmProxy: LlmProxy;
|
||||
|
@ -36,7 +40,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
void llmProxy.interceptConversation('Hello from LLM Proxy');
|
||||
|
||||
await chatComplete({
|
||||
userPrompt: USER_MESSAGE,
|
||||
userPrompt: USER_PROMPT,
|
||||
connectorId,
|
||||
observabilityAIAssistantAPIClient,
|
||||
});
|
||||
|
@ -70,7 +74,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
it('contains the original user message', () => {
|
||||
const everyRequestHasUserMessage = llmProxy.interceptedRequests.every(({ requestBody }) =>
|
||||
requestBody.messages.some(
|
||||
(message) => message.role === 'user' && (message.content as string) === USER_MESSAGE
|
||||
(message) => message.role === 'user' && (message.content as string) === USER_PROMPT
|
||||
)
|
||||
);
|
||||
expect(everyRequestHasUserMessage).to.be(true);
|
||||
|
@ -81,6 +85,9 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
describe('POST /internal/observability_ai_assistant/chat/complete', function () {
|
||||
let llmProxy: LlmProxy;
|
||||
let connectorId: string;
|
||||
let messageAddedEvents: MessageAddEvent[];
|
||||
let firstRequestBody: ChatCompletionStreamParams;
|
||||
let secondRequestBody: ChatCompletionStreamParams;
|
||||
before(async () => {
|
||||
llmProxy = await createLlmProxy(log);
|
||||
connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({
|
||||
|
@ -90,18 +97,24 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
|
||||
void llmProxy.interceptWithFunctionRequest({
|
||||
name: 'retrieve_elastic_doc',
|
||||
arguments: () => JSON.stringify({}),
|
||||
arguments: () =>
|
||||
JSON.stringify({
|
||||
query: USER_PROMPT,
|
||||
}),
|
||||
when: () => true,
|
||||
});
|
||||
|
||||
void llmProxy.interceptConversation('Hello from LLM Proxy');
|
||||
|
||||
await chatComplete({
|
||||
userPrompt: USER_MESSAGE,
|
||||
({ messageAddedEvents } = await chatComplete({
|
||||
userPrompt: USER_PROMPT,
|
||||
connectorId,
|
||||
observabilityAIAssistantAPIClient,
|
||||
});
|
||||
}));
|
||||
|
||||
await llmProxy.waitForAllInterceptorsToHaveBeenCalled();
|
||||
firstRequestBody = llmProxy.interceptedRequests[0].requestBody;
|
||||
secondRequestBody = llmProxy.interceptedRequests[1].requestBody;
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
|
@ -112,25 +125,65 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
});
|
||||
});
|
||||
|
||||
it('makes 6 requests to the LLM', () => {
|
||||
expect(llmProxy.interceptedRequests.length).to.be(6);
|
||||
it('makes 2 requests to the LLM', () => {
|
||||
expect(llmProxy.interceptedRequests.length).to.be(2);
|
||||
});
|
||||
|
||||
it('every request contain retrieve_elastic_doc function', () => {
|
||||
const everyRequestHasRetrieveElasticDoc = llmProxy.interceptedRequests.every(
|
||||
({ requestBody }) =>
|
||||
requestBody.tools?.some((t) => t.function.name === 'retrieve_elastic_doc')
|
||||
);
|
||||
expect(everyRequestHasRetrieveElasticDoc).to.be(true);
|
||||
it('emits 5 messageAdded events', () => {
|
||||
expect(messageAddedEvents.length).to.be(5);
|
||||
});
|
||||
|
||||
it('contains the original user message', () => {
|
||||
const everyRequestHasUserMessage = llmProxy.interceptedRequests.every(({ requestBody }) =>
|
||||
requestBody.messages.some(
|
||||
(message) => message.role === 'user' && (message.content as string) === USER_MESSAGE
|
||||
)
|
||||
);
|
||||
expect(everyRequestHasUserMessage).to.be(true);
|
||||
describe('The first request', () => {
|
||||
it('contains the retrieve_elastic_doc function', () => {
|
||||
expect(firstRequestBody.tools?.map((t) => t.function.name)).to.contain(
|
||||
'retrieve_elastic_doc'
|
||||
);
|
||||
});
|
||||
|
||||
it('leaves the LLM to choose the correct tool by leave tool_choice as auto and passes tools', () => {
|
||||
expect(firstRequestBody.tool_choice).to.be('auto');
|
||||
expect(firstRequestBody.tools?.length).to.not.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('The second request - Sending the user prompt', () => {
|
||||
let lastMessage: ChatCompletionMessageParam;
|
||||
let parsedContent: { documents: Array<{ title: string; content: string; url: string }> };
|
||||
before(() => {
|
||||
lastMessage = last(secondRequestBody.messages) as ChatCompletionMessageParam;
|
||||
parsedContent = JSON.parse(lastMessage.content as string);
|
||||
});
|
||||
it('includes the retrieve_elastic_doc function call', () => {
|
||||
expect(secondRequestBody.messages[4].role).to.be(MessageRole.Assistant);
|
||||
// @ts-expect-error
|
||||
expect(secondRequestBody.messages[4].tool_calls[0].function.name).to.be(
|
||||
'retrieve_elastic_doc'
|
||||
);
|
||||
});
|
||||
|
||||
it('responds with the correct tool message', () => {
|
||||
expect(lastMessage?.role).to.be('tool');
|
||||
// @ts-expect-error
|
||||
expect(lastMessage?.tool_call_id).to.equal(
|
||||
// @ts-expect-error
|
||||
secondRequestBody.messages[4].tool_calls[0].id
|
||||
);
|
||||
});
|
||||
it('sends the retrieved documents from Elastic docs to the LLM', () => {
|
||||
expect(lastMessage.content).to.be.a('string');
|
||||
});
|
||||
|
||||
it('should send 3 documents to the llm', () => {
|
||||
expect(parsedContent.documents.length).to.be(3);
|
||||
});
|
||||
|
||||
it('should contain the word "lens" in the document content', () => {
|
||||
const document = parsedContent.documents.find(
|
||||
(doc) => doc.title === 'Enhancements and bug fixes'
|
||||
);
|
||||
expect(document).to.not.be(undefined);
|
||||
expect(document?.content).to.contain('lens');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue