mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 01:13:23 -04:00
[Obs Ai Assistant] Add system message (#209773)
Fix: System Message Missing in Inference Plugin
Closes #209548
## Summary
A regression was introduced in 8.18
([#199286](https://github.com/elastic/kibana/pull/199286)), where the
system message is no longer passed to the inference plugin and,
consequently, the LLM.
Currently, only user messages are being sent, which impacts conversation
guidance and guardrails. The system message is crucial for steering
responses and maintaining contextual integrity.
The filtering of the system message happens here:
771a080ffa/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/client/index.ts (L510-L512)
Fix Approach
- Ensure the `system` message is included as a parameter in
`inferenceClient.chatComplete.`
```typescript
const options = {
connectorId,
system,
messages: convertMessagesForInference(messages),
toolChoice,
tools,
functionCalling: (simulateFunctionCalling ? 'simulated' : 'native') as FunctionCallingMode,
};
if (stream) {
return defer(() =>
this.dependencies.inferenceClient.chatComplete({
...options,
stream: true,
})
).pipe(
convertInferenceEventsToStreamingEvents(),
instrumentAndCountTokens(name),
failOnNonExistingFunctionCall({ functions }),
tap((event) => {
if (
event.type === StreamingChatResponseEventType.ChatCompletionChunk &&
this.dependencies.logger.isLevelEnabled('trace')
) {
this.dependencies.logger.trace(`Received chunk: ${JSON.stringify(event.message)}`);
}
}),
shareReplay()
) as TStream extends true
? Observable<ChatCompletionChunkEvent | TokenCountEvent | ChatCompletionMessageEvent>
: never;
} else {
return this.dependencies.inferenceClient.chatComplete({
...options,
stream: false,
}) as TStream extends true ? never : Promise<ChatCompleteResponse>;
}
}
```
- Add an API test to verify that the system message is correctly passed to the LLM.
This commit is contained in:
parent
579388d03e
commit
0ae28aa8bc
41 changed files with 281 additions and 608 deletions
|
@ -374,7 +374,7 @@ export function ChatBody({
|
|||
paddingSize="m"
|
||||
className={animClassName(euiTheme)}
|
||||
>
|
||||
{connectors.connectors?.length === 0 || messages.length === 1 ? (
|
||||
{connectors.connectors?.length === 0 || messages.length === 0 ? (
|
||||
<WelcomeMessage
|
||||
connectors={connectors}
|
||||
knowledgeBase={knowledgeBase}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import React from 'react';
|
||||
import { UserAvatar } from '@kbn/user-profile-components';
|
||||
import { EuiAvatar, EuiLoadingSpinner } from '@elastic/eui';
|
||||
import { EuiLoadingSpinner } from '@elastic/eui';
|
||||
import type { AuthenticatedUser } from '@kbn/security-plugin/common';
|
||||
import { MessageRole } from '@kbn/observability-ai-assistant-plugin/public';
|
||||
import { AssistantAvatar } from '@kbn/ai-assistant-icon';
|
||||
|
@ -34,9 +34,6 @@ export function ChatItemAvatar({ currentUser, role, loading }: ChatAvatarProps)
|
|||
case MessageRole.Function:
|
||||
return <AssistantAvatar name="Elastic Assistant" color="subdued" size="m" />;
|
||||
|
||||
case MessageRole.System:
|
||||
return <EuiAvatar name="system" iconType="dot" color="subdued" />;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -116,16 +116,8 @@ describe('useConversation', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('returns only the system message', () => {
|
||||
expect(hookResult.result.current.messages).toEqual([
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
content: '',
|
||||
role: MessageRole.System,
|
||||
},
|
||||
},
|
||||
]);
|
||||
it('returns empty messages', () => {
|
||||
expect(hookResult.result.current.messages).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns a ready state', () => {
|
||||
|
@ -157,15 +149,8 @@ describe('useConversation', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('returns the system message and the initial messages', () => {
|
||||
it('returns the initial messages', () => {
|
||||
expect(hookResult.result.current.messages).toEqual([
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
content: '',
|
||||
role: MessageRole.System,
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
|
@ -183,14 +168,8 @@ describe('useConversation', () => {
|
|||
conversation: {
|
||||
id: 'my-conversation-id',
|
||||
},
|
||||
systemMessage: 'System',
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -218,14 +197,8 @@ describe('useConversation', () => {
|
|||
conversation: {
|
||||
id: 'my-conversation-id',
|
||||
},
|
||||
systemMessage: 'System',
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
content: 'System',
|
||||
role: MessageRole.System,
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
|
@ -239,13 +212,6 @@ describe('useConversation', () => {
|
|||
|
||||
it('sets messages to the messages of the conversation', () => {
|
||||
expect(hookResult.result.current.messages).toEqual([
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
content: expect.any(String),
|
||||
role: MessageRole.System,
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
|
@ -255,10 +221,6 @@ describe('useConversation', () => {
|
|||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('overrides the system message', () => {
|
||||
expect(hookResult.result.current.messages[0].message.content).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('with a conversation id that fails to load', () => {
|
||||
|
@ -282,7 +244,7 @@ describe('useConversation', () => {
|
|||
});
|
||||
|
||||
it('resets the messages', () => {
|
||||
expect(hookResult.result.current.messages.length).toBe(1);
|
||||
expect(hookResult.result.current.messages.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -290,13 +252,6 @@ describe('useConversation', () => {
|
|||
const subject: Subject<StreamingChatResponseEventWithoutError> = new Subject();
|
||||
let onConversationUpdate: jest.Mock;
|
||||
const expectedMessages = [
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: '',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
|
@ -333,6 +288,7 @@ describe('useConversation', () => {
|
|||
conversation: {
|
||||
id: 'my-conversation-id',
|
||||
},
|
||||
systemMessage: '',
|
||||
messages: expectedMessages,
|
||||
},
|
||||
(request as any).params.body
|
||||
|
|
|
@ -32,19 +32,6 @@ export function buildMessage(params: BuildMessageProps): Message {
|
|||
);
|
||||
}
|
||||
|
||||
export function buildSystemMessage(
|
||||
params?: Omit<BuildMessageProps, 'message'> & {
|
||||
message: DeepPartial<Omit<Message['message'], 'role'>>;
|
||||
}
|
||||
) {
|
||||
return buildMessage(
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
merge({}, params, {
|
||||
message: { role: MessageRole.System },
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export function buildUserMessage(
|
||||
params?: Omit<BuildMessageProps, 'message'> & {
|
||||
message?: DeepPartial<Omit<Message['message'], 'role'>>;
|
||||
|
@ -117,7 +104,8 @@ export function buildConversation(params?: Partial<Conversation>): Conversation
|
|||
title: '',
|
||||
last_updated: '',
|
||||
},
|
||||
messages: [buildSystemMessage()],
|
||||
systemMessage: '',
|
||||
messages: [],
|
||||
labels: {},
|
||||
numeric_labels: {},
|
||||
namespace: '',
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
import type { DeeplyMockedKeys } from '@kbn/utility-types-jest';
|
||||
import {
|
||||
FunctionDefinition,
|
||||
MessageRole,
|
||||
ObservabilityAIAssistantChatService,
|
||||
} from '@kbn/observability-ai-assistant-plugin/public';
|
||||
import { BehaviorSubject } from 'rxjs';
|
||||
|
@ -25,13 +24,7 @@ export const createMockChatService = (): MockedChatService => {
|
|||
hasFunction: jest.fn().mockReturnValue(false),
|
||||
hasRenderFunction: jest.fn().mockReturnValue(true),
|
||||
renderFunction: jest.fn(),
|
||||
getSystemMessage: jest.fn().mockReturnValue({
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: '',
|
||||
},
|
||||
}),
|
||||
getSystemMessage: jest.fn().mockReturnValue('system message'),
|
||||
getScopes: jest.fn(),
|
||||
};
|
||||
return mockChatService;
|
||||
|
|
|
@ -15,12 +15,6 @@ export function getRoleTranslation(role: MessageRole) {
|
|||
});
|
||||
}
|
||||
|
||||
if (role === MessageRole.System) {
|
||||
return i18n.translate('xpack.aiAssistant.chatTimeline.messages.system.label', {
|
||||
defaultMessage: 'System',
|
||||
});
|
||||
}
|
||||
|
||||
return i18n.translate('xpack.aiAssistant.chatTimeline.messages.elasticAssistant.label', {
|
||||
defaultMessage: 'Elastic Assistant',
|
||||
});
|
||||
|
|
|
@ -59,13 +59,6 @@ describe('getTimelineItemsFromConversation', () => {
|
|||
},
|
||||
chatState: ChatState.Ready,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -77,7 +70,7 @@ describe('getTimelineItemsFromConversation', () => {
|
|||
onActionClick: jest.fn(),
|
||||
});
|
||||
});
|
||||
it('excludes the system message', () => {
|
||||
it('includes the opening message and the user message', () => {
|
||||
expect(items.length).toBe(2);
|
||||
expect(items[0].title).toBe('started a conversation');
|
||||
});
|
||||
|
@ -113,13 +106,6 @@ describe('getTimelineItemsFromConversation', () => {
|
|||
hasConnector: true,
|
||||
chatState: ChatState.Ready,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -204,13 +190,6 @@ describe('getTimelineItemsFromConversation', () => {
|
|||
hasConnector: true,
|
||||
chatState: ChatState.Ready,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -282,13 +261,6 @@ describe('getTimelineItemsFromConversation', () => {
|
|||
hasConnector: true,
|
||||
chatState: ChatState.Ready,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -364,13 +336,6 @@ describe('getTimelineItemsFromConversation', () => {
|
|||
},
|
||||
chatState: ChatState.Ready,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -415,13 +380,6 @@ describe('getTimelineItemsFromConversation', () => {
|
|||
hasConnector: true,
|
||||
chatState: ChatState.Ready,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -491,13 +449,6 @@ describe('getTimelineItemsFromConversation', () => {
|
|||
hasConnector: true,
|
||||
chatState: ChatState.Loading,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
|
|
@ -80,10 +80,6 @@ export function getTimelineItemsfromConversation({
|
|||
payload: ChatActionClickPayload;
|
||||
}) => void;
|
||||
}): ChatTimelineItem[] {
|
||||
const messagesWithoutSystem = messages.filter(
|
||||
(message) => message.message.role !== MessageRole.System
|
||||
);
|
||||
|
||||
const items: ChatTimelineItem[] = [
|
||||
{
|
||||
id: v4(),
|
||||
|
@ -100,7 +96,7 @@ export function getTimelineItemsfromConversation({
|
|||
}),
|
||||
role: MessageRole.User,
|
||||
},
|
||||
...messagesWithoutSystem.map((message, index) => {
|
||||
...messages.map((message, index) => {
|
||||
const id = v4();
|
||||
|
||||
let title: React.ReactNode = '';
|
||||
|
@ -108,10 +104,8 @@ export function getTimelineItemsfromConversation({
|
|||
let element: React.ReactNode | undefined;
|
||||
|
||||
const prevFunctionCall =
|
||||
message.message.name &&
|
||||
messagesWithoutSystem[index - 1] &&
|
||||
messagesWithoutSystem[index - 1].message.function_call
|
||||
? messagesWithoutSystem[index - 1].message.function_call
|
||||
message.message.name && messages[index - 1] && messages[index - 1].message.function_call
|
||||
? messages[index - 1].message.function_call
|
||||
: undefined;
|
||||
|
||||
let role = message.message.function_call?.trigger || message.message.role;
|
||||
|
|
|
@ -9747,7 +9747,6 @@
|
|||
"xpack.aiAssistant.chatTimeline.actions.editPrompt": "Modifier l'invite",
|
||||
"xpack.aiAssistant.chatTimeline.actions.inspectPrompt": "Inspecter l'invite",
|
||||
"xpack.aiAssistant.chatTimeline.messages.elasticAssistant.label": "Assistant d'Elastic",
|
||||
"xpack.aiAssistant.chatTimeline.messages.system.label": "Système",
|
||||
"xpack.aiAssistant.chatTimeline.messages.user.label": "Vous",
|
||||
"xpack.aiAssistant.checkingKbAvailability": "Vérification de la disponibilité de la base de connaissances",
|
||||
"xpack.aiAssistant.conversationList.dateGroupTitle.lastMonth": "Le mois dernier",
|
||||
|
|
|
@ -9623,7 +9623,6 @@
|
|||
"xpack.aiAssistant.chatTimeline.actions.editPrompt": "プロンプトを編集",
|
||||
"xpack.aiAssistant.chatTimeline.actions.inspectPrompt": "プロンプトを検査",
|
||||
"xpack.aiAssistant.chatTimeline.messages.elasticAssistant.label": "Elastic Assistant",
|
||||
"xpack.aiAssistant.chatTimeline.messages.system.label": "システム",
|
||||
"xpack.aiAssistant.chatTimeline.messages.user.label": "あなた",
|
||||
"xpack.aiAssistant.checkingKbAvailability": "ナレッジベースの利用可能性を確認中",
|
||||
"xpack.aiAssistant.conversationList.dateGroupTitle.lastMonth": "先月",
|
||||
|
|
|
@ -9469,7 +9469,6 @@
|
|||
"xpack.aiAssistant.chatTimeline.actions.editPrompt": "编辑提示",
|
||||
"xpack.aiAssistant.chatTimeline.actions.inspectPrompt": "检查提示",
|
||||
"xpack.aiAssistant.chatTimeline.messages.elasticAssistant.label": "Elastic 助手",
|
||||
"xpack.aiAssistant.chatTimeline.messages.system.label": "系统",
|
||||
"xpack.aiAssistant.chatTimeline.messages.user.label": "您",
|
||||
"xpack.aiAssistant.checkingKbAvailability": "正在检查知识库的可用性",
|
||||
"xpack.aiAssistant.conversationList.dateGroupTitle.lastMonth": "上月",
|
||||
|
|
|
@ -63,6 +63,7 @@ export interface Conversation {
|
|||
last_updated: string;
|
||||
token_count?: TokenCount;
|
||||
};
|
||||
systemMessage?: string;
|
||||
messages: Message[];
|
||||
labels: Record<string, string>;
|
||||
numeric_labels: Record<string, number>;
|
||||
|
|
|
@ -12,7 +12,7 @@ import { ObservabilityAIAssistantTelemetryEventType } from '../telemetry_event_t
|
|||
|
||||
export interface ChatFeedback {
|
||||
feedback: Feedback;
|
||||
conversation: Omit<Omit<Conversation, 'messages'>, 'conversation'> & {
|
||||
conversation: Omit<Omit<Conversation, 'messages' | 'systemMessage'>, 'conversation'> & {
|
||||
conversation: Omit<Conversation['conversation'], 'title'>;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -32,13 +32,7 @@ const mockChatService: MockedChatService = {
|
|||
hasFunction: jest.fn().mockReturnValue(false),
|
||||
hasRenderFunction: jest.fn().mockReturnValue(true),
|
||||
renderFunction: jest.fn(),
|
||||
getSystemMessage: jest.fn().mockReturnValue({
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
content: 'system',
|
||||
role: MessageRole.System,
|
||||
},
|
||||
}),
|
||||
getSystemMessage: jest.fn().mockReturnValue('system'),
|
||||
getScopes: jest.fn(),
|
||||
};
|
||||
|
||||
|
@ -88,11 +82,11 @@ describe('useChat', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('returns the initial messages including the system message', () => {
|
||||
it('returns the initial messages', () => {
|
||||
const { messages } = hookResult.result.current;
|
||||
expect(messages.length).toBe(2);
|
||||
expect(messages[0].message.role).toBe('system');
|
||||
expect(messages[1].message.content).toBe('hello');
|
||||
expect(messages.length).toBe(1);
|
||||
expect(messages[0].message.role).toBe(MessageRole.User);
|
||||
expect(messages[0].message.content).toBe('hello');
|
||||
});
|
||||
|
||||
it('sets chatState to ready', () => {
|
||||
|
@ -166,8 +160,7 @@ describe('useChat', () => {
|
|||
});
|
||||
|
||||
it('shows an empty list of messages', () => {
|
||||
expect(hookResult.result.current.messages.length).toBe(1);
|
||||
expect(hookResult.result.current.messages[0].message.role).toBe(MessageRole.System);
|
||||
expect(hookResult.result.current.messages.length).toBe(0);
|
||||
});
|
||||
|
||||
it('aborts the running request', () => {
|
||||
|
@ -187,7 +180,7 @@ describe('useChat', () => {
|
|||
});
|
||||
});
|
||||
|
||||
expect(hookResult.result.current.messages[2].message.content).toBe('good');
|
||||
expect(hookResult.result.current.messages[1].message.content).toBe('good');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -222,7 +215,7 @@ describe('useChat', () => {
|
|||
subject.complete();
|
||||
});
|
||||
|
||||
expect(hookResult.result.current.messages[2].message.content).toBe('goodbye');
|
||||
expect(hookResult.result.current.messages[1].message.content).toBe('goodbye');
|
||||
expect(hookResult.result.current.state).toBe(ChatState.Ready);
|
||||
});
|
||||
});
|
||||
|
@ -242,7 +235,7 @@ describe('useChat', () => {
|
|||
});
|
||||
|
||||
it('shows the partial message and sets chatState to aborted', () => {
|
||||
expect(hookResult.result.current.messages[2].message.content).toBe('good');
|
||||
expect(hookResult.result.current.messages[1].message.content).toBe('good');
|
||||
expect(hookResult.result.current.state).toBe(ChatState.Aborted);
|
||||
});
|
||||
|
||||
|
@ -285,7 +278,7 @@ describe('useChat', () => {
|
|||
});
|
||||
|
||||
it('shows the partial message and sets chatState to error', () => {
|
||||
expect(hookResult.result.current.messages[2].message.content).toBe('good');
|
||||
expect(hookResult.result.current.messages[1].message.content).toBe('good');
|
||||
expect(hookResult.result.current.state).toBe(ChatState.Error);
|
||||
});
|
||||
|
||||
|
|
|
@ -30,13 +30,6 @@ export enum ChatState {
|
|||
Aborted = 'aborted',
|
||||
}
|
||||
|
||||
function getWithSystemMessage(messages: Message[], systemMessage: Message) {
|
||||
return [
|
||||
systemMessage,
|
||||
...messages.filter((message) => message.message.role !== MessageRole.System),
|
||||
];
|
||||
}
|
||||
|
||||
export interface UseChatResult {
|
||||
messages: Message[];
|
||||
setMessages: (messages: Message[]) => void;
|
||||
|
@ -160,7 +153,8 @@ function useChatWithoutContext({
|
|||
const next$ = chatService.complete({
|
||||
getScreenContexts: () => service.getScreenContexts(),
|
||||
connectorId,
|
||||
messages: getWithSystemMessage(nextMessages, systemMessage),
|
||||
messages: nextMessages,
|
||||
systemMessage,
|
||||
persist,
|
||||
disableFunctions: disableFunctions ?? false,
|
||||
signal: abortControllerRef.current.signal,
|
||||
|
@ -275,8 +269,8 @@ function useChatWithoutContext({
|
|||
}, []);
|
||||
|
||||
const memoizedMessages = useMemo(() => {
|
||||
return getWithSystemMessage(messages.concat(pendingMessages ?? []), systemMessage);
|
||||
}, [systemMessage, messages, pendingMessages]);
|
||||
return messages.concat(pendingMessages ?? []);
|
||||
}, [messages, pendingMessages]);
|
||||
|
||||
const setMessagesWithAbort = useCallback((nextMessages: Message[]) => {
|
||||
abortControllerRef.current.abort();
|
||||
|
|
|
@ -13,7 +13,7 @@ import type {
|
|||
ChatCompletionChunkEvent,
|
||||
StreamingChatResponseEventWithoutError,
|
||||
} from '../common/conversation_complete';
|
||||
import { MessageRole, ScreenContextActionDefinition } from '../common/types';
|
||||
import { ScreenContextActionDefinition } from '../common/types';
|
||||
import type { ObservabilityAIAssistantAPIClient } from './api';
|
||||
import type {
|
||||
ObservabilityAIAssistantChatService,
|
||||
|
@ -40,13 +40,7 @@ export const mockChatService: ObservabilityAIAssistantChatService = {
|
|||
),
|
||||
hasFunction: () => true,
|
||||
hasRenderFunction: () => true,
|
||||
getSystemMessage: () => ({
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
}),
|
||||
getSystemMessage: () => 'System',
|
||||
getScopes: jest.fn(),
|
||||
};
|
||||
|
||||
|
|
|
@ -28,14 +28,8 @@ const client = {
|
|||
|
||||
const connectorId = 'foo';
|
||||
|
||||
const systemMessage = 'System message';
|
||||
const messages: Message[] = [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System message',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -97,6 +91,7 @@ describe('complete', () => {
|
|||
client,
|
||||
connectorId,
|
||||
getScreenContexts: () => [],
|
||||
systemMessage,
|
||||
messages,
|
||||
persist: false,
|
||||
disableFunctions: false,
|
||||
|
|
|
@ -83,6 +83,7 @@ describe('createChatService', () => {
|
|||
function chat({ signal }: { signal: AbortSignal } = { signal: new AbortController().signal }) {
|
||||
return service.chat('my_test', {
|
||||
signal,
|
||||
systemMessage: 'system',
|
||||
messages: [],
|
||||
connectorId: '',
|
||||
scopes: ['observability'],
|
||||
|
|
|
@ -25,7 +25,7 @@ import {
|
|||
} from 'rxjs';
|
||||
import { BehaviorSubject } from 'rxjs';
|
||||
import type { AssistantScope } from '@kbn/ai-assistant-common';
|
||||
import { ChatCompletionChunkEvent, Message, MessageRole } from '../../common';
|
||||
import { ChatCompletionChunkEvent } from '../../common';
|
||||
import {
|
||||
StreamingChatResponseEventType,
|
||||
type BufferFlushEvent,
|
||||
|
@ -186,7 +186,6 @@ class ChatService {
|
|||
|
||||
async initialize() {
|
||||
this.functionRegistry = new Map();
|
||||
const systemMessages: string[] = [];
|
||||
const scopePromise = this.apiClient('GET /internal/observability_ai_assistant/functions', {
|
||||
signal: this.abortSignal,
|
||||
params: {
|
||||
|
@ -196,7 +195,7 @@ class ChatService {
|
|||
},
|
||||
}).then(({ functionDefinitions, systemMessage }) => {
|
||||
functionDefinitions.forEach((fn) => this.functionRegistry.set(fn.name, fn));
|
||||
systemMessages.push(systemMessage);
|
||||
this.systemMessage = systemMessage;
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
|
@ -210,8 +209,6 @@ class ChatService {
|
|||
}),
|
||||
]);
|
||||
|
||||
this.systemMessage = systemMessages.join('\n');
|
||||
|
||||
this.functions$.next(this.getFunctions());
|
||||
}
|
||||
|
||||
|
@ -278,24 +275,17 @@ class ChatService {
|
|||
return this.renderFunctionRegistry.has(name);
|
||||
};
|
||||
|
||||
public getSystemMessage = (): Message => {
|
||||
return {
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: this.systemMessage,
|
||||
},
|
||||
};
|
||||
};
|
||||
public getSystemMessage = (): string => this.systemMessage;
|
||||
|
||||
public chat: ObservabilityAIAssistantChatService['chat'] = (
|
||||
name: string,
|
||||
{ connectorId, messages, functionCall, functions, signal }
|
||||
{ connectorId, systemMessage, messages, functionCall, functions, signal }
|
||||
) => {
|
||||
return this.callStreamingApi('POST /internal/observability_ai_assistant/chat', {
|
||||
params: {
|
||||
body: {
|
||||
name,
|
||||
systemMessage,
|
||||
messages,
|
||||
connectorId,
|
||||
functionCall,
|
||||
|
@ -316,6 +306,7 @@ class ChatService {
|
|||
getScreenContexts,
|
||||
connectorId,
|
||||
conversationId,
|
||||
systemMessage,
|
||||
messages,
|
||||
persist,
|
||||
disableFunctions,
|
||||
|
@ -327,6 +318,7 @@ class ChatService {
|
|||
getScreenContexts,
|
||||
connectorId,
|
||||
conversationId,
|
||||
systemMessage,
|
||||
messages,
|
||||
persist,
|
||||
disableFunctions,
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import type { DeeplyMockedKeys } from '@kbn/utility-types-jest';
|
||||
import { BehaviorSubject } from 'rxjs';
|
||||
import { FunctionDefinition, MessageRole } from '../../common';
|
||||
import { FunctionDefinition } from '../../common';
|
||||
import type { ObservabilityAIAssistantChatService } from '../types';
|
||||
|
||||
type MockedChatService = DeeplyMockedKeys<ObservabilityAIAssistantChatService>;
|
||||
|
@ -22,13 +22,7 @@ export const createMockChatService = (): MockedChatService => {
|
|||
hasFunction: jest.fn().mockReturnValue(false),
|
||||
hasRenderFunction: jest.fn().mockReturnValue(true),
|
||||
renderFunction: jest.fn(),
|
||||
getSystemMessage: jest.fn().mockReturnValue({
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'system',
|
||||
},
|
||||
}),
|
||||
getSystemMessage: jest.fn().mockReturnValue('system'),
|
||||
getScopes: jest.fn(),
|
||||
};
|
||||
return mockChatService;
|
||||
|
|
|
@ -9,7 +9,7 @@ import { noop } from 'lodash';
|
|||
import React from 'react';
|
||||
import { BehaviorSubject, Observable, of } from 'rxjs';
|
||||
import { AssistantScope } from '@kbn/ai-assistant-common';
|
||||
import { ChatCompletionChunkEvent, FunctionDefinition, MessageRole } from '.';
|
||||
import { ChatCompletionChunkEvent, FunctionDefinition } from '.';
|
||||
import type { StreamingChatResponseEventWithoutError } from '../common/conversation_complete';
|
||||
import type { ObservabilityAIAssistantAPIClient } from './api';
|
||||
import type { ObservabilityAIAssistantChatService, ObservabilityAIAssistantService } from './types';
|
||||
|
@ -30,13 +30,7 @@ export const createStorybookChatService = (): ObservabilityAIAssistantChatServic
|
|||
),
|
||||
hasFunction: () => true,
|
||||
hasRenderFunction: () => true,
|
||||
getSystemMessage: () => ({
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'System',
|
||||
},
|
||||
}),
|
||||
getSystemMessage: () => 'System',
|
||||
functions$: new BehaviorSubject<FunctionDefinition[]>(
|
||||
[]
|
||||
) as ObservabilityAIAssistantChatService['functions$'],
|
||||
|
|
|
@ -49,6 +49,7 @@ export interface ObservabilityAIAssistantChatService {
|
|||
chat: (
|
||||
name: string,
|
||||
options: {
|
||||
systemMessage: string;
|
||||
messages: Message[];
|
||||
connectorId: string;
|
||||
functions?: Array<Pick<FunctionDefinition, 'name' | 'description' | 'parameters'>>;
|
||||
|
@ -61,6 +62,7 @@ export interface ObservabilityAIAssistantChatService {
|
|||
getScreenContexts: () => ObservabilityAIAssistantScreenContext[];
|
||||
conversationId?: string;
|
||||
connectorId: string;
|
||||
systemMessage?: string;
|
||||
messages: Message[];
|
||||
persist: boolean;
|
||||
disableFunctions:
|
||||
|
@ -79,7 +81,7 @@ export interface ObservabilityAIAssistantChatService {
|
|||
}) => FunctionDefinition[];
|
||||
functions$: BehaviorSubject<FunctionDefinition[]>;
|
||||
hasFunction: (name: string) => boolean;
|
||||
getSystemMessage: () => Message;
|
||||
getSystemMessage: () => string;
|
||||
hasRenderFunction: (name: string) => boolean;
|
||||
renderFunction: (
|
||||
name: string,
|
||||
|
|
|
@ -100,20 +100,14 @@ export async function getRelevantFieldNames({
|
|||
await chat('get_relevant_dataset_names', {
|
||||
signal,
|
||||
stream: true,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: `You are a helpful assistant for Elastic Observability.
|
||||
systemMessage: `You are a helpful assistant for Elastic Observability.
|
||||
Your task is to create a list of field names that are relevant
|
||||
to the conversation, using ONLY the list of fields and
|
||||
types provided in the last user message. DO NOT UNDER ANY
|
||||
CIRCUMSTANCES include fields not mentioned in this list.`,
|
||||
},
|
||||
},
|
||||
// remove the system message and the function request
|
||||
...messages.slice(1, -1),
|
||||
messages: [
|
||||
// remove the function request
|
||||
...messages.filter((msg) => !msg.message?.function_call),
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
|
|
@ -135,6 +135,7 @@ const chatRoute = createObservabilityAIAssistantServerRoute({
|
|||
body: t.intersection([
|
||||
t.type({
|
||||
name: t.string,
|
||||
systemMessage: t.string,
|
||||
messages: t.array(messageRt),
|
||||
connectorId: t.string,
|
||||
functions: t.array(functionRt),
|
||||
|
@ -149,7 +150,7 @@ const chatRoute = createObservabilityAIAssistantServerRoute({
|
|||
const { params } = resources;
|
||||
|
||||
const {
|
||||
body: { name, messages, connectorId, functions, functionCall },
|
||||
body: { name, systemMessage, messages, connectorId, functions, functionCall },
|
||||
} = params;
|
||||
|
||||
const { client, simulateFunctionCalling, signal, isCloudEnabled } = await initializeChatRequest(
|
||||
|
@ -158,6 +159,7 @@ const chatRoute = createObservabilityAIAssistantServerRoute({
|
|||
|
||||
const response$ = client.chat(name, {
|
||||
stream: true,
|
||||
systemMessage,
|
||||
messages,
|
||||
connectorId,
|
||||
signal,
|
||||
|
|
|
@ -6,10 +6,23 @@
|
|||
*/
|
||||
import { notImplemented } from '@hapi/boom';
|
||||
import * as t from 'io-ts';
|
||||
import { Conversation } from '../../../common/types';
|
||||
import { Conversation, MessageRole } from '../../../common/types';
|
||||
import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route';
|
||||
import { conversationCreateRt, conversationUpdateRt } from '../runtime_types';
|
||||
|
||||
// backwards compatibility for messages with system role
|
||||
const getConversationWithoutSystemMessages = (conversation: Conversation) => {
|
||||
if (!conversation.systemMessage) {
|
||||
conversation.systemMessage =
|
||||
conversation.messages.find((message) => message.message.role === 'system')?.message
|
||||
?.content ?? '';
|
||||
}
|
||||
conversation.messages = conversation.messages.filter(
|
||||
(message) => message.message.role !== MessageRole.System
|
||||
);
|
||||
return conversation;
|
||||
};
|
||||
|
||||
const getConversationRoute = createObservabilityAIAssistantServerRoute({
|
||||
endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}',
|
||||
params: t.type({
|
||||
|
@ -31,7 +44,9 @@ const getConversationRoute = createObservabilityAIAssistantServerRoute({
|
|||
throw notImplemented();
|
||||
}
|
||||
|
||||
return client.get(params.path.conversationId);
|
||||
const conversation = await client.get(params.path.conversationId);
|
||||
// conversation without system messages
|
||||
return getConversationWithoutSystemMessages(conversation);
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -56,7 +71,12 @@ const findConversationsRoute = createObservabilityAIAssistantServerRoute({
|
|||
throw notImplemented();
|
||||
}
|
||||
|
||||
return client.find({ query: params?.body?.query });
|
||||
const conversations = await client.find({ query: params?.body?.query });
|
||||
|
||||
return {
|
||||
// conversations without system messages
|
||||
conversations: conversations.map(getConversationWithoutSystemMessages),
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -57,21 +57,26 @@ const tokenCountRt = t.type({
|
|||
total: t.number,
|
||||
});
|
||||
|
||||
export const baseConversationRt: t.Type<ConversationRequestBase> = t.type({
|
||||
'@timestamp': t.string,
|
||||
conversation: t.intersection([
|
||||
t.type({
|
||||
title: t.string,
|
||||
}),
|
||||
t.partial({
|
||||
token_count: tokenCountRt,
|
||||
}),
|
||||
]),
|
||||
messages: t.array(messageRt),
|
||||
labels: t.record(t.string, t.string),
|
||||
numeric_labels: t.record(t.string, t.number),
|
||||
public: toBooleanRt,
|
||||
});
|
||||
export const baseConversationRt: t.Type<ConversationRequestBase> = t.intersection([
|
||||
t.type({
|
||||
'@timestamp': t.string,
|
||||
conversation: t.intersection([
|
||||
t.type({
|
||||
title: t.string,
|
||||
}),
|
||||
t.partial({
|
||||
token_count: tokenCountRt,
|
||||
}),
|
||||
]),
|
||||
messages: t.array(messageRt),
|
||||
labels: t.record(t.string, t.string),
|
||||
numeric_labels: t.record(t.string, t.number),
|
||||
public: toBooleanRt,
|
||||
}),
|
||||
t.partial({
|
||||
systemMessage: t.string,
|
||||
}),
|
||||
]);
|
||||
|
||||
export const assistantScopeType = t.union([
|
||||
t.literal('observability'),
|
||||
|
|
|
@ -44,7 +44,7 @@ interface ChunkDelta {
|
|||
|
||||
type LlmSimulator = ReturnType<typeof createLlmSimulator>;
|
||||
|
||||
const EXPECTED_STORED_SYSTEM_MESSAGE = `system`;
|
||||
const EXPECTED_STORED_SYSTEM_MESSAGE = `this is a system message`;
|
||||
|
||||
const nextTick = () => {
|
||||
return new Promise(process.nextTick);
|
||||
|
@ -185,7 +185,7 @@ describe('Observability AI Assistant client', () => {
|
|||
|
||||
knowledgeBaseServiceMock.getUserInstructions.mockResolvedValue([]);
|
||||
|
||||
functionClientMock.getInstructions.mockReturnValue(['system']);
|
||||
functionClientMock.getInstructions.mockReturnValue([EXPECTED_STORED_SYSTEM_MESSAGE]);
|
||||
functionClientMock.getAdhocInstructions.mockReturnValue([]);
|
||||
|
||||
return new ObservabilityAIAssistantClient({
|
||||
|
@ -208,18 +208,6 @@ describe('Observability AI Assistant client', () => {
|
|||
});
|
||||
}
|
||||
|
||||
function system(content: string | Omit<Message['message'], 'role'>): Message {
|
||||
return merge(
|
||||
{
|
||||
'@timestamp': new Date().toString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
},
|
||||
},
|
||||
typeof content === 'string' ? { message: { content } } : content
|
||||
);
|
||||
}
|
||||
|
||||
function user(content: string | Omit<Message['message'], 'role'>): Message {
|
||||
return merge(
|
||||
{
|
||||
|
@ -286,7 +274,7 @@ describe('Observability AI Assistant client', () => {
|
|||
stream = observableIntoStream(
|
||||
client.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [system('This is a system message'), user('How many alerts do I have?')],
|
||||
messages: [user('How many alerts do I have?')],
|
||||
functionClient: functionClientMock,
|
||||
signal: new AbortController().signal,
|
||||
persist: true,
|
||||
|
@ -313,6 +301,8 @@ describe('Observability AI Assistant client', () => {
|
|||
expect.objectContaining({
|
||||
connectorId: 'foo',
|
||||
stream: false,
|
||||
system:
|
||||
'You are a helpful assistant for Elastic Observability. Assume the following message is the start of a conversation between you and a user; give this conversation a title based on the content below. DO NOT UNDER ANY CIRCUMSTANCES wrap this title in single or double quotes. This title is shown in a list of conversations to the user, so title it for the user, not for you.',
|
||||
functionCalling: 'auto',
|
||||
toolChoice: expect.objectContaining({
|
||||
function: 'title_conversation',
|
||||
|
@ -498,14 +488,8 @@ describe('Observability AI Assistant client', () => {
|
|||
user: {
|
||||
name: 'johndoe',
|
||||
},
|
||||
systemMessage: EXPECTED_STORED_SYSTEM_MESSAGE,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
content: EXPECTED_STORED_SYSTEM_MESSAGE,
|
||||
role: MessageRole.System,
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
|
@ -568,10 +552,7 @@ describe('Observability AI Assistant client', () => {
|
|||
labels: {},
|
||||
numeric_labels: {},
|
||||
public: false,
|
||||
messages: [
|
||||
system('This is a system message'),
|
||||
user('How many alerts do I have?'),
|
||||
],
|
||||
messages: [user('How many alerts do I have?')],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -586,7 +567,7 @@ describe('Observability AI Assistant client', () => {
|
|||
stream = observableIntoStream(
|
||||
await client.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [system('This is a system message'), user('How many alerts do I have?')],
|
||||
messages: [user('How many alerts do I have?')],
|
||||
functionClient: functionClientMock,
|
||||
signal: new AbortController().signal,
|
||||
conversationId: 'my-conversation-id',
|
||||
|
@ -646,14 +627,8 @@ describe('Observability AI Assistant client', () => {
|
|||
user: {
|
||||
name: 'johndoe',
|
||||
},
|
||||
systemMessage: EXPECTED_STORED_SYSTEM_MESSAGE,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
content: EXPECTED_STORED_SYSTEM_MESSAGE,
|
||||
role: MessageRole.System,
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
|
@ -695,7 +670,7 @@ describe('Observability AI Assistant client', () => {
|
|||
stream = observableIntoStream(
|
||||
await client.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [system('This is a system message'), user('How many alerts do I have?')],
|
||||
messages: [user('How many alerts do I have?')],
|
||||
functionClient: functionClientMock,
|
||||
signal: new AbortController().signal,
|
||||
title: 'My predefined title',
|
||||
|
@ -785,7 +760,7 @@ describe('Observability AI Assistant client', () => {
|
|||
stream = observableIntoStream(
|
||||
await client.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [system('This is a system message'), user('How many alerts do I have?')],
|
||||
messages: [user('How many alerts do I have?')],
|
||||
functionClient: functionClientMock,
|
||||
signal: new AbortController().signal,
|
||||
title: 'My predefined title',
|
||||
|
@ -846,13 +821,6 @@ describe('Observability AI Assistant client', () => {
|
|||
signal: expect.any(AbortSignal),
|
||||
connectorId: 'foo',
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: EXPECTED_STORED_SYSTEM_MESSAGE,
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
|
@ -985,13 +953,6 @@ describe('Observability AI Assistant client', () => {
|
|||
expect(
|
||||
(internalUserEsClientMock.index.mock.lastCall![0] as any).document.messages
|
||||
).toEqual([
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
content: EXPECTED_STORED_SYSTEM_MESSAGE,
|
||||
role: MessageRole.System,
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
|
@ -1224,7 +1185,7 @@ describe('Observability AI Assistant client', () => {
|
|||
stream = observableIntoStream(
|
||||
await client.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [system('This is a system message'), user('How many alerts do I have?')],
|
||||
messages: [user('How many alerts do I have?')],
|
||||
functionClient: functionClientMock,
|
||||
signal: new AbortController().signal,
|
||||
persist: false,
|
||||
|
@ -1349,7 +1310,7 @@ describe('Observability AI Assistant client', () => {
|
|||
stream = observableIntoStream(
|
||||
await client.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [system('This is a system message'), user('How many alerts do I have?')],
|
||||
messages: [user('How many alerts do I have?')],
|
||||
functionClient: functionClientMock,
|
||||
signal: new AbortController().signal,
|
||||
title: 'My predefined title',
|
||||
|
@ -1427,7 +1388,7 @@ describe('Observability AI Assistant client', () => {
|
|||
const stream = observableIntoStream(
|
||||
await client.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [system('This is a system message'), user('How many alerts do I have?')],
|
||||
messages: [user('How many alerts do I have?')],
|
||||
functionClient: functionClientMock,
|
||||
signal: new AbortController().signal,
|
||||
persist: false,
|
||||
|
@ -1515,7 +1476,7 @@ describe('Observability AI Assistant client', () => {
|
|||
stream = observableIntoStream(
|
||||
await client.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [system('This is a system message'), user('How many alerts do I have?')],
|
||||
messages: [user('How many alerts do I have?')],
|
||||
functionClient: functionClientMock,
|
||||
signal: new AbortController().signal,
|
||||
title: 'My predefined title',
|
||||
|
@ -1580,7 +1541,7 @@ describe('Observability AI Assistant client', () => {
|
|||
client
|
||||
.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [system('This is a system message'), user('A user message to cause completion')],
|
||||
messages: [user('A user message to cause completion')],
|
||||
functionClient: functionClientMock,
|
||||
signal: new AbortController().signal,
|
||||
title: 'My predefined title',
|
||||
|
@ -1589,9 +1550,7 @@ describe('Observability AI Assistant client', () => {
|
|||
.subscribe(() => {}); // To trigger call to chat
|
||||
await nextTick();
|
||||
|
||||
expect(chatSpy.mock.calls[0][1].messages[0].message.content).toEqual(
|
||||
EXPECTED_STORED_SYSTEM_MESSAGE
|
||||
);
|
||||
expect(chatSpy.mock.calls[0][1].systemMessage).toEqual(EXPECTED_STORED_SYSTEM_MESSAGE);
|
||||
});
|
||||
|
||||
describe('when executing an action', () => {
|
||||
|
@ -1608,10 +1567,7 @@ describe('Observability AI Assistant client', () => {
|
|||
|
||||
const complete$ = await client.complete({
|
||||
connectorId: 'foo',
|
||||
messages: [
|
||||
system('This is a system message'),
|
||||
user('Can you call the my_action function?'),
|
||||
],
|
||||
messages: [user('Can you call the my_action function?')],
|
||||
functionClient: new ChatFunctionClient([
|
||||
{
|
||||
actions: [
|
||||
|
|
|
@ -14,7 +14,6 @@ import { context } from '@opentelemetry/api';
|
|||
import { last, merge, omit } from 'lodash';
|
||||
import {
|
||||
catchError,
|
||||
combineLatest,
|
||||
defer,
|
||||
filter,
|
||||
forkJoin,
|
||||
|
@ -56,7 +55,6 @@ import {
|
|||
type Message,
|
||||
KnowledgeBaseType,
|
||||
KnowledgeBaseEntryRole,
|
||||
MessageRole,
|
||||
} from '../../../common/types';
|
||||
import { withoutTokenCountEvents } from '../../../common/utils/without_token_count_events';
|
||||
import { CONTEXT_FUNCTION_NAME } from '../../functions/context';
|
||||
|
@ -64,7 +62,6 @@ import type { ChatFunctionClient } from '../chat_function_client';
|
|||
import { KnowledgeBaseService, RecalledEntry } from '../knowledge_base_service';
|
||||
import { getAccessQuery } from '../util/get_access_query';
|
||||
import { getSystemMessageFromInstructions } from '../util/get_system_message_from_instructions';
|
||||
import { replaceSystemMessage } from '../util/replace_system_message';
|
||||
import { failOnNonExistingFunctionCall } from './operators/fail_on_non_existing_function_call';
|
||||
import { getContextFunctionRequestIfNeeded } from './get_context_function_request_if_needed';
|
||||
import { LangTracer } from './instrumentation/lang_tracer';
|
||||
|
@ -215,28 +212,6 @@ export class ObservabilityAIAssistantClient {
|
|||
const registeredAdhocInstructions = functionClient.getAdhocInstructions();
|
||||
const allAdHocInstructions = adHocInstructions.concat(registeredAdhocInstructions);
|
||||
|
||||
// from the initial messages, override any system message with
|
||||
// the one that is based on the instructions (registered, request, kb)
|
||||
const messagesWithUpdatedSystemMessage$ = userInstructions$.pipe(
|
||||
map((userInstructions) => {
|
||||
// this is what we eventually store in the conversation
|
||||
const messagesWithUpdatedSystemMessage = replaceSystemMessage(
|
||||
getSystemMessageFromInstructions({
|
||||
applicationInstructions: functionClient.getInstructions(),
|
||||
userInstructions,
|
||||
adHocInstructions: allAdHocInstructions,
|
||||
availableFunctionNames: functionClient
|
||||
.getFunctions()
|
||||
.map((fn) => fn.definition.name),
|
||||
}),
|
||||
initialMessages
|
||||
);
|
||||
|
||||
return messagesWithUpdatedSystemMessage;
|
||||
}),
|
||||
shareReplay()
|
||||
);
|
||||
|
||||
// if it is:
|
||||
// - a new conversation
|
||||
// - no predefined title is given
|
||||
|
@ -246,35 +221,39 @@ export class ObservabilityAIAssistantClient {
|
|||
const title$ =
|
||||
predefinedTitle || isConversationUpdate || !persist
|
||||
? of(predefinedTitle || '').pipe(shareReplay())
|
||||
: messagesWithUpdatedSystemMessage$.pipe(
|
||||
switchMap((messages) =>
|
||||
getGeneratedTitle({
|
||||
messages,
|
||||
logger: this.dependencies.logger,
|
||||
chat: (name, chatParams) =>
|
||||
this.chat(name, {
|
||||
...chatParams,
|
||||
simulateFunctionCalling,
|
||||
connectorId,
|
||||
signal,
|
||||
stream: false,
|
||||
}),
|
||||
tracer: completeTracer,
|
||||
})
|
||||
),
|
||||
shareReplay()
|
||||
);
|
||||
: getGeneratedTitle({
|
||||
messages: initialMessages,
|
||||
logger: this.dependencies.logger,
|
||||
chat: (name, chatParams) =>
|
||||
this.chat(name, {
|
||||
...chatParams,
|
||||
simulateFunctionCalling,
|
||||
connectorId,
|
||||
signal,
|
||||
stream: false,
|
||||
}),
|
||||
tracer: completeTracer,
|
||||
}).pipe(shareReplay());
|
||||
|
||||
const systemMessage$ = userInstructions$.pipe(
|
||||
map((userInstructions) => {
|
||||
return getSystemMessageFromInstructions({
|
||||
applicationInstructions: functionClient.getInstructions(),
|
||||
userInstructions,
|
||||
adHocInstructions: allAdHocInstructions,
|
||||
availableFunctionNames: functionClient.getFunctions().map((fn) => fn.definition.name),
|
||||
});
|
||||
}),
|
||||
shareReplay()
|
||||
);
|
||||
|
||||
// we continue the conversation here, after resolving both the materialized
|
||||
// messages and the knowledge base instructions
|
||||
const nextEvents$ = combineLatest([
|
||||
messagesWithUpdatedSystemMessage$,
|
||||
userInstructions$,
|
||||
]).pipe(
|
||||
switchMap(([messagesWithUpdatedSystemMessage, userInstructions]) => {
|
||||
const nextEvents$ = forkJoin([systemMessage$, userInstructions$]).pipe(
|
||||
switchMap(([systemMessage, userInstructions]) => {
|
||||
// if needed, inject a context function request here
|
||||
const contextRequest = functionClient.hasFunction(CONTEXT_FUNCTION_NAME)
|
||||
? getContextFunctionRequestIfNeeded(messagesWithUpdatedSystemMessage)
|
||||
? getContextFunctionRequestIfNeeded(initialMessages)
|
||||
: undefined;
|
||||
|
||||
return mergeOperator(
|
||||
|
@ -283,14 +262,12 @@ export class ObservabilityAIAssistantClient {
|
|||
// and add it to the conversation
|
||||
...(contextRequest ? [of(contextRequest)] : []),
|
||||
continueConversation({
|
||||
messages: [
|
||||
...messagesWithUpdatedSystemMessage,
|
||||
...(contextRequest ? [contextRequest.message] : []),
|
||||
],
|
||||
messages: [...initialMessages, ...(contextRequest ? [contextRequest.message] : [])],
|
||||
chat: (name, chatParams) => {
|
||||
// inject a chat function with predefined parameters
|
||||
return this.chat(name, {
|
||||
...chatParams,
|
||||
systemMessage,
|
||||
signal,
|
||||
simulateFunctionCalling,
|
||||
connectorId,
|
||||
|
@ -319,7 +296,6 @@ export class ObservabilityAIAssistantClient {
|
|||
nextEvents$,
|
||||
// wait until all dependencies have completed
|
||||
forkJoin([
|
||||
messagesWithUpdatedSystemMessage$,
|
||||
// get just the new messages
|
||||
nextEvents$.pipe(withoutTokenCountEvents(), extractMessages()),
|
||||
// count all the token count events emitted during completion
|
||||
|
@ -329,101 +305,100 @@ export class ObservabilityAIAssistantClient {
|
|||
).pipe(extractTokenCount()),
|
||||
// get just the title, and drop the token count events
|
||||
title$.pipe(filter((value): value is string => typeof value === 'string')),
|
||||
systemMessage$,
|
||||
]).pipe(
|
||||
switchMap(
|
||||
([messagesWithUpdatedSystemMessage, addedMessages, tokenCountResult, title]) => {
|
||||
const initialMessagesWithAddedMessages =
|
||||
messagesWithUpdatedSystemMessage.concat(addedMessages);
|
||||
switchMap(([addedMessages, tokenCountResult, title, systemMessage]) => {
|
||||
const initialMessagesWithAddedMessages = initialMessages.concat(addedMessages);
|
||||
|
||||
const lastMessage = last(initialMessagesWithAddedMessages);
|
||||
const lastMessage = last(initialMessagesWithAddedMessages);
|
||||
|
||||
// if a function request is at the very end, close the stream to consumer
|
||||
// without persisting or updating the conversation. we need to wait
|
||||
// on the function response to have a valid conversation
|
||||
const isFunctionRequest = !!lastMessage?.message.function_call?.name;
|
||||
// if a function request is at the very end, close the stream to consumer
|
||||
// without persisting or updating the conversation. we need to wait
|
||||
// on the function response to have a valid conversation
|
||||
const isFunctionRequest = !!lastMessage?.message.function_call?.name;
|
||||
|
||||
if (!persist || isFunctionRequest) {
|
||||
return of();
|
||||
}
|
||||
|
||||
if (isConversationUpdate) {
|
||||
return from(this.getConversationWithMetaFields(conversationId))
|
||||
.pipe(
|
||||
switchMap((conversation) => {
|
||||
if (!conversation) {
|
||||
return throwError(() => createConversationNotFoundError());
|
||||
}
|
||||
|
||||
const persistedTokenCount = conversation._source?.conversation
|
||||
.token_count ?? {
|
||||
prompt: 0,
|
||||
completion: 0,
|
||||
total: 0,
|
||||
};
|
||||
|
||||
return from(
|
||||
this.update(
|
||||
conversationId,
|
||||
|
||||
merge(
|
||||
{},
|
||||
|
||||
// base conversation without messages
|
||||
omit(conversation._source, 'messages'),
|
||||
|
||||
// update messages
|
||||
{ messages: initialMessagesWithAddedMessages },
|
||||
|
||||
// update token count
|
||||
{
|
||||
conversation: {
|
||||
title: title || conversation._source?.conversation.title,
|
||||
token_count: {
|
||||
prompt: persistedTokenCount.prompt + tokenCountResult.prompt,
|
||||
completion:
|
||||
persistedTokenCount.completion + tokenCountResult.completion,
|
||||
total: persistedTokenCount.total + tokenCountResult.total,
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
);
|
||||
})
|
||||
)
|
||||
.pipe(
|
||||
map((conversation): ConversationUpdateEvent => {
|
||||
return {
|
||||
conversation: conversation.conversation,
|
||||
type: StreamingChatResponseEventType.ConversationUpdate,
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return from(
|
||||
this.create({
|
||||
'@timestamp': new Date().toISOString(),
|
||||
conversation: {
|
||||
title,
|
||||
id: conversationId,
|
||||
token_count: tokenCountResult,
|
||||
},
|
||||
public: !!isPublic,
|
||||
labels: {},
|
||||
numeric_labels: {},
|
||||
messages: initialMessagesWithAddedMessages,
|
||||
})
|
||||
).pipe(
|
||||
map((conversation): ConversationCreateEvent => {
|
||||
return {
|
||||
conversation: conversation.conversation,
|
||||
type: StreamingChatResponseEventType.ConversationCreate,
|
||||
};
|
||||
})
|
||||
);
|
||||
if (!persist || isFunctionRequest) {
|
||||
return of();
|
||||
}
|
||||
)
|
||||
|
||||
if (isConversationUpdate) {
|
||||
return from(this.getConversationWithMetaFields(conversationId))
|
||||
.pipe(
|
||||
switchMap((conversation) => {
|
||||
if (!conversation) {
|
||||
return throwError(() => createConversationNotFoundError());
|
||||
}
|
||||
|
||||
const persistedTokenCount = conversation._source?.conversation
|
||||
.token_count ?? {
|
||||
prompt: 0,
|
||||
completion: 0,
|
||||
total: 0,
|
||||
};
|
||||
|
||||
return from(
|
||||
this.update(
|
||||
conversationId,
|
||||
|
||||
merge(
|
||||
{},
|
||||
|
||||
// base conversation without messages
|
||||
omit(conversation._source, 'messages'),
|
||||
|
||||
// update messages and system message
|
||||
{ messages: initialMessagesWithAddedMessages, systemMessage },
|
||||
|
||||
// update token count
|
||||
{
|
||||
conversation: {
|
||||
title: title || conversation._source?.conversation.title,
|
||||
token_count: {
|
||||
prompt: persistedTokenCount.prompt + tokenCountResult.prompt,
|
||||
completion:
|
||||
persistedTokenCount.completion + tokenCountResult.completion,
|
||||
total: persistedTokenCount.total + tokenCountResult.total,
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
);
|
||||
})
|
||||
)
|
||||
.pipe(
|
||||
map((conversation): ConversationUpdateEvent => {
|
||||
return {
|
||||
conversation: conversation.conversation,
|
||||
type: StreamingChatResponseEventType.ConversationUpdate,
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return from(
|
||||
this.create({
|
||||
'@timestamp': new Date().toISOString(),
|
||||
conversation: {
|
||||
title,
|
||||
id: conversationId,
|
||||
token_count: tokenCountResult,
|
||||
},
|
||||
public: !!isPublic,
|
||||
labels: {},
|
||||
numeric_labels: {},
|
||||
systemMessage,
|
||||
messages: initialMessagesWithAddedMessages,
|
||||
})
|
||||
).pipe(
|
||||
map((conversation): ConversationCreateEvent => {
|
||||
return {
|
||||
conversation: conversation.conversation,
|
||||
type: StreamingChatResponseEventType.ConversationCreate,
|
||||
};
|
||||
})
|
||||
);
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
|
@ -466,6 +441,7 @@ export class ObservabilityAIAssistantClient {
|
|||
chat<TStream extends boolean>(
|
||||
name: string,
|
||||
{
|
||||
systemMessage,
|
||||
messages,
|
||||
connectorId,
|
||||
functions,
|
||||
|
@ -475,6 +451,7 @@ export class ObservabilityAIAssistantClient {
|
|||
tracer,
|
||||
stream,
|
||||
}: {
|
||||
systemMessage?: string;
|
||||
messages: Message[];
|
||||
connectorId: string;
|
||||
functions?: Array<{ name: string; description: string; parameters?: CompatibleJSONSchema }>;
|
||||
|
@ -508,11 +485,8 @@ export class ObservabilityAIAssistantClient {
|
|||
|
||||
const options = {
|
||||
connectorId,
|
||||
system: messages.find((message) => message.message.role === MessageRole.System)?.message
|
||||
.content,
|
||||
messages: convertMessagesForInference(
|
||||
messages.filter((message) => message.message.role !== MessageRole.System)
|
||||
),
|
||||
system: systemMessage,
|
||||
messages: convertMessagesForInference(messages),
|
||||
toolChoice,
|
||||
tools,
|
||||
functionCalling: (simulateFunctionCalling ? 'simulated' : 'auto') as FunctionCallingMode,
|
||||
|
@ -548,7 +522,7 @@ export class ObservabilityAIAssistantClient {
|
|||
}
|
||||
}
|
||||
|
||||
find = async (options?: { query?: string }): Promise<{ conversations: Conversation[] }> => {
|
||||
find = async (options?: { query?: string }): Promise<Conversation[]> => {
|
||||
const response = await this.dependencies.esClient.asInternalUser.search<Conversation>({
|
||||
index: resourceNames.aliases.conversations,
|
||||
allow_no_indices: true,
|
||||
|
@ -568,9 +542,7 @@ export class ObservabilityAIAssistantClient {
|
|||
size: 100,
|
||||
});
|
||||
|
||||
return {
|
||||
conversations: response.hits.hits.map((hit) => hit._source!),
|
||||
};
|
||||
return response.hits.hits.map((hit) => hit._source!);
|
||||
};
|
||||
|
||||
update = async (
|
||||
|
|
|
@ -35,8 +35,6 @@ import { withoutTokenCountEvents } from '../../../../common/utils/without_token_
|
|||
import type { ChatFunctionClient } from '../../chat_function_client';
|
||||
import type { AutoAbortedChatFunction } from '../../types';
|
||||
import { createServerSideFunctionResponseError } from '../../util/create_server_side_function_response_error';
|
||||
import { getSystemMessageFromInstructions } from '../../util/get_system_message_from_instructions';
|
||||
import { replaceSystemMessage } from '../../util/replace_system_message';
|
||||
import { LangTracer } from '../instrumentation/lang_tracer';
|
||||
import { catchFunctionNotFoundError } from './catch_function_not_found_error';
|
||||
import { extractMessages } from './extract_messages';
|
||||
|
@ -213,20 +211,7 @@ export function continueConversation({
|
|||
disableFunctions,
|
||||
});
|
||||
|
||||
const registeredAdhocInstructions = functionClient.getAdhocInstructions();
|
||||
const allAdHocInstructions = adHocInstructions.concat(registeredAdhocInstructions);
|
||||
|
||||
const messagesWithUpdatedSystemMessage = replaceSystemMessage(
|
||||
getSystemMessageFromInstructions({
|
||||
applicationInstructions: functionClient.getInstructions(),
|
||||
userInstructions,
|
||||
adHocInstructions: allAdHocInstructions,
|
||||
availableFunctionNames: definitions.map((def) => def.name),
|
||||
}),
|
||||
initialMessages
|
||||
);
|
||||
|
||||
const lastMessage = last(messagesWithUpdatedSystemMessage)?.message;
|
||||
const lastMessage = last(initialMessages)?.message;
|
||||
const isUserMessage = lastMessage?.role === MessageRole.User;
|
||||
|
||||
return executeNextStep().pipe(handleEvents());
|
||||
|
@ -239,7 +224,7 @@ export function continueConversation({
|
|||
: 'user_message';
|
||||
|
||||
return chat(operationName, {
|
||||
messages: messagesWithUpdatedSystemMessage,
|
||||
messages: initialMessages,
|
||||
functions: definitions,
|
||||
tracer,
|
||||
connectorId,
|
||||
|
@ -314,7 +299,7 @@ export function continueConversation({
|
|||
args: lastMessage.function_call!.arguments,
|
||||
chat,
|
||||
functionClient,
|
||||
messages: messagesWithUpdatedSystemMessage,
|
||||
messages: initialMessages,
|
||||
signal,
|
||||
logger,
|
||||
tracer,
|
||||
|
@ -337,7 +322,7 @@ export function continueConversation({
|
|||
return EMPTY;
|
||||
}
|
||||
return continueConversation({
|
||||
messages: messagesWithUpdatedSystemMessage.concat(extractedMessages),
|
||||
messages: initialMessages.concat(extractedMessages),
|
||||
chat,
|
||||
functionCallsLeft: nextFunctionCallsLeft,
|
||||
functionClient,
|
||||
|
|
|
@ -92,10 +92,9 @@ describe('getGeneratedTitle', () => {
|
|||
await lastValueFrom(title$);
|
||||
|
||||
const [name, params] = chatSpy.mock.calls[0];
|
||||
|
||||
expect(name).toEqual('generate_title');
|
||||
expect(params.messages.length).toBe(2);
|
||||
expect(params.messages[1].message.content).toContain('A message');
|
||||
expect(params.messages.length).toBe(1);
|
||||
expect(params.messages[0].message.content).toContain('A message');
|
||||
});
|
||||
|
||||
it('strips quotes from the title', async () => {
|
||||
|
|
|
@ -36,23 +36,16 @@ export function getGeneratedTitle({
|
|||
}): Observable<string | TokenCountEvent> {
|
||||
return from(
|
||||
chat('generate_title', {
|
||||
systemMessage:
|
||||
'You are a helpful assistant for Elastic Observability. Assume the following message is the start of a conversation between you and a user; give this conversation a title based on the content below. DO NOT UNDER ANY CIRCUMSTANCES wrap this title in single or double quotes. This title is shown in a list of conversations to the user, so title it for the user, not for you.',
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: `You are a helpful assistant for Elastic Observability. Assume the following message is the start of a conversation between you and a user; give this conversation a title based on the content below. DO NOT UNDER ANY CIRCUMSTANCES wrap this title in single or double quotes. This title is shown in a list of conversations to the user, so title it for the user, not for you.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.User,
|
||||
content: messages
|
||||
.filter((msg) => msg.message.role !== MessageRole.System)
|
||||
.reduce((acc, curr) => {
|
||||
return `${acc} ${curr.message.role}: ${curr.message.content}`;
|
||||
}, 'Generate a title, using the title_conversation_function, based on the following conversation:\n\n'),
|
||||
content: messages.reduce((acc, curr) => {
|
||||
return `${acc} ${curr.message.role}: ${curr.message.content}`;
|
||||
}, 'Generate a title, using the title_conversation_function, based on the following conversation:\n\n'),
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Message, MessageRole } from '../../../common';
|
||||
|
||||
export function replaceSystemMessage(systemMessage: string, messages: Message[]): Message[] {
|
||||
return [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: systemMessage,
|
||||
},
|
||||
},
|
||||
...messages.filter((msg) => msg.message.role !== MessageRole.System),
|
||||
];
|
||||
}
|
|
@ -73,7 +73,7 @@ type CompleteFunction = (params: CompleteFunctionParams) => Promise<{
|
|||
}>;
|
||||
|
||||
export interface ChatClient {
|
||||
chat: (message: StringOrMessageList) => Promise<InnerMessage>;
|
||||
chat: (message: StringOrMessageList, system: string) => Promise<InnerMessage>;
|
||||
complete: CompleteFunction;
|
||||
evaluate: (
|
||||
{}: { conversationId?: string; messages: InnerMessage[]; errors: ChatCompletionErrorEvent[] },
|
||||
|
@ -349,11 +349,13 @@ export class KibanaClient {
|
|||
async function chat(
|
||||
name: string,
|
||||
{
|
||||
systemMessage,
|
||||
messages,
|
||||
functions,
|
||||
functionCall,
|
||||
connectorIdOverride,
|
||||
}: {
|
||||
systemMessage: string;
|
||||
messages: Message[];
|
||||
functions: FunctionDefinition[];
|
||||
functionCall?: string;
|
||||
|
@ -367,6 +369,7 @@ export class KibanaClient {
|
|||
const params: ObservabilityAIAssistantAPIClientRequestParamsOf<'POST /internal/observability_ai_assistant/chat'>['params']['body'] =
|
||||
{
|
||||
name,
|
||||
systemMessage,
|
||||
messages,
|
||||
connectorId: connectorIdOverride || connectorId,
|
||||
functions: functions.map((fn) => pick(fn, 'name', 'description', 'parameters')),
|
||||
|
@ -403,14 +406,14 @@ export class KibanaClient {
|
|||
const results: EvaluationResult[] = [];
|
||||
|
||||
return {
|
||||
chat: async (message) => {
|
||||
chat: async (message, systemMessage) => {
|
||||
const messages = [
|
||||
...this.getMessages(message).map((msg) => ({
|
||||
message: msg,
|
||||
'@timestamp': new Date().toISOString(),
|
||||
})),
|
||||
];
|
||||
return chat('chat', { messages, functions: [] });
|
||||
return chat('chat', { systemMessage, messages, functions: [] });
|
||||
},
|
||||
complete: async ({
|
||||
messages: messagesArg,
|
||||
|
@ -515,20 +518,14 @@ export class KibanaClient {
|
|||
evaluate: async ({ messages, conversationId, errors }, criteria) => {
|
||||
const message = await chat('evaluate', {
|
||||
connectorIdOverride: evaluationConnectorId,
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: `You are a critical assistant for evaluating conversations with the Elastic Observability AI Assistant,
|
||||
systemMessage: `You are a critical assistant for evaluating conversations with the Elastic Observability AI Assistant,
|
||||
which helps our users make sense of their Observability data.
|
||||
|
||||
Your goal is to verify whether a conversation between the user and the assistant matches the given criteria.
|
||||
|
||||
For each criterion, calculate a score. Explain your score, by describing what the assistant did right, and describing and quoting what the
|
||||
assistant did wrong, where it could improve, and what the root cause was in case of a failure.`,
|
||||
},
|
||||
},
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toString(),
|
||||
message: {
|
||||
|
|
|
@ -21,13 +21,6 @@ import { MessageRole } from '@kbn/observability-ai-assistant-plugin/public';
|
|||
import { AlertDetailsContextualInsightsService } from '@kbn/observability-plugin/server/services';
|
||||
|
||||
const buildConversation = (contentMessage: string) => [
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: '',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': expect.any(String),
|
||||
message: {
|
||||
|
|
|
@ -34,7 +34,6 @@ import {
|
|||
import { concatenateChatCompletionChunks } from '@kbn/observability-ai-assistant-plugin/common/utils/concatenate_chat_completion_chunks';
|
||||
import { CompatibleJSONSchema } from '@kbn/observability-ai-assistant-plugin/common/functions/types';
|
||||
import { AlertDetailsContextualInsightsService } from '@kbn/observability-plugin/server/services';
|
||||
import { getSystemMessageFromInstructions } from '@kbn/observability-ai-assistant-plugin/server/service/util/get_system_message_from_instructions';
|
||||
import { AdHocInstruction } from '@kbn/observability-ai-assistant-plugin/common/types';
|
||||
import { EXECUTE_CONNECTOR_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/server/functions/execute_connector';
|
||||
import { ObservabilityAIAssistantClient } from '@kbn/observability-ai-assistant-plugin/server';
|
||||
|
@ -315,18 +314,6 @@ If available, include the link of the conversation at the end of your answer.`
|
|||
kibanaPublicUrl: (await resources.plugins.core.start()).http.basePath.publicBaseUrl,
|
||||
instructions: [backgroundInstruction],
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: getSystemMessageFromInstructions({
|
||||
availableFunctionNames: functionClient.getFunctions().map((fn) => fn.definition.name),
|
||||
applicationInstructions: functionClient.getInstructions(),
|
||||
userInstructions: [],
|
||||
adHocInstructions: functionClient.getAdhocInstructions(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
|
|
@ -20,13 +20,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
|
||||
|
||||
const messages: Message[] = [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'You are a helpful assistant',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -63,6 +56,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
params: {
|
||||
body: {
|
||||
name: 'my_api_call',
|
||||
systemMessage: 'You are a helpful assistant',
|
||||
messages,
|
||||
connectorId: 'does not exist',
|
||||
functions: [],
|
||||
|
@ -98,6 +92,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
.on('error', reject)
|
||||
.send({
|
||||
name: 'my_api_call',
|
||||
systemMessage: 'You are a helpful assistant',
|
||||
messages,
|
||||
connectorId,
|
||||
functions: [],
|
||||
|
@ -154,6 +149,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
params: {
|
||||
body: {
|
||||
name: 'my_api_call',
|
||||
systemMessage: 'You are a helpful assistant',
|
||||
messages,
|
||||
connectorId,
|
||||
functions: [],
|
||||
|
|
|
@ -36,13 +36,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
|
||||
|
||||
const messages: Message[] = [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'You are a helpful assistant',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
|
|
@ -25,6 +25,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
},
|
||||
labels: {},
|
||||
numeric_labels: {},
|
||||
systemMessage: 'this is a system message',
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
|
@ -135,6 +136,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
},
|
||||
labels: conversationCreate.labels,
|
||||
numeric_labels: conversationCreate.numeric_labels,
|
||||
systemMessage: conversationCreate.systemMessage,
|
||||
messages: conversationCreate.messages,
|
||||
namespace: 'default',
|
||||
public: conversationCreate.public,
|
||||
|
|
|
@ -286,13 +286,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
.completeAfterIntercept();
|
||||
|
||||
const messages: Message[] = [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'You are a helpful assistant',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
@ -353,10 +346,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
|
||||
it('adds the instruction to the system prompt', async () => {
|
||||
const conversation = await getConversationForUser('editor');
|
||||
const systemMessage = conversation.messages.find(
|
||||
(message) => message.message.role === MessageRole.System
|
||||
)!;
|
||||
expect(systemMessage.message.content).to.contain(userInstructionText);
|
||||
expect(conversation.systemMessage).to.contain(userInstructionText);
|
||||
});
|
||||
|
||||
it('does not add the instruction to the context', async () => {
|
||||
|
@ -375,12 +365,9 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
|
||||
it('does not add the instruction conversation for other users', async () => {
|
||||
const conversation = await getConversationForUser('admin');
|
||||
const systemMessage = conversation.messages.find(
|
||||
(message) => message.message.role === MessageRole.System
|
||||
)!;
|
||||
|
||||
expect(systemMessage.message.content).to.not.contain(userInstructionText);
|
||||
expect(conversation.messages.length).to.be(5);
|
||||
expect(conversation.systemMessage).to.not.contain(userInstructionText);
|
||||
expect(conversation.messages.length).to.be(4);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -27,13 +27,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
|
|||
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
|
||||
|
||||
const messages: Message[] = [
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content: 'You are a helpful assistant',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': new Date().toISOString(),
|
||||
message: {
|
||||
|
|
|
@ -93,14 +93,6 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte
|
|||
body: {
|
||||
conversation: {
|
||||
messages: [
|
||||
{
|
||||
'@timestamp': '2024-04-18T14:28:50.118Z',
|
||||
message: {
|
||||
role: MessageRole.System,
|
||||
content:
|
||||
'You are a helpful assistant for Elastic Observability. Your goal is to help the Elastic Observability users to quickly assess what is happening in their observed systems. You can help them visualise and analyze data, investigate their systems, perform root cause analysis or identify optimisation opportunities.\n\nIt\'s very important to not assume what the user is meaning. Ask them for clarification if needed.\n\nIf you are unsure about which function should be used and with what arguments, ask the user for clarification or confirmation.\n\nIn KQL ("kqlFilter")) escaping happens with double quotes, not single quotes. Some characters that need escaping are: \':()\\ /". Always put a field value in double quotes. Best: service.name:"opbeans-go". Wrong: service.name:opbeans-go. This is very important!\n\nYou can use Github-flavored Markdown in your responses. If a function returns an array, consider using a Markdown table to format the response.\n\nNote that ES|QL (the Elasticsearch Query Language which is a new piped language) is the preferred query language.\n\nYou MUST use the "query" function when the user wants to:\n- visualize data\n- run any arbitrary query\n- breakdown or filter ES|QL queries that are displayed on the current page\n- convert queries from another language to ES|QL\n- asks general questions about ES|QL\n\nDO NOT UNDER ANY CIRCUMSTANCES generate ES|QL queries or explain anything about the ES|QL query language yourself.\nDO NOT UNDER ANY CIRCUMSTANCES try to correct an ES|QL query yourself - always use the "query" function for this.\n\nDO NOT UNDER ANY CIRCUMSTANCES USE ES|QL syntax (`service.name == "foo"`) with "kqlFilter" (`service.name:"foo"`).\n\nEven if the "context" function was used before that, follow it up with the "query" function. If a query fails, do not attempt to correct it yourself. Again you should call the "query" function,\neven if it has been called before.\n\nWhen the "visualize_query" function has been called, a visualization has been displayed to the user. DO NOT UNDER ANY CIRCUMSTANCES follow up a "visualize_query" function call with your own visualization attempt.\nIf the "execute_query" function has been called, summarize these results for the user. The user does not see a visualization in this case.\n\nYou MUST use the get_dataset_info function function before calling the "query" or "changes" function.\n\nIf a function requires an index, you MUST use the results from the dataset info functions.\n\n\n\nThe user is able to change the language which they want you to reply in on the settings page of the AI Assistant for Observability, which can be found in the Stack Management app under the option AI Assistants.\nIf the user asks how to change the language, reply in the same language the user asked in.You do not have a working memory. If the user expects you to remember the previous conversations, tell them they can set up the knowledge base.',
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': '2024-04-18T14:29:01.615Z',
|
||||
message: {
|
||||
|
@ -308,19 +300,17 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte
|
|||
|
||||
expect(response.body.conversations[0].conversation.title).to.be('My title');
|
||||
|
||||
const { messages } = response.body.conversations[0];
|
||||
const { messages, systemMessage } = response.body.conversations[0];
|
||||
|
||||
expect(messages.length).to.eql(5);
|
||||
expect(messages.length).to.eql(4);
|
||||
|
||||
const [
|
||||
systemMessage,
|
||||
firstUserMessage,
|
||||
contextRequest,
|
||||
contextResponse,
|
||||
assistantResponse,
|
||||
] = messages.map((msg) => msg.message);
|
||||
const [firstUserMessage, contextRequest, contextResponse, assistantResponse] =
|
||||
messages.map((msg) => msg.message);
|
||||
|
||||
expect(systemMessage.role).to.eql('system');
|
||||
const systemMessageContent =
|
||||
'You are a helpful assistant for Elastic Observability. Your goal is to help the Elastic Observability users to quickly assess what is happening in their observed systems. You can help them visualise and analyze data, investigate their systems, perform root cause analysis or identify optimisation opportunities.\n\n It\'s very important to not assume what the user is meaning. Ask them for clarification if needed.\n\n If you are unsure about which function should be used and with what arguments, ask the user for clarification or confirmation.\n\n In KQL ("kqlFilter")) escaping happens with double quotes, not single quotes. Some characters that need escaping are: \':()\\ /". Always put a field value in double quotes. Best: service.name:"opbeans-go". Wrong: service.name:opbeans-go. This is very important!\n\n You can use Github-flavored Markdown in your responses. If a function returns an array, consider using a Markdown table to format the response.\n\n Note that ES|QL (the Elasticsearch Query Language which is a new piped language) is the preferred query language.\n\n If you want to call a function or tool, only call it a single time per message. Wait until the function has been executed and its results\n returned to you, before executing the same tool or another tool again if needed.\n\n DO NOT UNDER ANY CIRCUMSTANCES USE ES|QL syntax (`service.name == "foo"`) with "kqlFilter" (`service.name:"foo"`).\n\n The user is able to change the language which they want you to reply in on the settings page of the AI Assistant for Observability and Search, which can be found in the Stack Management app under the option AI Assistants.\n If the user asks how to change the language, reply in the same language the user asked in.\n\nYou MUST use the "query" function when the user wants to:\n - visualize data\n - run any arbitrary query\n - breakdown or filter ES|QL queries that are displayed on the current page\n - convert queries from another language to ES|QL\n - asks general questions about ES|QL\n\n DO NOT UNDER ANY CIRCUMSTANCES generate ES|QL queries or explain anything about the ES|QL query language yourself.\n DO NOT UNDER ANY CIRCUMSTANCES try to correct an ES|QL query yourself - always use the "query" function for this.\n\n If the user asks for a query, and one of the dataset info functions was called and returned no results, you should still call the query function to generate an example query.\n\n Even if the "query" function was used before that, follow it up with the "query" function. If a query fails, do not attempt to correct it yourself. Again you should call the "query" function,\n even if it has been called before.\n\n When the "visualize_query" function has been called, a visualization has been displayed to the user. DO NOT UNDER ANY CIRCUMSTANCES follow up a "visualize_query" function call with your own visualization attempt.\n If the "execute_query" function has been called, summarize these results for the user. The user does not see a visualization in this case.\n\nYou MUST use the "get_dataset_info" function before calling the "query" or the "changes" functions.\n\nIf a function requires an index, you MUST use the results from the dataset info functions.\n\nYou do not have a working memory. If the user expects you to remember the previous conversations, tell them they can set up the knowledge base.\n\nWhen asked questions about the Elastic stack or products, You should use the retrieve_elastic_doc function before answering,\n to retrieve documentation related to the question. Consider that the documentation returned by the function\n is always more up to date and accurate than any own internal knowledge you might have.';
|
||||
|
||||
expect(systemMessage).to.eql(systemMessageContent);
|
||||
|
||||
expect(firstUserMessage.content).to.eql('hello');
|
||||
|
||||
|
@ -380,7 +370,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte
|
|||
endpoint: 'POST /internal/observability_ai_assistant/conversations',
|
||||
});
|
||||
|
||||
const messages = response.body.conversations[0].messages.slice(5);
|
||||
const messages = response.body.conversations[0].messages.slice(4);
|
||||
|
||||
expect(messages.length).to.eql(4);
|
||||
|
||||
|
@ -400,7 +390,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte
|
|||
content: 'My second response',
|
||||
});
|
||||
|
||||
expect(response.body.conversations[0].messages.length).to.eql(9);
|
||||
expect(response.body.conversations[0].messages.length).to.eql(8);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue