[O11y AI Ass] Consolidate system message & recall for insights (#168007)

This commit is contained in:
Dario Gieselaar 2023-10-06 10:35:23 +02:00 committed by GitHub
parent 591efd4271
commit 866edd206d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 183 additions and 138 deletions

View file

@ -40,14 +40,6 @@ export function ErrorSampleContextualInsight({
const transactionName = transaction?.transaction.name ?? '';
return [
{
'@timestamp': now,
message: {
role: MessageRole.System,
content: `You are apm-gpt, a helpful assistant for performance analysis, optimisation and
root cause analysis of software. Answer as concisely as possible.`,
},
},
{
'@timestamp': now,
message: {

View file

@ -233,14 +233,6 @@ export const LogRateAnalysis: FC<AlertDetailsLogRateAnalysisSectionProps> = ({ r
const now = new Date().toISOString();
return [
{
'@timestamp': now,
message: {
role: MessageRole.System,
content: `You are logs-gpt, a helpful assistant for logs-based observability. Answer as
concisely as possible.`,
},
},
{
'@timestamp': now,
message: {

View file

@ -45,14 +45,6 @@ export const ContextualInsightProcessRow = ({ command }: { command: string }) =>
}
const now = new Date().toISOString();
return [
{
'@timestamp': now,
message: {
role: MessageRole.System,
content: `You are infra-gpt, a helpful assistant for metrics-based infrastructure observability. Answer as
concisely as possible.`,
},
},
{
'@timestamp': now,
message: {

View file

@ -39,12 +39,6 @@ import { DataSearchProgress } from '../../data_search_progress';
import { LogEntryActionsMenu } from './log_entry_actions_menu';
import { LogEntryFieldsTable } from './log_entry_fields_table';
const LOGS_SYSTEM_MESSAGE = {
content: `You are logs-gpt, a helpful assistant for logs-based observability. Answer as
concisely as possible.`,
role: MessageRole.System,
};
export interface LogEntryFlyoutProps {
logEntryId: string | null | undefined;
onCloseFlyout: () => void;
@ -144,10 +138,6 @@ export const LogEntryFlyout = ({
const now = new Date().toISOString();
return [
{
'@timestamp': now,
message: LOGS_SYSTEM_MESSAGE,
},
{
'@timestamp': now,
message: {
@ -170,10 +160,6 @@ export const LogEntryFlyout = ({
const message = logEntry.fields.find((field) => field.field === 'message')?.value[0];
return [
{
'@timestamp': now,
message: LOGS_SYSTEM_MESSAGE,
},
{
'@timestamp': now,
message: {

View file

@ -4,11 +4,11 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useCallback, useEffect, useMemo, useState } from 'react';
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { last } from 'lodash';
import { EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
import { AbortError } from '@kbn/kibana-utils-plugin/common';
import type { Subscription } from 'rxjs';
import { isObservable, Subscription } from 'rxjs';
import { MessageRole, type Message } from '../../../common/types';
import { ObservabilityAIAssistantChatServiceProvider } from '../../context/observability_ai_assistant_chat_service_provider';
import { useKibana } from '../../hooks/use_kibana';
@ -41,6 +41,9 @@ function ChatContent({
const chatService = useObservabilityAIAssistantChatService();
const [pendingMessage, setPendingMessage] = useState<PendingMessage | undefined>();
const [recalledMessages, setRecalledMessages] = useState<Message[] | undefined>(undefined);
const [loading, setLoading] = useState(false);
const [subscription, setSubscription] = useState<Subscription | undefined>();
@ -56,37 +59,101 @@ function ChatContent({
const conversationTitle = conversationId
? conversation.value?.conversation.title || ''
: defaultTitle;
const reloadReply = useCallback(() => {
const controllerRef = useRef(new AbortController());
const reloadRecalledMessages = useCallback(async () => {
setLoading(true);
setDisplayedMessages(initialMessages);
setRecalledMessages(undefined);
controllerRef.current.abort();
const controller = (controllerRef.current = new AbortController());
let appendedMessages: Message[] = [];
if (chatService.hasFunction('recall')) {
try {
// manually execute recall function and append to list of
// messages
const functionCall = {
name: 'recall',
args: JSON.stringify({ queries: [], contexts: [] }),
};
const response = await chatService.executeFunction({
...functionCall,
messages: initialMessages,
signal: controller.signal,
connectorId,
});
if (isObservable(response)) {
throw new Error('Recall function unexpectedly returned an Observable');
}
appendedMessages = [
{
'@timestamp': new Date().toISOString(),
message: {
role: MessageRole.Assistant,
content: '',
function_call: {
name: functionCall.name,
arguments: functionCall.args,
trigger: MessageRole.User as const,
},
},
},
{
'@timestamp': new Date().toISOString(),
message: {
role: MessageRole.User,
name: functionCall.name,
content: JSON.stringify(response.content),
},
},
];
setRecalledMessages(appendedMessages);
} catch (err) {
// eslint-disable-next-line no-console
console.error(err);
setRecalledMessages([]);
}
}
}, [chatService, connectorId, initialMessages, setDisplayedMessages]);
useEffect(() => {
let lastPendingMessage: PendingMessage | undefined;
if (recalledMessages === undefined) {
// don't do anything, it's loading
return;
}
const nextSubscription = chatService
.chat({ messages: initialMessages, connectorId, function: 'none' })
.chat({ messages: displayedMessages.concat(recalledMessages), connectorId, function: 'none' })
.subscribe({
next: (msg) => {
lastPendingMessage = msg;
setPendingMessage(() => msg);
},
complete: () => {
setDisplayedMessages((prevMessages) =>
prevMessages.concat({
'@timestamp': new Date().toISOString(),
message: {
...lastPendingMessage!.message,
},
})
);
setPendingMessage(lastPendingMessage);
setLoading(false);
},
});
setSubscription(nextSubscription);
}, [initialMessages, setDisplayedMessages, connectorId, chatService]);
}, [chatService, connectorId, displayedMessages, setDisplayedMessages, recalledMessages]);
useEffect(() => {
reloadReply();
}, [reloadReply]);
reloadRecalledMessages();
}, [reloadRecalledMessages]);
useEffect(() => {
setDisplayedMessages(initialMessages);
@ -96,23 +163,25 @@ function ChatContent({
const messagesWithPending = useMemo(() => {
return pendingMessage
? displayedMessages.concat({
? displayedMessages.concat(recalledMessages || []).concat({
'@timestamp': new Date().toISOString(),
message: {
...pendingMessage.message,
},
})
: displayedMessages;
}, [pendingMessage, displayedMessages]);
: displayedMessages.concat(recalledMessages || []);
}, [pendingMessage, displayedMessages, recalledMessages]);
const lastMessage = last(messagesWithPending);
const lastAssistantMessage = last(
messagesWithPending.filter((message) => message.message.role === MessageRole.Assistant)
);
return (
<>
<MessagePanel
body={
<MessageText
content={lastMessage?.message.content ?? ''}
content={lastAssistantMessage?.message.content ?? ''}
loading={loading}
onActionClick={async () => {}}
/>
@ -147,7 +216,7 @@ function ChatContent({
<EuiFlexItem grow={false}>
<RegenerateResponseButton
onClick={() => {
reloadReply();
reloadRecalledMessages();
}}
/>
</EuiFlexItem>
@ -168,7 +237,7 @@ function ChatContent({
onClose={() => {
setIsOpen(() => false);
}}
messages={displayedMessages}
messages={messagesWithPending}
conversationId={conversationId}
startedFrom="contextualInsight"
onChatComplete={(nextMessages) => {

View file

@ -6,9 +6,10 @@
*/
import { i18n } from '@kbn/i18n';
import { merge, omit } from 'lodash';
import { Dispatch, SetStateAction, useState } from 'react';
import { Dispatch, SetStateAction, useMemo, useState } from 'react';
import { type Conversation, type Message } from '../../common';
import type { ConversationCreateRequest } from '../../common/types';
import { ConversationCreateRequest, MessageRole } from '../../common/types';
import { getAssistantSetupMessage } from '../service/get_assistant_setup_message';
import { ObservabilityAIAssistantChatService } from '../types';
import { useAbortableAsync, type AbortableAsyncState } from './use_abortable_async';
import { useKibana } from './use_kibana';
@ -21,7 +22,7 @@ export function useConversation({
connectorId,
}: {
conversationId?: string;
chatService?: ObservabilityAIAssistantChatService;
chatService?: ObservabilityAIAssistantChatService; // will eventually resolve to a non-nullish value
connectorId: string | undefined;
}): {
conversation: AbortableAsyncState<ConversationCreateRequest | Conversation | undefined>;
@ -41,6 +42,19 @@ export function useConversation({
const [displayedMessages, setDisplayedMessages] = useState<Message[]>([]);
const displayedMessagesWithHardcodedSystemMessage = useMemo(() => {
if (!chatService) {
return displayedMessages;
}
const systemMessage = getAssistantSetupMessage({ contexts: chatService?.getContexts() || [] });
if (displayedMessages[0]?.message.role === MessageRole.User) {
return [systemMessage, ...displayedMessages];
}
return [systemMessage, ...displayedMessages.slice(1)];
}, [displayedMessages, chatService]);
const conversation: AbortableAsyncState<ConversationCreateRequest | Conversation | undefined> =
useAbortableAsync(
({ signal }) => {
@ -71,7 +85,7 @@ export function useConversation({
return {
conversation,
displayedMessages,
displayedMessages: displayedMessagesWithHardcodedSystemMessage,
setDisplayedMessages,
save: (messages: Message[], handleRefreshConversations?: () => void) => {
const conversationObject = conversation.value!;

View file

@ -171,6 +171,8 @@ describe('useTimeline', () => {
return subject;
}),
executeFunction: jest.fn(),
hasFunction: jest.fn(),
hasRenderFunction: jest.fn(),
},
onChatUpdate: jest.fn().mockImplementation((messages) => {
props = { ...props, messages };

View file

@ -5,13 +5,13 @@
* 2.0.
*/
import { i18n } from '@kbn/i18n';
import { AbortError } from '@kbn/kibana-utils-plugin/common';
import type { AuthenticatedUser } from '@kbn/security-plugin/common';
import { last } from 'lodash';
import { useEffect, useMemo, useRef, useState } from 'react';
import { isObservable, Observable, Subscription } from 'rxjs';
import usePrevious from 'react-use/lib/usePrevious';
import { i18n } from '@kbn/i18n';
import { isObservable, Observable, Subscription } from 'rxjs';
import {
ContextDefinition,
MessageRole,
@ -20,8 +20,8 @@ import {
} from '../../common/types';
import type { ChatPromptEditorProps } from '../components/chat/chat_prompt_editor';
import type { ChatTimelineProps } from '../components/chat/chat_timeline';
import { ChatActionClickType } from '../components/chat/types';
import { EMPTY_CONVERSATION_TITLE } from '../i18n';
import { getAssistantSetupMessage } from '../service/get_assistant_setup_message';
import type { ObservabilityAIAssistantChatService, PendingMessage } from '../types';
import {
getTimelineItemsfromConversation,
@ -29,7 +29,6 @@ import {
} from '../utils/get_timeline_items_from_conversation';
import type { UseGenAIConnectorsResult } from './use_genai_connectors';
import { useKibana } from './use_kibana';
import { ChatActionClickType } from '../components/chat/types';
export function createNewConversation({
contexts,
@ -38,7 +37,7 @@ export function createNewConversation({
}): ConversationCreateRequest {
return {
'@timestamp': new Date().toISOString(),
messages: [getAssistantSetupMessage({ contexts })],
messages: [],
conversation: {
title: EMPTY_CONVERSATION_TITLE,
},
@ -114,55 +113,71 @@ export function useTimeline({
): Promise<Message[]> {
const controller = new AbortController();
return new Promise<PendingMessage | undefined>((resolve, reject) => {
if (!connectorId) {
reject(new Error('Can not add a message without a connector'));
return;
}
return new Promise<PendingMessage | undefined>(async (resolve, reject) => {
try {
if (!connectorId) {
reject(new Error('Can not add a message without a connector'));
return;
}
onChatUpdate(nextMessages);
const isStartOfConversation =
nextMessages.some((message) => message.message.role === MessageRole.Assistant) === false;
const lastMessage = last(nextMessages);
if (isStartOfConversation && chatService.hasFunction('recall')) {
nextMessages = nextMessages.concat({
'@timestamp': new Date().toISOString(),
message: {
role: MessageRole.Assistant,
content: '',
function_call: {
name: 'recall',
arguments: JSON.stringify({ queries: [], contexts: [] }),
trigger: MessageRole.User,
},
},
});
}
if (lastMessage?.message.function_call?.name) {
// the user has edited a function suggestion, no need to talk to
resolve(undefined);
return;
}
onChatUpdate(nextMessages);
const lastMessage = last(nextMessages);
if (lastMessage?.message.function_call?.name) {
// the user has edited a function suggestion, no need to talk to the LLM
resolve(undefined);
return;
}
response$ =
response$ ||
chatService!.chat({
messages: nextMessages,
connectorId,
response$ =
response$ ||
chatService!.chat({
messages: nextMessages,
connectorId,
});
let pendingMessageLocal = pendingMessage;
const nextSubscription = response$.subscribe({
next: (nextPendingMessage) => {
pendingMessageLocal = nextPendingMessage;
setPendingMessage(() => nextPendingMessage);
},
error: reject,
complete: () => {
const error = pendingMessageLocal?.error;
if (error) {
notifications.toasts.addError(error, {
title: i18n.translate('xpack.observabilityAiAssistant.failedToLoadResponse', {
defaultMessage: 'Failed to load response from the AI Assistant',
}),
});
}
resolve(pendingMessageLocal!);
},
});
let pendingMessageLocal = pendingMessage;
const nextSubscription = response$.subscribe({
next: (nextPendingMessage) => {
pendingMessageLocal = nextPendingMessage;
setPendingMessage(() => nextPendingMessage);
},
error: reject,
complete: () => {
const error = pendingMessageLocal?.error;
if (error) {
notifications.toasts.addError(error, {
title: i18n.translate('xpack.observabilityAiAssistant.failedToLoadResponse', {
defaultMessage: 'Failed to load response from the AI Assistant',
}),
});
}
resolve(pendingMessageLocal!);
},
});
setSubscription(() => {
controllerRef.current = controller;
return nextSubscription;
});
setSubscription(() => {
controllerRef.current = controller;
return nextSubscription;
});
} catch (error) {
reject(error);
}
}).then(async (reply) => {
if (reply?.error) {
return nextMessages;

View file

@ -145,6 +145,9 @@ export async function createChatService({
},
getContexts,
getFunctions,
hasFunction: (name: string) => {
return !!getFunctions().find((fn) => fn.options.name === name);
},
hasRenderFunction: (name: string) => {
return !!getFunctions().find((fn) => fn.options.name === name)?.render;
},

View file

@ -7,9 +7,9 @@
import { without } from 'lodash';
import { MessageRole } from '../../common';
import { ContextDefinition } from '../../common/types';
import type { ContextDefinition, Message } from '../../common/types';
export function getAssistantSetupMessage({ contexts }: { contexts: ContextDefinition[] }) {
export function getAssistantSetupMessage({ contexts }: { contexts: ContextDefinition[] }): Message {
const coreContext = contexts.find((context) => context.name === 'core')!;
const otherContexts = without(contexts.concat(), coreContext);

View file

@ -59,6 +59,7 @@ export interface ObservabilityAIAssistantChatService {
}) => Observable<PendingMessage>;
getContexts: () => ContextDefinition[];
getFunctions: (options?: { contexts?: string[]; filter?: string }) => FunctionDefinition[];
hasFunction: (name: string) => boolean;
hasRenderFunction: (name: string) => boolean;
executeFunction: ({}: {
name: string;

View file

@ -34,6 +34,7 @@ const chatService: ObservabilityAIAssistantChatService = {
renderFunction: (name: string, args: string | undefined, response: {}) => (
<div>Hello! {name}</div>
),
hasFunction: () => true,
hasRenderFunction: () => true,
};

View file

@ -9,7 +9,6 @@ import { IncomingMessage } from 'http';
import * as t from 'io-ts';
import { toBooleanRt } from '@kbn/io-ts-utils';
import type { CreateChatCompletionResponse } from 'openai';
import { MessageRole } from '../../../common';
import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route';
import { messageRt } from '../runtime_types';
@ -49,25 +48,12 @@ const chatRoute = createObservabilityAIAssistantServerRoute({
}
const {
body: { messages, connectorId, functions, functionCall: givenFunctionCall },
body: { messages, connectorId, functions, functionCall },
query = { stream: true },
} = params;
const stream = query.stream;
let functionCall = givenFunctionCall;
if (!functionCall) {
const isStartOfConversation =
messages.some((message) => message.message.role === MessageRole.Assistant) === false;
const isRecallFunctionAvailable = functions.some((fn) => fn.name === 'recall') === true;
const willUseRecall = isStartOfConversation && isRecallFunctionAvailable;
functionCall = willUseRecall ? 'recall' : undefined;
}
return client.chat({
messages,
connectorId,

View file

@ -30,14 +30,6 @@ export function FrameInformationAIAssistant({ frame }: Props) {
const now = new Date().toISOString();
return [
{
'@timestamp': now,
message: {
role: MessageRole.System,
content: `You are perf-gpt, a helpful assistant for performance analysis and optimisation
of software. Answer as concisely as possible.`,
},
},
{
'@timestamp': now,
message: {