[8.10] [Observability AI Assistant] Prompt tweaks (#165591) (#165611)

# Backport

This will backport the following commits from `main` to `8.10`:
- [[Observability AI Assistant] Prompt tweaks
(#165591)](https://github.com/elastic/kibana/pull/165591)

<!--- Backport version: 8.9.7 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sqren/backport)

<!--BACKPORT [{"author":{"name":"Dario
Gieselaar","email":"dario.gieselaar@elastic.co"},"sourceCommit":{"committedDate":"2023-09-04T15:20:23Z","message":"[Observability
AI Assistant] Prompt tweaks
(#165591)","sha":"b7fe71d6a1437819d7763383756b13c6a6baaa74","branchLabelMapping":{"^v8.11.0$":"main","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["Team:APM","release_note:skip","v8.10.0","v8.11.0"],"number":165591,"url":"https://github.com/elastic/kibana/pull/165591","mergeCommit":{"message":"[Observability
AI Assistant] Prompt tweaks
(#165591)","sha":"b7fe71d6a1437819d7763383756b13c6a6baaa74"}},"sourceBranch":"main","suggestedTargetBranches":["8.10"],"targetPullRequestStates":[{"branch":"8.10","label":"v8.10.0","labelRegex":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"main","label":"v8.11.0","labelRegex":"^v8.11.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/165591","number":165591,"mergeCommit":{"message":"[Observability
AI Assistant] Prompt tweaks
(#165591)","sha":"b7fe71d6a1437819d7763383756b13c6a6baaa74"}}]}]
BACKPORT-->

Co-authored-by: Dario Gieselaar <dario.gieselaar@elastic.co>
This commit is contained in:
Kibana Machine 2023-09-04 12:56:57 -04:00 committed by GitHub
parent 3e3f82ddea
commit 01d249fff0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 81 additions and 59 deletions

View file

@ -140,9 +140,8 @@ export function registerGetApmTimeseriesFunction({
description: 'The name of the service',
},
'service.environment': {
...NON_EMPTY_STRING,
description:
'The environment that the service is running in.',
'The environment that the service is running in. If undefined, all environments will be included. Only use this if you have confirmed the environment that the service is running in.',
},
filter: {
type: 'string',
@ -160,12 +159,7 @@ export function registerGetApmTimeseriesFunction({
'The offset. Right: 15m. 8h. 1d. Wrong: -15m. -8h. -1d.',
},
},
required: [
'service.name',
'service.environment',
'timeseries',
'title',
],
required: ['service.name', 'timeseries', 'title'],
},
},
},

View file

@ -14,7 +14,6 @@ import { environmentQuery } from '../../../../common/utils/environment_query';
import { getBucketSize } from '../../../../common/utils/get_bucket_size';
import { termQuery } from '../../../../common/utils/term_query';
import { APMEventClient } from '../../../lib/helpers/create_es_client/create_apm_event_client';
import { environmentRt } from '../../default_api_types';
import { getErrorEventRate } from './get_error_event_rate';
import { getExitSpanFailureRate } from './get_exit_span_failure_rate';
import { getExitSpanLatency } from './get_exit_span_latency';
@ -37,7 +36,6 @@ export const getApmTimeseriesRt = t.type({
stats: t.array(
t.intersection([
t.type({
'service.environment': environmentRt.props.environment,
'service.name': t.string,
title: t.string,
timeseries: t.union([
@ -85,6 +83,7 @@ export const getApmTimeseriesRt = t.type({
t.partial({
filter: t.string,
offset: t.string,
'service.environment': t.string,
}),
])
),

View file

@ -88,7 +88,7 @@ interface FunctionOptions<TParameters extends CompatibleJSONSchema = CompatibleJ
}
type RespondFunction<TArguments, TResponse extends FunctionResponse> = (
options: { arguments: TArguments },
options: { arguments: TArguments; messages: Message[] },
signal: AbortSignal
) => Promise<TResponse>;
@ -99,7 +99,10 @@ type RenderFunction<TArguments, TResponse extends FunctionResponse> = (options:
export interface FunctionDefinition {
options: FunctionOptions;
respond: (options: { arguments: any }, signal: AbortSignal) => Promise<FunctionResponse>;
respond: (
options: { arguments: any; messages: Message[] },
signal: AbortSignal
) => Promise<FunctionResponse>;
render?: RenderFunction<any, any>;
}

View file

@ -49,7 +49,10 @@ export async function registerFunctions({
In KQL, escaping happens with double quotes, not single quotes. Some characters that need escaping are: ':()\\\
/\". Always put a field value in double quotes. Best: service.name:\"opbeans-go\". Wrong: service.name:opbeans-go. This is very important!
You can use Github-flavored Markdown in your responses. If a function returns an array, consider using a Markdown table to format the response.`
You can use Github-flavored Markdown in your responses. If a function returns an array, consider using a Markdown table to format the response.
If multiple functions are suitable, use the most specific and easy one. E.g., when the user asks to visualise APM data, use the APM functions (if available) rather than Lens.
`
);
if (isReady) {

View file

@ -148,7 +148,7 @@ export function registerLensFunction({
name: 'lens',
contexts: ['core'],
description:
"Use this function to create custom visualizations, using Lens, that can be saved to dashboards. When using this function, make sure to use the recall function to get more information about how to use it, with how you want to use it. Make sure the query also contains information about the user's request. The visualisation is displayed to the user above your reply, DO NOT try to generate or display an image yourself.",
"Use this function to create custom visualizations, using Lens, that can be saved to dashboards. This function does not return data to the assistant, it only shows it to the user. When using this function, make sure to use the recall function to get more information about how to use it, with how you want to use it. Make sure the query also contains information about the user's request. The visualisation is displayed to the user above your reply, DO NOT try to generate or display an image yourself.",
descriptionForUser:
'Use this function to create custom visualizations, using Lens, that can be saved to dashboards.',
parameters: {

View file

@ -6,7 +6,7 @@
*/
import type { Serializable } from '@kbn/utility-types';
import type { RegisterFunctionDefinition } from '../../common/types';
import { MessageRole, RegisterFunctionDefinition } from '../../common/types';
import type { ObservabilityAIAssistantService } from '../types';
export function registerRecallFunction({
@ -20,24 +20,19 @@ export function registerRecallFunction({
{
name: 'recall',
contexts: ['core'],
description: `Use this function to recall earlier learnings. Anything you will summarize can be retrieved again later via this function. This is semantic/vector search so there's no need for an exact match.
description: `Use this function to recall earlier learnings. Anything you will summarize can be retrieved again later via this function.
Make sure the query covers the following aspects:
- The user's prompt, verbatim
- Anything you've inferred from the user's request, but is not mentioned in the user's request
- The functions you think might be suitable for answering the user's request. If there are multiple functions that seem suitable, create multiple queries. Use the function name in the query.
DO NOT include the user's request. It will be added internally.
Q: "can you visualise the average request duration for opbeans-go over the last 7 days?"
A: -"can you visualise the average request duration for opbeans-go over the last 7 days?"
The user asks: "can you visualise the average request duration for opbeans-go over the last 7 days?"
You recall:
- "APM service"
- "lens function usage"
- "get_apm_timeseries function usage"
Q: "what alerts are active?"
A: - "what alerts are active?"
- "alerts function usage"
`,
- "get_apm_timeseries function usage"`,
descriptionForUser: 'This function allows the assistant to recall previous learnings.',
parameters: {
type: 'object',
@ -53,15 +48,21 @@ export function registerRecallFunction({
},
},
},
required: ['queries' as const],
},
required: ['queries'],
} as const,
},
({ arguments: { queries } }, signal) => {
({ arguments: { queries }, messages }, signal) => {
const userMessages = messages.filter((message) => message.message.role === MessageRole.User);
const userPrompt = userMessages[userMessages.length - 1]?.message.content;
const queriesWithUserPrompt = userPrompt ? [userPrompt, ...queries] : queries;
return service
.callApi('POST /internal/observability_ai_assistant/functions/recall', {
params: {
body: {
queries,
queries: queriesWithUserPrompt,
},
},
signal,

View file

@ -434,11 +434,20 @@ describe('useTimeline', () => {
expect(props.onChatComplete).not.toHaveBeenCalled();
expect(props.chatService.executeFunction).toHaveBeenCalledWith(
'my_function',
'{}',
expect.any(Object)
);
expect(props.chatService.executeFunction).toHaveBeenCalledWith({
name: 'my_function',
args: '{}',
messages: [
{
'@timestamp': expect.any(String),
message: {
content: 'Hello',
role: 'user',
},
},
],
signal: expect.any(Object),
});
act(() => {
subject.next({

View file

@ -182,11 +182,12 @@ export function useTimeline({
const name = lastMessage.message.function_call.name;
try {
const message = await chatService!.executeFunction(
const message = await chatService!.executeFunction({
name,
lastMessage.message.function_call.arguments,
controller.signal
);
args: lastMessage.message.function_call.arguments,
messages: messagesAfterChat.slice(0, -1),
signal: controller.signal,
});
return await chat(
messagesAfterChat.concat({

View file

@ -74,7 +74,7 @@ export async function createChatService({
};
const registerFunction: RegisterFunctionDefinition = (def, respond, render) => {
validators.set(def.name, new Validator(def.parameters as Schema, '2020-12', false));
validators.set(def.name, new Validator(def.parameters as Schema, '2020-12', true));
functionRegistry.set(def.name, { options: def, respond, render });
};
@ -112,7 +112,7 @@ export async function createChatService({
}
return {
executeFunction: async (name, args, signal) => {
executeFunction: async ({ name, args, signal, messages }) => {
const fn = functionRegistry.get(name);
if (!fn) {
@ -123,7 +123,7 @@ export async function createChatService({
validate(name, parsedArguments);
return await fn.respond({ arguments: parsedArguments }, signal);
return await fn.respond({ arguments: parsedArguments, messages }, signal);
},
renderFunction: (name, args, response) => {
const fn = functionRegistry.get(name);

View file

@ -65,11 +65,12 @@ export interface ObservabilityAIAssistantChatService {
getContexts: () => ContextDefinition[];
getFunctions: (options?: { contexts?: string[]; filter?: string }) => FunctionDefinition[];
hasRenderFunction: (name: string) => boolean;
executeFunction: (
name: string,
args: string | undefined,
signal: AbortSignal
) => Promise<{ content?: Serializable; data?: Serializable }>;
executeFunction: ({}: {
name: string;
args: string | undefined;
messages: Message[];
signal: AbortSignal;
}) => Promise<{ content?: Serializable; data?: Serializable }>;
renderFunction: (
name: string,
args: string | undefined,

View file

@ -25,11 +25,12 @@ const chatService: ObservabilityAIAssistantChatService = {
chat: (options: { messages: Message[]; connectorId: string }) => new Observable<PendingMessage>(),
getContexts: () => [],
getFunctions: () => [buildFunctionElasticsearch(), buildFunctionServiceSummary()],
executeFunction: async (
name: string,
args: string | undefined,
signal: AbortSignal
): Promise<{ content?: Serializable; data?: Serializable }> => ({}),
executeFunction: async ({}: {
name: string;
args: string | undefined;
messages: Message[];
signal: AbortSignal;
}): Promise<{ content?: Serializable; data?: Serializable }> => ({}),
renderFunction: (name: string, args: string | undefined, response: {}) => (
<div>Hello! {name}</div>
),

View file

@ -4,12 +4,12 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import * as t from 'io-ts';
import { IncomingMessage } from 'http';
import { notImplemented } from '@hapi/boom';
import { IncomingMessage } from 'http';
import * as t from 'io-ts';
import { MessageRole } from '../../../common';
import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route';
import { messageRt } from '../runtime_types';
import { MessageRole } from '../../../common';
const chatRoute = createObservabilityAIAssistantServerRoute({
endpoint: 'POST /internal/observability_ai_assistant/chat',
@ -47,13 +47,15 @@ const chatRoute = createObservabilityAIAssistantServerRoute({
const isRecallFunctionAvailable = functions.some((fn) => fn.name === 'recall') === true;
const willUseRecall = isStartOfConversation && isRecallFunctionAvailable;
return client.chat({
messages,
connectorId,
...(functions.length
? {
functions,
functionCall: isStartOfConversation && isRecallFunctionAvailable ? 'recall' : undefined,
functionCall: willUseRecall ? 'recall' : undefined,
}
: {}),
});

View file

@ -246,9 +246,17 @@ export class ObservabilityAIAssistantClient {
});
if ('object' in response && response.object === 'chat.completion') {
const title =
response.choices[0].message?.content?.slice(1, -1) ||
`Conversation on ${conversation['@timestamp']}`;
const input =
response.choices[0].message?.content || `Conversation on ${conversation['@timestamp']}`;
// This regular expression captures a string enclosed in single or double quotes.
// It extracts the string content without the quotes.
// Example matches:
// - "Hello, World!" => Captures: Hello, World!
// - 'Another Example' => Captures: Another Example
// - JustTextWithoutQuotes => Captures: JustTextWithoutQuotes
const match = input.match(/^["']?([^"']+)["']?$/);
const title = match ? match[1] : input;
const updatedConversation: Conversation = merge(
{},