mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[8.x] [Obs AI Assistant] Update the simulate function calling setting to support "auto" (#209628) (#210277)
# Backport This will backport the following commits from `main` to `8.x`: - [[Obs AI Assistant] Update the simulate function calling setting to support "auto" (#209628)](https://github.com/elastic/kibana/pull/209628) <!--- Backport version: 9.4.3 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Viduni Wickramarachchi","email":"viduni.wickramarachchi@elastic.co"},"sourceCommit":{"committedDate":"2025-02-07T21:07:20Z","message":"[Obs AI Assistant] Update the simulate function calling setting to support \"auto\" (#209628)\n\nCloses https://github.com/elastic/obs-ai-assistant-team/issues/198\n\n## Summary\n\nThe simulated function calling setting is currently a boolean. It needs\nto be updated to support the option `auto`.\n`export type FunctionCallingMode = 'native' | 'simulated' | 'auto';`\n\nIf the setting is set to `false`, `auto` will be passed to the inference\nclient. If the setting is `true`, `simulated` will be passed to it.\n\nRelates to https://github.com/elastic/kibana/pull/208144\n\n\n### Checklist\n\n- [x] Any text added follows [EUI's writing\nguidelines](https://elastic.github.io/eui/#/guidelines/writing), uses\nsentence case text and includes [i18n\nsupport](https://github.com/elastic/kibana/blob/main/src/platform/packages/shared/kbn-i18n/README.md)\n- [x] [Unit or functional\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\nwere updated or added to match the most common scenarios\n- [x] The PR description includes the appropriate Release Notes section,\nand the correct `release_note:*` label is applied per the\n[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)\n\n---------\n\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"343b80a8a0bd231791b1b8fc3fb213938b0b1c23","branchLabelMapping":{"^v9.1.0$":"main","^v8.19.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:fix","v9.0.0","Team:Obs AI Assistant","backport:version","v8.18.0","v9.1.0","v8.19.0"],"title":"[Obs AI Assistant] Update the simulate function calling setting to support \"auto\"","number":209628,"url":"https://github.com/elastic/kibana/pull/209628","mergeCommit":{"message":"[Obs AI Assistant] Update the simulate function calling setting to support \"auto\" (#209628)\n\nCloses https://github.com/elastic/obs-ai-assistant-team/issues/198\n\n## Summary\n\nThe simulated function calling setting is currently a boolean. It needs\nto be updated to support the option `auto`.\n`export type FunctionCallingMode = 'native' | 'simulated' | 'auto';`\n\nIf the setting is set to `false`, `auto` will be passed to the inference\nclient. If the setting is `true`, `simulated` will be passed to it.\n\nRelates to https://github.com/elastic/kibana/pull/208144\n\n\n### Checklist\n\n- [x] Any text added follows [EUI's writing\nguidelines](https://elastic.github.io/eui/#/guidelines/writing), uses\nsentence case text and includes [i18n\nsupport](https://github.com/elastic/kibana/blob/main/src/platform/packages/shared/kbn-i18n/README.md)\n- [x] [Unit or functional\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\nwere updated or added to match the most common scenarios\n- [x] The PR description includes the appropriate Release Notes section,\nand the correct `release_note:*` label is applied per the\n[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)\n\n---------\n\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"343b80a8a0bd231791b1b8fc3fb213938b0b1c23"}},"sourceBranch":"main","suggestedTargetBranches":["9.0","8.18","8.x"],"targetPullRequestStates":[{"branch":"9.0","label":"v9.0.0","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"8.18","label":"v8.18.0","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"main","label":"v9.1.0","branchLabelMappingKey":"^v9.1.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/209628","number":209628,"mergeCommit":{"message":"[Obs AI Assistant] Update the simulate function calling setting to support \"auto\" (#209628)\n\nCloses https://github.com/elastic/obs-ai-assistant-team/issues/198\n\n## Summary\n\nThe simulated function calling setting is currently a boolean. It needs\nto be updated to support the option `auto`.\n`export type FunctionCallingMode = 'native' | 'simulated' | 'auto';`\n\nIf the setting is set to `false`, `auto` will be passed to the inference\nclient. If the setting is `true`, `simulated` will be passed to it.\n\nRelates to https://github.com/elastic/kibana/pull/208144\n\n\n### Checklist\n\n- [x] Any text added follows [EUI's writing\nguidelines](https://elastic.github.io/eui/#/guidelines/writing), uses\nsentence case text and includes [i18n\nsupport](https://github.com/elastic/kibana/blob/main/src/platform/packages/shared/kbn-i18n/README.md)\n- [x] [Unit or functional\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\nwere updated or added to match the most common scenarios\n- [x] The PR description includes the appropriate Release Notes section,\nand the correct `release_note:*` label is applied per the\n[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)\n\n---------\n\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"343b80a8a0bd231791b1b8fc3fb213938b0b1c23"}},{"branch":"8.x","label":"v8.19.0","branchLabelMappingKey":"^v8.19.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}] BACKPORT--> Co-authored-by: Viduni Wickramarachchi <viduni.wickramarachchi@elastic.co>
This commit is contained in:
parent
532304a9af
commit
b8c2230bee
11 changed files with 38 additions and 50 deletions
|
@ -26,6 +26,7 @@ import {
|
|||
VisualizeESQLUserIntention,
|
||||
type ChatActionClickPayload,
|
||||
type Feedback,
|
||||
aiAssistantSimulatedFunctionCalling,
|
||||
} from '@kbn/observability-ai-assistant-plugin/public';
|
||||
import type { AuthenticatedUser } from '@kbn/security-plugin/common';
|
||||
import { euiThemeVars } from '@kbn/ui-theme';
|
||||
|
@ -34,7 +35,6 @@ import React, { useCallback, useEffect, useRef, useState } from 'react';
|
|||
import type { UseKnowledgeBaseResult } from '../hooks/use_knowledge_base';
|
||||
import { ASSISTANT_SETUP_TITLE, EMPTY_CONVERSATION_TITLE, UPGRADE_LICENSE_TITLE } from '../i18n';
|
||||
import { useAIAssistantChatService } from '../hooks/use_ai_assistant_chat_service';
|
||||
import { useSimulatedFunctionCalling } from '../hooks/use_simulated_function_calling';
|
||||
import { useGenAIConnectors } from '../hooks/use_genai_connectors';
|
||||
import { useConversation } from '../hooks/use_conversation';
|
||||
import { FlyoutPositionMode } from './chat_flyout';
|
||||
|
@ -46,6 +46,7 @@ import { WelcomeMessage } from './welcome_message';
|
|||
import { useLicense } from '../hooks/use_license';
|
||||
import { PromptEditor } from '../prompt_editor/prompt_editor';
|
||||
import { deserializeMessage } from '../utils/deserialize_message';
|
||||
import { useKibana } from '../hooks/use_kibana';
|
||||
|
||||
const fullHeightClassName = css`
|
||||
height: 100%;
|
||||
|
@ -132,7 +133,14 @@ export function ChatBody({
|
|||
|
||||
const chatService = useAIAssistantChatService();
|
||||
|
||||
const { simulatedFunctionCallingEnabled } = useSimulatedFunctionCalling();
|
||||
const {
|
||||
services: { uiSettings },
|
||||
} = useKibana();
|
||||
|
||||
const simulateFunctionCalling = uiSettings!.get<boolean>(
|
||||
aiAssistantSimulatedFunctionCalling,
|
||||
false
|
||||
);
|
||||
|
||||
const { conversation, messages, next, state, stop, saveTitle } = useConversation({
|
||||
initialConversationId,
|
||||
|
@ -403,7 +411,7 @@ export function ChatBody({
|
|||
</div>
|
||||
</EuiFlexItem>
|
||||
|
||||
{simulatedFunctionCallingEnabled ? (
|
||||
{simulateFunctionCalling ? (
|
||||
<EuiFlexItem grow={false}>
|
||||
<SimulatedFunctionCallingCallout />
|
||||
</EuiFlexItem>
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { aiAssistantSimulatedFunctionCalling } from '@kbn/observability-ai-assistant-plugin/public';
|
||||
import { useKibana } from './use_kibana';
|
||||
|
||||
export function useSimulatedFunctionCalling() {
|
||||
const {
|
||||
services: { uiSettings },
|
||||
} = useKibana();
|
||||
|
||||
const simulatedFunctionCallingEnabled = uiSettings!.get<boolean>(
|
||||
aiAssistantSimulatedFunctionCalling,
|
||||
false
|
||||
);
|
||||
|
||||
return { simulatedFunctionCallingEnabled };
|
||||
}
|
|
@ -50,7 +50,7 @@ describe('chatFunctionClient', () => {
|
|||
messages: [],
|
||||
signal: new AbortController().signal,
|
||||
connectorId: 'foo',
|
||||
useSimulatedFunctionCalling: false,
|
||||
simulateFunctionCalling: false,
|
||||
});
|
||||
}).rejects.toThrowError(`Function arguments are invalid`);
|
||||
|
||||
|
@ -112,7 +112,7 @@ describe('chatFunctionClient', () => {
|
|||
messages: [],
|
||||
signal: new AbortController().signal,
|
||||
connectorId: 'foo',
|
||||
useSimulatedFunctionCalling: false,
|
||||
simulateFunctionCalling: false,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
|
|
|
@ -167,7 +167,7 @@ export class ChatFunctionClient {
|
|||
messages,
|
||||
signal,
|
||||
connectorId,
|
||||
useSimulatedFunctionCalling,
|
||||
simulateFunctionCalling,
|
||||
}: {
|
||||
chat: FunctionCallChatFunction;
|
||||
name: string;
|
||||
|
@ -175,7 +175,7 @@ export class ChatFunctionClient {
|
|||
messages: Message[];
|
||||
signal: AbortSignal;
|
||||
connectorId: string;
|
||||
useSimulatedFunctionCalling: boolean;
|
||||
simulateFunctionCalling: boolean;
|
||||
}): Promise<FunctionResponse> {
|
||||
const fn = this.functionRegistry.get(name);
|
||||
|
||||
|
@ -194,7 +194,7 @@ export class ChatFunctionClient {
|
|||
screenContexts: this.screenContexts,
|
||||
chat,
|
||||
connectorId,
|
||||
useSimulatedFunctionCalling,
|
||||
simulateFunctionCalling,
|
||||
},
|
||||
signal
|
||||
);
|
||||
|
|
|
@ -313,7 +313,7 @@ describe('Observability AI Assistant client', () => {
|
|||
expect.objectContaining({
|
||||
connectorId: 'foo',
|
||||
stream: false,
|
||||
functionCalling: 'native',
|
||||
functionCalling: 'auto',
|
||||
toolChoice: expect.objectContaining({
|
||||
function: 'title_conversation',
|
||||
}),
|
||||
|
@ -349,7 +349,7 @@ describe('Observability AI Assistant client', () => {
|
|||
messages: expect.arrayContaining([
|
||||
{ role: 'user', content: 'How many alerts do I have?' },
|
||||
]),
|
||||
functionCalling: 'native',
|
||||
functionCalling: 'auto',
|
||||
toolChoice: undefined,
|
||||
tools: undefined,
|
||||
},
|
||||
|
@ -872,7 +872,7 @@ describe('Observability AI Assistant client', () => {
|
|||
},
|
||||
},
|
||||
],
|
||||
useSimulatedFunctionCalling: false,
|
||||
simulateFunctionCalling: false,
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -919,7 +919,7 @@ describe('Observability AI Assistant client', () => {
|
|||
messages: expect.arrayContaining([
|
||||
{ role: 'user', content: 'How many alerts do I have?' },
|
||||
]),
|
||||
functionCalling: 'native',
|
||||
functionCalling: 'auto',
|
||||
toolChoice: 'auto',
|
||||
tools: expect.any(Object),
|
||||
},
|
||||
|
@ -1080,7 +1080,7 @@ describe('Observability AI Assistant client', () => {
|
|||
messages: expect.arrayContaining([
|
||||
{ role: 'user', content: 'How many alerts do I have?' },
|
||||
]),
|
||||
functionCalling: 'native',
|
||||
functionCalling: 'auto',
|
||||
toolChoice: 'auto',
|
||||
tools: expect.any(Object),
|
||||
},
|
||||
|
|
|
@ -307,7 +307,7 @@ export class ObservabilityAIAssistantClient {
|
|||
disableFunctions,
|
||||
tracer: completeTracer,
|
||||
connectorId,
|
||||
useSimulatedFunctionCalling: simulateFunctionCalling === true,
|
||||
simulateFunctionCalling,
|
||||
})
|
||||
);
|
||||
}),
|
||||
|
@ -505,6 +505,7 @@ export class ObservabilityAIAssistantClient {
|
|||
}
|
||||
: ToolChoiceType.auto;
|
||||
}
|
||||
|
||||
const options = {
|
||||
connectorId,
|
||||
messages: convertMessagesForInference(
|
||||
|
@ -512,8 +513,9 @@ export class ObservabilityAIAssistantClient {
|
|||
),
|
||||
toolChoice,
|
||||
tools,
|
||||
functionCalling: (simulateFunctionCalling ? 'simulated' : 'native') as FunctionCallingMode,
|
||||
functionCalling: (simulateFunctionCalling ? 'simulated' : 'auto') as FunctionCallingMode,
|
||||
};
|
||||
|
||||
if (stream) {
|
||||
return defer(() =>
|
||||
this.dependencies.inferenceClient.chatComplete({
|
||||
|
|
|
@ -54,7 +54,7 @@ function executeFunctionAndCatchError({
|
|||
logger,
|
||||
tracer,
|
||||
connectorId,
|
||||
useSimulatedFunctionCalling,
|
||||
simulateFunctionCalling,
|
||||
}: {
|
||||
name: string;
|
||||
args: string | undefined;
|
||||
|
@ -65,7 +65,7 @@ function executeFunctionAndCatchError({
|
|||
logger: Logger;
|
||||
tracer: LangTracer;
|
||||
connectorId: string;
|
||||
useSimulatedFunctionCalling: boolean;
|
||||
simulateFunctionCalling: boolean;
|
||||
}): Observable<MessageOrChatEvent> {
|
||||
// hide token count events from functions to prevent them from
|
||||
// having to deal with it as well
|
||||
|
@ -86,7 +86,7 @@ function executeFunctionAndCatchError({
|
|||
signal,
|
||||
messages,
|
||||
connectorId,
|
||||
useSimulatedFunctionCalling,
|
||||
simulateFunctionCalling,
|
||||
})
|
||||
);
|
||||
|
||||
|
@ -184,7 +184,7 @@ export function continueConversation({
|
|||
disableFunctions,
|
||||
tracer,
|
||||
connectorId,
|
||||
useSimulatedFunctionCalling,
|
||||
simulateFunctionCalling,
|
||||
}: {
|
||||
messages: Message[];
|
||||
functionClient: ChatFunctionClient;
|
||||
|
@ -201,7 +201,7 @@ export function continueConversation({
|
|||
};
|
||||
tracer: LangTracer;
|
||||
connectorId: string;
|
||||
useSimulatedFunctionCalling: boolean;
|
||||
simulateFunctionCalling: boolean;
|
||||
}): Observable<MessageOrChatEvent> {
|
||||
let nextFunctionCallsLeft = functionCallsLeft;
|
||||
|
||||
|
@ -319,7 +319,7 @@ export function continueConversation({
|
|||
logger,
|
||||
tracer,
|
||||
connectorId,
|
||||
useSimulatedFunctionCalling,
|
||||
simulateFunctionCalling,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -348,7 +348,7 @@ export function continueConversation({
|
|||
disableFunctions,
|
||||
tracer,
|
||||
connectorId,
|
||||
useSimulatedFunctionCalling,
|
||||
simulateFunctionCalling,
|
||||
});
|
||||
})
|
||||
)
|
||||
|
|
|
@ -56,7 +56,7 @@ type RespondFunction<TArguments, TResponse extends FunctionResponse> = (
|
|||
screenContexts: ObservabilityAIAssistantScreenContextRequest[];
|
||||
chat: FunctionCallChatFunction;
|
||||
connectorId: string;
|
||||
useSimulatedFunctionCalling: boolean;
|
||||
simulateFunctionCalling: boolean;
|
||||
},
|
||||
signal: AbortSignal
|
||||
) => Promise<TResponse>;
|
||||
|
|
|
@ -62,14 +62,14 @@ export async function registerDocumentationFunction({
|
|||
required: ['query'],
|
||||
} as const,
|
||||
},
|
||||
async ({ arguments: { query, product }, connectorId, useSimulatedFunctionCalling }) => {
|
||||
async ({ arguments: { query, product }, connectorId, simulateFunctionCalling }) => {
|
||||
const response = await llmTasks!.retrieveDocumentation({
|
||||
searchTerm: query,
|
||||
products: product ? [product] : undefined,
|
||||
max: 3,
|
||||
connectorId,
|
||||
request: resources.request,
|
||||
functionCalling: useSimulatedFunctionCalling ? 'simulated' : 'native',
|
||||
functionCalling: simulateFunctionCalling ? 'simulated' : 'auto',
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -112,7 +112,7 @@ export function registerQueryFunction({
|
|||
function takes no input.`,
|
||||
visibility: FunctionVisibility.AssistantOnly,
|
||||
},
|
||||
async ({ messages, connectorId, useSimulatedFunctionCalling }, signal) => {
|
||||
async ({ messages, connectorId, simulateFunctionCalling }, signal) => {
|
||||
const esqlFunctions = functions
|
||||
.getFunctions()
|
||||
.filter(
|
||||
|
@ -137,7 +137,7 @@ export function registerQueryFunction({
|
|||
{ description: fn.description, schema: fn.parameters } as ToolDefinition,
|
||||
])
|
||||
),
|
||||
functionCalling: useSimulatedFunctionCalling ? 'simulated' : 'native',
|
||||
functionCalling: simulateFunctionCalling ? 'simulated' : 'auto',
|
||||
});
|
||||
|
||||
const chatMessageId = v4();
|
||||
|
|
|
@ -27,7 +27,7 @@ export const uiSettings: Record<string, UiSettingsParams> = {
|
|||
'xpack.observabilityAiAssistantManagement.settingsPage.simulatedFunctionCallingDescription',
|
||||
{
|
||||
defaultMessage:
|
||||
'<em>[technical preview]</em> Use simulated function calling. Simulated function calling does not need API support for functions or tools, but it may decrease performance. Simulated function calling is currently always enabled for non-OpenAI connector, regardless of this setting.',
|
||||
'<em>[technical preview]</em> Simulated function calling does not need API support for functions or tools, but it may decrease performance. It is currently always enabled for connectors that do not have API support for Native function calling, regardless of this setting.',
|
||||
values: {
|
||||
em: (chunks) => `<em>${chunks}</em>`,
|
||||
},
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue