mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
Add connectors support for playground (#179676)
## Summary - Add connectors flyout to playground - Add set up gen-ai panel - Create feature connector id - Use encrypted objects on the server - Use management Locator for navigating to connectors management - Pass dependencies to embeddable app <img width="397" alt="image" src="499797f9
-1dfa-4806-a364-32d2533945cd"> <img width="1004" alt="image" src="3171be7c
-b3f7-4c8e-99ff-0d81c7b2b9c9"> --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
0b450739b2
commit
56fe25ddc7
69 changed files with 1295 additions and 702 deletions
|
@ -10,8 +10,7 @@ import type { PluginStartContract as ActionsPluginStart } from '@kbn/actions-plu
|
|||
import { loggerMock } from '@kbn/logging-mocks';
|
||||
|
||||
import { ActionsClientLlm } from './actions_client_llm';
|
||||
import { mockActionResponse } from '../../../__mocks__/action_result_data';
|
||||
import { ExecuteConnectorRequestBody } from '@kbn/elastic-assistant-common';
|
||||
import { mockActionResponse } from '../mock/mock_action_response';
|
||||
|
||||
const connectorId = 'mock-connector-id';
|
||||
|
||||
|
@ -30,14 +29,14 @@ const mockActions = {
|
|||
|
||||
const prompt = 'Do you know my name?';
|
||||
|
||||
const mockRequest: KibanaRequest<unknown, unknown, ExecuteConnectorRequestBody> = {
|
||||
const mockRequest: KibanaRequest = {
|
||||
params: { connectorId },
|
||||
body: {
|
||||
message: prompt,
|
||||
subAction: 'invokeAI',
|
||||
isEnabledKnowledgeBase: true,
|
||||
},
|
||||
} as KibanaRequest<unknown, unknown, ExecuteConnectorRequestBody>;
|
||||
} as KibanaRequest;
|
||||
|
||||
describe('ActionsClientLlm', () => {
|
||||
beforeEach(() => {
|
|
@ -11,8 +11,7 @@ import type { PluginStartContract as ActionsPluginStart } from '@kbn/actions-plu
|
|||
import { LLM } from '@langchain/core/language_models/llms';
|
||||
import { get } from 'lodash/fp';
|
||||
|
||||
import { ExecuteConnectorRequestBody } from '@kbn/elastic-assistant-common';
|
||||
import { getMessageContentAndRole } from '../helpers';
|
||||
import { getMessageContentAndRole } from './helpers';
|
||||
|
||||
const LLM_TYPE = 'ActionsClientLlm';
|
||||
|
||||
|
@ -21,7 +20,8 @@ interface ActionsClientLlmParams {
|
|||
connectorId: string;
|
||||
llmType?: string;
|
||||
logger: Logger;
|
||||
request: KibanaRequest<unknown, unknown, ExecuteConnectorRequestBody>;
|
||||
request: KibanaRequest;
|
||||
model?: string;
|
||||
traceId?: string;
|
||||
}
|
||||
|
||||
|
@ -29,19 +29,22 @@ export class ActionsClientLlm extends LLM {
|
|||
#actions: ActionsPluginStart;
|
||||
#connectorId: string;
|
||||
#logger: Logger;
|
||||
#request: KibanaRequest<unknown, unknown, ExecuteConnectorRequestBody>;
|
||||
#request: KibanaRequest;
|
||||
#traceId: string;
|
||||
|
||||
// Local `llmType` as it can change and needs to be accessed by abstract `_llmType()` method
|
||||
// Not using getter as `this._llmType()` is called in the constructor via `super({})`
|
||||
protected llmType: string;
|
||||
|
||||
model?: string;
|
||||
|
||||
constructor({
|
||||
actions,
|
||||
connectorId,
|
||||
traceId = uuidv4(),
|
||||
llmType,
|
||||
logger,
|
||||
model,
|
||||
request,
|
||||
}: ActionsClientLlmParams) {
|
||||
super({});
|
||||
|
@ -52,6 +55,7 @@ export class ActionsClientLlm extends LLM {
|
|||
this.llmType = llmType ?? LLM_TYPE;
|
||||
this.#logger = logger;
|
||||
this.#request = request;
|
||||
this.model = model;
|
||||
}
|
||||
|
||||
_llmType() {
|
||||
|
@ -80,7 +84,7 @@ export class ActionsClientLlm extends LLM {
|
|||
// hard code to non-streaming subaction as this class only supports non-streaming
|
||||
subAction: 'invokeAI',
|
||||
subActionParams: {
|
||||
model: this.#request.body.model,
|
||||
model: this.model,
|
||||
messages: [assistantMessage], // the assistant message
|
||||
...(this.llmType === 'openai'
|
||||
? { n: 1, stop: null, temperature: 0.2 }
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Message } from '../schemas';
|
||||
import { getMessageContentAndRole } from './helpers';
|
||||
|
||||
describe('helpers', () => {
|
||||
describe('getMessageContentAndRole', () => {
|
||||
const testCases: Array<[string, Pick<Message, 'content' | 'role'>]> = [
|
||||
['Prompt 1', { content: 'Prompt 1', role: 'user' }],
|
||||
['Prompt 2', { content: 'Prompt 2', role: 'user' }],
|
||||
['', { content: '', role: 'user' }],
|
||||
];
|
||||
|
||||
testCases.forEach(([prompt, expectedOutput]) => {
|
||||
test(`Given the prompt "${prompt}", it returns the prompt as content with a "user" role`, () => {
|
||||
const result = getMessageContentAndRole(prompt);
|
||||
|
||||
expect(result).toEqual(expectedOutput);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Message } from '../schemas';
|
||||
|
||||
export const getMessageContentAndRole = (prompt: string): Pick<Message, 'content' | 'role'> => ({
|
||||
content: prompt,
|
||||
role: 'user',
|
||||
});
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export { ActionsClientChatOpenAI } from './openai';
|
||||
export { ActionsClientLlm } from './actions_client_llm';
|
|
@ -4,6 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { KibanaRequest, Logger } from '@kbn/core/server';
|
||||
import type { PluginStartContract as ActionsPluginStart } from '@kbn/actions-plugin/server';
|
||||
|
@ -17,8 +18,7 @@ import {
|
|||
ChatCompletionCreateParamsStreaming,
|
||||
ChatCompletionCreateParamsNonStreaming,
|
||||
} from 'openai/resources/chat/completions';
|
||||
import { ExecuteConnectorRequestBody } from '@kbn/elastic-assistant-common';
|
||||
import { InvokeAIActionParamsSchema } from '../types';
|
||||
import { InvokeAIActionParamsSchema } from './types';
|
||||
|
||||
const LLM_TYPE = 'ActionsClientChatOpenAI';
|
||||
|
||||
|
@ -27,10 +27,11 @@ interface ActionsClientChatOpenAIParams {
|
|||
connectorId: string;
|
||||
llmType?: string;
|
||||
logger: Logger;
|
||||
request: KibanaRequest<unknown, unknown, ExecuteConnectorRequestBody>;
|
||||
request: KibanaRequest;
|
||||
streaming?: boolean;
|
||||
traceId?: string;
|
||||
maxRetries?: number;
|
||||
model?: string;
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
|
||||
|
@ -51,12 +52,13 @@ export class ActionsClientChatOpenAI extends ChatOpenAI {
|
|||
// ChatOpenAI class needs these, but they do not matter as we override the openai client with the actions client
|
||||
azureOpenAIApiKey = '';
|
||||
openAIApiKey = '';
|
||||
model?: string;
|
||||
|
||||
// Kibana variables
|
||||
#actions: ActionsPluginStart;
|
||||
#connectorId: string;
|
||||
#logger: Logger;
|
||||
#request: KibanaRequest<unknown, unknown, ExecuteConnectorRequestBody>;
|
||||
#request: KibanaRequest;
|
||||
#actionResultData: string;
|
||||
#traceId: string;
|
||||
#signal?: AbortSignal;
|
||||
|
@ -68,6 +70,7 @@ export class ActionsClientChatOpenAI extends ChatOpenAI {
|
|||
logger,
|
||||
request,
|
||||
maxRetries,
|
||||
model,
|
||||
signal,
|
||||
}: ActionsClientChatOpenAIParams) {
|
||||
super({
|
||||
|
@ -90,6 +93,7 @@ export class ActionsClientChatOpenAI extends ChatOpenAI {
|
|||
this.#actionResultData = '';
|
||||
this.streaming = true;
|
||||
this.#signal = signal;
|
||||
this.model = model;
|
||||
}
|
||||
|
||||
getActionResultData(): string {
|
||||
|
@ -173,7 +177,7 @@ export class ActionsClientChatOpenAI extends ChatOpenAI {
|
|||
temperature: completionRequest.temperature,
|
||||
functions: completionRequest.functions,
|
||||
// possible client model override
|
||||
model: this.#request.body.model ?? completionRequest.model,
|
||||
model: this.model ?? completionRequest.model,
|
||||
// ensure we take the messages from the completion request, not the client request
|
||||
messages: completionRequest.messages.map((message) => ({
|
||||
role: message.role,
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
ChatCompletionContentPart,
|
||||
ChatCompletionCreateParamsNonStreaming,
|
||||
} from 'openai/resources/chat/completions';
|
||||
|
||||
export interface InvokeAIActionParamsSchema {
|
||||
messages: Array<{
|
||||
role: string;
|
||||
content: string | ChatCompletionContentPart[];
|
||||
name?: string;
|
||||
function_call?: {
|
||||
arguments: string;
|
||||
name: string;
|
||||
};
|
||||
tool_calls?: Array<{
|
||||
id: string;
|
||||
|
||||
function: {
|
||||
arguments: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
type: string;
|
||||
}>;
|
||||
tool_call_id?: string;
|
||||
}>;
|
||||
model?: ChatCompletionCreateParamsNonStreaming['model'];
|
||||
n?: ChatCompletionCreateParamsNonStreaming['n'];
|
||||
stop?: ChatCompletionCreateParamsNonStreaming['stop'];
|
||||
temperature?: ChatCompletionCreateParamsNonStreaming['temperature'];
|
||||
functions?: ChatCompletionCreateParamsNonStreaming['functions'];
|
||||
signal?: AbortSignal;
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const mockActionResponse = {
|
||||
message: 'Yes, your name is Andrew. How can I assist you further, Andrew?',
|
||||
usage: { prompt_tokens: 4, completion_tokens: 10, total_tokens: 14 },
|
||||
};
|
|
@ -19,5 +19,7 @@
|
|||
"@kbn/zod-helpers",
|
||||
"@kbn/securitysolution-io-ts-utils",
|
||||
"@kbn/core",
|
||||
"@kbn/actions-plugin",
|
||||
"@kbn/logging-mocks",
|
||||
]
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
ApiConfig,
|
||||
Replacements,
|
||||
} from '@kbn/elastic-assistant-common';
|
||||
import { Conversation, Message } from '../../../assistant_context/types';
|
||||
import { Conversation, ClientMessage } from '../../../assistant_context/types';
|
||||
|
||||
export interface GetConversationByIdParams {
|
||||
http: HttpSetup;
|
||||
|
@ -149,7 +149,7 @@ export interface PutConversationMessageParams {
|
|||
toasts?: IToasts;
|
||||
conversationId: string;
|
||||
title?: string;
|
||||
messages?: Message[];
|
||||
messages?: ClientMessage[];
|
||||
apiConfig?: ApiConfig;
|
||||
replacements?: Replacements;
|
||||
excludeFromLastConversationStorage?: boolean;
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
ELASTIC_AI_ASSISTANT_API_CURRENT_VERSION,
|
||||
ApiConfig,
|
||||
} from '@kbn/elastic-assistant-common';
|
||||
import { Conversation, Message } from '../../../assistant_context/types';
|
||||
import { Conversation, ClientMessage } from '../../../assistant_context/types';
|
||||
|
||||
export interface BulkActionSummary {
|
||||
failed: number;
|
||||
|
@ -52,7 +52,7 @@ export interface BulkActionResponse {
|
|||
export interface ConversationUpdateParams {
|
||||
id?: string;
|
||||
title?: string;
|
||||
messages?: Message[];
|
||||
messages?: ClientMessage[];
|
||||
apiConfig?: ApiConfig;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,11 +8,12 @@
|
|||
import React, { useCallback } from 'react';
|
||||
import { HttpSetup } from '@kbn/core-http-browser';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import type { ClientMessage } from '../../assistant_context/types';
|
||||
import { SelectedPromptContext } from '../prompt_context/types';
|
||||
import { useSendMessage } from '../use_send_message';
|
||||
import { useConversation } from '../use_conversation';
|
||||
import { getCombinedMessage } from '../prompt/helpers';
|
||||
import { Conversation, Message, Prompt, useAssistantContext } from '../../..';
|
||||
import { Conversation, Prompt, useAssistantContext } from '../../..';
|
||||
import { getMessageFromRawResponse } from '../helpers';
|
||||
import { getDefaultSystemPrompt } from '../use_conversation/helpers';
|
||||
|
||||
|
@ -126,7 +127,7 @@ export const useChatSend = ({
|
|||
isEnabledRAGAlerts,
|
||||
});
|
||||
|
||||
const responseMessage: Message = getMessageFromRawResponse(rawResponse);
|
||||
const responseMessage: ClientMessage = getMessageFromRawResponse(rawResponse);
|
||||
|
||||
setCurrentConversation({
|
||||
...currentConversation,
|
||||
|
@ -184,7 +185,7 @@ export const useChatSend = ({
|
|||
replacements: {},
|
||||
});
|
||||
|
||||
const responseMessage: Message = getMessageFromRawResponse(rawResponse);
|
||||
const responseMessage: ClientMessage = getMessageFromRawResponse(rawResponse);
|
||||
setCurrentConversation({
|
||||
...currentConversation,
|
||||
messages: [...updatedMessages, responseMessage],
|
||||
|
|
|
@ -9,10 +9,12 @@ import { merge } from 'lodash/fp';
|
|||
import { AIConnector } from '../connectorland/connector_selector';
|
||||
import { FetchConnectorExecuteResponse, FetchConversationsResponse } from './api';
|
||||
import { Conversation } from '../..';
|
||||
import type { Message } from '../assistant_context/types';
|
||||
import type { ClientMessage } from '../assistant_context/types';
|
||||
import { enterpriseMessaging, WELCOME_CONVERSATION } from './use_conversation/sample_conversations';
|
||||
|
||||
export const getMessageFromRawResponse = (rawResponse: FetchConnectorExecuteResponse): Message => {
|
||||
export const getMessageFromRawResponse = (
|
||||
rawResponse: FetchConnectorExecuteResponse
|
||||
): ClientMessage => {
|
||||
const { response, isStream, isError } = rawResponse;
|
||||
const dateTimeString = new Date().toLocaleString(); // TODO: Pull from response
|
||||
if (rawResponse) {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { Message } from '../../assistant_context/types';
|
||||
import type { ClientMessage } from '../../assistant_context/types';
|
||||
import { getCombinedMessage, getSystemMessages } from './helpers';
|
||||
import { mockGetAnonymizedValue } from '../../mock/get_anonymized_value';
|
||||
import { mockSystemPrompt } from '../../mock/system_prompt';
|
||||
|
@ -39,7 +39,7 @@ describe('helpers', () => {
|
|||
});
|
||||
|
||||
describe('when isNewChat is true and selectedSystemPrompt is defined', () => {
|
||||
let result: Message[];
|
||||
let result: ClientMessage[];
|
||||
|
||||
beforeEach(() => {
|
||||
result = getSystemMessages({ isNewChat: true, selectedSystemPrompt: mockSystemPrompt });
|
||||
|
@ -63,7 +63,7 @@ describe('helpers', () => {
|
|||
|
||||
describe('getCombinedMessage', () => {
|
||||
it('returns correct content for a new chat with a system prompt', async () => {
|
||||
const message: Message = await getCombinedMessage({
|
||||
const message: ClientMessage = await getCombinedMessage({
|
||||
currentReplacements: {},
|
||||
isNewChat: true,
|
||||
promptText: 'User prompt text',
|
||||
|
@ -85,7 +85,7 @@ User prompt text`);
|
|||
});
|
||||
|
||||
it('returns correct content for a new chat WITHOUT a system prompt', async () => {
|
||||
const message: Message = await getCombinedMessage({
|
||||
const message: ClientMessage = await getCombinedMessage({
|
||||
currentReplacements: {},
|
||||
isNewChat: true,
|
||||
promptText: 'User prompt text',
|
||||
|
@ -106,7 +106,7 @@ User prompt text`);
|
|||
});
|
||||
|
||||
it('returns the correct content for an existing chat', async () => {
|
||||
const message: Message = await getCombinedMessage({
|
||||
const message: ClientMessage = await getCombinedMessage({
|
||||
currentReplacements: {},
|
||||
isNewChat: false,
|
||||
promptText: 'User prompt text',
|
||||
|
@ -125,7 +125,7 @@ User prompt text`);
|
|||
});
|
||||
|
||||
it('returns the expected role', async () => {
|
||||
const message: Message = await getCombinedMessage({
|
||||
const message: ClientMessage = await getCombinedMessage({
|
||||
currentReplacements: {},
|
||||
isNewChat: true,
|
||||
promptText: 'User prompt text',
|
||||
|
@ -139,7 +139,7 @@ User prompt text`);
|
|||
});
|
||||
|
||||
it('returns a valid timestamp', async () => {
|
||||
const message: Message = await getCombinedMessage({
|
||||
const message: ClientMessage = await getCombinedMessage({
|
||||
currentReplacements: {},
|
||||
isNewChat: true,
|
||||
promptText: 'User prompt text',
|
||||
|
@ -185,7 +185,7 @@ User prompt text`);
|
|||
it('returns the expected content when `isNewChat` is false', async () => {
|
||||
const isNewChat = false; // <-- not a new chat
|
||||
|
||||
const message: Message = await getCombinedMessage({
|
||||
const message: ClientMessage = await getCombinedMessage({
|
||||
currentReplacements: {},
|
||||
getAnonymizedValue: mockGetAnonymizedValue,
|
||||
isNewChat,
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
*/
|
||||
|
||||
import { Replacements, transformRawData } from '@kbn/elastic-assistant-common';
|
||||
import type { ClientMessage } from '../../assistant_context/types';
|
||||
import { getAnonymizedValue as defaultGetAnonymizedValue } from '../get_anonymized_value';
|
||||
import type { Message } from '../../assistant_context/types';
|
||||
import type { SelectedPromptContext } from '../prompt_context/types';
|
||||
import type { Prompt } from '../types';
|
||||
import { SYSTEM_PROMPT_CONTEXT_NON_I18N } from '../../content/prompts/system/translations';
|
||||
|
@ -18,12 +18,12 @@ export const getSystemMessages = ({
|
|||
}: {
|
||||
isNewChat: boolean;
|
||||
selectedSystemPrompt: Prompt | undefined;
|
||||
}): Message[] => {
|
||||
}): ClientMessage[] => {
|
||||
if (!isNewChat || selectedSystemPrompt == null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const message: Message = {
|
||||
const message: ClientMessage = {
|
||||
content: selectedSystemPrompt.content,
|
||||
role: 'system',
|
||||
timestamp: new Date().toLocaleString(),
|
||||
|
@ -31,7 +31,9 @@ export const getSystemMessages = ({
|
|||
|
||||
return [message];
|
||||
};
|
||||
|
||||
interface ClientMessageWithReplacements extends ClientMessage {
|
||||
replacements: Replacements;
|
||||
}
|
||||
export function getCombinedMessage({
|
||||
currentReplacements,
|
||||
getAnonymizedValue = defaultGetAnonymizedValue,
|
||||
|
@ -52,7 +54,7 @@ export function getCombinedMessage({
|
|||
promptText: string;
|
||||
selectedPromptContexts: Record<string, SelectedPromptContext>;
|
||||
selectedSystemPrompt: Prompt | undefined;
|
||||
}): Message {
|
||||
}): ClientMessageWithReplacements {
|
||||
let replacements: Replacements = currentReplacements ?? {};
|
||||
const onNewReplacements = (newReplacements: Replacements) => {
|
||||
replacements = { ...replacements, ...newReplacements };
|
||||
|
|
|
@ -9,7 +9,7 @@ import { useConversation } from '.';
|
|||
import { act, renderHook } from '@testing-library/react-hooks';
|
||||
import { TestProviders } from '../../mock/test_providers/test_providers';
|
||||
import React from 'react';
|
||||
import { ConversationRole } from '../../assistant_context/types';
|
||||
import { MessageRole } from '@kbn/elastic-assistant-common';
|
||||
import { httpServiceMock } from '@kbn/core/public/mocks';
|
||||
import { WELCOME_CONVERSATION } from './sample_conversations';
|
||||
import {
|
||||
|
@ -21,12 +21,12 @@ import {
|
|||
jest.mock('../api/conversations');
|
||||
const message = {
|
||||
content: 'You are a robot',
|
||||
role: 'user' as ConversationRole,
|
||||
role: 'user' as MessageRole,
|
||||
timestamp: '10/04/2023, 1:00:36 PM',
|
||||
};
|
||||
const anotherMessage = {
|
||||
content: 'I am a robot',
|
||||
role: 'assistant' as ConversationRole,
|
||||
role: 'assistant' as MessageRole,
|
||||
timestamp: '10/04/2023, 1:00:46 PM',
|
||||
};
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ import { useCallback } from 'react';
|
|||
|
||||
import { ApiConfig } from '@kbn/elastic-assistant-common';
|
||||
import { useAssistantContext } from '../../assistant_context';
|
||||
import { Conversation, Message } from '../../assistant_context/types';
|
||||
import { Conversation, ClientMessage } from '../../assistant_context/types';
|
||||
import * as i18n from './translations';
|
||||
import { getDefaultSystemPrompt } from './helpers';
|
||||
import {
|
||||
|
@ -30,7 +30,7 @@ export const DEFAULT_CONVERSATION_STATE: Conversation = {
|
|||
|
||||
interface CreateConversationProps {
|
||||
cTitle: string;
|
||||
messages?: Message[];
|
||||
messages?: ClientMessage[];
|
||||
}
|
||||
|
||||
interface SetApiConfigProps {
|
||||
|
@ -42,7 +42,7 @@ interface UseConversation {
|
|||
clearConversation: (conversation: Conversation) => Promise<Conversation | undefined>;
|
||||
getDefaultConversation: ({ cTitle, messages }: CreateConversationProps) => Conversation;
|
||||
deleteConversation: (conversationId: string) => void;
|
||||
removeLastMessage: (conversationId: string) => Promise<Message[] | undefined>;
|
||||
removeLastMessage: (conversationId: string) => Promise<ClientMessage[] | undefined>;
|
||||
setApiConfig: ({
|
||||
conversation,
|
||||
apiConfig,
|
||||
|
@ -66,7 +66,7 @@ export const useConversation = (): UseConversation => {
|
|||
*/
|
||||
const removeLastMessage = useCallback(
|
||||
async (conversationId: string) => {
|
||||
let messages: Message[] = [];
|
||||
let messages: ClientMessage[] = [];
|
||||
const prevConversation = await getConversationById({ http, id: conversationId, toasts });
|
||||
if (prevConversation != null) {
|
||||
messages = prevConversation.messages.slice(0, prevConversation.messages.length - 1);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Conversation, Message } from '../../assistant_context/types';
|
||||
import { Conversation, ClientMessage } from '../../assistant_context/types';
|
||||
import * as i18n from '../../content/prompts/welcome/translations';
|
||||
import { WELCOME_CONVERSATION_TITLE } from './translations';
|
||||
|
||||
|
@ -45,7 +45,7 @@ export const WELCOME_CONVERSATION: Conversation = {
|
|||
replacements: {},
|
||||
};
|
||||
|
||||
export const enterpriseMessaging: Message[] = [
|
||||
export const enterpriseMessaging: ClientMessage[] = [
|
||||
{
|
||||
role: 'assistant',
|
||||
content: i18n.ENTERPRISE,
|
||||
|
|
|
@ -5,27 +5,19 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ApiConfig, Replacements } from '@kbn/elastic-assistant-common';
|
||||
|
||||
export type ConversationRole = 'system' | 'user' | 'assistant';
|
||||
import { ApiConfig, Message, Replacements } from '@kbn/elastic-assistant-common';
|
||||
|
||||
export interface MessagePresentation {
|
||||
delay?: number;
|
||||
stream?: boolean;
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
role: ConversationRole;
|
||||
// The ClientMessage is different from the Message in that it content
|
||||
// can be undefined and reader is the correct type which is unavailable in Zod
|
||||
export interface ClientMessage extends Omit<Message, 'content' | 'reader'> {
|
||||
reader?: ReadableStreamDefaultReader<Uint8Array>;
|
||||
replacements?: Replacements;
|
||||
content?: string;
|
||||
timestamp: string;
|
||||
isError?: boolean;
|
||||
presentation?: MessagePresentation;
|
||||
traceData?: {
|
||||
transactionId: string;
|
||||
traceId: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ConversationTheme {
|
||||
|
@ -59,7 +51,7 @@ export interface Conversation {
|
|||
category: string;
|
||||
id: string;
|
||||
title: string;
|
||||
messages: Message[];
|
||||
messages: ClientMessage[];
|
||||
updatedAt?: Date;
|
||||
createdAt?: Date;
|
||||
replacements: Replacements;
|
||||
|
|
|
@ -15,7 +15,7 @@ import { ActionConnector } from '@kbn/triggers-actions-ui-plugin/public/common/c
|
|||
import { ActionType } from '@kbn/triggers-actions-ui-plugin/public';
|
||||
import { AddConnectorModal } from '../add_connector_modal';
|
||||
import { WELCOME_CONVERSATION } from '../../assistant/use_conversation/sample_conversations';
|
||||
import { Conversation, Message } from '../../..';
|
||||
import { Conversation, ClientMessage } from '../../..';
|
||||
import { useLoadActionTypes } from '../use_load_action_types';
|
||||
import { StreamingText } from '../../assistant/streaming_text';
|
||||
import { ConnectorButton } from '../connector_button';
|
||||
|
@ -111,7 +111,7 @@ export const useConnectorSetup = ({
|
|||
|
||||
// Create EuiCommentProps[] from conversation messages
|
||||
const commentBody = useCallback(
|
||||
(message: Message, index: number, length: number) => {
|
||||
(message: ClientMessage, index: number, length: number) => {
|
||||
// If timestamp is not set, set it to current time (will update conversation at end of setup)
|
||||
if (
|
||||
conversation.messages[index].timestamp == null ||
|
||||
|
|
|
@ -99,8 +99,8 @@ export type {
|
|||
AssistantTelemetry,
|
||||
/** Conversation Interface */
|
||||
Conversation,
|
||||
/** Message Interface */
|
||||
Message,
|
||||
/** Message interface on the client */
|
||||
ClientMessage,
|
||||
} from './impl/assistant_context/types';
|
||||
|
||||
/** Interface for defining system/user prompts */
|
||||
|
|
|
@ -27,6 +27,7 @@ export const UptimeConnectorFeatureId = 'uptime';
|
|||
export const SecurityConnectorFeatureId = 'siem';
|
||||
export const GenerativeAIForSecurityConnectorFeatureId = 'generativeAIForSecurity';
|
||||
export const GenerativeAIForObservabilityConnectorFeatureId = 'generativeAIForObservability';
|
||||
export const GenerativeAIForSearchPlaygroundConnectorFeatureId = 'generativeAIForSearchPlayground';
|
||||
|
||||
const compatibilityGenerativeAIForSecurity = i18n.translate(
|
||||
'xpack.actions.availableConnectorFeatures.compatibility.generativeAIForSecurity',
|
||||
|
@ -42,6 +43,13 @@ const compatibilityGenerativeAIForObservability = i18n.translate(
|
|||
}
|
||||
);
|
||||
|
||||
const compatibilityGenerativeAIForSearchPlayground = i18n.translate(
|
||||
'xpack.actions.availableConnectorFeatures.compatibility.generativeAIForSearchPlayground',
|
||||
{
|
||||
defaultMessage: 'Generative AI for Search Playground',
|
||||
}
|
||||
);
|
||||
|
||||
const compatibilityAlertingRules = i18n.translate(
|
||||
'xpack.actions.availableConnectorFeatures.compatibility.alertingRules',
|
||||
{
|
||||
|
@ -100,6 +108,12 @@ export const GenerativeAIForObservabilityFeature: ConnectorFeatureConfig = {
|
|||
compatibility: compatibilityGenerativeAIForObservability,
|
||||
};
|
||||
|
||||
export const GenerativeAIForSearchPlaygroundFeature: ConnectorFeatureConfig = {
|
||||
id: GenerativeAIForSearchPlaygroundConnectorFeatureId,
|
||||
name: compatibilityGenerativeAIForSearchPlayground,
|
||||
compatibility: compatibilityGenerativeAIForSearchPlayground,
|
||||
};
|
||||
|
||||
const AllAvailableConnectorFeatures = {
|
||||
[AlertingConnectorFeature.id]: AlertingConnectorFeature,
|
||||
[CasesConnectorFeature.id]: CasesConnectorFeature,
|
||||
|
@ -107,6 +121,7 @@ const AllAvailableConnectorFeatures = {
|
|||
[SecuritySolutionFeature.id]: SecuritySolutionFeature,
|
||||
[GenerativeAIForSecurityFeature.id]: GenerativeAIForSecurityFeature,
|
||||
[GenerativeAIForObservabilityFeature.id]: GenerativeAIForObservabilityFeature,
|
||||
[GenerativeAIForSearchPlaygroundFeature.id]: GenerativeAIForSearchPlaygroundFeature,
|
||||
};
|
||||
|
||||
export function areValidFeatures(ids: string[]) {
|
||||
|
|
|
@ -11,16 +11,17 @@ import { coreMock } from '@kbn/core/server/mocks';
|
|||
import { KibanaRequest } from '@kbn/core/server';
|
||||
import { loggerMock } from '@kbn/logging-mocks';
|
||||
|
||||
import { ActionsClientLlm } from '../llm/actions_client_llm';
|
||||
import { ActionsClientChatOpenAI } from '../llm/openai';
|
||||
import { mockActionResponse } from '../../../__mocks__/action_result_data';
|
||||
import { langChainMessages } from '../../../__mocks__/lang_chain_messages';
|
||||
import { ESQL_RESOURCE } from '../../../routes/knowledge_base/constants';
|
||||
import { callAgentExecutor } from '.';
|
||||
import { Stream } from 'stream';
|
||||
import { ActionsClientChatOpenAI, ActionsClientLlm } from '@kbn/elastic-assistant-common/impl/llm';
|
||||
|
||||
jest.mock('../llm/actions_client_llm');
|
||||
jest.mock('../llm/openai');
|
||||
jest.mock('@kbn/elastic-assistant-common/impl/llm', () => ({
|
||||
ActionsClientChatOpenAI: jest.fn(),
|
||||
ActionsClientLlm: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockConversationChain = {
|
||||
call: jest.fn(),
|
||||
|
@ -57,7 +58,7 @@ jest.mock('../elasticsearch_store/elasticsearch_store', () => ({
|
|||
const mockConnectorId = 'mock-connector-id';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const mockRequest: KibanaRequest<unknown, unknown, any, any> = {} as KibanaRequest<
|
||||
const mockRequest: KibanaRequest<unknown, unknown, any, any> = { body: {} } as KibanaRequest<
|
||||
unknown,
|
||||
unknown,
|
||||
any, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
|
|
|
@ -12,9 +12,8 @@ import { ToolInterface } from '@langchain/core/tools';
|
|||
import { streamFactory } from '@kbn/ml-response-stream/server';
|
||||
import { transformError } from '@kbn/securitysolution-es-utils';
|
||||
import { RetrievalQAChain } from 'langchain/chains';
|
||||
import { ActionsClientChatOpenAI, ActionsClientLlm } from '@kbn/elastic-assistant-common/impl/llm';
|
||||
import { ElasticsearchStore } from '../elasticsearch_store/elasticsearch_store';
|
||||
import { ActionsClientChatOpenAI } from '../llm/openai';
|
||||
import { ActionsClientLlm } from '../llm/actions_client_llm';
|
||||
import { KNOWLEDGE_BASE_INDEX_PATTERN } from '../../../routes/knowledge_base/constants';
|
||||
import { AgentExecutor } from '../executors/types';
|
||||
import { withAssistantSpan } from '../tracers/with_assistant_span';
|
||||
|
@ -61,6 +60,7 @@ export const callAgentExecutor: AgentExecutor<true | false> = async ({
|
|||
request,
|
||||
llmType,
|
||||
logger,
|
||||
model: request.body.model,
|
||||
signal: abortSignal,
|
||||
streaming: isStream,
|
||||
// prevents the agent from retrying on failure
|
||||
|
|
|
@ -10,8 +10,8 @@ import { RetrievalQAChain } from 'langchain/chains';
|
|||
import { BufferMemory, ChatMessageHistory } from 'langchain/memory';
|
||||
import { ChainTool } from 'langchain/tools/chain';
|
||||
|
||||
import { ActionsClientLlm } from '@kbn/elastic-assistant-common/impl/llm';
|
||||
import { ElasticsearchStore } from '../elasticsearch_store/elasticsearch_store';
|
||||
import { ActionsClientLlm } from '../llm/actions_client_llm';
|
||||
import { KNOWLEDGE_BASE_INDEX_PATTERN } from '../../../routes/knowledge_base/constants';
|
||||
import { AgentExecutor } from './types';
|
||||
import { withAssistantSpan } from '../tracers/with_assistant_span';
|
||||
|
@ -39,7 +39,14 @@ export const callOpenAIFunctionsExecutor: AgentExecutor<false> = async ({
|
|||
telemetry,
|
||||
traceOptions,
|
||||
}) => {
|
||||
const llm = new ActionsClientLlm({ actions, connectorId, request, llmType, logger });
|
||||
const llm = new ActionsClientLlm({
|
||||
actions,
|
||||
connectorId,
|
||||
request,
|
||||
llmType,
|
||||
logger,
|
||||
model: request.body.model,
|
||||
});
|
||||
|
||||
const pastMessages = langChainMessages.slice(0, -1); // all but the last message
|
||||
const latestMessage = langChainMessages.slice(-1); // the last message
|
||||
|
|
|
@ -6,14 +6,13 @@
|
|||
*/
|
||||
|
||||
import { KibanaRequest } from '@kbn/core-http-server';
|
||||
import type { Message } from '@kbn/elastic-assistant';
|
||||
import type { Message } from '@kbn/elastic-assistant-common';
|
||||
import { AIMessage, BaseMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
|
||||
import { ExecuteConnectorRequestBody } from '@kbn/elastic-assistant-common';
|
||||
|
||||
import {
|
||||
getLangChainMessage,
|
||||
getLangChainMessages,
|
||||
getMessageContentAndRole,
|
||||
requestHasRequiredAnonymizationParams,
|
||||
} from './helpers';
|
||||
import { langChainMessages } from '../../__mocks__/lang_chain_messages';
|
||||
|
@ -97,22 +96,6 @@ describe('helpers', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('getMessageContentAndRole', () => {
|
||||
const testCases: Array<[string, Pick<Message, 'content' | 'role'>]> = [
|
||||
['Prompt 1', { content: 'Prompt 1', role: 'user' }],
|
||||
['Prompt 2', { content: 'Prompt 2', role: 'user' }],
|
||||
['', { content: '', role: 'user' }],
|
||||
];
|
||||
|
||||
testCases.forEach(([prompt, expectedOutput]) => {
|
||||
test(`Given the prompt "${prompt}", it returns the prompt as content with a "user" role`, () => {
|
||||
const result = getMessageContentAndRole(prompt);
|
||||
|
||||
expect(result).toEqual(expectedOutput);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('requestHasRequiredAnonymizationParams', () => {
|
||||
it('returns true if the request has valid anonymization params', () => {
|
||||
const request = {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import { KibanaRequest } from '@kbn/core-http-server';
|
||||
import type { Message } from '@kbn/elastic-assistant';
|
||||
import type { Message } from '@kbn/elastic-assistant-common';
|
||||
import { AIMessage, BaseMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
|
||||
|
||||
import { ExecuteConnectorRequestBody } from '@kbn/elastic-assistant-common/impl/schemas/actions_connector/post_actions_connector_execute_route.gen';
|
||||
|
@ -30,11 +30,6 @@ export const getLangChainMessages = (
|
|||
assistantMessages: Array<Pick<Message, 'content' | 'role'>>
|
||||
): BaseMessage[] => assistantMessages.map(getLangChainMessage);
|
||||
|
||||
export const getMessageContentAndRole = (prompt: string): Pick<Message, 'content' | 'role'> => ({
|
||||
content: prompt,
|
||||
role: 'user',
|
||||
});
|
||||
|
||||
export const requestHasRequiredAnonymizationParams = (
|
||||
request: KibanaRequest<unknown, unknown, ExecuteConnectorRequestBody>
|
||||
): boolean => {
|
||||
|
|
|
@ -5,39 +5,6 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
ChatCompletionContentPart,
|
||||
ChatCompletionCreateParamsNonStreaming,
|
||||
} from 'openai/resources/chat/completions';
|
||||
import { ExecuteConnectorResponse } from '@kbn/elastic-assistant-common';
|
||||
|
||||
export type ResponseBody = ExecuteConnectorResponse;
|
||||
|
||||
export interface InvokeAIActionParamsSchema {
|
||||
messages: Array<{
|
||||
role: string;
|
||||
content: string | ChatCompletionContentPart[];
|
||||
name?: string;
|
||||
function_call?: {
|
||||
arguments: string;
|
||||
name: string;
|
||||
};
|
||||
tool_calls?: Array<{
|
||||
id: string;
|
||||
|
||||
function: {
|
||||
arguments: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
type: string;
|
||||
}>;
|
||||
tool_call_id?: string;
|
||||
}>;
|
||||
model?: ChatCompletionCreateParamsNonStreaming['model'];
|
||||
n?: ChatCompletionCreateParamsNonStreaming['n'];
|
||||
stop?: ChatCompletionCreateParamsNonStreaming['stop'];
|
||||
temperature?: ChatCompletionCreateParamsNonStreaming['temperature'];
|
||||
functions?: ChatCompletionCreateParamsNonStreaming['functions'];
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
PostEvaluateResponse,
|
||||
ExecuteConnectorRequestBody,
|
||||
} from '@kbn/elastic-assistant-common';
|
||||
import { ActionsClientLlm } from '@kbn/elastic-assistant-common/impl/llm';
|
||||
import { buildRouteValidationWithZod } from '@kbn/elastic-assistant-common/impl/schemas/common';
|
||||
import { ESQL_RESOURCE } from '../knowledge_base/constants';
|
||||
import { buildResponse } from '../../lib/build_response';
|
||||
|
@ -24,7 +25,6 @@ import { ElasticAssistantRequestHandlerContext, GetElser } from '../../types';
|
|||
import { EVALUATE } from '../../../common/constants';
|
||||
import { performEvaluation } from '../../lib/model_evaluator/evaluation';
|
||||
import { AgentExecutorEvaluatorWithMetadata } from '../../lib/langchain/executors/types';
|
||||
import { ActionsClientLlm } from '../../lib/langchain/llm/actions_client_llm';
|
||||
import {
|
||||
indexEvaluations,
|
||||
setupEvaluationIndex,
|
||||
|
@ -221,6 +221,7 @@ export const postEvaluateRoute = (
|
|||
connectorId: evalModel,
|
||||
request: skeletonRequest,
|
||||
logger,
|
||||
model: skeletonRequest.body.model,
|
||||
});
|
||||
|
||||
const { evaluationResults, evaluationSummary } = await performEvaluation({
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
"@kbn/licensing-plugin",
|
||||
"@kbn/securitysolution-es-utils",
|
||||
"@kbn/actions-plugin",
|
||||
"@kbn/elastic-assistant",
|
||||
"@kbn/logging-mocks",
|
||||
"@kbn/core-elasticsearch-server-mocks",
|
||||
"@kbn/core-logging-server-mocks",
|
||||
|
|
|
@ -25,3 +25,19 @@ export enum APIRoutes {
|
|||
POST_CHAT_MESSAGE = '/internal/search_playground/chat',
|
||||
POST_QUERY_SOURCE_FIELDS = '/internal/search_playground/query_source_fields',
|
||||
}
|
||||
|
||||
export enum LLMs {
|
||||
openai = 'openai',
|
||||
openai_azure = 'openai_azure',
|
||||
}
|
||||
|
||||
export interface ChatRequestData {
|
||||
connector_id: string;
|
||||
prompt: string;
|
||||
indices: string;
|
||||
citations: boolean;
|
||||
elasticsearch_query: string;
|
||||
summarization_model?: string;
|
||||
source_fields: string;
|
||||
doc_size: number;
|
||||
}
|
||||
|
|
|
@ -11,8 +11,12 @@
|
|||
"searchPlayground"
|
||||
],
|
||||
"requiredPlugins": [
|
||||
"actions",
|
||||
"encryptedSavedObjects",
|
||||
"navigation",
|
||||
"security"
|
||||
"security",
|
||||
"stackConnectors",
|
||||
"triggersActionsUi",
|
||||
],
|
||||
"optionalPlugins": [
|
||||
"cloud"
|
||||
|
|
|
@ -25,7 +25,7 @@ import { FormattedMessage } from '@kbn/i18n-react';
|
|||
import { useAutoBottomScroll } from '../hooks/use_auto_bottom_scroll';
|
||||
import { ChatSidebar } from './chat_sidebar';
|
||||
import { useChat } from '../hooks/use_chat';
|
||||
import { ChatForm, ChatFormFields, MessageRole } from '../types';
|
||||
import { ChatForm, ChatFormFields, ChatRequestData, MessageRole } from '../types';
|
||||
|
||||
import { MessageList } from './message_list/message_list';
|
||||
import { QuestionInput } from './question_input';
|
||||
|
@ -34,15 +34,15 @@ import { StartNewChat } from './start_new_chat';
|
|||
import { TelegramIcon } from './telegram_icon';
|
||||
import { transformFromChatMessages } from '../utils/transform_to_messages';
|
||||
|
||||
const buildFormData = (formData: ChatForm) => ({
|
||||
const buildFormData = (formData: ChatForm): ChatRequestData => ({
|
||||
connector_id: formData[ChatFormFields.summarizationModel].connectorId!,
|
||||
prompt: formData[ChatFormFields.prompt],
|
||||
indices: formData[ChatFormFields.indices].join(),
|
||||
api_key: formData[ChatFormFields.openAIKey],
|
||||
citations: formData[ChatFormFields.citations],
|
||||
elasticsearchQuery: JSON.stringify(formData[ChatFormFields.elasticsearchQuery]),
|
||||
summarization_model: formData[ChatFormFields.summarizationModel],
|
||||
elasticsearch_query: JSON.stringify(formData[ChatFormFields.elasticsearchQuery]),
|
||||
summarization_model: formData[ChatFormFields.summarizationModel].value,
|
||||
source_fields: JSON.stringify(formData[ChatFormFields.sourceFields]),
|
||||
docSize: formData[ChatFormFields.docSize],
|
||||
doc_size: formData[ChatFormFields.docSize],
|
||||
});
|
||||
|
||||
export const Chat = () => {
|
||||
|
|
|
@ -88,9 +88,9 @@ export const AssistantMessage: React.FC<AssistantMessageProps> = ({
|
|||
timestamp={
|
||||
createdAt &&
|
||||
i18n.translate('xpack.searchPlayground.chat.message.assistant.createdAt', {
|
||||
defaultMessage: 'on {date}',
|
||||
defaultMessage: 'at {time}',
|
||||
values: {
|
||||
date: moment(createdAt).format('MMM DD, YYYY'),
|
||||
time: moment(createdAt).format('HH:mm'),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
@ -5,41 +5,38 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import React from 'react';
|
||||
|
||||
import moment from 'moment';
|
||||
|
||||
import { EuiComment, EuiText } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { UserAvatar, UserProfileWithAvatar } from '@kbn/user-profile-components';
|
||||
import { UserAvatar } from '@kbn/user-profile-components';
|
||||
|
||||
import { useUserProfile } from '../../hooks/use_user_profile';
|
||||
import type { Message as MessageType } from '../../types';
|
||||
|
||||
import { CopyActionButton } from './copy_action_button';
|
||||
import { useKibana } from '../../hooks/use_kibana';
|
||||
|
||||
type UserMessageProps = Pick<MessageType, 'content' | 'createdAt'>;
|
||||
|
||||
export const UserMessage: React.FC<UserMessageProps> = ({ content, createdAt }) => {
|
||||
const { services } = useKibana();
|
||||
const [currentUserProfile, setCurrentUserProfile] = useState<UserProfileWithAvatar>();
|
||||
|
||||
useEffect(() => {
|
||||
services.security?.userProfiles.getCurrent({ dataPath: 'avatar' }).then(setCurrentUserProfile);
|
||||
}, [services]);
|
||||
const currentUserProfile = useUserProfile();
|
||||
|
||||
return (
|
||||
<EuiComment
|
||||
username={currentUserProfile?.user.username}
|
||||
username={i18n.translate('xpack.searchPlayground.chat.message.user.name', {
|
||||
defaultMessage: 'You',
|
||||
})}
|
||||
event={i18n.translate('xpack.searchPlayground.chat.message.user.event', {
|
||||
defaultMessage: 'asked',
|
||||
})}
|
||||
timestamp={
|
||||
createdAt &&
|
||||
i18n.translate('xpack.searchPlayground.chat.message.user.createdAt', {
|
||||
defaultMessage: 'on {date}',
|
||||
defaultMessage: 'at {time}',
|
||||
values: {
|
||||
date: moment(createdAt).format('MMM DD, YYYY'),
|
||||
time: moment(createdAt).format('HH:mm'),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { fireEvent, render as testingLibraryRender, waitFor } from '@testing-library/react';
|
||||
import { SetUpConnectorPanelForStartChat } from './set_up_connector_panel_for_start_chat';
|
||||
import { useKibana } from '../hooks/use_kibana';
|
||||
import { useLoadConnectors } from '../hooks/use_load_connectors';
|
||||
import { __IntlProvider as IntlProvider } from '@kbn/i18n-react';
|
||||
|
||||
const render = (children: React.ReactNode) =>
|
||||
testingLibraryRender(<IntlProvider locale="en">{children}</IntlProvider>);
|
||||
|
||||
jest.mock('../hooks/use_kibana');
|
||||
jest.mock('../hooks/use_load_connectors');
|
||||
|
||||
const mockConnectors = {
|
||||
'1': { title: 'Connector 1' },
|
||||
'2': { title: 'Connector 2' },
|
||||
};
|
||||
|
||||
describe('SetUpConnectorPanelForStartChat', () => {
|
||||
beforeEach(() => {
|
||||
(useKibana as jest.Mock).mockReturnValue({
|
||||
services: {
|
||||
triggersActionsUi: {
|
||||
getAddConnectorFlyout: () => (
|
||||
<div data-test-subj="addConnectorFlyout">Add Connector Flyout</div>
|
||||
),
|
||||
},
|
||||
},
|
||||
});
|
||||
(useLoadConnectors as jest.Mock).mockReturnValue({
|
||||
data: mockConnectors,
|
||||
refetch: jest.fn(),
|
||||
isLoading: false,
|
||||
isSuccess: true,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
it('renders the empty state when there are no connectors', () => {
|
||||
(useLoadConnectors as jest.Mock).mockReturnValueOnce({
|
||||
data: {},
|
||||
isLoading: false,
|
||||
isSuccess: true,
|
||||
});
|
||||
const { getByTestId } = render(<SetUpConnectorPanelForStartChat />);
|
||||
expect(getByTestId('setupGenAIConnectorButton')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('show the flyout when the button is clicked', async () => {
|
||||
(useLoadConnectors as jest.Mock).mockReturnValue({
|
||||
data: {},
|
||||
isLoading: false,
|
||||
isSuccess: true,
|
||||
});
|
||||
const { getByTestId, queryByTestId } = render(<SetUpConnectorPanelForStartChat />);
|
||||
|
||||
expect(queryByTestId('addConnectorFlyout')).not.toBeInTheDocument();
|
||||
|
||||
fireEvent.click(getByTestId('setupGenAIConnectorButton'));
|
||||
await waitFor(() => expect(getByTestId('addConnectorFlyout')).toBeInTheDocument());
|
||||
});
|
||||
});
|
|
@ -0,0 +1,94 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { useState } from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import { EuiButton, EuiCallOut, EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
|
||||
import { GenerativeAIForSearchPlaygroundConnectorFeatureId } from '@kbn/actions-plugin/common';
|
||||
import { useKibana } from '../hooks/use_kibana';
|
||||
import { useLoadConnectors } from '../hooks/use_load_connectors';
|
||||
import { StartChatPanel } from './start_chat_panel';
|
||||
|
||||
export const SetUpConnectorPanelForStartChat: React.FC = () => {
|
||||
const [connectorFlyoutOpen, setConnectorFlyoutOpen] = useState(false);
|
||||
const [showCallout, setShowAddedCallout] = useState(false);
|
||||
const {
|
||||
services: {
|
||||
triggersActionsUi: { getAddConnectorFlyout: ConnectorFlyout },
|
||||
},
|
||||
} = useKibana();
|
||||
const {
|
||||
data: connectors,
|
||||
refetch: refetchConnectors,
|
||||
isLoading: isConnectorListLoading,
|
||||
} = useLoadConnectors();
|
||||
const handleConnectorCreated = () => {
|
||||
refetchConnectors();
|
||||
setShowAddedCallout(true);
|
||||
setConnectorFlyoutOpen(false);
|
||||
};
|
||||
|
||||
return connectors && !isConnectorListLoading ? (
|
||||
<>
|
||||
{!!Object.keys(connectors).length && showCallout && (
|
||||
<EuiCallOut
|
||||
title={i18n.translate('xpack.searchPlayground.emptyPrompts.setUpConnector.settled', {
|
||||
defaultMessage:
|
||||
'{connectorsNames} {count, plural, one {connector} other {connectors}} added',
|
||||
values: {
|
||||
connectorsNames: Object.values(connectors)
|
||||
.map((connector) => connector.title)
|
||||
.join(', '),
|
||||
count: Object.values(connectors).length,
|
||||
},
|
||||
})}
|
||||
iconType="check"
|
||||
color="success"
|
||||
data-test-subj="addedConnectorCallout"
|
||||
/>
|
||||
)}
|
||||
{!Object.keys(connectors).length && (
|
||||
<>
|
||||
<StartChatPanel
|
||||
title={i18n.translate('xpack.searchPlayground.emptyPrompts.setUpConnector.title', {
|
||||
defaultMessage: 'Set up a Gen AI connector',
|
||||
})}
|
||||
description={
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.emptyPrompts.setUpConnector.description"
|
||||
defaultMessage="A large-language model is required to use a chat bot. Set up a connection to your LLM provider to continue."
|
||||
/>
|
||||
}
|
||||
>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButton
|
||||
color="primary"
|
||||
data-test-subj="setupGenAIConnectorButton"
|
||||
onClick={() => setConnectorFlyoutOpen(true)}
|
||||
>
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.emptyPrompts.setUpConnector.btn"
|
||||
defaultMessage="Set up GenAI connector"
|
||||
/>
|
||||
</EuiButton>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</StartChatPanel>
|
||||
{connectorFlyoutOpen && (
|
||||
<ConnectorFlyout
|
||||
featureId={GenerativeAIForSearchPlaygroundConnectorFeatureId}
|
||||
onConnectorCreated={handleConnectorCreated}
|
||||
onClose={() => setConnectorFlyoutOpen(false)}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
) : null;
|
||||
};
|
|
@ -55,7 +55,7 @@ export const StartChatPanel: React.FC<StartChatPanelProps> = ({
|
|||
|
||||
<EuiSpacer size="s" />
|
||||
|
||||
<EuiFlexGroup direction="column" gutterSize="xl">
|
||||
<EuiFlexGroup direction="column" gutterSize="l">
|
||||
<EuiText size="s">
|
||||
<p>{description}</p>
|
||||
</EuiText>
|
||||
|
|
|
@ -9,8 +9,9 @@ import { EuiButton, EuiFlexGroup, EuiFlexItem, EuiIcon, EuiTitle, useEuiTheme }
|
|||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import React from 'react';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
import { useLoadConnectors } from '../hooks/use_load_connectors';
|
||||
import { SourcesPanelForStartChat } from './sources_panel/sources_panel_for_start_chat';
|
||||
import { SummarizationPanelForStartChat } from './summarization_panel/summarization_panel_for_start_chat';
|
||||
import { SetUpConnectorPanelForStartChat } from './set_up_connector_panel_for_start_chat';
|
||||
import { ChatFormFields } from '../types';
|
||||
|
||||
const maxWidthPage = 640;
|
||||
|
@ -21,6 +22,7 @@ interface StartNewChatProps {
|
|||
|
||||
export const StartNewChat: React.FC<StartNewChatProps> = ({ onStartClick }) => {
|
||||
const { euiTheme } = useEuiTheme();
|
||||
const { data: connectors } = useLoadConnectors();
|
||||
const { watch } = useFormContext();
|
||||
|
||||
return (
|
||||
|
@ -51,7 +53,7 @@ export const StartNewChat: React.FC<StartNewChatProps> = ({ onStartClick }) => {
|
|||
</EuiFlexItem>
|
||||
|
||||
<EuiFlexItem grow={false}>
|
||||
<SummarizationPanelForStartChat />
|
||||
<SetUpConnectorPanelForStartChat />
|
||||
</EuiFlexItem>
|
||||
|
||||
<EuiFlexItem grow={false}>
|
||||
|
@ -63,7 +65,9 @@ export const StartNewChat: React.FC<StartNewChatProps> = ({ onStartClick }) => {
|
|||
fill
|
||||
iconType="arrowRight"
|
||||
iconSide="right"
|
||||
disabled={!watch(ChatFormFields.openAIKey) || !watch(ChatFormFields.indices, []).length}
|
||||
disabled={
|
||||
!watch(ChatFormFields.indices, []).length || !Object.keys(connectors || {}).length
|
||||
}
|
||||
onClick={onStartClick}
|
||||
>
|
||||
<FormattedMessage
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
export const OpenAIIcon = () => (
|
||||
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M22.282 9.821a5.985 5.985 0 0 0-.516-4.91 6.046 6.046 0 0 0-6.51-2.9A6.065 6.065 0 0 0 4.981 4.18a5.985 5.985 0 0 0-3.998 2.9 6.046 6.046 0 0 0 .743 7.097 5.98 5.98 0 0 0 .51 4.911 6.051 6.051 0 0 0 6.515 2.9A5.985 5.985 0 0 0 13.26 24a6.056 6.056 0 0 0 5.772-4.206 5.99 5.99 0 0 0 3.997-2.9 6.056 6.056 0 0 0-.747-7.073zM13.26 22.43a4.476 4.476 0 0 1-2.876-1.04l.141-.081 4.779-2.758a.795.795 0 0 0 .392-.681v-6.737l2.02 1.168a.071.071 0 0 1 .038.052v5.583a4.504 4.504 0 0 1-4.494 4.494zM3.6 18.304a4.47 4.47 0 0 1-.535-3.014l.142.085 4.783 2.759a.771.771 0 0 0 .78 0l5.843-3.369v2.332a.08.08 0 0 1-.033.062L9.74 19.95a4.5 4.5 0 0 1-6.14-1.646zM2.34 7.896a4.485 4.485 0 0 1 2.366-1.973V11.6a.766.766 0 0 0 .388.676l5.815 3.355-2.02 1.168a.076.076 0 0 1-.071 0l-4.83-2.786A4.504 4.504 0 0 1 2.34 7.872zm16.597 3.855l-5.833-3.387L15.119 7.2a.076.076 0 0 1 .071 0l4.83 2.791a4.494 4.494 0 0 1-.676 8.105v-5.678a.79.79 0 0 0-.407-.667zm2.01-3.023l-.141-.085-4.774-2.782a.776.776 0 0 0-.785 0L9.409 9.23V6.897a.066.066 0 0 1 .028-.061l4.83-2.787a4.5 4.5 0 0 1 6.68 4.66zm-12.64 4.135l-2.02-1.164a.08.08 0 0 1-.038-.057V6.075a4.5 4.5 0 0 1 7.375-3.453l-.142.08L8.704 5.46a.795.795 0 0 0-.393.681zm1.097-2.365l2.602-1.5 2.607 1.5v2.999l-2.597 1.5-2.607-1.5z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
</svg>
|
||||
);
|
|
@ -1,40 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { EuiButton, EuiCallOut } from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
interface OpenAIKeyCalloutProps {
|
||||
openAIFlyOutOpen: () => void;
|
||||
}
|
||||
|
||||
export const OpenAIKeyCallout: React.FC<OpenAIKeyCalloutProps> = ({ openAIFlyOutOpen }) => {
|
||||
return (
|
||||
<EuiCallOut
|
||||
title={i18n.translate('xpack.searchPlayground.sidebar.openAICallout.headerText', {
|
||||
defaultMessage: 'Add OpenAI API Key',
|
||||
})}
|
||||
color="warning"
|
||||
iconType="warning"
|
||||
>
|
||||
<p>
|
||||
{i18n.translate('xpack.searchPlayground.sidebar.openAICallout.description', {
|
||||
defaultMessage:
|
||||
'The AI Playground uses OpenAl models for summarization. Add your OpenAI API key to continue.',
|
||||
})}
|
||||
</p>
|
||||
<EuiButton onClick={openAIFlyOutOpen} color="warning" fill data-test-subj="openaiflyout-open">
|
||||
{i18n.translate('xpack.searchPlayground.sidebar.openAICallout.buttonLabelText', {
|
||||
defaultMessage: 'Add OpenAI API Key',
|
||||
})}
|
||||
</EuiButton>
|
||||
</EuiCallOut>
|
||||
);
|
||||
};
|
|
@ -1,65 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { EuiButton, EuiFieldPassword, EuiFlexGroup, EuiFormRow, keys } from '@elastic/eui';
|
||||
import React from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import { ChatFormFields } from '../../types';
|
||||
|
||||
export const OpenAIKeyField: React.FC = () => {
|
||||
const [openAITempValue, setOpenAITempValue] = React.useState('');
|
||||
const { setValue, watch } = useFormContext();
|
||||
const openAIKey = watch(ChatFormFields.openAIKey);
|
||||
const handleSaveValue = () => {
|
||||
if (openAITempValue) {
|
||||
setValue(ChatFormFields.openAIKey, openAITempValue);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<EuiFormRow
|
||||
label={i18n.translate('xpack.searchPlayground.summarization.openAI.labelTitle', {
|
||||
defaultMessage: 'OpenAI API Key',
|
||||
})}
|
||||
fullWidth
|
||||
>
|
||||
<EuiFlexGroup>
|
||||
<EuiFieldPassword
|
||||
fullWidth
|
||||
placeholder={i18n.translate('xpack.searchPlayground.sidebar.openAIFlyOut.placeholder', {
|
||||
defaultMessage: 'Enter API Key here',
|
||||
})}
|
||||
value={openAITempValue}
|
||||
onKeyUp={({ key }) => {
|
||||
if (keys.ENTER === key) {
|
||||
handleSaveValue();
|
||||
}
|
||||
}}
|
||||
onChange={(e) => setOpenAITempValue(e.target.value)}
|
||||
/>
|
||||
|
||||
{openAIKey && openAIKey === openAITempValue ? (
|
||||
<EuiButton color="success" iconType="check">
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.summarization.openAI.savedButton"
|
||||
defaultMessage="Saved"
|
||||
/>
|
||||
</EuiButton>
|
||||
) : (
|
||||
<EuiButton type="submit" disabled={!openAITempValue} onClick={handleSaveValue}>
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.summarization.openAI.saveButton"
|
||||
defaultMessage="Save"
|
||||
/>
|
||||
</EuiButton>
|
||||
)}
|
||||
</EuiFlexGroup>
|
||||
</EuiFormRow>
|
||||
);
|
||||
};
|
|
@ -1,123 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
EuiButton,
|
||||
EuiButtonEmpty,
|
||||
EuiFieldPassword,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiFlyout,
|
||||
EuiFlyoutBody,
|
||||
EuiFlyoutFooter,
|
||||
EuiFlyoutHeader,
|
||||
EuiFormRow,
|
||||
EuiLink,
|
||||
EuiSpacer,
|
||||
EuiText,
|
||||
EuiTitle,
|
||||
} from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import React, { useState } from 'react';
|
||||
|
||||
export interface OpenAIKeyFlyOutProps {
|
||||
openAPIKey: string;
|
||||
onClose: () => void;
|
||||
onSave: (key: string) => void;
|
||||
}
|
||||
|
||||
export const OpenAIKeyFlyOut: React.FC<OpenAIKeyFlyOutProps> = ({
|
||||
openAPIKey,
|
||||
onClose,
|
||||
onSave,
|
||||
}) => {
|
||||
const [apiKey, setApiKey] = useState<string>(openAPIKey);
|
||||
|
||||
const handleSave = () => {
|
||||
onSave(apiKey);
|
||||
onClose();
|
||||
};
|
||||
|
||||
return (
|
||||
<EuiFlyout onClose={onClose} size="m">
|
||||
<EuiFlyoutHeader hasBorder>
|
||||
<EuiTitle size="m">
|
||||
<h3>
|
||||
{i18n.translate('xpack.searchPlayground.sidebar.openAIFlyOut.headerTitle', {
|
||||
defaultMessage: 'OpenAI API Key',
|
||||
})}
|
||||
</h3>
|
||||
</EuiTitle>
|
||||
<EuiSpacer size="s" />
|
||||
</EuiFlyoutHeader>
|
||||
<EuiFlyoutBody>
|
||||
<EuiFlexGroup direction="column" gutterSize="l">
|
||||
<EuiFormRow
|
||||
fullWidth
|
||||
label={i18n.translate('xpack.searchPlayground.sidebar.openAIFlyOut.labelTitle', {
|
||||
defaultMessage: 'OpenAI API Key',
|
||||
})}
|
||||
labelAppend={
|
||||
<EuiText size="xs">
|
||||
<EuiLink target="_blank" href="https://platform.openai.com/api-keys">
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.sidebar.openAIFlyOut.linkTitle"
|
||||
defaultMessage="OpenAI API Keys"
|
||||
/>
|
||||
</EuiLink>
|
||||
</EuiText>
|
||||
}
|
||||
>
|
||||
<EuiFlexItem grow>
|
||||
<EuiFieldPassword
|
||||
fullWidth
|
||||
placeholder={i18n.translate(
|
||||
'xpack.searchPlayground.sidebar.openAIFlyOut.placeholder',
|
||||
{
|
||||
defaultMessage: 'Enter API Key here',
|
||||
}
|
||||
)}
|
||||
value={apiKey}
|
||||
onChange={(e) => setApiKey(e.target.value)}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFormRow>
|
||||
</EuiFlexGroup>
|
||||
</EuiFlyoutBody>
|
||||
<EuiFlyoutFooter>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButtonEmpty
|
||||
data-telemetry-id="entSearchAIPlayground-addingOpenAIKey-cancel"
|
||||
onClick={onClose}
|
||||
>
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.sidebar.openAIFlyOut.cancelButtonLabel"
|
||||
defaultMessage="Cancel"
|
||||
/>
|
||||
</EuiButtonEmpty>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem />
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButton
|
||||
isDisabled={!apiKey.trim()}
|
||||
data-telemetry-id="entSearchAIPlayground-addingOpenAIKey-save"
|
||||
fill
|
||||
onClick={handleSave}
|
||||
>
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.sidebar.openAIFlyOut.saveButtonLabel"
|
||||
defaultMessage="Save"
|
||||
/>
|
||||
</EuiButton>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiFlyoutFooter>
|
||||
</EuiFlyout>
|
||||
);
|
||||
};
|
|
@ -1,102 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import {
|
||||
EuiButtonEmpty,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiFormRow,
|
||||
EuiIcon,
|
||||
EuiSuperSelect,
|
||||
EuiSuperSelectOption,
|
||||
EuiToolTip,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { SummarizationModelName } from '../../types';
|
||||
|
||||
import { OpenAIIcon } from './open_ai_icon';
|
||||
|
||||
const renderSelectOptions = (label: string) => (
|
||||
<EuiFlexGroup alignItems="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiIcon type={OpenAIIcon} />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>{label}</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
|
||||
const SummarizationModel: Array<EuiSuperSelectOption<string>> = [
|
||||
{
|
||||
value: SummarizationModelName.gpt3_5,
|
||||
inputDisplay: renderSelectOptions(SummarizationModelName.gpt3_5),
|
||||
},
|
||||
{
|
||||
value: SummarizationModelName.gpt3_5_turbo_1106,
|
||||
inputDisplay: renderSelectOptions(SummarizationModelName.gpt3_5_turbo_1106),
|
||||
},
|
||||
{
|
||||
value: SummarizationModelName.gpt3_5_turbo_16k,
|
||||
inputDisplay: renderSelectOptions(SummarizationModelName.gpt3_5_turbo_16k),
|
||||
},
|
||||
{
|
||||
value: SummarizationModelName.gpt3_5_turbo_16k_0613,
|
||||
inputDisplay: renderSelectOptions(SummarizationModelName.gpt3_5_turbo_16k_0613),
|
||||
},
|
||||
{
|
||||
value: SummarizationModelName.gpt3_5_turbo,
|
||||
inputDisplay: renderSelectOptions(SummarizationModelName.gpt3_5_turbo),
|
||||
},
|
||||
];
|
||||
|
||||
interface OpenAISummarizationModelProps {
|
||||
openAIFlyOutOpen: () => void;
|
||||
model: string;
|
||||
onSelect: (key: string) => void;
|
||||
}
|
||||
|
||||
export const OpenAISummarizationModel: React.FC<OpenAISummarizationModelProps> = ({
|
||||
model = SummarizationModelName.gpt3_5_turbo_1106,
|
||||
onSelect,
|
||||
openAIFlyOutOpen,
|
||||
}) => {
|
||||
const onChange = (value: string) => {
|
||||
onSelect(value);
|
||||
};
|
||||
|
||||
return (
|
||||
<EuiFormRow
|
||||
label={
|
||||
<EuiToolTip
|
||||
content={i18n.translate('xpack.searchPlayground.sidebar.summarizationModel.help', {
|
||||
defaultMessage: 'The large language model used to summarize your documents.',
|
||||
})}
|
||||
>
|
||||
<>
|
||||
<span>
|
||||
{i18n.translate('xpack.searchPlayground.sidebar.summarizationModel.label', {
|
||||
defaultMessage: 'Summarization Model',
|
||||
})}
|
||||
</span>
|
||||
<EuiIcon type="questionInCircle" color="subdued" />
|
||||
</>
|
||||
</EuiToolTip>
|
||||
}
|
||||
labelAppend={
|
||||
<EuiButtonEmpty flush="both" size="xs" onClick={() => openAIFlyOutOpen()}>
|
||||
{i18n.translate('xpack.searchPlayground.sidebar.summarizationModel.editLabel', {
|
||||
defaultMessage: 'Edit OpenAI API key',
|
||||
})}
|
||||
</EuiButtonEmpty>
|
||||
}
|
||||
>
|
||||
<EuiSuperSelect options={SummarizationModel} valueOfSelected={model} onChange={onChange} />
|
||||
</EuiFormRow>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { render as testingLibraryRender } from '@testing-library/react';
|
||||
import { SummarizationModel } from './summarization_model';
|
||||
import { useManagementLink } from '../../hooks/use_management_link';
|
||||
import { __IntlProvider as IntlProvider } from '@kbn/i18n-react';
|
||||
|
||||
const render = (children: React.ReactNode) =>
|
||||
testingLibraryRender(<IntlProvider locale="en">{children}</IntlProvider>);
|
||||
const MockIcon = () => <span />;
|
||||
|
||||
jest.mock('../../hooks/use_management_link');
|
||||
|
||||
const mockUseManagementLink = useManagementLink as jest.Mock;
|
||||
|
||||
describe('SummarizationModel', () => {
|
||||
beforeEach(() => {
|
||||
mockUseManagementLink.mockReturnValue('http://example.com/manage-connectors');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
it('renders correctly with models', () => {
|
||||
const models = [
|
||||
{ name: 'Model1', disabled: false, icon: MockIcon, connectorId: 'connector1' },
|
||||
{ name: 'Model2', disabled: true, icon: MockIcon, connectorId: 'connector2' },
|
||||
];
|
||||
const { getByTestId } = render(
|
||||
<SummarizationModel selectedModel={models[1]} models={models} onSelect={jest.fn()} />
|
||||
);
|
||||
|
||||
expect(getByTestId('summarizationModelSelect')).toBeInTheDocument();
|
||||
expect(getByTestId('manageConnectorsLink')).toHaveAttribute(
|
||||
'href',
|
||||
'http://example.com/manage-connectors'
|
||||
);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { useMemo } from 'react';
|
||||
|
||||
import {
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiFormRow,
|
||||
EuiIcon,
|
||||
EuiIconTip,
|
||||
EuiLink,
|
||||
EuiSuperSelect,
|
||||
EuiText,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import { EuiSuperSelectOption } from '@elastic/eui/src/components/form/super_select/super_select_control';
|
||||
import type { LLMModel } from '../../types';
|
||||
import { useManagementLink } from '../../hooks/use_management_link';
|
||||
|
||||
interface SummarizationModelProps {
|
||||
selectedModel: LLMModel;
|
||||
onSelect: (model: LLMModel) => void;
|
||||
models: LLMModel[];
|
||||
}
|
||||
|
||||
export const SummarizationModel: React.FC<SummarizationModelProps> = ({
|
||||
selectedModel,
|
||||
models,
|
||||
onSelect,
|
||||
}) => {
|
||||
const managementLink = useManagementLink();
|
||||
const onChange = (modelName: string) => {
|
||||
const model = models.find(({ name }) => name === modelName);
|
||||
|
||||
if (model) {
|
||||
onSelect(model);
|
||||
}
|
||||
};
|
||||
|
||||
const modelsOption: Array<EuiSuperSelectOption<string>> = useMemo(
|
||||
() =>
|
||||
models.map(({ name, disabled, icon, connectorId }) => ({
|
||||
value: name,
|
||||
disabled,
|
||||
inputDisplay: (
|
||||
<EuiFlexGroup alignItems="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiIcon type={icon} />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>{name}</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
),
|
||||
})),
|
||||
[models]
|
||||
);
|
||||
|
||||
return (
|
||||
<EuiFormRow
|
||||
label={
|
||||
<>
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.sidebar.summarizationModel.label"
|
||||
defaultMessage="Summarization Model"
|
||||
/>{' '}
|
||||
<EuiIconTip
|
||||
content={i18n.translate('xpack.searchPlayground.sidebar.summarizationModel.help', {
|
||||
defaultMessage: 'The large language model used to summarize your documents.',
|
||||
})}
|
||||
/>
|
||||
</>
|
||||
}
|
||||
labelAppend={
|
||||
<EuiText size="xs">
|
||||
<EuiLink target="_blank" href={managementLink} data-test-subj="manageConnectorsLink">
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.sidebar.summarizationModel.manageConnectors"
|
||||
defaultMessage="Manage GenAI connectors"
|
||||
/>
|
||||
</EuiLink>
|
||||
</EuiText>
|
||||
}
|
||||
>
|
||||
<EuiSuperSelect
|
||||
data-test-subj="summarizationModelSelect"
|
||||
options={modelsOption}
|
||||
valueOfSelected={selectedModel.name}
|
||||
onChange={onChange}
|
||||
/>
|
||||
</EuiFormRow>
|
||||
);
|
||||
};
|
|
@ -5,51 +5,32 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { useState } from 'react';
|
||||
import React from 'react';
|
||||
import { Controller, useFormContext } from 'react-hook-form';
|
||||
|
||||
import { useLLMsModels } from '../../hooks/use_llms_models';
|
||||
import { IncludeCitationsField } from './include_citations_field';
|
||||
import { InstructionsField } from './instructions_field';
|
||||
import { ChatFormFields } from '../../types';
|
||||
import { OpenAIKeyFlyOut } from './open_ai_key_flyout';
|
||||
import { OpenAISummarizationModel } from './open_ai_summarization_model';
|
||||
import { ChatForm, ChatFormFields } from '../../types';
|
||||
import { SummarizationModel } from './summarization_model';
|
||||
|
||||
export const SummarizationPanel: React.FC = () => {
|
||||
const { control } = useFormContext();
|
||||
const [isOpenAIFlyOutOpen, setIsOpenAIFlyOutOpen] = useState<boolean>(false);
|
||||
|
||||
const onCloseOpenAIFlyOut = () => {
|
||||
setIsOpenAIFlyOutOpen(!isOpenAIFlyOutOpen);
|
||||
};
|
||||
const handleOpenAIFlyOut = () => {
|
||||
setIsOpenAIFlyOutOpen(true);
|
||||
};
|
||||
const { control } = useFormContext<ChatForm>();
|
||||
const models = useLLMsModels();
|
||||
const defaultModel = models.find((model) => !model.disabled);
|
||||
|
||||
return (
|
||||
<>
|
||||
{isOpenAIFlyOutOpen && (
|
||||
<Controller
|
||||
name={ChatFormFields.openAIKey}
|
||||
control={control}
|
||||
defaultValue=""
|
||||
render={({ field }) => (
|
||||
<OpenAIKeyFlyOut
|
||||
openAPIKey={field.value}
|
||||
onSave={field.onChange}
|
||||
onClose={onCloseOpenAIFlyOut}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Controller
|
||||
name={ChatFormFields.summarizationModel}
|
||||
defaultValue={defaultModel}
|
||||
rules={{ required: true }}
|
||||
control={control}
|
||||
render={({ field }) => (
|
||||
<OpenAISummarizationModel
|
||||
model={field.value}
|
||||
onSelect={field.onChange}
|
||||
openAIFlyOutOpen={handleOpenAIFlyOut}
|
||||
<SummarizationModel
|
||||
selectedModel={field.value}
|
||||
onSelect={(model) => field.onChange(model)}
|
||||
models={models}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
|
@ -57,7 +38,8 @@ export const SummarizationPanel: React.FC = () => {
|
|||
<Controller
|
||||
name={ChatFormFields.prompt}
|
||||
control={control}
|
||||
defaultValue=""
|
||||
rules={{ required: true }}
|
||||
defaultValue="You are an assistant for question-answering tasks."
|
||||
render={({ field }) => <InstructionsField value={field.value} onChange={field.onChange} />}
|
||||
/>
|
||||
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { EuiLink } from '@elastic/eui';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
import { OpenAIKeyField } from './open_ai_key_field';
|
||||
import { StartChatPanel } from '../start_chat_panel';
|
||||
import { ChatFormFields } from '../../types';
|
||||
|
||||
const openAIApiKeyLink = 'https://platform.openai.com/api-keys';
|
||||
|
||||
export const SummarizationPanelForStartChat: React.FC = () => {
|
||||
const { watch } = useFormContext();
|
||||
|
||||
return (
|
||||
<StartChatPanel
|
||||
title={i18n.translate('xpack.searchPlayground.emptyPrompts.summarization.title', {
|
||||
defaultMessage: 'Enable summarization models',
|
||||
})}
|
||||
description={
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.emptyPrompts.summarization.description"
|
||||
defaultMessage="The AI Playground uses OpenAl models for summarization. Find or create your api key in OpenAI’s {link}"
|
||||
values={{
|
||||
link: (
|
||||
<EuiLink href={openAIApiKeyLink} target="_blank" external>
|
||||
<FormattedMessage
|
||||
id="xpack.searchPlayground.emptyPrompts.summarization.description.linkText"
|
||||
defaultMessage="API keys dashboard"
|
||||
/>
|
||||
</EuiLink>
|
||||
),
|
||||
}}
|
||||
/>
|
||||
}
|
||||
isValid={watch(ChatFormFields.openAIKey)}
|
||||
>
|
||||
<OpenAIKeyField />
|
||||
</StartChatPanel>
|
||||
);
|
||||
};
|
|
@ -17,7 +17,7 @@ describe('PY_LANG_CLIENT function', () => {
|
|||
const formValues = {
|
||||
elasticsearch_query: { query: {} },
|
||||
indices: ['index1', 'index2'],
|
||||
docSize: 10,
|
||||
doc_size: 10,
|
||||
source_fields: { index1: ['field1'], index2: ['field2'] },
|
||||
prompt: 'Your prompt',
|
||||
citations: true,
|
||||
|
|
|
@ -39,7 +39,7 @@ def get_elasticsearch_results(query):
|
|||
es_query = ${getESQuery(formValues.elasticsearch_query.query)}
|
||||
|
||||
result = es.search(index="${formValues.indices.join(',')}", query=es_query, size=${
|
||||
formValues.docSize
|
||||
formValues.doc_size
|
||||
})
|
||||
return result["hits"]["hits"]
|
||||
|
||||
|
|
|
@ -5,36 +5,29 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { ComponentType, lazy, LazyExoticComponent, Suspense } from 'react';
|
||||
import type { App } from './components/app';
|
||||
import { PlaygroundProviderProps } from './providers/playground_provider';
|
||||
import type { Toolbar } from './components/toolbar';
|
||||
import React from 'react';
|
||||
import { dynamic } from '@kbn/shared-ux-utility';
|
||||
import { KibanaContextProvider } from '@kbn/kibana-react-plugin/public';
|
||||
import { CoreStart } from '@kbn/core-lifecycle-browser';
|
||||
import { AppPluginStartDependencies } from './types';
|
||||
|
||||
const lazyRender =
|
||||
<P extends {}>(
|
||||
Component: LazyExoticComponent<ComponentType<P>>
|
||||
): React.FC<React.ComponentProps<typeof Component>> =>
|
||||
(props) =>
|
||||
export const Playground = dynamic(async () => ({
|
||||
default: (await import('./components/app')).App,
|
||||
}));
|
||||
|
||||
export const PlaygroundToolbar = dynamic(async () => ({
|
||||
default: (await import('./components/toolbar')).Toolbar,
|
||||
}));
|
||||
|
||||
export const PlaygroundProvider = dynamic(async () => ({
|
||||
default: (await import('./providers/playground_provider')).PlaygroundProvider,
|
||||
}));
|
||||
|
||||
export const getPlaygroundProvider =
|
||||
(core: CoreStart, services: AppPluginStartDependencies) =>
|
||||
(props: React.ComponentProps<typeof PlaygroundProvider>) =>
|
||||
(
|
||||
<Suspense fallback={null}>
|
||||
<Component {...props} />
|
||||
</Suspense>
|
||||
<KibanaContextProvider services={{ ...core, ...services }}>
|
||||
<PlaygroundProvider {...props} />
|
||||
</KibanaContextProvider>
|
||||
);
|
||||
|
||||
export const Playground = lazyRender<React.ComponentProps<typeof App>>(
|
||||
lazy<typeof App>(async () => ({
|
||||
default: (await import('./components/app')).App,
|
||||
}))
|
||||
);
|
||||
|
||||
export const PlaygroundToolbar = lazyRender<React.ComponentProps<typeof Toolbar>>(
|
||||
lazy<typeof Toolbar>(async () => ({
|
||||
default: (await import('./components/toolbar')).Toolbar,
|
||||
}))
|
||||
);
|
||||
|
||||
export const PlaygroundProvider = lazyRender<PlaygroundProviderProps>(
|
||||
lazy(async () => ({
|
||||
default: (await import('./providers/playground_provider')).PlaygroundProvider,
|
||||
}))
|
||||
);
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { renderHook } from '@testing-library/react-hooks';
|
||||
import { useLoadConnectors } from './use_load_connectors';
|
||||
import { useLLMsModels } from './use_llms_models';
|
||||
import { LLMs } from '../types';
|
||||
|
||||
jest.mock('./use_load_connectors', () => ({
|
||||
useLoadConnectors: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockConnectors = {
|
||||
[LLMs.openai]: { id: 'connectorId1', title: 'OpenAI Connector' },
|
||||
};
|
||||
const mockUseLoadConnectors = (data: any) => {
|
||||
(useLoadConnectors as jest.Mock).mockReturnValue({ data });
|
||||
};
|
||||
|
||||
describe('useLLMsModels Hook', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('returns LLMModels with connectors available', () => {
|
||||
mockUseLoadConnectors(mockConnectors);
|
||||
|
||||
const { result } = renderHook(() => useLLMsModels());
|
||||
|
||||
expect(result.current).toEqual([
|
||||
{
|
||||
connectorId: undefined,
|
||||
disabled: true,
|
||||
icon: expect.any(Function),
|
||||
name: 'Azure OpenAI',
|
||||
value: undefined,
|
||||
},
|
||||
{
|
||||
connectorId: 'connectorId1',
|
||||
disabled: false,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo',
|
||||
value: 'gpt-3.5-turbo',
|
||||
},
|
||||
{
|
||||
connectorId: 'connectorId1',
|
||||
disabled: false,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo-1106',
|
||||
value: 'gpt-3.5-turbo-1106',
|
||||
},
|
||||
{
|
||||
connectorId: 'connectorId1',
|
||||
disabled: false,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo-16k',
|
||||
value: 'gpt-3.5-turbo-16k',
|
||||
},
|
||||
{
|
||||
connectorId: 'connectorId1',
|
||||
disabled: false,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo-16k-0613',
|
||||
value: 'gpt-3.5-turbo-16k-0613',
|
||||
},
|
||||
{
|
||||
connectorId: 'connectorId1',
|
||||
disabled: false,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo-instruct',
|
||||
value: 'gpt-3.5-turbo-instruct',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns LLMModels as disabled when no connectors are available', () => {
|
||||
mockUseLoadConnectors({});
|
||||
|
||||
const { result } = renderHook(() => useLLMsModels());
|
||||
|
||||
expect(result.current).toEqual([
|
||||
{
|
||||
connectorId: undefined,
|
||||
disabled: true,
|
||||
icon: expect.any(Function),
|
||||
name: 'Azure OpenAI',
|
||||
value: undefined,
|
||||
},
|
||||
{
|
||||
connectorId: undefined,
|
||||
disabled: true,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo',
|
||||
value: 'gpt-3.5-turbo',
|
||||
},
|
||||
{
|
||||
connectorId: undefined,
|
||||
disabled: true,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo-1106',
|
||||
value: 'gpt-3.5-turbo-1106',
|
||||
},
|
||||
{
|
||||
connectorId: undefined,
|
||||
disabled: true,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo-16k',
|
||||
value: 'gpt-3.5-turbo-16k',
|
||||
},
|
||||
{
|
||||
connectorId: undefined,
|
||||
disabled: true,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo-16k-0613',
|
||||
value: 'gpt-3.5-turbo-16k-0613',
|
||||
},
|
||||
{
|
||||
connectorId: undefined,
|
||||
disabled: true,
|
||||
icon: expect.any(Function),
|
||||
name: 'gpt-3.5-turbo-instruct',
|
||||
value: 'gpt-3.5-turbo-instruct',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { OpenAILogo } from '@kbn/stack-connectors-plugin/public/common';
|
||||
import { ComponentType } from 'react';
|
||||
import { LLMs } from '../../common/types';
|
||||
import { LLMModel, SummarizationModelName } from '../types';
|
||||
import { useLoadConnectors } from './use_load_connectors';
|
||||
|
||||
const llmModels: Array<{
|
||||
llm: LLMs;
|
||||
icon: ComponentType;
|
||||
models: Array<{ label: string; value?: string }>;
|
||||
}> = [
|
||||
{
|
||||
llm: LLMs.openai_azure,
|
||||
icon: OpenAILogo,
|
||||
models: [
|
||||
{
|
||||
label: i18n.translate('xpack.searchPlayground.openAIAzureModel', {
|
||||
defaultMessage: 'Azure OpenAI',
|
||||
}),
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
llm: LLMs.openai,
|
||||
icon: OpenAILogo,
|
||||
models: Object.values(SummarizationModelName).map((model) => ({ label: model, value: model })),
|
||||
},
|
||||
];
|
||||
|
||||
export const useLLMsModels = (): LLMModel[] => {
|
||||
const { data: connectors } = useLoadConnectors();
|
||||
|
||||
return llmModels.reduce<LLMModel[]>(
|
||||
(result, { llm, icon, models }) => [
|
||||
...result,
|
||||
...models.map(({ label, value }) => ({
|
||||
name: label,
|
||||
value,
|
||||
icon,
|
||||
disabled: !connectors?.[llm],
|
||||
connectorId: connectors?.[llm]?.id,
|
||||
})),
|
||||
],
|
||||
[]
|
||||
);
|
||||
};
|
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { loadAllActions as loadConnectors } from '@kbn/triggers-actions-ui-plugin/public/common/constants';
|
||||
import { useLoadConnectors } from './use_load_connectors';
|
||||
import { useKibana } from './use_kibana';
|
||||
import { act, renderHook } from '@testing-library/react-hooks';
|
||||
import { OpenAiProviderType } from '@kbn/stack-connectors-plugin/common/openai/constants';
|
||||
|
||||
const mockedLoadConnectors = loadConnectors as jest.Mock;
|
||||
const mockedUseKibana = useKibana as jest.Mock;
|
||||
|
||||
jest.mock('@tanstack/react-query', () => ({
|
||||
useQuery: jest.fn().mockImplementation(async (queryKey, fn, opts) => {
|
||||
try {
|
||||
const res = await fn();
|
||||
return Promise.resolve(res);
|
||||
} catch (e) {
|
||||
opts.onError(e);
|
||||
}
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('@kbn/triggers-actions-ui-plugin/public/common/constants', () => ({
|
||||
loadAllActions: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('./use_kibana', () => ({
|
||||
useKibana: jest.fn().mockReturnValue({
|
||||
services: {
|
||||
http: {},
|
||||
notifications: {
|
||||
toasts: {
|
||||
addError: jest.fn(),
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('useLoadConnectors', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('successfully loads and transforms connectors', async () => {
|
||||
const connectors = [
|
||||
{
|
||||
id: '1',
|
||||
actionTypeId: '.gen-ai',
|
||||
isMissingSecrets: false,
|
||||
config: { apiProvider: OpenAiProviderType.OpenAi },
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
actionTypeId: 'slack',
|
||||
isMissingSecrets: false,
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
actionTypeId: '.gen-ai',
|
||||
isMissingSecrets: false,
|
||||
config: { apiProvider: OpenAiProviderType.AzureAi },
|
||||
},
|
||||
];
|
||||
mockedLoadConnectors.mockResolvedValue(connectors);
|
||||
|
||||
await act(async () => {
|
||||
const { result, waitForNextUpdate } = renderHook(() => useLoadConnectors());
|
||||
await waitForNextUpdate();
|
||||
|
||||
await expect(result.current).resolves.toStrictEqual({
|
||||
openai: {
|
||||
actionTypeId: '.gen-ai',
|
||||
config: {
|
||||
apiProvider: 'OpenAI',
|
||||
},
|
||||
id: '1',
|
||||
isMissingSecrets: false,
|
||||
title: 'OpenAI',
|
||||
},
|
||||
openai_azure: {
|
||||
actionTypeId: '.gen-ai',
|
||||
config: {
|
||||
apiProvider: 'Azure OpenAI',
|
||||
},
|
||||
id: '3',
|
||||
isMissingSecrets: false,
|
||||
title: 'OpenAI Azure',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('handles errors correctly', async () => {
|
||||
const error = new Error('Test Error');
|
||||
mockedLoadConnectors.mockRejectedValue(error);
|
||||
|
||||
await act(async () => {
|
||||
const { waitForNextUpdate } = renderHook(() => useLoadConnectors());
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(mockedUseKibana().services.notifications.toasts.addError).toHaveBeenCalledWith(
|
||||
error,
|
||||
expect.any(Object)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { UseQueryResult } from '@tanstack/react-query';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import type { ServerError } from '@kbn/cases-plugin/public/types';
|
||||
import { ActionConnector } from '@kbn/triggers-actions-ui-plugin/public';
|
||||
import { loadAllActions as loadConnectors } from '@kbn/triggers-actions-ui-plugin/public/common/constants';
|
||||
import type { IHttpFetchError } from '@kbn/core-http-browser';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import {
|
||||
OPENAI_CONNECTOR_ID,
|
||||
OpenAiProviderType,
|
||||
} from '@kbn/stack-connectors-plugin/public/common';
|
||||
import { UserConfiguredActionConnector } from '@kbn/triggers-actions-ui-plugin/public/types';
|
||||
import { useKibana } from './use_kibana';
|
||||
import { LLMs } from '../types';
|
||||
|
||||
const QUERY_KEY = ['search-playground, load-connectors'];
|
||||
|
||||
type OpenAIConnector = UserConfiguredActionConnector<
|
||||
{ apiProvider: OpenAiProviderType },
|
||||
Record<string, unknown>
|
||||
>;
|
||||
|
||||
const mapLLMToActionParam: Record<
|
||||
LLMs,
|
||||
{
|
||||
actionId: string;
|
||||
actionProvider?: string;
|
||||
match: (connector: ActionConnector) => boolean;
|
||||
transform: (connector: ActionConnector) => PlaygroundConnector;
|
||||
}
|
||||
> = {
|
||||
[LLMs.openai_azure]: {
|
||||
actionId: OPENAI_CONNECTOR_ID,
|
||||
actionProvider: OpenAiProviderType.AzureAi,
|
||||
match: (connector) =>
|
||||
(connector as OpenAIConnector).config.apiProvider === OpenAiProviderType.AzureAi,
|
||||
transform: (connector) => ({
|
||||
...connector,
|
||||
title: i18n.translate('xpack.searchPlayground.openAIAzureConnectorTitle', {
|
||||
defaultMessage: 'OpenAI Azure',
|
||||
}),
|
||||
}),
|
||||
},
|
||||
[LLMs.openai]: {
|
||||
actionId: OPENAI_CONNECTOR_ID,
|
||||
match: (connector) =>
|
||||
(connector as OpenAIConnector).config.apiProvider === OpenAiProviderType.OpenAi,
|
||||
transform: (connector) => ({
|
||||
...connector,
|
||||
title: i18n.translate('xpack.searchPlayground.openAIConnectorTitle', {
|
||||
defaultMessage: 'OpenAI',
|
||||
}),
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
type PlaygroundConnector = ActionConnector & { title: string };
|
||||
|
||||
export const useLoadConnectors = (): UseQueryResult<
|
||||
Record<LLMs, PlaygroundConnector>,
|
||||
IHttpFetchError
|
||||
> => {
|
||||
const {
|
||||
services: { http, notifications },
|
||||
} = useKibana();
|
||||
|
||||
return useQuery(
|
||||
QUERY_KEY,
|
||||
async () => {
|
||||
const queryResult = await loadConnectors({ http });
|
||||
|
||||
return Object.entries(mapLLMToActionParam).reduce<Partial<Record<LLMs, PlaygroundConnector>>>(
|
||||
(result, [llm, { actionId, match, transform }]) => {
|
||||
const targetConnector = queryResult.find(
|
||||
(connector) =>
|
||||
!connector.isMissingSecrets &&
|
||||
connector.actionTypeId === actionId &&
|
||||
(match?.(connector) ?? true)
|
||||
);
|
||||
|
||||
return targetConnector ? { ...result, [llm]: transform(targetConnector) } : result;
|
||||
},
|
||||
{}
|
||||
);
|
||||
},
|
||||
{
|
||||
retry: false,
|
||||
keepPreviousData: true,
|
||||
onError: (error: ServerError) => {
|
||||
if (error.name !== 'AbortError') {
|
||||
notifications?.toasts?.addError(
|
||||
error.body && error.body.message ? new Error(error.body.message) : error,
|
||||
{
|
||||
title: i18n.translate('xpack.searchPlayground.loadConnectorsError', {
|
||||
defaultMessage:
|
||||
'Error loading connectors. Please check your configuration and try again.',
|
||||
}),
|
||||
}
|
||||
);
|
||||
}
|
||||
},
|
||||
}
|
||||
);
|
||||
};
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { renderHook } from '@testing-library/react-hooks';
|
||||
import { useManagementLink } from './use_management_link';
|
||||
import { useKibana } from './use_kibana';
|
||||
|
||||
jest.mock('./use_kibana', () => ({
|
||||
useKibana: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('useManagementLink Hook', () => {
|
||||
const mockGetUrl = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
(useKibana as jest.Mock).mockReturnValue({
|
||||
services: {
|
||||
share: {
|
||||
url: {
|
||||
locators: {
|
||||
get: jest.fn().mockReturnValue({
|
||||
getUrl: mockGetUrl,
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('generates the management link successfully', async () => {
|
||||
const expectedUrl =
|
||||
'http://localhost:5601/app/management/insightsAndAlerting/triggersActionsConnectors';
|
||||
mockGetUrl.mockResolvedValue(expectedUrl);
|
||||
const { result, waitForNextUpdate } = renderHook(() => useManagementLink());
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(result.current).toBe(expectedUrl);
|
||||
expect(mockGetUrl).toHaveBeenCalledWith({
|
||||
sectionId: 'insightsAndAlerting',
|
||||
appId: 'triggersActionsConnectors',
|
||||
});
|
||||
});
|
||||
|
||||
it('return empty link when management locator is not found', async () => {
|
||||
(useKibana as jest.Mock).mockReturnValueOnce({
|
||||
services: {
|
||||
share: {
|
||||
url: {
|
||||
locators: {
|
||||
get: jest.fn().mockReturnValue(undefined),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useManagementLink());
|
||||
|
||||
expect(result.current).toBe('');
|
||||
});
|
||||
});
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useKibana } from './use_kibana';
|
||||
|
||||
export const useManagementLink = () => {
|
||||
const {
|
||||
services: { share },
|
||||
} = useKibana();
|
||||
const managementLocator = useMemo(
|
||||
() => share.url.locators.get('MANAGEMENT_APP_LOCATOR'),
|
||||
[share]
|
||||
);
|
||||
const [managementLink, setManagementLink] = useState('');
|
||||
useEffect(() => {
|
||||
const getLink = async () => {
|
||||
const link = await managementLocator?.getUrl({
|
||||
sectionId: 'insightsAndAlerting',
|
||||
appId: 'triggersActionsConnectors',
|
||||
});
|
||||
setManagementLink(link || '');
|
||||
};
|
||||
getLink();
|
||||
}, [managementLocator]);
|
||||
|
||||
return managementLink;
|
||||
};
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { UserProfileWithAvatar } from '@kbn/user-profile-components';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { useKibana } from './use_kibana';
|
||||
|
||||
export const useUserProfile = (): UserProfileWithAvatar | undefined => {
|
||||
const { security } = useKibana().services;
|
||||
|
||||
return useQuery<UserProfileWithAvatar>(
|
||||
['useGetCurrentUserProfile'],
|
||||
async () => {
|
||||
return security.userProfiles.getCurrent({ dataPath: 'avatar' });
|
||||
},
|
||||
{
|
||||
retry: false,
|
||||
staleTime: Infinity,
|
||||
}
|
||||
).data;
|
||||
};
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { CoreSetup, Plugin, CoreStart, AppMountParameters } from '@kbn/core/public';
|
||||
import { PLUGIN_ID, PLUGIN_NAME } from '../common';
|
||||
import { PlaygroundToolbar, PlaygroundProvider, Playground } from './embeddable';
|
||||
import { PlaygroundToolbar, Playground, getPlaygroundProvider } from './embeddable';
|
||||
import {
|
||||
AppPluginStartDependencies,
|
||||
SearchPlaygroundPluginSetup,
|
||||
|
@ -35,7 +35,7 @@ export class SearchPlaygroundPlugin
|
|||
|
||||
public start(core: CoreStart, deps: AppPluginStartDependencies): SearchPlaygroundPluginStart {
|
||||
return {
|
||||
PlaygroundProvider,
|
||||
PlaygroundProvider: getPlaygroundProvider(core, deps),
|
||||
PlaygroundToolbar,
|
||||
Playground,
|
||||
};
|
||||
|
|
|
@ -8,15 +8,12 @@
|
|||
import React from 'react';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import { FormProvider, useForm } from 'react-hook-form';
|
||||
import { ChatForm, SummarizationModelName } from '../types';
|
||||
import { ChatForm, ChatFormFields } from '../types';
|
||||
|
||||
const queryClient = new QueryClient({});
|
||||
|
||||
export interface PlaygroundProviderProps {
|
||||
children: React.ReactNode;
|
||||
defaultValues: {
|
||||
indices: string[];
|
||||
};
|
||||
defaultValues?: Partial<Pick<ChatForm, ChatFormFields.indices>>;
|
||||
}
|
||||
|
||||
export const PlaygroundProvider: React.FC<PlaygroundProviderProps> = ({
|
||||
|
@ -26,10 +23,9 @@ export const PlaygroundProvider: React.FC<PlaygroundProviderProps> = ({
|
|||
const form = useForm<ChatForm>({
|
||||
defaultValues: {
|
||||
prompt: 'You are an assistant for question-answering tasks.',
|
||||
docSize: 5,
|
||||
doc_size: 5,
|
||||
source_fields: [],
|
||||
summarization_model: SummarizationModelName.gpt3_5_turbo,
|
||||
indices: defaultValues.indices,
|
||||
indices: defaultValues?.indices || [],
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -15,9 +15,11 @@ import {
|
|||
import { NavigationPublicPluginStart } from '@kbn/navigation-plugin/public';
|
||||
import { SecurityPluginStart } from '@kbn/security-plugin/public';
|
||||
import { HttpStart } from '@kbn/core-http-browser';
|
||||
import React from 'react';
|
||||
import React, { ComponentType } from 'react';
|
||||
import { SharePluginStart } from '@kbn/share-plugin/public';
|
||||
import { CloudSetup } from '@kbn/cloud-plugin/public';
|
||||
import { TriggersAndActionsUIPublicPluginStart } from '@kbn/triggers-actions-ui-plugin/public';
|
||||
import { ChatRequestData } from '../common/types';
|
||||
import type { App } from './components/app';
|
||||
import type { PlaygroundProvider as PlaygroundProviderComponent } from './providers/playground_provider';
|
||||
import type { Toolbar } from './components/toolbar';
|
||||
|
@ -34,6 +36,7 @@ export interface SearchPlaygroundPluginStart {
|
|||
|
||||
export interface AppPluginStartDependencies {
|
||||
navigation: NavigationPublicPluginStart;
|
||||
triggersActionsUi: TriggersAndActionsUIPublicPluginStart;
|
||||
}
|
||||
|
||||
export interface AppServicesContext {
|
||||
|
@ -41,27 +44,26 @@ export interface AppServicesContext {
|
|||
security: SecurityPluginStart;
|
||||
share: SharePluginStart;
|
||||
cloud?: CloudSetup;
|
||||
triggersActionsUi: TriggersAndActionsUIPublicPluginStart;
|
||||
}
|
||||
|
||||
export enum ChatFormFields {
|
||||
question = 'question',
|
||||
citations = 'citations',
|
||||
prompt = 'prompt',
|
||||
openAIKey = 'api_key',
|
||||
indices = 'indices',
|
||||
elasticsearchQuery = 'elasticsearch_query',
|
||||
summarizationModel = 'summarization_model',
|
||||
sourceFields = 'source_fields',
|
||||
docSize = 'docSize',
|
||||
docSize = 'doc_size',
|
||||
}
|
||||
|
||||
export interface ChatForm {
|
||||
[ChatFormFields.question]: string;
|
||||
[ChatFormFields.prompt]: string;
|
||||
[ChatFormFields.citations]: boolean;
|
||||
[ChatFormFields.openAIKey]: string;
|
||||
[ChatFormFields.indices]: string[];
|
||||
[ChatFormFields.summarizationModel]: string;
|
||||
[ChatFormFields.summarizationModel]: LLMModel;
|
||||
[ChatFormFields.elasticsearchQuery]: { query: QueryDslQueryContainer };
|
||||
[ChatFormFields.sourceFields]: string[];
|
||||
[ChatFormFields.docSize]: number;
|
||||
|
@ -103,11 +105,11 @@ export interface AIMessage extends Message {
|
|||
}
|
||||
|
||||
export enum SummarizationModelName {
|
||||
gpt3_5 = 'gpt-3.5-turbo',
|
||||
gpt3_5_turbo = 'gpt-3.5-turbo',
|
||||
gpt3_5_turbo_1106 = 'gpt-3.5-turbo-1106',
|
||||
gpt3_5_turbo_16k = 'gpt-3.5-turbo-16k',
|
||||
gpt3_5_turbo_16k_0613 = 'gpt-3.5-turbo-16k-0613',
|
||||
gpt3_5_turbo = 'gpt-3.5-turbo-instruct',
|
||||
gpt3_5_turbo_instruct = 'gpt-3.5-turbo-instruct',
|
||||
}
|
||||
|
||||
export interface ElasticsearchIndex {
|
||||
|
@ -134,7 +136,7 @@ export type JSONValue = null | string | number | boolean | { [x: string]: JSONVa
|
|||
|
||||
export interface ChatRequestOptions {
|
||||
options?: RequestOptions;
|
||||
data?: Record<string, string | number | boolean>;
|
||||
data?: ChatRequestData;
|
||||
}
|
||||
|
||||
export type CreateMessage = Omit<Message, 'id'> & {
|
||||
|
@ -191,3 +193,11 @@ export interface UseChatHelpers {
|
|||
) => void;
|
||||
isLoading: boolean;
|
||||
}
|
||||
|
||||
export interface LLMModel {
|
||||
name: string;
|
||||
value?: string;
|
||||
icon: ComponentType;
|
||||
disabled: boolean;
|
||||
connectorId?: string;
|
||||
}
|
||||
|
|
|
@ -7,11 +7,21 @@
|
|||
|
||||
import { PluginInitializerContext, CoreSetup, CoreStart, Plugin, Logger } from '@kbn/core/server';
|
||||
|
||||
import { SearchPlaygroundPluginSetup, SearchPlaygroundPluginStart } from './types';
|
||||
import {
|
||||
SearchPlaygroundPluginSetup,
|
||||
SearchPlaygroundPluginStart,
|
||||
SearchPlaygroundPluginStartDependencies,
|
||||
} from './types';
|
||||
import { defineRoutes } from './routes';
|
||||
|
||||
export class SearchPlaygroundPlugin
|
||||
implements Plugin<SearchPlaygroundPluginSetup, SearchPlaygroundPluginStart>
|
||||
implements
|
||||
Plugin<
|
||||
SearchPlaygroundPluginSetup,
|
||||
SearchPlaygroundPluginStart,
|
||||
{},
|
||||
SearchPlaygroundPluginStartDependencies
|
||||
>
|
||||
{
|
||||
private readonly logger: Logger;
|
||||
|
||||
|
@ -19,11 +29,13 @@ export class SearchPlaygroundPlugin
|
|||
this.logger = initializerContext.logger.get();
|
||||
}
|
||||
|
||||
public setup(core: CoreSetup) {
|
||||
public setup(
|
||||
core: CoreSetup<SearchPlaygroundPluginStartDependencies, SearchPlaygroundPluginStart>
|
||||
) {
|
||||
this.logger.debug('searchPlayground: Setup');
|
||||
const router = core.http.createRouter();
|
||||
|
||||
defineRoutes({ router, log: this.logger });
|
||||
defineRoutes({ router, log: this.logger, getStartServices: core.getStartServices });
|
||||
|
||||
return {};
|
||||
}
|
||||
|
|
|
@ -6,16 +6,21 @@
|
|||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { streamFactory } from '@kbn/ml-response-stream/server';
|
||||
import { Logger } from '@kbn/logging';
|
||||
import { IRouter } from '@kbn/core/server';
|
||||
import { IRouter, StartServicesAccessor } from '@kbn/core/server';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { ActionsClientChatOpenAI } from '@kbn/elastic-assistant-common/impl/llm';
|
||||
import { fetchFields } from './utils/fetch_query_source_fields';
|
||||
import { AssistClientOptionsWithClient, createAssist as Assist } from './utils/assist';
|
||||
import { ConversationalChain } from './utils/conversational_chain';
|
||||
import { Prompt } from '../common/prompt';
|
||||
import { errorHandler } from './utils/error_handler';
|
||||
import { APIRoutes } from './types';
|
||||
import {
|
||||
APIRoutes,
|
||||
SearchPlaygroundPluginStart,
|
||||
SearchPlaygroundPluginStartDependencies,
|
||||
} from './types';
|
||||
|
||||
export function createRetriever(esQuery: string) {
|
||||
return (question: string) => {
|
||||
|
@ -28,7 +33,18 @@ export function createRetriever(esQuery: string) {
|
|||
};
|
||||
}
|
||||
|
||||
export function defineRoutes({ log, router }: { log: Logger; router: IRouter }) {
|
||||
export function defineRoutes({
|
||||
log,
|
||||
router,
|
||||
getStartServices,
|
||||
}: {
|
||||
log: Logger;
|
||||
router: IRouter;
|
||||
getStartServices: StartServicesAccessor<
|
||||
SearchPlaygroundPluginStartDependencies,
|
||||
SearchPlaygroundPluginStart
|
||||
>;
|
||||
}) {
|
||||
router.post(
|
||||
{
|
||||
path: APIRoutes.POST_QUERY_SOURCE_FIELDS,
|
||||
|
@ -56,69 +72,93 @@ export function defineRoutes({ log, router }: { log: Logger; router: IRouter })
|
|||
path: APIRoutes.POST_CHAT_MESSAGE,
|
||||
validate: {
|
||||
body: schema.object({
|
||||
data: schema.any(),
|
||||
data: schema.object({
|
||||
connector_id: schema.string(),
|
||||
indices: schema.string(),
|
||||
prompt: schema.string(),
|
||||
citations: schema.boolean(),
|
||||
elasticsearch_query: schema.string(),
|
||||
summarization_model: schema.maybe(schema.string()),
|
||||
doc_size: schema.number(),
|
||||
source_fields: schema.string(),
|
||||
}),
|
||||
messages: schema.any(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
errorHandler(async (context, request, response) => {
|
||||
const [, { actions }] = await getStartServices();
|
||||
const { client } = (await context.core).elasticsearch;
|
||||
|
||||
const aiClient = Assist({
|
||||
es_client: client.asCurrentUser,
|
||||
} as AssistClientOptionsWithClient);
|
||||
|
||||
const { messages, data } = await request.body;
|
||||
|
||||
const model = new ChatOpenAI({
|
||||
openAIApiKey: data.api_key,
|
||||
});
|
||||
|
||||
let sourceFields = {};
|
||||
|
||||
try {
|
||||
sourceFields = JSON.parse(data.source_fields);
|
||||
const aiClient = Assist({
|
||||
es_client: client.asCurrentUser,
|
||||
} as AssistClientOptionsWithClient);
|
||||
const { messages, data } = await request.body;
|
||||
const abortController = new AbortController();
|
||||
const abortSignal = abortController.signal;
|
||||
const model = new ActionsClientChatOpenAI({
|
||||
actions,
|
||||
logger: log,
|
||||
request,
|
||||
connectorId: data.connector_id,
|
||||
model: data.summarization_model,
|
||||
traceId: uuidv4(),
|
||||
signal: abortSignal,
|
||||
// prevents the agent from retrying on failure
|
||||
// failure could be due to bad connector, we should deliver that result to the client asap
|
||||
maxRetries: 0,
|
||||
});
|
||||
|
||||
let sourceFields = {};
|
||||
|
||||
try {
|
||||
sourceFields = JSON.parse(data.source_fields);
|
||||
} catch (e) {
|
||||
log.error('Failed to parse the source fields', e);
|
||||
throw Error(e);
|
||||
}
|
||||
|
||||
const chain = ConversationalChain({
|
||||
model,
|
||||
rag: {
|
||||
index: data.indices,
|
||||
retriever: createRetriever(data.elasticsearch_query),
|
||||
content_field: sourceFields,
|
||||
size: Number(data.doc_size),
|
||||
},
|
||||
prompt: Prompt(data.prompt, {
|
||||
citations: data.citations,
|
||||
context: true,
|
||||
type: 'openai',
|
||||
}),
|
||||
});
|
||||
|
||||
const stream = await chain.stream(aiClient, messages);
|
||||
|
||||
const { end, push, responseWithHeaders } = streamFactory(request.headers, log);
|
||||
|
||||
const reader = (stream as ReadableStream).getReader();
|
||||
const textDecoder = new TextDecoder();
|
||||
|
||||
async function pushStreamUpdate() {
|
||||
reader.read().then(({ done, value }: { done: boolean; value?: Uint8Array }) => {
|
||||
if (done) {
|
||||
end();
|
||||
return;
|
||||
}
|
||||
push(textDecoder.decode(value));
|
||||
pushStreamUpdate();
|
||||
});
|
||||
}
|
||||
|
||||
pushStreamUpdate();
|
||||
|
||||
return response.ok(responseWithHeaders);
|
||||
} catch (e) {
|
||||
log.error('Failed to parse the source fields', e);
|
||||
log.error('Failed to create the chat stream', e);
|
||||
|
||||
throw Error(e);
|
||||
}
|
||||
|
||||
const chain = ConversationalChain({
|
||||
model,
|
||||
rag: {
|
||||
index: data.indices,
|
||||
retriever: createRetriever(data.elasticsearchQuery),
|
||||
content_field: sourceFields,
|
||||
size: Number(data.docSize),
|
||||
},
|
||||
prompt: Prompt(data.prompt, {
|
||||
citations: data.citations,
|
||||
context: true,
|
||||
type: 'openai',
|
||||
}),
|
||||
});
|
||||
|
||||
const stream = await chain.stream(aiClient, messages);
|
||||
|
||||
const { end, push, responseWithHeaders } = streamFactory(request.headers, log);
|
||||
|
||||
const reader = (stream as ReadableStream).getReader();
|
||||
const textDecoder = new TextDecoder();
|
||||
|
||||
async function pushStreamUpdate() {
|
||||
reader.read().then(({ done, value }: { done: boolean; value?: Uint8Array }) => {
|
||||
if (done) {
|
||||
end();
|
||||
return;
|
||||
}
|
||||
push(textDecoder.decode(value));
|
||||
pushStreamUpdate();
|
||||
});
|
||||
}
|
||||
|
||||
pushStreamUpdate();
|
||||
|
||||
return response.ok(responseWithHeaders);
|
||||
})
|
||||
);
|
||||
|
||||
|
|
|
@ -5,9 +5,15 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { PluginStartContract as ActionsPluginStartContract } from '@kbn/actions-plugin/server';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-interface
|
||||
export interface SearchPlaygroundPluginSetup {}
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-interface
|
||||
export interface SearchPlaygroundPluginStart {}
|
||||
|
||||
export interface SearchPlaygroundPluginStartDependencies {
|
||||
actions: ActionsPluginStartContract;
|
||||
}
|
||||
|
||||
export * from '../common/types';
|
||||
|
|
|
@ -27,7 +27,14 @@
|
|||
"@kbn/navigation-plugin",
|
||||
"@kbn/core-http-server",
|
||||
"@kbn/share-plugin",
|
||||
"@kbn/cloud-plugin"
|
||||
"@kbn/cloud-plugin",
|
||||
"@kbn/actions-plugin",
|
||||
"@kbn/shared-ux-utility",
|
||||
"@kbn/core-lifecycle-browser",
|
||||
"@kbn/stack-connectors-plugin",
|
||||
"@kbn/cases-plugin",
|
||||
"@kbn/triggers-actions-ui-plugin",
|
||||
"@kbn/elastic-assistant-common"
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { EuiButtonIcon, EuiCopy, EuiFlexGroup, EuiFlexItem, EuiToolTip } from '@elastic/eui';
|
||||
import { AttachmentType } from '@kbn/cases-plugin/common';
|
||||
import type { Message } from '@kbn/elastic-assistant';
|
||||
import type { ClientMessage } from '@kbn/elastic-assistant';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
|
||||
|
@ -22,7 +22,7 @@ import * as i18n from './translations';
|
|||
import { useIsExperimentalFeatureEnabled } from '../../common/hooks/use_experimental_features';
|
||||
|
||||
interface Props {
|
||||
message: Message;
|
||||
message: ClientMessage;
|
||||
}
|
||||
|
||||
const CommentActionsComponent: React.FC<Props> = ({ message }) => {
|
||||
|
|
|
@ -5,11 +5,11 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { MessageRole } from '@kbn/elastic-assistant-common';
|
||||
import { OpenAiProviderType } from '@kbn/stack-connectors-plugin/public/common';
|
||||
import { getComments } from '.';
|
||||
import type { ConversationRole } from '@kbn/elastic-assistant/impl/assistant_context/types';
|
||||
|
||||
const user: ConversationRole = 'user';
|
||||
const user: MessageRole = 'user';
|
||||
const currentConversation = {
|
||||
apiConfig: {
|
||||
actionTypeId: '.gen-ai',
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import type { EuiCommentProps } from '@elastic/eui';
|
||||
import type { Conversation, Message } from '@kbn/elastic-assistant';
|
||||
import type { Conversation, ClientMessage } from '@kbn/elastic-assistant';
|
||||
import { EuiAvatar, EuiLoadingSpinner } from '@elastic/eui';
|
||||
import React from 'react';
|
||||
|
||||
|
@ -17,7 +17,7 @@ import { StreamComment } from './stream';
|
|||
import { CommentActions } from '../comment_actions';
|
||||
import * as i18n from './translations';
|
||||
|
||||
export interface ContentMessage extends Message {
|
||||
export interface ContentMessage extends ClientMessage {
|
||||
content: string;
|
||||
}
|
||||
const transformMessageWithReplacements = ({
|
||||
|
@ -26,7 +26,7 @@ const transformMessageWithReplacements = ({
|
|||
showAnonymizedValues,
|
||||
replacements,
|
||||
}: {
|
||||
message: Message;
|
||||
message: ClientMessage;
|
||||
content: string;
|
||||
showAnonymizedValues: boolean;
|
||||
replacements: Replacements;
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
import {
|
||||
GenerativeAIForSecurityConnectorFeatureId,
|
||||
GenerativeAIForObservabilityConnectorFeatureId,
|
||||
GenerativeAIForSearchPlaygroundConnectorFeatureId,
|
||||
} from '@kbn/actions-plugin/common';
|
||||
import { urlAllowListValidator } from '@kbn/actions-plugin/server';
|
||||
import { ValidatorServices } from '@kbn/actions-plugin/server/types';
|
||||
|
@ -39,6 +40,7 @@ export const getConnectorType = (): SubActionConnectorType<Config, Secrets> => (
|
|||
supportedFeatureIds: [
|
||||
GenerativeAIForSecurityConnectorFeatureId,
|
||||
GenerativeAIForObservabilityConnectorFeatureId,
|
||||
GenerativeAIForSearchPlaygroundConnectorFeatureId,
|
||||
],
|
||||
minimumLicenseRequired: 'enterprise' as const,
|
||||
renderParameterTemplates,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue