[Security AI Assistant] Removed connectorTypeTitle from the Conversation API required params. Replaced usage replaced with actionsClient on server and API call on the client (#179117)

Current PR is fixing bug mentioned
[here](https://github.com/elastic/kibana/pull/179007/files#pullrequestreview-1947890025)
and reducing `connectorTypeTitle`/`llmType` params for AI assistant
APIs.

Streaming is working as it was before:


ff7c2ae8-288f-4cba-bb43-e45367845667
This commit is contained in:
Yuliia Naumenko 2024-03-22 10:17:13 -07:00 committed by GitHub
parent b41d423820
commit 175b59bf9b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
61 changed files with 123 additions and 213 deletions

View file

@ -40,7 +40,6 @@ export const ExecuteConnectorRequestBody = z.object({
isEnabledRAGAlerts: z.boolean().optional(),
replacements: z.array(Replacement),
size: z.number().optional(),
llmType: z.enum(['bedrock', 'openai']),
});
export type ExecuteConnectorRequestBodyInput = z.input<typeof ExecuteConnectorRequestBody>;

View file

@ -26,7 +26,6 @@ paths:
type: object
required:
- params
- llmType
- replacements
- subAction
properties:
@ -61,11 +60,6 @@ paths:
$ref: '../conversations/common_attributes.schema.yaml#/components/schemas/Replacement'
size:
type: number
llmType:
type: string
enum:
- bedrock
- openai
responses:
'200':
description: Successful response

View file

@ -145,10 +145,6 @@ export const ApiConfig = z.object({
* connector Id
*/
connectorId: z.string(),
/**
* connector Type Title
*/
connectorTypeTitle: z.string(),
/**
* defaultSystemPromptId
*/

View file

@ -117,14 +117,10 @@ components:
type: object
required:
- connectorId
- connectorTypeTitle
properties:
connectorId:
type: string
description: connector Id
connectorTypeTitle:
type: string
description: connector Type Title
defaultSystemPromptId:
type: string
description: defaultSystemPromptId

View file

@ -16,7 +16,7 @@ import { IToasts } from '@kbn/core-notifications-browser';
const conversation1 = {
id: 'conversation1',
title: 'Conversation 1',
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'default',
messages: [

View file

@ -26,7 +26,6 @@ const mockHttp = {
const apiConfig: Conversation['apiConfig'] = {
connectorId: 'foo',
connectorTypeTitle: 'OpenAI',
model: 'gpt-4',
provider: OpenAiProviderType.OpenAi,
};
@ -53,7 +52,7 @@ describe('API tests', () => {
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeAI","conversationId":"test","replacements":[],"isEnabledKnowledgeBase":true,"isEnabledRAGAlerts":false,"llmType":"openai"}',
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeAI","conversationId":"test","replacements":[],"isEnabledKnowledgeBase":true,"isEnabledRAGAlerts":false}',
headers: { 'Content-Type': 'application/json' },
method: 'POST',
signal: undefined,
@ -73,7 +72,7 @@ describe('API tests', () => {
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeStream","conversationId":"test","replacements":[],"isEnabledKnowledgeBase":false,"isEnabledRAGAlerts":false,"llmType":"openai"}',
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeStream","conversationId":"test","replacements":[],"isEnabledKnowledgeBase":false,"isEnabledRAGAlerts":false}',
method: 'POST',
asResponse: true,
rawResponse: true,
@ -99,7 +98,7 @@ describe('API tests', () => {
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeAI","conversationId":"test","replacements":[{"uuid":"auuid","value":"real.hostname"}],"isEnabledKnowledgeBase":true,"isEnabledRAGAlerts":true,"llmType":"openai","alertsIndexPattern":".alerts-security.alerts-default","allow":["a","b","c"],"allowReplacement":["b","c"],"size":30}',
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeAI","conversationId":"test","replacements":[{"uuid":"auuid","value":"real.hostname"}],"isEnabledKnowledgeBase":true,"isEnabledRAGAlerts":true,"alertsIndexPattern":".alerts-security.alerts-default","allow":["a","b","c"],"allowReplacement":["b","c"],"size":30}',
headers: {
'Content-Type': 'application/json',
},
@ -122,7 +121,7 @@ describe('API tests', () => {
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeAI","conversationId":"test","replacements":[],"isEnabledKnowledgeBase":false,"isEnabledRAGAlerts":false,"llmType":"openai"}',
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeAI","conversationId":"test","replacements":[],"isEnabledKnowledgeBase":false,"isEnabledRAGAlerts":false}',
method: 'POST',
headers: {
'Content-Type': 'application/json',
@ -145,7 +144,7 @@ describe('API tests', () => {
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeAI","conversationId":"test","replacements":[],"isEnabledKnowledgeBase":false,"isEnabledRAGAlerts":true,"llmType":"openai"}',
body: '{"model":"gpt-4","message":"This is a test","subAction":"invokeAI","conversationId":"test","replacements":[],"isEnabledKnowledgeBase":false,"isEnabledRAGAlerts":true}',
method: 'POST',
headers: {
'Content-Type': 'application/json',

View file

@ -9,7 +9,7 @@ import { HttpSetup } from '@kbn/core/public';
import { IHttpFetchError } from '@kbn/core-http-browser';
import { ApiConfig, Replacement } from '@kbn/elastic-assistant-common';
import { API_ERROR } from '../translations';
import { getOptionalRequestParams, llmTypeDictionary } from '../helpers';
import { getOptionalRequestParams } from '../helpers';
export * from './conversations';
export interface FetchConnectorExecuteAction {
@ -53,7 +53,6 @@ export const fetchConnectorExecuteAction = async ({
signal,
size,
}: FetchConnectorExecuteAction): Promise<FetchConnectorExecuteResponse> => {
const llmType = llmTypeDictionary[apiConfig.connectorTypeTitle];
// TODO: Remove in part 3 of streaming work for security solution
// tracked here: https://github.com/elastic/security-team/issues/7363
// In part 3 I will make enhancements to langchain to introduce streaming
@ -76,7 +75,6 @@ export const fetchConnectorExecuteAction = async ({
replacements,
isEnabledKnowledgeBase,
isEnabledRAGAlerts,
llmType,
...optionalRequestParams,
};

View file

@ -118,7 +118,6 @@ export const ConversationSelector: React.FC<Props> = React.memo(
? {
apiConfig: {
connectorId: defaultConnector.id,
connectorTypeTitle: defaultConnector.connectorTypeTitle,
provider: defaultConnector.apiProvider,
defaultSystemPromptId: defaultSystemPrompt?.id,
},

View file

@ -102,7 +102,6 @@ const mockConnector = {
id: 'cool-id-bro',
actionTypeId: '.gen-ai',
name: 'cool name',
connectorTypeTitle: 'OpenAI',
};
jest.mock('../../../connectorland/connector_selector', () => ({
// @ts-ignore
@ -231,7 +230,6 @@ describe('ConversationSettings', () => {
...mockConvos[welcomeConvo.title],
apiConfig: {
connectorId: mockConnector.id,
connectorTypeTitle: 'OpenAI',
model: undefined,
provider: undefined,
},
@ -243,7 +241,6 @@ describe('ConversationSettings', () => {
...mockConvos[welcomeConvo.title],
apiConfig: {
connectorId: mockConnector.id,
connectorTypeTitle: 'OpenAI',
model: undefined,
provider: undefined,
},
@ -331,7 +328,6 @@ describe('ConversationSettings', () => {
id: 'not-the-right-id',
apiConfig: {
connectorId: mockConnector.id,
connectorTypeTitle: 'OpenAI',
model: undefined,
provider: undefined,
},

View file

@ -47,7 +47,6 @@ export interface ConversationSettingsProps {
*/
export const ConversationSettings: React.FC<ConversationSettingsProps> = React.memo(
({
actionTypeRegistry,
allSystemPrompts,
defaultConnector,
selectedConversation,
@ -68,7 +67,6 @@ export const ConversationSettings: React.FC<ConversationSettingsProps> = React.m
}, [allSystemPrompts, selectedConversation]);
const { data: connectors, isSuccess: areConnectorsFetched } = useLoadConnectors({
actionTypeRegistry,
http,
});
@ -89,7 +87,6 @@ export const ConversationSettings: React.FC<ConversationSettingsProps> = React.m
? {
apiConfig: {
connectorId: defaultConnector.id,
connectorTypeTitle: defaultConnector.connectorTypeTitle,
provider: defaultConnector.apiProvider,
defaultSystemPromptId: defaultSystemPrompt?.id,
},
@ -231,7 +228,6 @@ export const ConversationSettings: React.FC<ConversationSettingsProps> = React.m
apiConfig: {
...selectedConversation.apiConfig,
connectorId: connector.id,
connectorTypeTitle: connector.connectorTypeTitle,
provider: config?.apiProvider,
model: config?.defaultModel,
},
@ -257,7 +253,6 @@ export const ConversationSettings: React.FC<ConversationSettingsProps> = React.m
: {}
).apiConfig ?? {}),
connectorId: connector?.id,
connectorTypeTitle: connector?.connectorTypeTitle,
provider: config?.apiProvider,
model: config?.defaultModel,
},

View file

@ -23,7 +23,7 @@ describe('helpers', () => {
category: 'assistant',
theme: {},
messages: [],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
title: 'conversation_id',
};
@ -46,7 +46,7 @@ describe('helpers', () => {
},
},
],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'assistant',
title: 'conversation_id',
@ -60,7 +60,7 @@ describe('helpers', () => {
id: 'conversation_id',
title: 'conversation_id',
messages: enterpriseMessaging,
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'assistant',
};
@ -86,7 +86,7 @@ describe('helpers', () => {
},
},
],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
};
const result = getBlockBotConversation(conversation, isAssistantEnabled);
@ -102,7 +102,7 @@ describe('helpers', () => {
title: 'conversation_id',
category: 'assistant',
messages: [],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
};
const result = getBlockBotConversation(conversation, isAssistantEnabled);
@ -124,7 +124,7 @@ describe('helpers', () => {
},
},
],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
};
const result = getBlockBotConversation(conversation, isAssistantEnabled);
@ -151,7 +151,6 @@ describe('helpers', () => {
const connectors: AIConnector[] = [
{
actionTypeId: '.gen-ai',
connectorTypeTitle: 'OpenAI',
isPreconfigured: false,
isDeprecated: false,
referencedByCount: 0,
@ -175,7 +174,6 @@ describe('helpers', () => {
const connectors: AIConnector[] = [
{
actionTypeId: '.gen-ai',
connectorTypeTitle: 'OpenAI',
isPreconfigured: false,
isDeprecated: false,
referencedByCount: 0,
@ -191,7 +189,6 @@ describe('helpers', () => {
},
{
actionTypeId: '.gen-ai',
connectorTypeTitle: 'OpenAI',
isPreconfigured: false,
isDeprecated: false,
referencedByCount: 0,
@ -268,7 +265,7 @@ describe('helpers', () => {
messages,
category: 'assistant',
theme: {},
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
};
const baseConversations = {

View file

@ -118,9 +118,3 @@ export const getOptionalRequestParams = ({
...optionalSize,
};
};
export const llmTypeDictionary: Record<string, string> = {
'Amazon Bedrock': 'bedrock',
'Azure OpenAI': 'openai',
OpenAI: 'openai',
};

View file

@ -50,7 +50,7 @@ const mockData = {
title: 'Welcome',
category: 'assistant',
messages: [],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
},
'electric sheep': {
@ -58,7 +58,7 @@ const mockData = {
category: 'assistant',
title: 'electric sheep',
messages: [],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
},
};
@ -169,7 +169,7 @@ describe('Assistant', () => {
expect(chatSendSpy).toHaveBeenLastCalledWith(
expect.objectContaining({
currentConversation: {
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'assistant',
id: 'Welcome Id',

View file

@ -80,7 +80,6 @@ const AssistantComponent: React.FC<Props> = ({
setConversationTitle,
}) => {
const {
actionTypeRegistry,
assistantTelemetry,
augmentMessageCodeBlocks,
assistantAvailability: { isAssistantEnabled },
@ -136,7 +135,6 @@ const AssistantComponent: React.FC<Props> = ({
// Connector details
const { data: connectors, isSuccess: areConnectorsFetched } = useLoadConnectors({
actionTypeRegistry,
http,
});
const defaultConnector = useMemo(() => getDefaultConnector(connectors), [connectors]);

View file

@ -23,7 +23,6 @@ const BASE_CONVERSATION: Conversation = {
...WELCOME_CONVERSATION,
apiConfig: {
connectorId: '123',
connectorTypeTitle: 'OpenAI',
defaultSystemPromptId: mockSystemPrompt.id,
},
};
@ -377,7 +376,6 @@ describe('SystemPrompt', () => {
category: 'assistant',
apiConfig: {
connectorId: '123',
connectorTypeTitle: 'OpenAI',
defaultSystemPromptId: undefined,
},
title: 'second',
@ -462,7 +460,6 @@ describe('SystemPrompt', () => {
...secondMockConversation,
apiConfig: {
connectorId: '123',
connectorTypeTitle: 'OpenAI',
defaultSystemPromptId: mockSystemPrompt.id,
},
},

View file

@ -144,7 +144,6 @@ export const SystemPromptSettings: React.FC<Props> = React.memo(
apiConfig: {
defaultSystemPromptId: getDefaultSystemPromptId(convo),
connectorId: defaultConnector?.id ?? '',
connectorTypeTitle: defaultConnector?.connectorTypeTitle ?? '',
},
}),
}))
@ -211,7 +210,6 @@ export const SystemPromptSettings: React.FC<Props> = React.memo(
[
conversationSettings,
conversationsSettingsBulkActions,
defaultConnector?.connectorTypeTitle,
defaultConnector?.id,
selectedSystemPrompt,
setConversationSettings,

View file

@ -53,7 +53,7 @@ interface Props {
*/
export const EvaluationSettings: React.FC<Props> = React.memo(({ onEvaluationSettingsChange }) => {
const { actionTypeRegistry, basePath, http } = useAssistantContext();
const { data: connectors } = useLoadConnectors({ actionTypeRegistry, http });
const { data: connectors } = useLoadConnectors({ http });
const {
data: evalResponse,
mutate: performEvaluation,

View file

@ -93,7 +93,6 @@ describe('useConversation helpers', () => {
const conversation: Conversation = {
apiConfig: {
connectorId: '123',
connectorTypeTitle: 'OpenAI',
defaultSystemPromptId: '3',
},
category: 'assistant',
@ -111,7 +110,7 @@ describe('useConversation helpers', () => {
test('should return the default (starred) isNewConversationDefault system prompt if conversation system prompt does not exist', () => {
const conversationWithoutSystemPrompt: Conversation = {
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'assistant',
id: '1',
@ -128,7 +127,7 @@ describe('useConversation helpers', () => {
test('should return the default (starred) isNewConversationDefault system prompt if conversation system prompt does not exist within all system prompts', () => {
const conversationWithoutSystemPrompt: Conversation = {
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'assistant',
id: '4', // this id does not exist within allSystemPrompts
@ -145,7 +144,7 @@ describe('useConversation helpers', () => {
test('should return the first prompt if both conversation system prompt and default new system prompt do not exist', () => {
const conversationWithoutSystemPrompt: Conversation = {
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'assistant',
id: '1',
@ -162,7 +161,7 @@ describe('useConversation helpers', () => {
test('should return undefined if conversation system prompt does not exist and there are no system prompts', () => {
const conversationWithoutSystemPrompt: Conversation = {
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'assistant',
id: '1',
@ -179,7 +178,7 @@ describe('useConversation helpers', () => {
test('should return undefined if conversation system prompt does not exist within all system prompts', () => {
const conversationWithoutSystemPrompt: Conversation = {
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'assistant',
id: '4', // this id does not exist within allSystemPrompts

View file

@ -36,7 +36,6 @@ const mockConvo = {
messages: [message, anotherMessage],
apiConfig: {
connectorId: '123',
connectorTypeTitle: 'OpenAI',
defaultSystemPromptId: 'default-system-prompt',
},
};
@ -69,7 +68,6 @@ describe('useConversation', () => {
replacements: [],
apiConfig: {
connectorId: '123',
connectorTypeTitle: 'OpenAI',
defaultSystemPromptId: 'default-system-prompt',
},
title: mockConvo.title,

View file

@ -90,10 +90,7 @@ describe('Connector selector', () => {
expect(getByTestId('connector-selector')).toBeInTheDocument();
fireEvent.click(getByTestId('connector-selector'));
fireEvent.click(getByTestId(connectorTwo.id));
expect(onConnectorSelectionChange).toHaveBeenCalledWith({
...connectorTwo,
connectorTypeTitle: 'OpenAI',
});
expect(onConnectorSelectionChange).toHaveBeenCalledWith(connectorTwo);
});
it('Calls onConnectorSelectionChange once new connector is saved', () => {
const { getByTestId } = render(
@ -106,10 +103,7 @@ describe('Connector selector', () => {
fireEvent.click(getByTestId('modal-mock'));
expect(onConnectorSelectionChange).toHaveBeenCalledWith({
...newConnector,
connectorTypeTitle: 'OpenAI',
});
expect(onConnectorSelectionChange).toHaveBeenCalledWith(newConnector);
expect(mockRefetchConnectors).toHaveBeenCalled();
expect(setIsOpen).toHaveBeenCalledWith(false);
});

View file

@ -30,8 +30,6 @@ interface Props {
}
export type AIConnector = ActionConnector & {
// ex: Bedrock, OpenAI
connectorTypeTitle: string;
// related to OpenAI connectors, ex: Azure OpenAI, OpenAI
apiProvider?: OpenAiProviderType;
};
@ -57,7 +55,7 @@ export const ConnectorSelector: React.FC<Props> = React.memo(
isLoading: isLoadingConnectors,
isFetching: isFetchingConnectors,
refetch: refetchConnectors,
} = useLoadConnectors({ actionTypeRegistry, http });
} = useLoadConnectors({ http });
const isLoading = isLoadingConnectors || isFetchingConnectors;
const localIsDisabled = isDisabled || !assistantAvailability.hasConnectorsReadPrivilege;
@ -91,7 +89,8 @@ export const ConnectorSelector: React.FC<Props> = React.memo(
() =>
(aiConnectors ?? []).map((connector) => {
const connectorTypeTitle =
getGenAiConfig(connector)?.apiProvider ?? connector.connectorTypeTitle;
getGenAiConfig(connector)?.apiProvider ??
getActionTypeTitle(actionTypeRegistry.get(connector.actionTypeId));
const connectorDetails = connector.isPreconfigured
? i18n.PRECONFIGURED_CONNECTOR
: connectorTypeTitle;
@ -111,7 +110,7 @@ export const ConnectorSelector: React.FC<Props> = React.memo(
),
};
}),
[aiConnectors, displayFancy]
[actionTypeRegistry, aiConnectors, displayFancy]
);
// Only include add new connector option if user has privilege
@ -151,12 +150,11 @@ export const ConnectorSelector: React.FC<Props> = React.memo(
(connector: ActionConnector) => {
onConnectorSelectionChange({
...connector,
connectorTypeTitle: getActionTypeTitle(actionTypeRegistry.get(connector.actionTypeId)),
});
refetchConnectors?.();
cleanupAndCloseModal();
},
[actionTypeRegistry, cleanupAndCloseModal, onConnectorSelectionChange, refetchConnectors]
[cleanupAndCloseModal, onConnectorSelectionChange, refetchConnectors]
);
return (

View file

@ -77,7 +77,7 @@ describe('ConnectorSelectorInline', () => {
id: 'conversation_id',
category: 'assistant',
messages: [],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
title: 'conversation_id',
};
@ -98,7 +98,7 @@ describe('ConnectorSelectorInline', () => {
id: 'conversation_id',
category: 'assistant',
messages: [],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
title: 'conversation_id',
};
@ -122,7 +122,7 @@ describe('ConnectorSelectorInline', () => {
id: 'conversation_id',
category: 'assistant',
messages: [],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
title: 'conversation_id',
};
@ -143,12 +143,11 @@ describe('ConnectorSelectorInline', () => {
expect(setApiConfig).toHaveBeenCalledWith({
apiConfig: {
connectorId: connectorTwo.id,
connectorTypeTitle: 'OpenAI',
model: undefined,
provider: 'OpenAI',
},
conversation: {
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
category: 'assistant',
id: 'conversation_id',
@ -162,7 +161,7 @@ describe('ConnectorSelectorInline', () => {
id: 'conversation_id',
category: 'assistant',
messages: [],
apiConfig: { connectorId: '123', connectorTypeTitle: 'OpenAI' },
apiConfig: { connectorId: '123' },
replacements: [],
title: 'conversation_id',
};

View file

@ -68,11 +68,10 @@ const placeholderButtonClassName = css`
export const ConnectorSelectorInline: React.FC<Props> = React.memo(
({ isDisabled = false, selectedConnectorId, selectedConversation, onConnectorSelected }) => {
const [isOpen, setIsOpen] = useState<boolean>(false);
const { actionTypeRegistry, assistantAvailability, http } = useAssistantContext();
const { assistantAvailability, http } = useAssistantContext();
const { setApiConfig } = useConversation();
const { data: aiConnectors } = useLoadConnectors({
actionTypeRegistry,
http,
});
@ -103,7 +102,6 @@ export const ConnectorSelectorInline: React.FC<Props> = React.memo(
apiConfig: {
...selectedConversation.apiConfig,
connectorId,
connectorTypeTitle: connector.connectorTypeTitle,
// With the inline component, prefer config args to handle 'new connector' case
provider: apiProvider ?? config?.apiProvider,
model: model ?? config?.defaultModel,

View file

@ -25,7 +25,7 @@ import * as i18n from '../translations';
import { useAssistantContext } from '../../assistant_context';
import { useLoadConnectors } from '../use_load_connectors';
import { AssistantAvatar } from '../../assistant/assistant_avatar/assistant_avatar';
import { getActionTypeTitle, getGenAiConfig } from '../helpers';
import { getGenAiConfig } from '../helpers';
const ConnectorButtonWrapper = styled.div`
margin-bottom: 10px;
@ -58,7 +58,7 @@ export const useConnectorSetup = ({
data: connectors,
isSuccess: areConnectorsFetched,
refetch: refetchConnectors,
} = useLoadConnectors({ actionTypeRegistry, http });
} = useLoadConnectors({ http });
const isConnectorConfigured = areConnectorsFetched && !!connectors?.length;
const [isConnectorModalVisible, setIsConnectorModalVisible] = useState<boolean>(false);
@ -175,16 +175,12 @@ export const useConnectorSetup = ({
const onSaveConnector = useCallback(
async (connector: ActionConnector) => {
const config = getGenAiConfig(connector);
// add action type title to new connector
const connectorTypeTitle = getActionTypeTitle(actionTypeRegistry.get(connector.actionTypeId));
// persist only the active conversation
const updatedConversation = await setApiConfig({
conversation,
apiConfig: {
...conversation.apiConfig,
connectorId: connector.id,
connectorTypeTitle,
provider: config?.apiProvider,
model: config?.defaultModel,
},
@ -197,7 +193,7 @@ export const useConnectorSetup = ({
setIsConnectorModalVisible(false);
}
},
[actionTypeRegistry, conversation, onConversationUpdate, refetchConnectors, setApiConfig]
[conversation, onConversationUpdate, refetchConnectors, setApiConfig]
);
return {

View file

@ -10,13 +10,10 @@ import { useQuery } from '@tanstack/react-query';
import type { ServerError } from '@kbn/cases-plugin/public/types';
import { loadAllActions as loadConnectors } from '@kbn/triggers-actions-ui-plugin/public/common/constants';
import type { IHttpFetchError } from '@kbn/core-http-browser';
import { ActionTypeRegistryContract } from '@kbn/triggers-actions-ui-plugin/public';
import { HttpSetup } from '@kbn/core-http-browser';
import { IToasts } from '@kbn/core-notifications-browser';
import { useMemo } from 'react';
import { OpenAiProviderType } from '@kbn/stack-connectors-plugin/common/openai/constants';
import { AIConnector } from '../connector_selector';
import { getActionTypeTitle } from '../helpers';
import * as i18n from '../translations';
/**
@ -26,7 +23,6 @@ import * as i18n from '../translations';
const QUERY_KEY = ['elastic-assistant, load-connectors'];
export interface Props {
actionTypeRegistry: ActionTypeRegistryContract;
http: HttpSetup;
toasts?: IToasts;
}
@ -37,27 +33,9 @@ const actionTypeKey = {
};
export const useLoadConnectors = ({
actionTypeRegistry,
http,
toasts,
}: Props): UseQueryResult<AIConnector[], IHttpFetchError> => {
const connectorDetails = useMemo(
() =>
actionTypeRegistry
? {
[actionTypeKey.bedrock]: getActionTypeTitle(
actionTypeRegistry.get(actionTypeKey.bedrock)
),
[actionTypeKey.openai]: getActionTypeTitle(
actionTypeRegistry.get(actionTypeKey.openai)
),
}
: {
[actionTypeKey.bedrock]: 'Amazon Bedrock',
[actionTypeKey.openai]: 'OpenAI',
},
[actionTypeRegistry]
);
return useQuery(
QUERY_KEY,
async () => {
@ -70,7 +48,6 @@ export const useLoadConnectors = ({
? [
{
...connector,
connectorTypeTitle: connectorDetails[connector.actionTypeId],
apiProvider:
!connector.isPreconfigured &&
!connector.isSystemAction &&

View file

@ -34,7 +34,6 @@ export const mockActionTypes = [
export const mockConnectors: AIConnector[] = [
{
id: 'connectorId',
connectorTypeTitle: 'OpenAI',
name: 'Captain Connector',
isMissingSecrets: false,
actionTypeId: '.gen-ai',
@ -48,7 +47,6 @@ export const mockConnectors: AIConnector[] = [
},
{
id: 'c29c28a0-20fe-11ee-9306-a1f4d42ec542',
connectorTypeTitle: 'OpenAI',
name: 'Professor Connector',
isMissingSecrets: false,
actionTypeId: '.gen-ai',

View file

@ -23,7 +23,6 @@ export const alertConvo: Conversation = {
],
apiConfig: {
connectorId: 'c29c28a0-20fe-11ee-9306-a1f4d42ec542',
connectorTypeTitle: 'OpenAI',
provider: OpenAiProviderType.OpenAi,
},
replacements: [
@ -42,7 +41,6 @@ export const emptyWelcomeConvo: Conversation = {
replacements: [],
apiConfig: {
connectorId: 'c29c28a0-20fe-11ee-9306-a1f4d42ec542',
connectorTypeTitle: 'OpenAI',
provider: OpenAiProviderType.OpenAi,
},
};
@ -74,7 +72,6 @@ export const customConvo: Conversation = {
replacements: [],
apiConfig: {
connectorId: 'c29c28a0-20fe-11ee-9306-a1f4d42ec542',
connectorTypeTitle: 'OpenAI',
provider: OpenAiProviderType.OpenAi,
},
};

View file

@ -65,7 +65,6 @@ export const getCreateConversationSchemaMock = (): ConversationCreateProps => ({
apiConfig: {
connectorId: '1',
defaultSystemPromptId: 'Default',
connectorTypeTitle: 'Test connector',
model: 'model',
},
excludeFromLastConversationStorage: false,
@ -91,7 +90,6 @@ export const getUpdateConversationSchemaMock = (
apiConfig: {
connectorId: '2',
defaultSystemPromptId: 'Default',
connectorTypeTitle: 'Test connector',
model: 'model',
},
excludeFromLastConversationStorage: false,
@ -131,7 +129,6 @@ export const getConversationMock = (
apiConfig: {
connectorId: '1',
defaultSystemPromptId: 'Default',
connectorTypeTitle: 'OpenAI',
},
replacements: [],
title: 'test',
@ -156,7 +153,6 @@ export const getQueryConversationParams = (
apiConfig: {
connectorId: '2',
defaultSystemPromptId: 'Default',
connectorTypeTitle: 'Test connector',
model: 'model',
},
category: 'assistant',
@ -180,7 +176,6 @@ export const getQueryConversationParams = (
apiConfig: {
connectorId: '1',
defaultSystemPromptId: 'Default',
connectorTypeTitle: 'Test connector',
model: 'model',
},
excludeFromLastConversationStorage: false,

View file

@ -69,7 +69,6 @@ export const getConversationResponseMock = (
title: 'test',
apiConfig: {
connectorId: '1',
connectorTypeTitle: 'test-connector',
defaultSystemPromptId: 'default-system-prompt',
model: 'test-model',
provider: 'OpenAI',

View file

@ -30,7 +30,6 @@ export const getCreateConversationMock = (): ConversationCreateProps => ({
title: 'test',
apiConfig: {
connectorId: '1',
connectorTypeTitle: 'test-connector',
defaultSystemPromptId: 'default-system-prompt',
model: 'test-model',
provider: 'OpenAI',
@ -47,7 +46,6 @@ export const getConversationResponseMock = (): ConversationResponse => ({
title: 'test',
apiConfig: {
connectorId: '1',
connectorTypeTitle: 'test-connector',
defaultSystemPromptId: 'default-system-prompt',
model: 'test-model',
provider: 'OpenAI',
@ -96,7 +94,6 @@ export const getSearchConversationMock =
exclude_from_last_conversation_storage: false,
api_config: {
connector_id: 'c1',
connector_type_title: 'title-c-1',
default_system_prompt_id: 'prompt-1',
model: 'test',
provider: 'Azure OpenAI',

View file

@ -86,7 +86,6 @@ export const transformToCreateScheme = (
api_config: apiConfig
? {
connector_id: apiConfig.connectorId,
connector_type_title: apiConfig.connectorTypeTitle,
default_system_prompt_id: apiConfig.defaultSystemPromptId,
model: apiConfig.model,
provider: apiConfig.provider,

View file

@ -147,11 +147,6 @@ export const conversationsFieldMap: FieldMap = {
array: false,
required: false,
},
'api_config.connector_type_title': {
type: 'keyword',
array: false,
required: false,
},
'api_config.default_system_prompt_id': {
type: 'keyword',
array: false,

View file

@ -26,7 +26,6 @@ export const getConversationResponseMock = (): ConversationResponse => ({
timestamp: '2020-04-20T15:25:31.830Z',
apiConfig: {
connectorId: 'c1',
connectorTypeTitle: 'title-c-1',
defaultSystemPromptId: 'prompt-1',
model: 'test',
provider: 'Azure OpenAI',
@ -79,7 +78,6 @@ export const getSearchConversationMock =
exclude_from_last_conversation_storage: false,
api_config: {
connector_id: 'c1',
connector_type_title: 'title-c-1',
default_system_prompt_id: 'prompt-1',
model: 'test',
provider: 'Azure OpenAI',

View file

@ -21,9 +21,6 @@ export const getUpdateScript = ({
if (params.assignEmpty == true || params.api_config.containsKey('connector_id')) {
ctx._source.api_config.connector_id = params.api_config.connector_id;
}
if (params.assignEmpty == true || params.api_config.containsKey('connector_type_title')) {
ctx._source.api_config.connector_type_title = params.api_config.connector_type_title;
}
if (params.assignEmpty == true || params.api_config.containsKey('default_system_prompt_id')) {
ctx._source.api_config.default_system_prompt_id = params.api_config.default_system_prompt_id;
}

View file

@ -61,7 +61,6 @@ describe('AIAssistantConversationsDataClient', () => {
api_config: {
connector_id: 'bedbf764-b991-4115-a9fc-1cfeaef21046',
model: 'anthropic.claude-v2',
connector_type_title: 'Amazon Bedrock',
},
namespace: 'hghjghjghghjghg33',
created_at: '2024-01-25T01:32:37.649Z',
@ -113,7 +112,6 @@ describe('AIAssistantConversationsDataClient', () => {
expect(result).toEqual({
apiConfig: {
connectorId: 'bedbf764-b991-4115-a9fc-1cfeaef21046',
connectorTypeTitle: 'Amazon Bedrock',
defaultSystemPromptId: undefined,
model: 'anthropic.claude-v2',
provider: undefined,
@ -180,7 +178,6 @@ describe('AIAssistantConversationsDataClient', () => {
params: {
api_config: {
connector_id: '2',
connector_type_title: 'Test connector',
default_system_prompt_id: 'Default',
model: 'model',
provider: undefined,

View file

@ -36,7 +36,6 @@ export const transformESToConversations = (
? {
apiConfig: {
connectorId: conversationSchema.api_config.connector_id,
connectorTypeTitle: conversationSchema.api_config.connector_type_title,
defaultSystemPromptId: conversationSchema.api_config.default_system_prompt_id,
model: conversationSchema.api_config.model,
provider: conversationSchema.api_config.provider,

View file

@ -39,7 +39,6 @@ export interface SearchEsConversationSchema {
}>;
api_config?: {
connector_id: string;
connector_type_title: string;
default_system_prompt_id?: string;
provider?: Provider;
model?: string;
@ -74,7 +73,6 @@ export interface CreateMessageSchema {
}>;
api_config?: {
connector_id?: string;
connector_type_title?: string;
default_system_prompt_id?: string;
provider?: Provider;
model?: string;

View file

@ -21,7 +21,6 @@ export const getUpdateConversationOptionsMock = (): ConversationUpdateProps => (
title: 'test',
apiConfig: {
connectorId: '1',
connectorTypeTitle: 'test-connector',
defaultSystemPromptId: 'default-system-prompt',
model: 'test-model',
provider: 'OpenAI',
@ -44,7 +43,6 @@ export const getConversationResponseMock = (): ConversationResponse => ({
title: 'test',
apiConfig: {
connectorId: '1',
connectorTypeTitle: 'test-connector',
defaultSystemPromptId: 'default-system-prompt',
model: 'test-model',
provider: 'OpenAI',
@ -164,7 +162,6 @@ describe('transformToUpdateScheme', () => {
title: 'test',
api_config: {
connector_id: '1',
connector_type_title: 'test-connector',
default_system_prompt_id: 'default-system-prompt',
model: 'test-model',
provider: 'OpenAI',

View file

@ -37,7 +37,6 @@ export interface UpdateConversationSchema {
}>;
api_config?: {
connector_id?: string;
connector_type_title?: string;
default_system_prompt_id?: string;
provider?: Provider;
model?: string;
@ -118,7 +117,6 @@ export const transformToUpdateScheme = (
title,
api_config: {
connector_id: apiConfig?.connectorId,
connector_type_title: apiConfig?.connectorTypeTitle,
default_system_prompt_id: apiConfig?.defaultSystemPromptId,
model: apiConfig?.model,
provider: apiConfig?.provider,

View file

@ -25,7 +25,6 @@ export const findDocumentsResponseMock = (): ConversationResponse => ({
timestamp: '2020-04-20T15:25:31.830Z',
apiConfig: {
connectorId: 'c1',
connectorTypeTitle: 'title-c-1',
defaultSystemPromptId: 'prompt-1',
model: 'test',
provider: 'Azure OpenAI',
@ -70,7 +69,6 @@ export const getSearchConversationMock =
exclude_from_last_conversation_storage: false,
api_config: {
connector_id: 'c1',
connector_type_title: 'title-c-1',
default_system_prompt_id: 'prompt-1',
model: 'test',
provider: 'Azure OpenAI',
@ -137,7 +135,6 @@ describe('findDocuments', () => {
'@timestamp': '2020-04-20T15:25:31.830Z',
api_config: {
connector_id: 'c1',
connector_type_title: 'title-c-1',
default_system_prompt_id: 'prompt-1',
model: 'test',
provider: 'Azure OpenAI',

View file

@ -134,7 +134,6 @@ describe('AIAssistantDataClient', () => {
api_config: {
connector_id: 'bedbf764-b991-4115-a9fc-1cfeaef21046',
model: 'anthropic.claude-v2',
connector_type_title: 'Amazon Bedrock',
},
namespace: 'hghjghjghghjghg33',
created_at: '2024-01-25T01:32:37.649Z',
@ -194,7 +193,6 @@ describe('AIAssistantDataClient', () => {
'@timestamp': '2024-01-25T01:32:37.649Z',
api_config: {
connector_id: 'bedbf764-b991-4115-a9fc-1cfeaef21046',
connector_type_title: 'Amazon Bedrock',
model: 'anthropic.claude-v2',
},
created_at: '2024-01-25T01:32:37.649Z',

View file

@ -29,6 +29,7 @@ const testProps: Omit<Props, 'actions'> = {
subAction: 'invokeAI',
subActionParams: { messages: [{ content: 'hello', role: 'user' }] },
},
llmType: '.bedrock',
request,
connectorId,
onLlmResponse,

View file

@ -18,6 +18,7 @@ export interface Props {
connectorId: string;
params: InvokeAIActionsParams;
request: KibanaRequest<unknown, unknown, ExecuteConnectorRequestBody>;
llmType: string;
}
interface StaticResponse {
connector_id: string;
@ -44,6 +45,7 @@ export const executeAction = async ({
actions,
params,
connectorId,
llmType,
request,
}: Props): Promise<StaticResponse | Readable> => {
const actionsClient = await actions.getActionsClientWithRequest(request);
@ -76,7 +78,7 @@ export const executeAction = async ({
}
// do not await, blocks stream for UI
handleStreamStorage(readable, request.body.llmType, onLlmResponse);
handleStreamStorage(readable, llmType, onLlmResponse);
return readable.pipe(new PassThrough());
};

View file

@ -81,9 +81,7 @@ export class ActionsClientLlm extends LLM {
subActionParams: {
model: this.#request.body.model,
messages: [assistantMessage], // the assistant message
...(this.#request.body.llmType === 'openai'
? { n: 1, stop: null, temperature: 0.2 }
: {}),
...(this.llmType === '.gen-ai' ? { n: 1, stop: null, temperature: 0.2 } : {}),
},
},
};

View file

@ -52,11 +52,11 @@ describe('handleStreamStorage', () => {
it('saves the final string successful streaming event', async () => {
stream.complete();
await handleStreamStorage(stream.transform, 'openai', onMessageSent);
await handleStreamStorage(stream.transform, '.gen-ai', onMessageSent);
expect(onMessageSent).toHaveBeenCalledWith('Single.');
});
it('saves the error message on a failed streaming event', async () => {
const tokenPromise = handleStreamStorage(stream.transform, 'openai', onMessageSent);
const tokenPromise = handleStreamStorage(stream.transform, '.gen-ai', onMessageSent);
stream.fail();
await expect(tokenPromise).resolves.not.toThrow();
@ -73,11 +73,11 @@ describe('handleStreamStorage', () => {
it('saves the final string successful streaming event', async () => {
stream.complete();
await handleStreamStorage(stream.transform, 'bedrock', onMessageSent);
await handleStreamStorage(stream.transform, '.bedrock', onMessageSent);
expect(onMessageSent).toHaveBeenCalledWith('Simple.');
});
it('saves the error message on a failed streaming event', async () => {
const tokenPromise = handleStreamStorage(stream.transform, 'bedrock', onMessageSent);
const tokenPromise = handleStreamStorage(stream.transform, '.bedrock', onMessageSent);
stream.fail();
await expect(tokenPromise).resolves.not.toThrow();

View file

@ -18,7 +18,7 @@ export const handleStreamStorage: (
onMessageSent?: (content: string) => void
) => Promise<void> = async (responseStream, llmType, onMessageSent) => {
try {
const parser = llmType === 'bedrock' ? parseBedrockStream : parseOpenAIStream;
const parser = llmType === '.bedrock' ? parseBedrockStream : parseOpenAIStream;
// TODO @steph add abort signal
const parsedResponse = await parser(responseStream);
if (onMessageSent) {

View file

@ -154,7 +154,6 @@ export const postEvaluateRoute = (
isEnabledKnowledgeBase: true,
isEnabledRAGAlerts: true,
conversationId: '',
llmType: 'openai',
},
};

View file

@ -20,7 +20,9 @@ import {
INVOKE_ASSISTANT_SUCCESS_EVENT,
} from '../lib/telemetry/event_based_telemetry';
import { getConversationResponseMock } from '../ai_assistant_data_clients/conversations/update_conversation.test';
import { actionsClientMock } from '@kbn/actions-plugin/server/actions_client/actions_client.mock';
const actionsClient = actionsClientMock.create();
jest.mock('../lib/build_response', () => ({
buildResponse: jest.fn().mockImplementation((x) => x),
}));
@ -67,7 +69,9 @@ const existingConversation = getConversationResponseMock();
const reportEvent = jest.fn();
const mockContext = {
elasticAssistant: {
actions: jest.fn(),
actions: {
getActionsClientWithRequest: jest.fn().mockResolvedValue(actionsClient),
},
getRegisteredTools: jest.fn(() => []),
logger: loggingSystemMock.createLogger(),
telemetry: { ...coreMock.createSetup().analytics, reportEvent },
@ -136,6 +140,22 @@ describe('postActionsConnectorExecuteRoute', () => {
beforeEach(() => {
jest.clearAllMocks();
actionsClient.getBulk.mockResolvedValue([
{
id: '1',
isPreconfigured: false,
isSystemAction: false,
isDeprecated: false,
name: 'my name',
actionTypeId: '.gen-ai',
isMissingSecrets: false,
config: {
a: true,
b: true,
c: true,
},
},
]);
});
it('returns the expected response when isEnabledKnowledgeBase=false', async () => {

View file

@ -65,6 +65,9 @@ export const postActionsConnectorExecuteRoute = (
const telemetry = assistantContext.telemetry;
try {
// Get the actions plugin start contract from the request context for the agents
const actionsClient = await assistantContext.actions.getActionsClientWithRequest(request);
const authenticatedUser = assistantContext.getCurrentUser();
if (authenticatedUser == null) {
return response.unauthorized({
@ -188,6 +191,10 @@ export const postActionsConnectorExecuteRoute = (
}
const connectorId = decodeURIComponent(request.params.connectorId);
const connectors = await actionsClient.getBulk({
ids: [connectorId],
throwIfSystemAction: false,
});
// get the actions plugin start contract from the request context:
const actions = (await context.elasticAssistant).actions;
@ -201,12 +208,13 @@ export const postActionsConnectorExecuteRoute = (
actions,
request,
connectorId,
llmType: connectors[0]?.actionTypeId,
params: {
subAction: request.body.subAction,
subActionParams: {
model: request.body.model,
messages: [...(prevMessages ?? []), ...(newMessage ? [newMessage] : [])],
...(request.body.llmType === 'openai'
...(connectors[0]?.actionTypeId === '.gen-ai'
? { n: 1, stop: null, temperature: 0.2 }
: {}),
},

View file

@ -121,7 +121,6 @@ describe('Update conversation route', () => {
...getUpdateConversationSchemaMock(),
apiConfig: {
connectorId: '123',
connectorTypeTitle: 'OpenAI',
defaultSystemPromptId: 'test',
},
},

View file

@ -13,7 +13,6 @@ const user: ConversationRole = 'user';
const currentConversation = {
apiConfig: {
connectorId: 'c29c28a0-20fe-11ee-9306-a1f4d42ec542',
connectorTypeTitle: 'OpenAI',
provider: OpenAiProviderType.OpenAi,
},
replacements: [],
@ -49,7 +48,6 @@ describe('getComments', () => {
category: 'assistant',
apiConfig: {
connectorId: 'c29c28a0-20fe-11ee-9306-a1f4d42ec542',
connectorTypeTitle: 'OpenAI',
provider: OpenAiProviderType.OpenAi,
},
replacements: [],

View file

@ -58,8 +58,7 @@ export const getComments = ({
const regenerateMessageOfConversation = () => {
regenerateMessage(currentConversation.id);
};
const connectorTypeTitle = currentConversation.apiConfig?.connectorTypeTitle ?? '';
const connectorId = currentConversation.apiConfig?.connectorId ?? '';
const extraLoadingComment = isFetchingResponse
? [
@ -69,7 +68,7 @@ export const getComments = ({
timestamp: '...',
children: (
<StreamComment
connectorTypeTitle={connectorTypeTitle}
connectorId={connectorId}
content=""
refetchCurrentConversation={refetchCurrentConversation}
regenerateMessage={regenerateMessageOfConversation}
@ -120,7 +119,7 @@ export const getComments = ({
...messageProps,
children: (
<StreamComment
connectorTypeTitle={connectorTypeTitle}
connectorId={connectorId}
index={index}
isControlsEnabled={isControlsEnabled}
isError={message.isError}
@ -141,7 +140,7 @@ export const getComments = ({
actions: <CommentActions message={transformedMessage} />,
children: (
<StreamComment
connectorTypeTitle={connectorTypeTitle}
connectorId={connectorId}
content={transformedMessage.content}
index={index}
isControlsEnabled={isControlsEnabled}

View file

@ -6,10 +6,15 @@
*/
import React from 'react';
import type { UseQueryResult } from '@tanstack/react-query';
import { render, screen, fireEvent } from '@testing-library/react';
import { useFetchConnectorsQuery } from '../../../detection_engine/rule_management/api/hooks/use_fetch_connectors_query';
import { StreamComment } from '.';
import { useStream } from './use_stream';
import type { Connector } from '@kbn/actions-plugin/server/application/connector/types';
import type { AsApiContract } from '@kbn/actions-plugin/common';
const mockSetComplete = jest.fn();
jest.mock('../../../detection_engine/rule_management/api/hooks/use_fetch_connectors_query');
jest.mock('./use_stream');
@ -19,9 +24,9 @@ const testProps = {
content,
index: 1,
isControlsEnabled: true,
connectorTypeTitle: 'OpenAI',
regenerateMessage: jest.fn(),
transformMessage: jest.fn(),
connectorId: 'test',
};
const mockReader = jest.fn() as unknown as ReadableStreamDefaultReader<Uint8Array>;
@ -36,6 +41,16 @@ describe('StreamComment', () => {
pendingMessage: 'Test Message',
setComplete: mockSetComplete,
});
const connectors: unknown[] = [
{
id: 'hi',
name: 'OpenAI connector',
actionTypeId: '.gen-ai',
},
];
jest.mocked(useFetchConnectorsQuery).mockReturnValue({
data: connectors,
} as unknown as UseQueryResult<Array<AsApiContract<Connector>>, unknown>);
});
it('renders content correctly', () => {
render(<StreamComment {...testProps} />);

View file

@ -7,6 +7,7 @@
import React, { useEffect, useMemo, useRef } from 'react';
import { EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
import { useFetchConnectorsQuery } from '../../../detection_engine/rule_management/api/hooks/use_fetch_connectors_query';
import type { ContentMessage } from '..';
import { useStream } from './use_stream';
import { StopGeneratingButton } from './buttons/stop_generating_button';
@ -20,7 +21,7 @@ interface Props {
isFetching?: boolean;
isControlsEnabled?: boolean;
index: number;
connectorTypeTitle: string;
connectorId: string;
reader?: ReadableStreamDefaultReader<Uint8Array>;
refetchCurrentConversation: () => void;
regenerateMessage: () => void;
@ -29,7 +30,7 @@ interface Props {
export const StreamComment = ({
content,
connectorTypeTitle,
connectorId,
index,
isControlsEnabled = false,
isError = false,
@ -39,10 +40,13 @@ export const StreamComment = ({
regenerateMessage,
transformMessage,
}: Props) => {
const { data: connectors } = useFetchConnectorsQuery();
const llmType = connectors?.find((c) => c.id === connectorId)?.connector_type_id ?? '.gen-ai';
const { error, isLoading, isStreaming, pendingMessage, setComplete } = useStream({
refetchCurrentConversation,
content,
connectorTypeTitle,
llmType,
reader,
isError,
});

View file

@ -69,7 +69,7 @@ describe('getStreamObservable', () => {
});
const source = getStreamObservable({
connectorTypeTitle: 'Amazon Bedrock',
llmType: '.bedrock',
isError: false,
reader: typedReader,
setLoading,
@ -142,7 +142,7 @@ describe('getStreamObservable', () => {
});
const source = getStreamObservable({
connectorTypeTitle: 'OpenAI',
llmType: '.gen-ai',
isError: false,
reader: typedReader,
setLoading,
@ -215,7 +215,7 @@ describe('getStreamObservable', () => {
});
const source = getStreamObservable({
connectorTypeTitle: 'OpenAI',
llmType: '.gen-ai',
isError: false,
reader: typedReader,
setLoading,
@ -268,7 +268,7 @@ describe('getStreamObservable', () => {
});
const source = getStreamObservable({
connectorTypeTitle: 'OpenAI',
llmType: '.gen-ai',
isError: true,
reader: typedReader,
setLoading,
@ -299,7 +299,7 @@ describe('getStreamObservable', () => {
// Simulate an error
mockReader.read.mockRejectedValue(error);
const source = getStreamObservable({
connectorTypeTitle: 'OpenAI',
llmType: '.gen-ai',
isError: false,
reader: typedReader,
setLoading,

View file

@ -14,7 +14,7 @@ import { API_ERROR } from '../translations';
const MIN_DELAY = 35;
interface StreamObservable {
connectorTypeTitle: string;
llmType: string;
reader: ReadableStreamDefaultReader<Uint8Array>;
setLoading: Dispatch<SetStateAction<boolean>>;
isError: boolean;
@ -28,7 +28,7 @@ interface StreamObservable {
* @returns {Observable<PromptObservableState>} An Observable that emits PromptObservableState
*/
export const getStreamObservable = ({
connectorTypeTitle,
llmType,
isError,
reader,
setLoading,
@ -163,17 +163,15 @@ export const getStreamObservable = ({
// this should never actually happen
function badConnector() {
observer.next({
chunks: [
`Invalid connector type - ${connectorTypeTitle} is not a supported GenAI connector.`,
],
message: `Invalid connector type - ${connectorTypeTitle} is not a supported GenAI connector.`,
chunks: [`Invalid connector type - ${llmType} is not a supported GenAI connector.`],
message: `Invalid connector type - ${llmType} is not a supported GenAI connector.`,
loading: false,
});
observer.complete();
}
if (connectorTypeTitle === 'Amazon Bedrock') readBedrock();
else if (connectorTypeTitle === 'OpenAI') readOpenAI();
if (llmType === '.bedrock') readBedrock();
else if (llmType === '.gen-ai') readOpenAI();
else badConnector();
return () => {

View file

@ -40,7 +40,7 @@ const defaultProps = {
refetchCurrentConversation,
reader: readerComplete,
isError: false,
connectorTypeTitle: 'OpenAI',
llmType: '.gen-ai',
};
describe('useStream', () => {
beforeEach(() => {

View file

@ -13,7 +13,7 @@ interface UseStreamProps {
refetchCurrentConversation: () => void;
isError: boolean;
content?: string;
connectorTypeTitle: string;
llmType: string;
reader?: ReadableStreamDefaultReader<Uint8Array>;
}
interface UseStream {
@ -39,7 +39,7 @@ interface UseStream {
*/
export const useStream = ({
content,
connectorTypeTitle,
llmType,
reader,
refetchCurrentConversation,
isError,
@ -51,9 +51,9 @@ export const useStream = ({
const observer$ = useMemo(
() =>
content == null && reader != null
? getStreamObservable({ connectorTypeTitle, reader, setLoading, isError })
? getStreamObservable({ llmType, reader, setLoading, isError })
: getPlaceholderObservable(),
[content, isError, reader, connectorTypeTitle]
[content, isError, reader, llmType]
);
const onCompleteStream = useCallback(() => {
subscription?.unsubscribe();

View file

@ -18,7 +18,6 @@ const conversations = {
isDefault: true,
apiConfig: {
connectorId: 'my-bedrock',
connectorTypeTitle: 'Amazon Bedrock',
defaultSystemPromptId: 'default-system-prompt',
},
replacements: {
@ -49,7 +48,6 @@ const conversations = {
isDefault: true,
apiConfig: {
connectorId: 'my-gen-ai',
connectorTypeTitle: 'OpenAI',
defaultSystemPromptId: 'default-system-prompt',
},
messages: [
@ -105,7 +103,6 @@ const conversations = {
},
apiConfig: {
connectorId: 'my-gen-ai',
connectorTypeTitle: 'OpenAI',
defaultSystemPromptId: 'default-system-prompt',
},
messages: [],

View file

@ -435,7 +435,6 @@ export default function bedrockTest({ getService }: FtrProviderContext) {
message: 'Hello world',
isEnabledKnowledgeBase: false,
isEnabledRAGAlerts: false,
llmType: 'bedrock',
replacements: [],
})
.pipe(passThrough);

View file

@ -19,7 +19,6 @@ const mockRequest = {
subAction: 'invokeAI',
isEnabledKnowledgeBase: false,
isEnabledRAGAlerts: false,
llmType: 'bedrock',
replacements: [],
};
@ -85,7 +84,7 @@ export default ({ getService }: FtrProviderContext) => {
it('should execute a chat completion', async () => {
const response = await postActionsClientExecute(
openaiActionId,
{ ...mockRequest, llmType: 'openai' },
{ ...mockRequest },
supertest
);