Add support for Azure

This commit is contained in:
Dario Gieselaar 2023-07-24 13:53:03 +02:00
parent e0b5911353
commit a284f45897
3 changed files with 29 additions and 21 deletions

View file

@ -7,7 +7,7 @@
import { clone } from 'lodash';
import { useEffect, useState } from 'react';
import { delay } from 'rxjs';
import { concatMap, delay, of } from 'rxjs';
import { useKibana } from '@kbn/kibana-react-plugin/public';
import { i18n } from '@kbn/i18n';
import type { Message } from '../../common/types';
@ -61,22 +61,24 @@ export function useChat({ messages, connectorId }: { messages: Message[]; connec
.chat({ messages, connectorId, signal: controller.signal })
.then((response$) => {
return new Promise<void>((resolve, reject) => {
const subscription = response$.pipe(delay(50)).subscribe({
next: (chunk) => {
partialResponse.content += chunk.choices[0].delta.content ?? '';
partialResponse.function_call.name +=
chunk.choices[0].delta.function_call?.name ?? '';
partialResponse.function_call.args +=
chunk.choices[0].delta.function_call?.args ?? '';
setResponse(clone(partialResponse));
},
error: (err) => {
reject(err);
},
complete: () => {
resolve();
},
});
const subscription = response$
.pipe(concatMap((value) => of(value).pipe(delay(50))))
.subscribe({
next: (chunk) => {
partialResponse.content += chunk.choices[0].delta.content ?? '';
partialResponse.function_call.name +=
chunk.choices[0].delta.function_call?.name ?? '';
partialResponse.function_call.args +=
chunk.choices[0].delta.function_call?.args ?? '';
setResponse(clone(partialResponse));
},
error: (err) => {
reject(err);
},
complete: () => {
resolve();
},
});
controller.signal.addEventListener('abort', () => {
subscription.unsubscribe();

View file

@ -55,7 +55,8 @@ export function createService(coreStart: CoreStart): ObservabilityAIAssistantSer
return readableStreamReaderIntoObservable(reader).pipe(
map((line) => line.substring(6)),
filter((line) => !!line && line !== '[DONE]'),
map((line) => JSON.parse(line) as CreateChatCompletionResponseChunk)
map((line) => JSON.parse(line) as CreateChatCompletionResponseChunk),
filter((line) => line.object === 'chat.completion.chunk')
);
},
callApi: client,

View file

@ -14,7 +14,8 @@ import type { ActionsClient } from '@kbn/actions-plugin/server/actions_client';
import type { PublicMethodsOf } from '@kbn/utility-types';
import { internal, notFound } from '@hapi/boom';
import { compact, isEmpty, merge, omit } from 'lodash';
import { SearchHit } from '@elastic/elasticsearch/lib/api/types';
import type { SearchHit } from '@elastic/elasticsearch/lib/api/types';
import { OpenAiProviderType } from '@kbn/stack-connectors-plugin/common/gen_ai/constants';
import {
type Conversation,
type ConversationCreateRequest,
@ -133,8 +134,12 @@ export class ObservabilityAIAssistantClient implements IObservabilityAIAssistant
})
);
const request: CreateChatCompletionRequest = {
model: 'gpt-4',
const connector = await this.dependencies.actionsClient.get({
id: connectorId,
});
const request: Omit<CreateChatCompletionRequest, 'model'> & { model?: string } = {
...(connector.config?.apiProvider === OpenAiProviderType.OpenAi ? { model: 'gpt-4' } : {}),
messages: messagesForOpenAI,
stream: true,
};