[Security solution] ChatBedrockConverse (#200042)

This commit is contained in:
Steph Milovic 2024-11-19 13:45:31 -07:00 committed by GitHub
parent 8e7799ae7a
commit 755ef312f2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 2436 additions and 110 deletions

View file

@ -104,6 +104,7 @@
"@appland/sql-parser": "^1.5.1",
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/client-bedrock-runtime": "^3.687.0",
"@babel/runtime": "^7.24.7",
"@dagrejs/dagre": "^1.1.4",
"@dnd-kit/core": "^6.1.0",
@ -1019,7 +1020,8 @@
"@kbn/xstate-utils": "link:packages/kbn-xstate-utils",
"@kbn/zod": "link:packages/kbn-zod",
"@kbn/zod-helpers": "link:packages/kbn-zod-helpers",
"@langchain/community": "0.3.11",
"@langchain/aws": "^0.1.2",
"@langchain/community": "0.3.14",
"@langchain/core": "^0.3.16",
"@langchain/google-common": "^0.1.1",
"@langchain/google-genai": "^0.1.2",
@ -1054,7 +1056,9 @@
"@slack/webhook": "^7.0.1",
"@smithy/eventstream-codec": "^3.1.1",
"@smithy/eventstream-serde-node": "^3.0.3",
"@smithy/protocol-http": "^4.0.2",
"@smithy/middleware-stack": "^3.0.10",
"@smithy/node-http-handler": "^3.3.1",
"@smithy/protocol-http": "^4.1.7",
"@smithy/signature-v4": "^3.1.1",
"@smithy/types": "^3.2.0",
"@smithy/util-utf8": "^3.0.0",

View file

@ -11,6 +11,7 @@ import { ActionsClientLlm } from './language_models/llm';
import { ActionsClientSimpleChatModel } from './language_models/simple_chat_model';
import { ActionsClientGeminiChatModel } from './language_models/gemini_chat';
import { ActionsClientChatVertexAI } from './language_models/chat_vertex';
import { ActionsClientChatBedrockConverse } from './language_models/chat_bedrock_converse';
import { parseBedrockStream } from './utils/bedrock';
import { parseGeminiResponse } from './utils/gemini';
import { getDefaultArguments } from './language_models/constants';
@ -25,4 +26,5 @@ export {
ActionsClientGeminiChatModel,
ActionsClientLlm,
ActionsClientSimpleChatModel,
ActionsClientChatBedrockConverse,
};

View file

@ -9,11 +9,8 @@ import { BedrockChat as _BedrockChat } from '@langchain/community/chat_models/be
import type { ActionsClient } from '@kbn/actions-plugin/server';
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models';
import { Logger } from '@kbn/logging';
import { Readable } from 'stream';
import { PublicMethodsOf } from '@kbn/utility-types';
export const DEFAULT_BEDROCK_MODEL = 'anthropic.claude-3-5-sonnet-20240620-v1:0';
export const DEFAULT_BEDROCK_REGION = 'us-east-1';
import { prepareMessages, DEFAULT_BEDROCK_MODEL, DEFAULT_BEDROCK_REGION } from '../utils/bedrock';
export interface CustomChatModelInput extends BaseChatModelParams {
actionsClient: PublicMethodsOf<ActionsClient>;
@ -25,6 +22,11 @@ export interface CustomChatModelInput extends BaseChatModelParams {
maxTokens?: number;
}
/**
* @deprecated Use the ActionsClientChatBedrockConverse chat model instead.
* ActionsClientBedrockChatModel chat model supports non-streaming only the Bedrock Invoke API.
* The LangChain team will support only the Bedrock Converse API in the future.
*/
export class ActionsClientBedrockChatModel extends _BedrockChat {
constructor({ actionsClient, connectorId, logger, ...params }: CustomChatModelInput) {
super({
@ -36,32 +38,10 @@ export class ActionsClientBedrockChatModel extends _BedrockChat {
fetchFn: async (url, options) => {
const inputBody = JSON.parse(options?.body as string);
if (this.streaming && !inputBody.tools?.length) {
const data = (await actionsClient.execute({
actionId: connectorId,
params: {
subAction: 'invokeStream',
subActionParams: {
messages: inputBody.messages,
temperature: params.temperature ?? inputBody.temperature,
stopSequences: inputBody.stop_sequences,
system: inputBody.system,
maxTokens: params.maxTokens ?? inputBody.max_tokens,
tools: inputBody.tools,
anthropicVersion: inputBody.anthropic_version,
},
},
})) as { data: Readable; status: string; message?: string; serviceMessage?: string };
if (data.status === 'error') {
throw new Error(
`ActionsClientBedrockChat: action result status is error: ${data?.message} - ${data?.serviceMessage}`
);
}
return {
body: Readable.toWeb(data.data),
} as unknown as Response;
if (this.streaming) {
throw new Error(
`ActionsClientBedrockChat does not support streaming, use ActionsClientChatBedrockConverse instead`
);
}
const data = (await actionsClient.execute({
@ -84,7 +64,6 @@ export class ActionsClientBedrockChatModel extends _BedrockChat {
message?: string;
serviceMessage?: string;
};
if (data.status === 'error') {
throw new Error(
`ActionsClientBedrockChat: action result status is error: ${data?.message} - ${data?.serviceMessage}`
@ -99,20 +78,3 @@ export class ActionsClientBedrockChatModel extends _BedrockChat {
});
}
}
const prepareMessages = (messages: Array<{ role: string; content: string[] }>) =>
messages.reduce((acc, { role, content }) => {
const lastMessage = acc[acc.length - 1];
if (!lastMessage || lastMessage.role !== role) {
acc.push({ role, content });
return acc;
}
if (lastMessage.role === role) {
acc[acc.length - 1].content = lastMessage.content.concat(content);
return acc;
}
return acc;
}, [] as Array<{ role: string; content: string[] }>);

View file

@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
BedrockRuntimeClient as _BedrockRuntimeClient,
BedrockRuntimeClientConfig,
} from '@aws-sdk/client-bedrock-runtime';
import { constructStack } from '@smithy/middleware-stack';
import { PublicMethodsOf } from '@kbn/utility-types';
import type { ActionsClient } from '@kbn/actions-plugin/server';
import { NodeHttpHandler } from './node_http_handler';
export interface CustomChatModelInput extends BedrockRuntimeClientConfig {
actionsClient: PublicMethodsOf<ActionsClient>;
connectorId: string;
streaming?: boolean;
}
export class BedrockRuntimeClient extends _BedrockRuntimeClient {
middlewareStack: _BedrockRuntimeClient['middlewareStack'];
constructor({ actionsClient, connectorId, ...fields }: CustomChatModelInput) {
super(fields ?? {});
this.config.requestHandler = new NodeHttpHandler({
streaming: fields.streaming ?? true,
actionsClient,
connectorId,
});
// eliminate middleware steps that handle auth as Kibana connector handles auth
this.middlewareStack = constructStack() as _BedrockRuntimeClient['middlewareStack'];
}
}

View file

@ -0,0 +1,50 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ChatBedrockConverse } from '@langchain/aws';
import type { ActionsClient } from '@kbn/actions-plugin/server';
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models';
import { Logger } from '@kbn/logging';
import { PublicMethodsOf } from '@kbn/utility-types';
import { BedrockRuntimeClient } from './bedrock_runtime_client';
import { DEFAULT_BEDROCK_MODEL, DEFAULT_BEDROCK_REGION } from '../../utils/bedrock';
export interface CustomChatModelInput extends BaseChatModelParams {
actionsClient: PublicMethodsOf<ActionsClient>;
connectorId: string;
logger: Logger;
signal?: AbortSignal;
model?: string;
}
/**
* Custom chat model class for Bedrock Converse API.
* The ActionsClientChatBedrockConverse chat model supports streaming and
* non-streaming via the Bedrock Converse and ConverseStream APIs.
*
* @param {Object} params - The parameters for the chat model.
* @param {ActionsClient} params.actionsClient - The actions client.
* @param {string} params.connectorId - The connector ID.
* @param {Logger} params.logger - The logger instance.
* @param {AbortSignal} [params.signal] - Optional abort signal.
* @param {string} [params.model] - Optional model name.
*/
export class ActionsClientChatBedrockConverse extends ChatBedrockConverse {
constructor({ actionsClient, connectorId, logger, ...fields }: CustomChatModelInput) {
super({
...(fields ?? {}),
credentials: { accessKeyId: '', secretAccessKey: '' },
model: fields?.model ?? DEFAULT_BEDROCK_MODEL,
region: DEFAULT_BEDROCK_REGION,
});
this.client = new BedrockRuntimeClient({
actionsClient,
connectorId,
streaming: this.streaming,
region: DEFAULT_BEDROCK_REGION,
});
}
}

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ActionsClientChatBedrockConverse } from './chat_bedrock_converse';
export { ActionsClientChatBedrockConverse };

View file

@ -0,0 +1,125 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { NodeHttpHandler } from './node_http_handler';
import { HttpRequest } from '@smithy/protocol-http';
import { actionsClientMock } from '@kbn/actions-plugin/server/actions_client/actions_client.mock';
import { Readable } from 'stream';
import { fromUtf8 } from '@smithy/util-utf8';
const mockActionsClient = actionsClientMock.create();
const connectorId = 'mock-connector-id';
const mockOutput = {
output: {
message: {
role: 'assistant',
content: [{ text: 'This is a response from the assistant.' }],
},
},
stopReason: 'end_turn',
usage: { inputTokens: 10, outputTokens: 20, totalTokens: 30 },
metrics: { latencyMs: 123 },
additionalModelResponseFields: {},
trace: { guardrail: { modelOutput: ['Output text'] } },
};
describe('NodeHttpHandler', () => {
let handler: NodeHttpHandler;
beforeEach(() => {
jest.clearAllMocks();
handler = new NodeHttpHandler({
streaming: false,
actionsClient: mockActionsClient,
connectorId,
});
mockActionsClient.execute.mockResolvedValue({
data: mockOutput,
actionId: 'mock-action-id',
status: 'ok',
});
});
it('handles non-streaming requests successfully', async () => {
const request = new HttpRequest({
body: JSON.stringify({ messages: [] }),
});
const result = await handler.handle(request);
expect(result.response.statusCode).toBe(200);
expect(result.response.headers['content-type']).toBe('application/json');
expect(result.response.body).toStrictEqual(fromUtf8(JSON.stringify(mockOutput)));
});
it('handles streaming requests successfully', async () => {
handler = new NodeHttpHandler({
streaming: true,
actionsClient: mockActionsClient,
connectorId,
});
const request = new HttpRequest({
body: JSON.stringify({ messages: [] }),
});
const readable = new Readable();
readable.push('streaming data');
readable.push(null);
mockActionsClient.execute.mockResolvedValue({
data: readable,
status: 'ok',
actionId: 'mock-action-id',
});
const result = await handler.handle(request);
expect(result.response.statusCode).toBe(200);
expect(result.response.body).toBe(readable);
});
it('throws an error for non-streaming requests with error status', async () => {
const request = new HttpRequest({
body: JSON.stringify({ messages: [] }),
});
mockActionsClient.execute.mockResolvedValue({
status: 'error',
message: 'error message',
serviceMessage: 'service error message',
actionId: 'mock-action-id',
});
await expect(handler.handle(request)).rejects.toThrow(
'ActionsClientBedrockChat: action result status is error: error message - service error message'
);
});
it('throws an error for streaming requests with error status', async () => {
handler = new NodeHttpHandler({
streaming: true,
actionsClient: mockActionsClient,
connectorId,
});
const request = new HttpRequest({
body: JSON.stringify({ messages: [] }),
});
mockActionsClient.execute.mockResolvedValue({
status: 'error',
message: 'error message',
serviceMessage: 'service error message',
actionId: 'mock-action-id',
});
await expect(handler.handle(request)).rejects.toThrow(
'ActionsClientBedrockChat: action result status is error: error message - service error message'
);
});
});

View file

@ -0,0 +1,88 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { NodeHttpHandler as _NodeHttpHandler } from '@smithy/node-http-handler';
import { HttpRequest, HttpResponse } from '@smithy/protocol-http';
import { HttpHandlerOptions, NodeHttpHandlerOptions } from '@smithy/types';
import { PublicMethodsOf } from '@kbn/utility-types';
import type { ActionsClient } from '@kbn/actions-plugin/server';
import { Readable } from 'stream';
import { fromUtf8 } from '@smithy/util-utf8';
import { ConverseResponse } from '@aws-sdk/client-bedrock-runtime';
import { prepareMessages } from '../../utils/bedrock';
interface NodeHandlerOptions extends NodeHttpHandlerOptions {
streaming: boolean;
actionsClient: PublicMethodsOf<ActionsClient>;
connectorId: string;
}
export class NodeHttpHandler extends _NodeHttpHandler {
streaming: boolean;
actionsClient: PublicMethodsOf<ActionsClient>;
connectorId: string;
constructor(options: NodeHandlerOptions) {
super(options);
this.streaming = options.streaming;
this.actionsClient = options.actionsClient;
this.connectorId = options.connectorId;
}
async handle(
request: HttpRequest,
options: HttpHandlerOptions = {}
): Promise<{ response: HttpResponse }> {
const body = JSON.parse(request.body);
const messages = prepareMessages(body.messages);
if (this.streaming) {
const data = (await this.actionsClient.execute({
actionId: this.connectorId,
params: {
subAction: 'converseStream',
subActionParams: { ...body, messages, signal: options.abortSignal },
},
})) as { data: Readable; status: string; message?: string; serviceMessage?: string };
if (data.status === 'error') {
throw new Error(
`ActionsClientBedrockChat: action result status is error: ${data?.message} - ${data?.serviceMessage}`
);
}
return {
response: {
statusCode: 200,
headers: {},
body: data.data,
},
};
}
const data = (await this.actionsClient.execute({
actionId: this.connectorId,
params: {
subAction: 'converse',
subActionParams: { ...body, messages, signal: options.abortSignal },
},
})) as { data: ConverseResponse; status: string; message?: string; serviceMessage?: string };
if (data.status === 'error') {
throw new Error(
`ActionsClientBedrockChat: action result status is error: ${data?.message} - ${data?.serviceMessage}`
);
}
return {
response: {
statusCode: 200,
headers: { 'content-type': 'application/json' },
body: fromUtf8(JSON.stringify(data.data)),
},
};
}
}

View file

@ -222,3 +222,27 @@ function parseContent(content: Array<{ text?: string; type: string }>): string {
}
return parsedContent;
}
/**
* Prepare messages for the bedrock API by combining messages from the same role
* @param messages
*/
export const prepareMessages = (messages: Array<{ role: string; content: string[] }>) =>
messages.reduce((acc, { role, content }) => {
const lastMessage = acc[acc.length - 1];
if (!lastMessage || lastMessage.role !== role) {
acc.push({ role, content });
return acc;
}
if (lastMessage.role === role) {
acc[acc.length - 1].content = lastMessage.content.concat(content);
return acc;
}
return acc;
}, [] as Array<{ role: string; content: string[] }>);
export const DEFAULT_BEDROCK_MODEL = 'anthropic.claude-3-5-sonnet-20240620-v1:0';
export const DEFAULT_BEDROCK_REGION = 'us-east-1';

View file

@ -18,7 +18,7 @@ const BASE_GEMINI_PROMPT =
const KB_CATCH =
'If the knowledge base tool gives empty results, do your best to answer the question from the perspective of an expert security analyst.';
export const GEMINI_SYSTEM_PROMPT = `${BASE_GEMINI_PROMPT} ${KB_CATCH}`;
export const BEDROCK_SYSTEM_PROMPT = `Use tools as often as possible, as they have access to the latest data and syntax. Always return value from ESQLKnowledgeBaseTool as is. Never return <thinking> tags in the response, but make sure to include <result> tags content in the response. Do not reflect on the quality of the returned search results in your response.`;
export const BEDROCK_SYSTEM_PROMPT = `Use tools as often as possible, as they have access to the latest data and syntax. Always return value from NaturalLanguageESQLTool as is. Never return <thinking> tags in the response, but make sure to include <result> tags content in the response. Do not reflect on the quality of the returned search results in your response.`;
export const GEMINI_USER_PROMPT = `Now, always using the tools at your disposal, step by step, come up with a response to this request:\n\n`;
export const STRUCTURED_SYSTEM_PROMPT = `Respond to the human as helpfully and accurately as possible. ${KNOWLEDGE_HISTORY} You have access to the following tools:

View file

@ -15,7 +15,7 @@ import type {
} from '@kbn/core/server';
import {
ActionsClientChatOpenAI,
ActionsClientBedrockChatModel,
ActionsClientChatBedrockConverse,
ActionsClientChatVertexAI,
} from '@kbn/langchain/server';
import { Connector } from '@kbn/actions-plugin/server/application/connector/types';
@ -184,7 +184,7 @@ export const getLlmType = (actionTypeId: string): string | undefined => {
export const getLlmClass = (llmType?: string) => {
switch (llmType) {
case 'bedrock':
return ActionsClientBedrockChatModel;
return ActionsClientChatBedrockConverse;
case 'gemini':
return ActionsClientChatVertexAI;
case 'openai':

View file

@ -38,7 +38,7 @@ import {
LicensingPluginStart,
} from '@kbn/licensing-plugin/server';
import {
ActionsClientBedrockChatModel,
ActionsClientChatBedrockConverse,
ActionsClientChatOpenAI,
ActionsClientChatVertexAI,
ActionsClientGeminiChatModel,
@ -215,7 +215,7 @@ export interface AssistantTool {
}
export type AssistantToolLlm =
| ActionsClientBedrockChatModel
| ActionsClientChatBedrockConverse
| ActionsClientChatOpenAI
| ActionsClientGeminiChatModel
| ActionsClientChatVertexAI;

View file

@ -33,10 +33,7 @@ export type ActionsClientChatModelClass =
export type ChatModelParams = Partial<ActionsClientSimpleChatModelParams> &
Partial<ActionsClientChatOpenAIParams> &
Partial<ActionsClientBedrockChatModelParams> &
Partial<ActionsClientChatVertexAIParams> & {
/** Enables the streaming mode of the response, disabled by default */
streaming?: boolean;
};
Partial<ActionsClientChatVertexAIParams>;
const llmTypeDictionary: Record<string, string> = {
[`.gen-ai`]: `openai`,
@ -67,7 +64,7 @@ export class ActionsClientChat {
llmType,
model: connector.config?.defaultModel,
...params,
streaming: params?.streaming ?? false, // disabling streaming by default, for some reason is enabled when omitted
streaming: false, // disabling streaming by default
});
return model;
}

View file

@ -21,6 +21,8 @@ export enum SUB_ACTION {
INVOKE_STREAM = 'invokeStream',
DASHBOARD = 'getDashboard',
TEST = 'test',
CONVERSE = 'converse',
CONVERSE_STREAM = 'converseStream',
}
export const DEFAULT_TIMEOUT_MS = 120000;

View file

@ -26,6 +26,11 @@ export const RunActionParamsSchema = schema.object({
signal: schema.maybe(schema.any()),
timeout: schema.maybe(schema.number()),
raw: schema.maybe(schema.boolean()),
apiType: schema.maybe(
schema.oneOf([schema.literal('converse'), schema.literal('invoke')], {
defaultValue: 'invoke',
})
),
});
export const BedrockMessageSchema = schema.object(
@ -148,3 +153,54 @@ export const DashboardActionParamsSchema = schema.object({
export const DashboardActionResponseSchema = schema.object({
available: schema.boolean(),
});
export const ConverseActionParamsSchema = schema.object({
// Bedrock API Properties
modelId: schema.maybe(schema.string()),
messages: schema.arrayOf(
schema.object({
role: schema.string(),
content: schema.any(),
})
),
system: schema.arrayOf(
schema.object({
text: schema.string(),
})
),
inferenceConfig: schema.object({
temperature: schema.maybe(schema.number()),
maxTokens: schema.maybe(schema.number()),
stopSequences: schema.maybe(schema.arrayOf(schema.string())),
topP: schema.maybe(schema.number()),
}),
toolConfig: schema.maybe(
schema.object({
tools: schema.arrayOf(
schema.object({
toolSpec: schema.object({
name: schema.string(),
description: schema.string(),
inputSchema: schema.object({
json: schema.object({
type: schema.string(),
properties: schema.object({}, { unknowns: 'allow' }),
required: schema.maybe(schema.arrayOf(schema.string())),
additionalProperties: schema.boolean(),
$schema: schema.maybe(schema.string()),
}),
}),
}),
})
),
toolChoice: schema.maybe(schema.object({}, { unknowns: 'allow' })),
})
),
additionalModelRequestFields: schema.maybe(schema.any()),
additionalModelResponseFieldPaths: schema.maybe(schema.any()),
guardrailConfig: schema.maybe(schema.any()),
// Kibana related properties
signal: schema.maybe(schema.any()),
});
export const ConverseActionResponseSchema = schema.object({}, { unknowns: 'allow' });

View file

@ -21,6 +21,8 @@ import {
RunApiLatestResponseSchema,
BedrockMessageSchema,
BedrockToolChoiceSchema,
ConverseActionParamsSchema,
ConverseActionResponseSchema,
} from './schema';
export type Config = TypeOf<typeof ConfigSchema>;
@ -37,3 +39,5 @@ export type DashboardActionParams = TypeOf<typeof DashboardActionParamsSchema>;
export type DashboardActionResponse = TypeOf<typeof DashboardActionResponseSchema>;
export type BedrockMessage = TypeOf<typeof BedrockMessageSchema>;
export type BedrockToolChoice = TypeOf<typeof BedrockToolChoiceSchema>;
export type ConverseActionParams = TypeOf<typeof ConverseActionParamsSchema>;
export type ConverseActionResponse = TypeOf<typeof ConverseActionResponseSchema>;

View file

@ -21,8 +21,9 @@ import {
StreamingResponseSchema,
RunActionResponseSchema,
RunApiLatestResponseSchema,
ConverseActionParamsSchema,
} from '../../../common/bedrock/schema';
import type {
import {
Config,
Secrets,
RunActionParams,
@ -34,6 +35,8 @@ import type {
RunApiLatestResponse,
BedrockMessage,
BedrockToolChoice,
ConverseActionParams,
ConverseActionResponse,
} from '../../../common/bedrock/types';
import {
SUB_ACTION,
@ -103,6 +106,18 @@ export class BedrockConnector extends SubActionConnector<Config, Secrets> {
method: 'invokeAIRaw',
schema: InvokeAIRawActionParamsSchema,
});
this.registerSubAction({
name: SUB_ACTION.CONVERSE,
method: 'converse',
schema: ConverseActionParamsSchema,
});
this.registerSubAction({
name: SUB_ACTION.CONVERSE_STREAM,
method: 'converseStream',
schema: ConverseActionParamsSchema,
});
}
protected getResponseErrorMessage(error: AxiosError<{ message?: string }>): string {
@ -222,14 +237,18 @@ The Kibana Connector in use may need to be reconfigured with an updated Amazon B
* responsible for making a POST request to the external API endpoint and returning the response data
* @param body The stringified request body to be sent in the POST request.
* @param model Optional model to be used for the API request. If not provided, the default model from the connector will be used.
* @param signal Optional signal to cancel the request.
* @param timeout Optional timeout for the request.
* @param raw Optional flag to indicate if the response should be returned as raw data.
* @param apiType Optional type of API to be called. Defaults to 'invoke', .
*/
public async runApi(
{ body, model: reqModel, signal, timeout, raw }: RunActionParams,
{ body, model: reqModel, signal, timeout, raw, apiType = 'invoke' }: RunActionParams,
connectorUsageCollector: ConnectorUsageCollector
): Promise<RunActionResponse | InvokeAIRawActionResponse> {
// set model on per request basis
const currentModel = reqModel ?? this.model;
const path = `/model/${currentModel}/invoke`;
const path = `/model/${currentModel}/${apiType}`;
const signed = this.signRequest(body, path, false);
const requestArgs = {
...signed,
@ -262,18 +281,22 @@ The Kibana Connector in use may need to be reconfigured with an updated Amazon B
/**
* NOT INTENDED TO BE CALLED DIRECTLY
* call invokeStream instead
* call invokeStream or converseStream instead
* responsible for making a POST request to a specified URL with a given request body.
* The response is then processed based on whether it is a streaming response or a regular response.
* @param body The stringified request body to be sent in the POST request.
* @param model Optional model to be used for the API request. If not provided, the default model from the connector will be used.
*/
private async streamApi(
{ body, model: reqModel, signal, timeout }: RunActionParams,
{ body, model: reqModel, signal, timeout, apiType = 'invoke' }: RunActionParams,
connectorUsageCollector: ConnectorUsageCollector
): Promise<StreamingResponse> {
const streamingApiRoute = {
invoke: 'invoke-with-response-stream',
converse: 'converse-stream',
};
// set model on per request basis
const path = `/model/${reqModel ?? this.model}/invoke-with-response-stream`;
const path = `/model/${reqModel ?? this.model}/${streamingApiRoute[apiType]}`;
const signed = this.signRequest(body, path, true);
const response = await this.request(
@ -312,7 +335,7 @@ The Kibana Connector in use may need to be reconfigured with an updated Amazon B
timeout,
tools,
toolChoice,
}: InvokeAIActionParams | InvokeAIRawActionParams,
}: InvokeAIRawActionParams,
connectorUsageCollector: ConnectorUsageCollector
): Promise<IncomingMessage> {
const res = (await this.streamApi(
@ -411,6 +434,50 @@ The Kibana Connector in use may need to be reconfigured with an updated Amazon B
);
return res;
}
/**
* Sends a request to the Bedrock API to perform a conversation action.
* @param input - The parameters for the conversation action.
* @param connectorUsageCollector - The usage collector for the connector.
* @returns A promise that resolves to the response of the conversation action.
*/
public async converse(
{ signal, ...converseApiInput }: ConverseActionParams,
connectorUsageCollector: ConnectorUsageCollector
): Promise<ConverseActionResponse> {
const res = await this.runApi(
{
body: JSON.stringify(converseApiInput),
raw: true,
apiType: 'converse',
signal,
},
connectorUsageCollector
);
return res;
}
/**
* Sends a request to the Bedrock API to perform a streaming conversation action.
* @param input - The parameters for the streaming conversation action.
* @param connectorUsageCollector - The usage collector for the connector.
* @returns A promise that resolves to the streaming response of the conversation action.
*/
public async converseStream(
{ signal, ...converseApiInput }: ConverseActionParams,
connectorUsageCollector: ConnectorUsageCollector
): Promise<IncomingMessage> {
const res = await this.streamApi(
{
body: JSON.stringify(converseApiInput),
apiType: 'converse',
signal,
},
connectorUsageCollector
);
return res;
}
}
const formatBedrockBody = ({

968
yarn.lock

File diff suppressed because it is too large Load diff