[Security solution] Improve AI connector error handling (#167674)

This commit is contained in:
Steph Milovic 2023-10-02 15:59:50 -06:00 committed by GitHub
parent 40c3ebb5dc
commit 2174a95807
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 312 additions and 53 deletions

View file

@ -84,7 +84,7 @@ describe('fetchConnectorExecuteAction', () => {
const result = await fetchConnectorExecuteAction(testProps);
expect(result).toBe(API_ERROR);
expect(result).toEqual({ response: API_ERROR, isError: true });
});
it('returns API_ERROR when there are no choices', async () => {
@ -98,15 +98,15 @@ describe('fetchConnectorExecuteAction', () => {
const result = await fetchConnectorExecuteAction(testProps);
expect(result).toBe(API_ERROR);
expect(result).toEqual({ response: API_ERROR, isError: true });
});
it('returns the value of the action_input property when assistantLangChain is true, and `content` has properly prefixed and suffixed JSON with the action_input property', async () => {
const content = '```json\n{"action_input": "value from action_input"}\n```';
const response = '```json\n{"action_input": "value from action_input"}\n```';
(mockHttp.fetch as jest.Mock).mockResolvedValue({
status: 'ok',
data: content,
data: response,
});
const testProps: FetchConnectorExecuteAction = {
@ -118,15 +118,15 @@ describe('fetchConnectorExecuteAction', () => {
const result = await fetchConnectorExecuteAction(testProps);
expect(result).toBe('value from action_input');
expect(result).toEqual({ response: 'value from action_input', isError: false });
});
it('returns the original content when assistantLangChain is true, and `content` has properly formatted JSON WITHOUT the action_input property', async () => {
const content = '```json\n{"some_key": "some value"}\n```';
const response = '```json\n{"some_key": "some value"}\n```';
(mockHttp.fetch as jest.Mock).mockResolvedValue({
status: 'ok',
data: content,
data: response,
});
const testProps: FetchConnectorExecuteAction = {
@ -138,15 +138,15 @@ describe('fetchConnectorExecuteAction', () => {
const result = await fetchConnectorExecuteAction(testProps);
expect(result).toBe(content);
expect(result).toEqual({ response, isError: false });
});
it('returns the original when assistantLangChain is true, and `content` is not JSON', async () => {
const content = 'plain text content';
const response = 'plain text content';
(mockHttp.fetch as jest.Mock).mockResolvedValue({
status: 'ok',
data: content,
data: response,
});
const testProps: FetchConnectorExecuteAction = {
@ -158,6 +158,6 @@ describe('fetchConnectorExecuteAction', () => {
const result = await fetchConnectorExecuteAction(testProps);
expect(result).toBe(content);
expect(result).toEqual({ response, isError: false });
});
});

View file

@ -23,13 +23,18 @@ export interface FetchConnectorExecuteAction {
signal?: AbortSignal | undefined;
}
export interface FetchConnectorExecuteResponse {
response: string;
isError: boolean;
}
export const fetchConnectorExecuteAction = async ({
assistantLangChain,
http,
messages,
apiConfig,
signal,
}: FetchConnectorExecuteAction): Promise<string> => {
}: FetchConnectorExecuteAction): Promise<FetchConnectorExecuteResponse> => {
const outboundMessages = messages.map((msg) => ({
role: msg.role,
content: msg.content,
@ -61,25 +66,41 @@ export const fetchConnectorExecuteAction = async ({
? `/internal/elastic_assistant/actions/connector/${apiConfig?.connectorId}/_execute`
: `/api/actions/connector/${apiConfig?.connectorId}/_execute`;
const response = await http.fetch<{ connector_id: string; status: string; data: string }>(
path,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
signal,
}
);
const response = await http.fetch<{
connector_id: string;
status: string;
data: string;
service_message?: string;
}>(path, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
signal,
});
if (response.status !== 'ok' || !response.data) {
return API_ERROR;
if (response.service_message) {
return {
response: `${API_ERROR}\n\n${response.service_message}`,
isError: true,
};
}
return {
response: API_ERROR,
isError: true,
};
}
return assistantLangChain ? getFormattedMessageContent(response.data) : response.data;
return {
response: assistantLangChain ? getFormattedMessageContent(response.data) : response.data,
isError: false,
};
} catch (error) {
return API_ERROR;
return {
response: API_ERROR,
isError: true,
};
}
};

View file

@ -44,7 +44,7 @@ export const testProps: UseChatSendProps = {
setSelectedPromptContexts,
setUserPrompt,
};
const robotMessage = 'Response message from the robot';
const robotMessage = { response: 'Response message from the robot', isError: false };
describe('use chat send', () => {
beforeEach(() => {
jest.clearAllMocks();
@ -88,7 +88,7 @@ describe('use chat send', () => {
`You are a helpful, expert assistant who answers questions about Elastic Security. Do not answer questions unrelated to Elastic Security.\nIf you answer a question related to KQL or EQL, it should be immediately usable within an Elastic Security timeline; please always format the output correctly with back ticks. Any answer provided for Query DSL should also be usable in a security timeline. This means you should only ever include the "filter" portion of the query.\nUse the following context to answer questions:\n\n\n\n${promptText}`
);
expect(appendMessageSend.message.role).toEqual('user');
expect(appendMessageResponse.message.content).toEqual(robotMessage);
expect(appendMessageResponse.message.content).toEqual(robotMessage.response);
expect(appendMessageResponse.message.role).toEqual('assistant');
});
});

View file

@ -6,23 +6,27 @@
*/
import { ActionConnector } from '@kbn/triggers-actions-ui-plugin/public';
import { FetchConnectorExecuteResponse } from './api';
import { Conversation } from '../..';
import type { Message } from '../assistant_context/types';
import { enterpriseMessaging, WELCOME_CONVERSATION } from './use_conversation/sample_conversations';
export const getMessageFromRawResponse = (rawResponse: string): Message => {
export const getMessageFromRawResponse = (rawResponse: FetchConnectorExecuteResponse): Message => {
const { response, isError } = rawResponse;
const dateTimeString = new Date().toLocaleString(); // TODO: Pull from response
if (rawResponse) {
return {
role: 'assistant',
content: rawResponse,
content: response,
timestamp: dateTimeString,
isError,
};
} else {
return {
role: 'assistant',
content: 'Error: Response from LLM API is empty or undefined.',
timestamp: dateTimeString,
isError: true,
};
}
};

View file

@ -37,8 +37,7 @@ export const SUBMIT_MESSAGE = i18n.translate('xpack.elasticAssistant.assistant.s
});
export const API_ERROR = i18n.translate('xpack.elasticAssistant.assistant.apiErrorTitle', {
defaultMessage:
'An error occurred sending your message. If the problem persists, please test the connector configuration.',
defaultMessage: 'An error occurred sending your message.',
});
export const TOOLTIP_ARIA_LABEL = i18n.translate(

View file

@ -11,7 +11,7 @@ import { HttpSetup } from '@kbn/core-http-browser';
import { useAssistantContext } from '../../assistant_context';
import { Conversation, Message } from '../../assistant_context/types';
import { fetchConnectorExecuteAction } from '../api';
import { fetchConnectorExecuteAction, FetchConnectorExecuteResponse } from '../api';
interface SendMessagesProps {
http: HttpSetup;
@ -21,7 +21,11 @@ interface SendMessagesProps {
interface UseSendMessages {
isLoading: boolean;
sendMessages: ({ apiConfig, http, messages }: SendMessagesProps) => Promise<string>;
sendMessages: ({
apiConfig,
http,
messages,
}: SendMessagesProps) => Promise<FetchConnectorExecuteResponse>;
}
export const useSendMessages = (): UseSendMessages => {

View file

@ -17,6 +17,7 @@ export interface Message {
role: ConversationRole;
content: string;
timestamp: string;
isError?: boolean;
presentation?: MessagePresentation;
}

View file

@ -0,0 +1,51 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { getComments } from '.';
import type { ConversationRole } from '@kbn/elastic-assistant/impl/assistant_context/types';
const user: ConversationRole = 'user';
describe('getComments', () => {
it('Does not add error state message has no error', () => {
const currentConversation = {
apiConfig: {},
id: '1',
messages: [
{
role: user,
content: 'Hello {name}',
timestamp: '2022-01-01',
isError: false,
},
],
};
const lastCommentRef = { current: null };
const showAnonymizedValues = false;
const result = getComments({ currentConversation, lastCommentRef, showAnonymizedValues });
expect(result[0].eventColor).toEqual(undefined);
});
it('Adds error state when message has error', () => {
const currentConversation = {
apiConfig: {},
id: '1',
messages: [
{
role: user,
content: 'Hello {name}',
timestamp: '2022-01-01',
isError: true,
},
],
};
const lastCommentRef = { current: null };
const showAnonymizedValues = false;
const result = getComments({ currentConversation, lastCommentRef, showAnonymizedValues });
expect(result[0].eventColor).toEqual('danger');
});
});

View file

@ -7,10 +7,12 @@
import type { EuiCommentProps } from '@elastic/eui';
import type { Conversation } from '@kbn/elastic-assistant';
import { EuiAvatar, EuiMarkdownFormat, EuiText } from '@elastic/eui';
import { EuiAvatar, EuiMarkdownFormat, EuiText, tint } from '@elastic/eui';
import React from 'react';
import { AssistantAvatar } from '@kbn/elastic-assistant';
import { css } from '@emotion/react/dist/emotion-react.cjs';
import { euiThemeVars } from '@kbn/ui-theme';
import { CommentActions } from '../comment_actions';
import * as i18n from './translations';
@ -64,5 +66,19 @@ export const getComments = ({
message.timestamp.length === 0 ? new Date().toLocaleString() : message.timestamp
),
username: isUser ? i18n.YOU : i18n.ASSISTANT,
...(message.isError
? {
eventColor: 'danger',
css: css`
.euiCommentEvent {
border: 1px solid ${tint(euiThemeVars.euiColorDanger, 0.75)};
}
.euiCommentEvent__header {
padding: 0 !important;
border-block-end: 1px solid ${tint(euiThemeVars.euiColorDanger, 0.75)};
}
`,
}
: {}),
};
});

View file

@ -26,7 +26,7 @@ beforeAll(() => {
describe('actionTypeRegistry.get() works', () => {
test('connector type static data is as expected', () => {
expect(actionTypeModel.id).toEqual(ACTION_TYPE_ID);
expect(actionTypeModel.selectMessage).toBe('Send a request to AWS Bedrock systems.');
expect(actionTypeModel.selectMessage).toBe('Send a request to AWS Bedrock.');
expect(actionTypeModel.actionTypeTitle).toBe('AWS Bedrock');
});
});

View file

@ -21,7 +21,7 @@ export function getConnectorType(): BedrockConnector {
id: BEDROCK_CONNECTOR_ID,
iconClass: lazy(() => import('./logo')),
selectMessage: i18n.translate('xpack.stackConnectors.components.bedrock.selectMessageText', {
defaultMessage: 'Send a request to AWS Bedrock systems.',
defaultMessage: 'Send a request to AWS Bedrock.',
}),
actionTypeTitle: BEDROCK_TITLE,
validateParams: async (

View file

@ -26,7 +26,9 @@ beforeAll(() => {
describe('actionTypeRegistry.get() works', () => {
test('connector type static data is as expected', () => {
expect(actionTypeModel.id).toEqual(ACTION_TYPE_ID);
expect(actionTypeModel.selectMessage).toBe('Send a request to OpenAI systems.');
expect(actionTypeModel.selectMessage).toBe(
'Send a request to an OpenAI or Azure OpenAI service.'
);
expect(actionTypeModel.actionTypeTitle).toBe('OpenAI');
});
});

View file

@ -21,7 +21,7 @@ export function getConnectorType(): OpenAIConnector {
id: OPENAI_CONNECTOR_ID,
iconClass: lazy(() => import('./logo')),
selectMessage: i18n.translate('xpack.stackConnectors.components.genAi.selectMessageText', {
defaultMessage: 'Send a request to OpenAI systems.',
defaultMessage: 'Send a request to an OpenAI or Azure OpenAI service.',
}),
actionTypeTitle: OPENAI_TITLE,
validateParams: async (

View file

@ -16,6 +16,7 @@ import {
DEFAULT_BEDROCK_URL,
} from '../../../common/bedrock/constants';
import { DEFAULT_BODY } from '../../../public/connector_types/bedrock/constants';
import { AxiosError } from 'axios';
jest.mock('aws4', () => ({
sign: () => ({ signed: true }),
@ -151,5 +152,55 @@ describe('BedrockConnector', () => {
await expect(connector.invokeAI(aiAssistantBody)).rejects.toThrow('API Error');
});
});
describe('getResponseErrorMessage', () => {
it('returns an unknown error message', () => {
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage({})).toEqual(
`Unexpected API Error: - Unknown error`
);
});
it('returns the error.message', () => {
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage({ message: 'a message' })).toEqual(
`Unexpected API Error: - a message`
);
});
it('returns the error.response.data.error.message', () => {
const err = {
response: {
headers: {},
status: 404,
statusText: 'Resource Not Found',
data: {
message: 'Resource not found',
},
},
} as AxiosError<{ message?: string }>;
expect(
// @ts-expect-error expects an axios error as the parameter
connector.getResponseErrorMessage(err)
).toEqual(`API Error: Resource Not Found - Resource not found`);
});
it('returns auhtorization error', () => {
const err = {
response: {
headers: {},
status: 401,
statusText: 'Auth error',
data: {
message: 'The api key was invalid.',
},
},
} as AxiosError<{ message?: string }>;
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage(err)).toEqual(
`Unauthorized API Error - The api key was invalid.`
);
});
});
});
});

View file

@ -63,15 +63,17 @@ export class BedrockConnector extends SubActionConnector<Config, Secrets> {
});
}
protected getResponseErrorMessage(error: AxiosError<{ error?: { message?: string } }>): string {
protected getResponseErrorMessage(error: AxiosError<{ message?: string }>): string {
if (!error.response?.status) {
return `Unexpected API Error: ${error.code} - ${error.message}`;
return `Unexpected API Error: ${error.code ?? ''} - ${error.message ?? 'Unknown error'}`;
}
if (error.response.status === 401) {
return 'Unauthorized API Error';
return `Unauthorized API Error${
error.response?.data?.message ? ` - ${error.response.data.message}` : ''
}`;
}
return `API Error: ${error.response?.status} - ${error.response?.statusText}${
error.response?.data?.error?.message ? ` - ${error.response.data.error?.message}` : ''
return `API Error: ${error.response?.statusText}${
error.response?.data?.message ? ` - ${error.response.data.message}` : ''
}`;
}

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { AxiosError } from 'axios';
import { OpenAIConnector } from './openai';
import { actionsConfigMock } from '@kbn/actions-plugin/server/actions_config.mock';
import {
@ -282,6 +283,60 @@ describe('OpenAIConnector', () => {
await expect(connector.invokeAI(sampleOpenAiBody)).rejects.toThrow('API Error');
});
});
describe('getResponseErrorMessage', () => {
it('returns an unknown error message', () => {
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage({})).toEqual(
`Unexpected API Error: - Unknown error`
);
});
it('returns the error.message', () => {
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage({ message: 'a message' })).toEqual(
`Unexpected API Error: - a message`
);
});
it('returns the error.response.data.error.message', () => {
const err = {
response: {
headers: {},
status: 404,
statusText: 'Resource Not Found',
data: {
error: {
message: 'Resource not found',
},
},
},
} as AxiosError<{ error?: { message?: string } }>;
expect(
// @ts-expect-error expects an axios error as the parameter
connector.getResponseErrorMessage(err)
).toEqual(`API Error: Resource Not Found - Resource not found`);
});
it('returns auhtorization error', () => {
const err = {
response: {
headers: {},
status: 401,
statusText: 'Auth error',
data: {
error: {
message: 'The api key was invalid.',
},
},
},
} as AxiosError<{ error?: { message?: string } }>;
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage(err)).toEqual(
`Unauthorized API Error - The api key was invalid.`
);
});
});
});
describe('AzureAI', () => {

View file

@ -86,12 +86,14 @@ export class OpenAIConnector extends SubActionConnector<Config, Secrets> {
protected getResponseErrorMessage(error: AxiosError<{ error?: { message?: string } }>): string {
if (!error.response?.status) {
return `Unexpected API Error: ${error.code} - ${error.message}`;
return `Unexpected API Error: ${error.code ?? ''} - ${error.message ?? 'Unknown error'}`;
}
if (error.response.status === 401) {
return 'Unauthorized API Error';
return `Unauthorized API Error${
error.response?.data?.error?.message ? ` - ${error.response.data.error?.message}` : ''
}`;
}
return `API Error: ${error.response?.status} - ${error.response?.statusText}${
return `API Error: ${error.response?.statusText}${
error.response?.data?.error?.message ? ` - ${error.response.data.error?.message}` : ''
}`;
}
@ -193,8 +195,6 @@ export class OpenAIConnector extends SubActionConnector<Config, Secrets> {
return result;
}
// TO DO: Pass actual error
// tracked here https://github.com/elastic/security-team/issues/7373
return 'An error occurred sending your message. If the problem persists, please test the connector configuration.';
return 'An error occurred sending your message. \n\nAPI Error: The response from OpenAI was in an unrecognized format.';
}
}

View file

@ -444,6 +444,35 @@ export default function bedrockTest({ getService }: FtrProviderContext) {
retry: false,
});
});
it('should return an error when error happens', async () => {
const DEFAULT_BODY = {
prompt: `Hello world!`,
max_tokens_to_sample: 300,
stop_sequences: ['\n\nHuman:'],
};
const { body } = await supertest
.post(`/api/actions/connector/${bedrockActionId}/_execute`)
.set('kbn-xsrf', 'foo')
.send({
params: {
subAction: 'test',
subActionParams: {
body: JSON.stringify(DEFAULT_BODY),
},
},
})
.expect(200);
expect(body).to.eql({
status: 'error',
connector_id: bedrockActionId,
message: 'an error occurred while running the action',
retry: true,
service_message:
'Status code: 422. Message: API Error: Unprocessable Entity - Malformed input request: extraneous key [ooooo] is not permitted, please reformat your input and try again.',
});
});
});
});
});

View file

@ -46,7 +46,7 @@ export default function genAiTest({ getService }: FtrProviderContext) {
return body.id;
};
describe('GenAi', () => {
describe('OpenAI', () => {
after(() => {
objectRemover.removeAll();
});
@ -463,6 +463,30 @@ export default function genAiTest({ getService }: FtrProviderContext) {
retry: false,
});
});
it('should return a error when error happens', async () => {
const { body } = await supertest
.post(`/api/actions/connector/${genAiActionId}/_execute`)
.set('kbn-xsrf', 'foo')
.send({
params: {
subAction: 'test',
subActionParams: {
body: '{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"Hello world"}]}',
},
},
})
.expect(200);
expect(body).to.eql({
status: 'error',
connector_id: genAiActionId,
message: 'an error occurred while running the action',
retry: true,
service_message:
'Status code: 422. Message: API Error: Unprocessable Entity - The model `bad model` does not exist',
});
});
});
});
});

View file

@ -181,7 +181,7 @@ export default function ApiTest({ getService }: FtrProviderContext) {
});
expect(response.body.message).to.contain(
`400 - Bad Request - This model's maximum context length is 8192 tokens. However, your messages resulted in 11036 tokens. Please reduce the length of the messages.`
`an error occurred while running the action - Status code: 400. Message: API Error: Bad Request - This model's maximum context length is 8192 tokens. However, your messages resulted in 11036 tokens. Please reduce the length of the messages.`
);
});