mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Attack discovery] Fix error handling in LM studio (#213855)
## Summary Error were not properly propagated to the user and instead of meaningful message we were displaying just `API Error`. <img width="1813" alt="Zrzut ekranu 2025-03-11 o 03 47 59" src="https://github.com/user-attachments/assets/8d059159-f020-4944-a463-b10799e7fa46" /> Steps to reproduce, Thank you @andrew-goldstein 🙇 **Desk testing** To reproduce: 1. In LM Studio, download the `MLX` variant (optimized for Mac) of `Llama-3.2-3B-Instruct-4bit`: ``` mlx-community/Llama-3.2-3B-Instruct-4bit ``` 2. Configure the model to have a context length of `131072` tokens, as illustrated by the screenshot below:  3. Serve ONLY the model above in LM Studio. (Ensure no other models are running in LM Studio), as illustrated by the screenshot below:  4. Configure a connector via the details in <https://www.elastic.co/guide/en/security/current/connect-to-byo-llm.html> but change: ``` local-model ``` to the name of the model when configuring the connector: ``` llama-3.2-3b-instruct ``` as illustrated by the screenshot below:  5. Generate Attack discoveries **Expected results** - Generation does NOT fail with the error described in the later steps below. - Progress on generating discoveries is visible in Langsmith, as illustrated by the screenshot below:  Note: `Llama-3.2-3B-Instruct-4bit` may not reliably generate Attack discoveries, so generation may still fail after `10` generation / refinement steps. 6. In LM studio, serve a _second_ model, as illustrated by the screenshot below:  7. Once again, generate Attack discoveries **Expected results** - Generation does NOT fail with the errors below - Progress on generating discoveries is visible in Langsmith, though as noted above, generation may still fail after `10` attempts if the model does not produce output that conforms to the expected schema **Actual results** - Generation fails with an error similar to: ``` generate node is unable to parse (openai) response from attempt 0; (this may be an incomplete response from the model): Status code: 400. Message: API Error: Bad Request: ActionsClientLlm: action result status is error: an error occurred while running the action - Status code: 400. Message: API Error: Bad Request, ``` or ``` generate node is unable to parse (openai) response from attempt 0; (this may be an incomplete response from the model): Status code: 404. Message: API Error: Not Found - Model "llama-3.2-3b-instruct" not found. Please specify a valid model. ``` as illustrated by the following screenshot: 
This commit is contained in:
parent
2eb9dcdd68
commit
0b9cceb574
3 changed files with 22 additions and 6 deletions
|
@ -643,6 +643,23 @@ describe('OpenAIConnector', () => {
|
|||
).toEqual(`API Error: Resource Not Found - Resource not found`);
|
||||
});
|
||||
|
||||
it('returns the error.response.data.error', () => {
|
||||
const err = {
|
||||
response: {
|
||||
headers: {},
|
||||
status: 404,
|
||||
statusText: 'Resource Not Found',
|
||||
data: {
|
||||
error: 'Resource not found',
|
||||
},
|
||||
},
|
||||
} as AxiosError<{ error?: string }>;
|
||||
expect(
|
||||
// @ts-expect-error expects an axios error as the parameter
|
||||
connector.getResponseErrorMessage(err)
|
||||
).toEqual(`API Error: Resource Not Found - Resource not found`);
|
||||
});
|
||||
|
||||
it('returns auhtorization error', () => {
|
||||
const err = {
|
||||
response: {
|
||||
|
|
|
@ -152,14 +152,12 @@ export class OpenAIConnector extends SubActionConnector<Config, Secrets> {
|
|||
if (!error.response?.status) {
|
||||
return `Unexpected API Error: ${error.code ?? ''} - ${error.message ?? 'Unknown error'}`;
|
||||
}
|
||||
// LM Studio returns error.response?.data?.error as string
|
||||
const errorMessage = error.response?.data?.error?.message ?? error.response?.data?.error;
|
||||
if (error.response.status === 401) {
|
||||
return `Unauthorized API Error${
|
||||
error.response?.data?.error?.message ? ` - ${error.response.data.error?.message}` : ''
|
||||
}`;
|
||||
return `Unauthorized API Error${errorMessage ? ` - ${errorMessage}` : ''}`;
|
||||
}
|
||||
return `API Error: ${error.response?.statusText}${
|
||||
error.response?.data?.error?.message ? ` - ${error.response.data.error?.message}` : ''
|
||||
}`;
|
||||
return `API Error: ${error.response?.statusText}${errorMessage ? ` - ${errorMessage}` : ''}`;
|
||||
}
|
||||
/**
|
||||
* responsible for making a POST request to the external API endpoint and returning the response data
|
||||
|
|
|
@ -83,6 +83,7 @@ export const invokeAttackDiscoveryGraph = async ({
|
|||
connectorId: apiConfig.connectorId,
|
||||
llmType,
|
||||
logger,
|
||||
model,
|
||||
temperature: 0, // zero temperature for attack discovery, because we want structured JSON output
|
||||
timeout: connectorTimeout,
|
||||
traceOptions,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue