[Rule Migration] Add inference connector as supported LLM type (#208032)

## Summary

Summarize your PR. If it involves visual changes include a screenshot or
gif.

Adds .inference as a supported type, so it can be tested with EIS both
with custom providers and the default EIS provider.
This commit is contained in:
Marius Iversen 2025-01-23 14:44:26 +01:00 committed by GitHub
parent cc38fbea29
commit 811c539fff
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 6 additions and 4 deletions

View file

@ -5,4 +5,4 @@
* 2.0.
*/
export const AIActionTypeIds = ['.bedrock', '.gen-ai', '.gemini'];
export const AIActionTypeIds = ['.bedrock', '.gen-ai', '.gemini', '.inference'];

View file

@ -5,16 +5,16 @@
* 2.0.
*/
import type { ActionsClient } from '@kbn/actions-plugin/server';
import type { Logger } from '@kbn/core/server';
import type { ActionsClientSimpleChatModel } from '@kbn/langchain/server';
import {
ActionsClientBedrockChatModel,
ActionsClientChatOpenAI,
ActionsClientChatVertexAI,
} from '@kbn/langchain/server';
import type { Logger } from '@kbn/core/server';
import type { ActionsClient } from '@kbn/actions-plugin/server';
import type { ActionsClientChatOpenAIParams } from '@kbn/langchain/server/language_models/chat_openai';
import type { CustomChatModelInput as ActionsClientBedrockChatModelParams } from '@kbn/langchain/server/language_models/bedrock_chat';
import type { ActionsClientChatOpenAIParams } from '@kbn/langchain/server/language_models/chat_openai';
import type { CustomChatModelInput as ActionsClientChatVertexAIParams } from '@kbn/langchain/server/language_models/gemini_chat';
import type { CustomChatModelInput as ActionsClientSimpleChatModelParams } from '@kbn/langchain/server/language_models/simple_chat_model';
@ -39,6 +39,7 @@ const llmTypeDictionary: Record<string, string> = {
[`.gen-ai`]: `openai`,
[`.bedrock`]: `bedrock`,
[`.gemini`]: `gemini`,
[`.inference`]: `inference`,
};
export class ActionsClientChat {
@ -83,6 +84,7 @@ export class ActionsClientChat {
case 'gemini':
return ActionsClientChatVertexAI;
case 'openai':
case 'inference':
default:
return ActionsClientChatOpenAI;
}