Updating preconfigured connector name (#211927)

## Summary

Update Preconfigured connector name to `Elastic LLM`.

<img width="1504" alt="Screenshot 2025-02-20 at 11 29 02 AM"
src="https://github.com/user-attachments/assets/aa0a32f7-f1b2-4496-8c2e-7773f017c153"
/>

### ES3 Testing instruction
No additional config needed. Once run in local machine, the changes
should reflect automatically.

### ESS instructions
In `kibana.dev.yml` file, add
```
# xpack.actions.preconfigured:
   Elastic-LLM:
     name: Elastic LLM
     actionTypeId: .inference
     exposeConfig: true
     config:
       provider: 'elastic'
       taskType: 'chat_completion'
       inferenceId: '.rainbow-sprinkles-elastic'
       providerConfig:
         model_id: 'rainbow-sprinkles'
```
and the preconfigured endpoint with updated name should be visible.

### Checklist

Check the PR satisfies following conditions. 

Reviewers should verify this PR satisfies this list as well.

- [X] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
This commit is contained in:
Samiul Monir 2025-02-25 14:21:22 -05:00 committed by GitHub
parent 9a1d70d5d2
commit 0e02a32892
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 5 additions and 8 deletions

View file

@ -271,13 +271,10 @@ xpack.dataUsage.enabled: true
# This feature is disabled in Serverless until fully tested within a Serverless environment
xpack.dataUsage.enableExperimental: ['dataUsageDisabled']
# This feature is disabled in Serverless until Inference Endpoint become enabled within a Serverless environment
xpack.stack_connectors.enableExperimental: ['inferenceConnectorOff']
# This is the definition introducing pre-configured Kibana Connector for Elastic default LLM
xpack.actions.preconfigured:
Elastic-Inference-Rainbow-Sprinkles:
name: Elastic-Inference-Rainbow-Sprinkles
Elastic-LLM:
name: Elastic LLM
actionTypeId: .inference
exposeConfig: true
config:

View file

@ -25,7 +25,7 @@ export const request = <T = unknown>({
...options,
});
};
export const INTERNAL_INFERENCE_CONNECTORS = ['Elastic-Inference-Rainbow-Sprinkles'];
export const INTERNAL_INFERENCE_CONNECTORS = ['Elastic-LLM'];
export const INTERNAL_CLOUD_CONNECTORS = ['Elastic-Cloud-SMTP'];
export const getConnectors = () =>

View file

@ -17,7 +17,7 @@ import {
} from '../../../../common/endpoint/constants';
const INTERNAL_CLOUD_CONNECTORS = ['Elastic-Cloud-SMTP'];
const INTERNAL_INFERENCE_CONNECTORS = ['Elastic-Inference-Rainbow-Sprinkles'];
const INTERNAL_INFERENCE_CONNECTORS = ['Elastic-LLM'];
const INTERNAL_CONNECTORS = [...INTERNAL_CLOUD_CONNECTORS, ...INTERNAL_INFERENCE_CONNECTORS];
export const createBedrockAIConnector = (connectorName?: string) =>

View file

@ -26,7 +26,7 @@ export const API_HEADERS = Object.freeze({
});
export const INTERNAL_CLOUD_CONNECTORS = ['Elastic-Cloud-SMTP'];
export const INTERNAL_INFERENCE_CONNECTORS = ['Elastic-Inference-Rainbow-Sprinkles'];
export const INTERNAL_INFERENCE_CONNECTORS = ['Elastic-LLM'];
export const rootRequest = <T = unknown>({
headers: optionHeaders = {},