[Inference Connector][8.18] Do not show inference connector in the GenAI connectors list if the inference endpoint does not exist (#209406)

Based on the discussion regarding the availability of the Inference
Connector in 8.18, the decision was next:
- enable .inference connector type by default for 8.18
https://github.com/elastic/kibana/pull/209197
- do not show inference connector for Kibana GenAI if Inference Endpoint
does not exist. This is related to the preconfigured connector for
Elastic Rainbow Sprinkles availability and UX.
- preconfigured connector for Elastic Rainbow Sprinkles will always
exists in the Stack Management Connectors list, but on run will show the
banner message in case if that Inference Endpoint does not exist

<img width="2265" alt="Screenshot 2025-02-03 at 11 38 41 AM"
src="https://github.com/user-attachments/assets/029a814a-cc66-4a31-9e92-3512587e377f"
/>

---------

Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Yuliia Naumenko 2025-02-05 08:58:56 -08:00 committed by GitHub
parent b47c8266e7
commit a3189cc681
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 254 additions and 33 deletions

View file

@ -9,6 +9,13 @@ import { waitFor, renderHook } from '@testing-library/react';
import { useLoadConnectors, Props } from '.';
import { mockConnectors } from '../../mock/connectors';
import { TestProviders } from '../../mock/test_providers/test_providers';
import { isInferenceEndpointExists } from '@kbn/inference-endpoint-ui-common';
const mockedIsInferenceEndpointExists = isInferenceEndpointExists as jest.Mock;
jest.mock('@kbn/inference-endpoint-ui-common', () => ({
isInferenceEndpointExists: jest.fn(),
}));
const mockConnectorsAndExtras = [
...mockConnectors,
@ -57,6 +64,7 @@ const defaultProps = { http, toasts } as unknown as Props;
describe('useLoadConnectors', () => {
beforeEach(() => {
jest.clearAllMocks();
mockedIsInferenceEndpointExists.mockResolvedValue(true);
});
it('should call api to load action types', async () => {
renderHook(() => useLoadConnectors(defaultProps), {

View file

@ -11,8 +11,10 @@ import type { ServerError } from '@kbn/cases-plugin/public/types';
import { loadAllActions as loadConnectors } from '@kbn/triggers-actions-ui-plugin/public/common/constants';
import type { IHttpFetchError } from '@kbn/core-http-browser';
import { HttpSetup } from '@kbn/core-http-browser';
import { isInferenceEndpointExists } from '@kbn/inference-endpoint-ui-common';
import { IToasts } from '@kbn/core-notifications-browser';
import { OpenAiProviderType } from '@kbn/stack-connectors-plugin/common/openai/constants';
import { ActionConnector } from '@kbn/cases-plugin/public/containers/configure/types';
import { AIConnector } from '../connector_selector';
import * as i18n from '../translations';
@ -44,12 +46,18 @@ export const useLoadConnectors = ({
async () => {
const queryResult = await loadConnectors({ http });
return queryResult.reduce(
(acc: AIConnector[], connector) => [
...acc,
async (acc: Promise<AIConnector[]>, connector) => [
...(await acc),
...(!connector.isMissingSecrets &&
actionTypes.includes(connector.actionTypeId) &&
// only include preconfigured .inference connectors
(connector.actionTypeId !== '.inference' || connector.isPreconfigured)
(connector.actionTypeId !== '.inference' ||
(connector.actionTypeId === '.inference' &&
connector.isPreconfigured &&
(await isInferenceEndpointExists(
http,
(connector as ActionConnector)?.config?.inferenceId
))))
? [
{
...connector,
@ -63,7 +71,7 @@ export const useLoadConnectors = ({
]
: []),
],
[]
Promise.resolve([])
);
},
{

View file

@ -41,5 +41,6 @@
"@kbn/product-doc-base-plugin",
"@kbn/spaces-plugin",
"@kbn/shared-ux-router",
"@kbn/inference-endpoint-ui-common",
]
}

View file

@ -10,6 +10,7 @@ export { InferenceServiceFormFields } from './src/components/inference_service_f
// eslint-disable-next-line import/no-default-export
export { InferenceFlyoutWrapper as default } from './src/components/inference_flyout_wrapper';
export { useProviders } from './src/hooks/use_providers';
export { isInferenceEndpointExists } from './src/hooks/inference_endpoint_exists';
export { SERVICE_PROVIDERS } from './src/components/providers/render_service_provider/service_provider';
export * from './src/types/types';

View file

@ -0,0 +1,27 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { HttpSetup } from '@kbn/core-http-browser';
import { INFERENCE_ENDPOINT_INTERNAL_API_VERSION } from '../types/types';
export const isInferenceEndpointExists = async (
http: HttpSetup,
inferenceEndpointId: string
): Promise<boolean> => {
try {
return (
await http.get<{ isEndpointExists: boolean }>(
`/internal/_inference/_exists/${inferenceEndpointId}`,
{
version: INFERENCE_ENDPOINT_INTERNAL_API_VERSION,
}
)
).isEndpointExists;
} catch (err) {
return false;
}
};

View file

@ -22,6 +22,7 @@
],
"requiredBundles": [
"kibanaUtils",
"esUiShared",
]
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchClient } from '@kbn/core/server';
export const inferenceEndpointExists = async (
esClient: ElasticsearchClient,
inferenceEndpointId: string
) => {
try {
await esClient.inference.get({ inference_id: inferenceEndpointId });
return true;
} catch (err) {
if (err?.statusCode === 404) {
return false;
} else {
throw err;
}
}
};

View file

@ -13,6 +13,7 @@ import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/t
import { InferenceServicesGetResponse } from '../types';
import { INFERENCE_ENDPOINT_INTERNAL_API_VERSION } from '../../common';
import { addInferenceEndpoint } from '../lib/add_inference_endpoint';
import { inferenceEndpointExists } from '../lib/inference_endpoint_exists';
const inferenceEndpointSchema = schema.object({
config: schema.object({
@ -104,6 +105,45 @@ export const getInferenceServicesRoute = (
}
);
router.versioned
.get({
access: 'internal',
path: '/internal/_inference/_exists/{inferenceId}',
})
.addVersion(
{
version: INFERENCE_ENDPOINT_INTERNAL_API_VERSION,
validate: {
request: {
params: schema.object({
inferenceId: schema.string(),
}),
},
},
},
async (
context,
request,
response
): Promise<IKibanaResponse<{ isEndpointExists: boolean }>> => {
try {
const esClient = (await context.core).elasticsearch.client.asCurrentUser;
const result = await inferenceEndpointExists(esClient, request.params.inferenceId);
return response.ok({
body: { isEndpointExists: result },
});
} catch (err) {
logger.error(err);
return response.customError({
body: err.message,
statusCode: err.statusCode,
});
}
}
);
router.versioned
.put({
access: 'internal',

View file

@ -0,0 +1,26 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { HttpSetup } from '@kbn/core-http-browser';
export const isInferenceEndpointExists = async (
http: HttpSetup,
inferenceEndpointId: string
): Promise<boolean> => {
try {
return (
await http.get<{ isEndpointExists: boolean }>(
`/internal/_inference/_exists/${inferenceEndpointId}`,
{
version: '1',
}
)
).isEndpointExists;
} catch (err) {
return false;
}
};

View file

@ -10,6 +10,8 @@ import type { FindActionResult } from '@kbn/actions-plugin/server';
import useLocalStorage from 'react-use/lib/useLocalStorage';
import type { ObservabilityAIAssistantService } from '../types';
import { useObservabilityAIAssistant } from './use_observability_ai_assistant';
import { useKibana } from './use_kibana';
import { isInferenceEndpointExists } from './inference_endpoint_exists';
export interface UseGenAIConnectorsResult {
connectors?: FindActionResult[];
@ -30,6 +32,9 @@ export function useGenAIConnectorsWithoutContext(
assistant: ObservabilityAIAssistantService
): UseGenAIConnectorsResult {
const [connectors, setConnectors] = useState<FindActionResult[] | undefined>(undefined);
const {
services: { http },
} = useKibana();
const [selectedConnector, setSelectedConnector] = useLocalStorage(
`xpack.observabilityAiAssistant.lastUsedConnector`,
@ -49,15 +54,32 @@ export function useGenAIConnectorsWithoutContext(
signal: controller.signal,
})
.then((results) => {
setConnectors(results);
setSelectedConnector((connectorId) => {
if (connectorId && results.findIndex((result) => result.id === connectorId) === -1) {
return '';
}
return connectorId;
});
return results
.reduce<Promise<FindActionResult[]>>(async (result, connector) => {
if (
connector.actionTypeId !== '.inference' ||
(connector.actionTypeId === '.inference' &&
(await isInferenceEndpointExists(
http,
(connector as FindActionResult)?.config?.inferenceId
)))
) {
return [...(await result), connector];
}
setError(undefined);
return result;
}, Promise.resolve([]))
.then((c) => {
setConnectors(c);
setSelectedConnector((connectorId) => {
if (connectorId && c.findIndex((result) => result.id === connectorId) === -1) {
return '';
}
return connectorId;
});
setError(undefined);
});
})
.catch((err) => {
setError(err);
@ -66,7 +88,7 @@ export function useGenAIConnectorsWithoutContext(
.finally(() => {
setLoading(false);
});
}, [assistant, controller.signal, setSelectedConnector]);
}, [assistant, controller.signal, http, setSelectedConnector]);
useEffect(() => {
fetchConnectors();

View file

@ -51,7 +51,8 @@
"@kbn/server-route-repository-utils",
"@kbn/inference-plugin",
"@kbn/ai-assistant-icon",
"@kbn/security-authorization-core-common"
"@kbn/security-authorization-core-common",
"@kbn/core-http-browser"
],
"exclude": ["target/**/*"]
}

View file

@ -9,8 +9,16 @@ import React from 'react';
import { fireEvent, render } from '@testing-library/react';
import ParamsFields from './params';
import { SUB_ACTION } from '../../../common/inference/constants';
import { isInferenceEndpointExists } from '@kbn/inference-endpoint-ui-common';
const mockedIsInferenceEndpointExists = isInferenceEndpointExists as jest.Mock;
jest.mock('@kbn/inference-endpoint-ui-common', () => ({
isInferenceEndpointExists: jest.fn(),
}));
describe('Inference Params Fields renders', () => {
mockedIsInferenceEndpointExists.mockResolvedValue(true);
test('all params fields are rendered', () => {
const { getByTestId } = render(
<ParamsFields

View file

@ -5,14 +5,17 @@
* 2.0.
*/
import React, { useCallback, useEffect } from 'react';
import React, { useCallback, useEffect, useState } from 'react';
import {
JsonEditorWithMessageVariables,
useKibana,
type ActionParamsProps,
} from '@kbn/triggers-actions-ui-plugin/public';
import { EuiTextArea, EuiFormRow, EuiSpacer, EuiSelect } from '@elastic/eui';
import { isInferenceEndpointExists } from '@kbn/inference-endpoint-ui-common';
import { EuiTextArea, EuiFormRow, EuiSpacer, EuiSelect, EuiCallOut } from '@elastic/eui';
import type { RuleFormParamsErrors } from '@kbn/response-ops-rule-form';
import { ActionVariable } from '@kbn/alerting-types';
import { FormattedMessage } from '@kbn/i18n-react';
import {
ChatCompleteParams,
RerankParams,
@ -29,8 +32,22 @@ const InferenceServiceParamsFields: React.FunctionComponent<
ActionParamsProps<InferenceActionParams>
> = ({ actionParams, editAction, index, errors, actionConnector, messageVariables }) => {
const { subAction, subActionParams } = actionParams;
const [isEndpointExists, setIsInferenceEndpointExists] = useState<boolean>(true);
const { taskType, provider } = (actionConnector as unknown as InferenceActionConnector).config;
const {
services: { http },
} = useKibana();
const { taskType, provider, inferenceId } = (
actionConnector as unknown as InferenceActionConnector
).config;
useEffect(() => {
const f = async () => {
setIsInferenceEndpointExists(await isInferenceEndpointExists(http, inferenceId));
};
f();
}, [http, inferenceId]);
useEffect(() => {
if (!subAction) {
@ -63,6 +80,18 @@ const InferenceServiceParamsFields: React.FunctionComponent<
[editAction, index, subActionParams]
);
if (!isEndpointExists) {
return (
<EuiCallOut title="Missing configuration" color="warning" iconType="warning">
<FormattedMessage
id="xpack.stackConnectors.components.inference.loadingErrorText"
defaultMessage={'Inference Endpoint by ID {inferenceId} does not exist!'}
values={{ inferenceId }}
/>
</EuiCallOut>
);
}
if (subAction === SUB_ACTION.UNIFIED_COMPLETION) {
return (
<UnifiedCompletionParamsFields

View file

@ -10,9 +10,11 @@ import { useLoadConnectors } from './use_load_connectors';
import { useKibana } from './use_kibana';
import { waitFor, renderHook } from '@testing-library/react';
import { OpenAiProviderType } from '@kbn/stack-connectors-plugin/common/openai/constants';
import { isInferenceEndpointExists } from '@kbn/inference-endpoint-ui-common';
const mockedLoadConnectors = loadConnectors as jest.Mock;
const mockedUseKibana = useKibana as jest.Mock;
const mockedIsInferenceEndpointExists = isInferenceEndpointExists as jest.Mock;
jest.mock('@tanstack/react-query', () => ({
useQuery: jest.fn().mockImplementation(async (queryKey, fn, opts) => {
@ -29,6 +31,10 @@ jest.mock('@kbn/triggers-actions-ui-plugin/public/common/constants', () => ({
loadAllActions: jest.fn(),
}));
jest.mock('@kbn/inference-endpoint-ui-common', () => ({
isInferenceEndpointExists: jest.fn(),
}));
jest.mock('./use_kibana', () => ({
useKibana: jest.fn().mockReturnValue({
services: {
@ -85,6 +91,7 @@ describe('useLoadConnectors', () => {
},
];
mockedLoadConnectors.mockResolvedValue(connectors);
mockedIsInferenceEndpointExists.mockResolvedValue(true);
const { result } = renderHook(() => useLoadConnectors());
await waitFor(() =>

View file

@ -17,10 +17,16 @@ import {
GEMINI_CONNECTOR_ID,
INFERENCE_CONNECTOR_ID,
} from '@kbn/stack-connectors-plugin/public/common';
import type { UserConfiguredActionConnector } from '@kbn/triggers-actions-ui-plugin/public/types';
import { isSupportedConnector } from '@kbn/inference-common';
import { isInferenceEndpointExists } from '@kbn/inference-endpoint-ui-common';
import { useKibana } from './use_kibana';
import { LLMs, type ActionConnector, type PlaygroundConnector } from '../types';
import {
LLMs,
type ActionConnector,
type UserConfiguredActionConnector,
type PlaygroundConnector,
InferenceActionConnector,
} from '../types';
const QUERY_KEY = ['search-playground, load-connectors'];
@ -123,15 +129,24 @@ export const useLoadConnectors = (): UseQueryResult<PlaygroundConnector[], IHttp
async () => {
const queryResult = await loadConnectors({ http });
return queryResult.reduce<PlaygroundConnector[]>((result, connector) => {
return queryResult.reduce<Promise<PlaygroundConnector[]>>(async (result, connector) => {
const { transform } = connectorTypeToLLM.find(({ match }) => match(connector)) || {};
if (!connector.isMissingSecrets && !!transform) {
return [...result, transform(connector)];
if (
!connector.isMissingSecrets &&
!!transform &&
(connector.actionTypeId !== '.inference' ||
(connector.actionTypeId === '.inference' &&
(await isInferenceEndpointExists(
http,
(connector as InferenceActionConnector)?.config?.inferenceId
))))
) {
return [...(await result), transform(connector)];
}
return result;
}, []);
}, Promise.resolve([]));
},
{
retry: false,

View file

@ -23,7 +23,10 @@ import type { DataPublicPluginStart } from '@kbn/data-plugin/public';
import type { SearchNavigationPluginStart } from '@kbn/search-navigation/public';
import type { SecurityPluginStart } from '@kbn/security-plugin/public';
import type { LicensingPluginStart } from '@kbn/licensing-plugin/public';
import type { ActionConnector } from '@kbn/alerts-ui-shared/src/common/types';
import type {
ActionConnector,
UserConfiguredActionConnector,
} from '@kbn/alerts-ui-shared/src/common/types';
import type { ServiceProviderKeys } from '@kbn/inference-endpoint-ui-common';
import { UiActionsStart } from '@kbn/ui-actions-plugin/public';
import type { ChatRequestData, MessageRole, LLMs } from '../common/types';
@ -226,8 +229,8 @@ export interface LLMModel {
promptTokenLimit?: number;
}
export type { ActionConnector };
export type { ActionConnector, UserConfiguredActionConnector };
export type InferenceActionConnector = ActionConnector & {
config: { provider: ServiceProviderKeys };
config: { provider: ServiceProviderKeys; inferenceId: string };
};
export type PlaygroundConnector = ActionConnector & { title: string; type: LLMs };

View file

@ -40,6 +40,13 @@ exports[`arrows ArrowBody renders correctly against snapshot 1`] = `
.css-1sgx2gt .euiKeyPadMenuItem__label{white-space:nowrap;overflow:hidden;}
</style>
<style
data-emotion="css"
data-s=""
>
.css-1f5ny76{position:absolute;top:-8px;right:-8px;}
</style>
<style
data-emotion="css"
data-s=""
@ -82,13 +89,6 @@ exports[`arrows ArrowBody renders correctly against snapshot 1`] = `
.css-1kcx8qm .euiAccordion__childWrapper{-webkit-transition:none;transition:none;}
</style>
<style
data-emotion="css"
data-s=""
>
.css-1f5ny76{position:absolute;top:-8px;right:-8px;}
</style>
<style
data-emotion="css-global"
data-s=""