[Search] Refactor Search Inference Endpoint (#210943)

## Summary

This PR includes:
- Delete unused code
- Remove duplicate type definition and use the types defined in
`elastic` node module
- Refactor the code as needed for the type definition.

Tested in Serverless QA environment:


https://github.com/user-attachments/assets/43777b09-70c6-48ec-8ba2-988a3ab7e029

### Checklist

Check the PR satisfies following conditions. 

Reviewers should verify this PR satisfies this list as well.

- [X] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios

---------

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
Samiul Monir 2025-02-28 16:14:35 -05:00 committed by GitHub
parent 42e094189c
commit ad52467e3d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 299 additions and 313 deletions

View file

@ -5,8 +5,6 @@
* 2.0.
*/
import type { Config, Secrets } from '@kbn/inference-endpoint-ui-common';
export enum APIRoutes {
GET_INFERENCE_ENDPOINTS = '/internal/inference_endpoints/endpoints',
INFERENCE_ENDPOINT = '/internal/inference_endpoint/endpoints/{type}/{id}',
@ -18,17 +16,3 @@ export interface SearchInferenceEndpointsConfigType {
enabled: boolean;
};
}
export enum TaskTypes {
completion = 'completion',
rerank = 'rerank',
sparse_embedding = 'sparse_embedding',
text_embedding = 'text_embedding',
}
export type { InferenceProvider } from '@kbn/inference-endpoint-ui-common';
export interface InferenceEndpoint {
config: Config;
secrets: Secrets;
}

View file

@ -19,7 +19,7 @@ export const DEFAULT_TABLE_LIMIT = 25;
export const DEFAULT_QUERY_PARAMS: QueryParams = {
page: DEFAULT_TABLE_ACTIVE_PAGE,
perPage: DEFAULT_TABLE_LIMIT,
sortField: SortFieldInferenceEndpoint.endpoint,
sortField: SortFieldInferenceEndpoint.inference_id,
sortOrder: SortOrder.asc,
};

View file

@ -9,15 +9,14 @@ import React from 'react';
import type { EuiBasicTableProps, Pagination } from '@elastic/eui';
import { EuiBasicTable } from '@elastic/eui';
import type { InferenceEndpointUI } from './types';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
interface EndpointsTableProps {
columns: EuiBasicTableProps<InferenceEndpointUI>['columns'];
data: InferenceEndpointUI[];
onChange: EuiBasicTableProps<InferenceEndpointUI>['onChange'];
columns: EuiBasicTableProps<InferenceInferenceEndpointInfo>['columns'];
data: InferenceInferenceEndpointInfo[];
onChange: EuiBasicTableProps<InferenceInferenceEndpointInfo>['onChange'];
pagination: Pagination;
sorting: EuiBasicTableProps<InferenceEndpointUI>['sorting'];
sorting: EuiBasicTableProps<InferenceInferenceEndpointInfo>['sorting'];
}
export const EndpointsTable: React.FC<EndpointsTableProps> = ({

View file

@ -6,13 +6,19 @@
*/
import React from 'react';
import { TaskTypes } from '../../../../common/types';
import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/types';
import { FilterOptions } from '../types';
import { MultiSelectFilter, MultiSelectFilterOption } from './multi_select_filter';
import * as i18n from './translations';
enum TaskTypes {
completion = 'completion',
rerank = 'rerank',
sparse_embedding = 'sparse_embedding',
text_embedding = 'text_embedding',
}
interface Props {
optionKeys: TaskTypes[];
optionKeys: InferenceTaskType[];
onChange: (newFilterOptions: Partial<FilterOptions>) => void;
}

View file

@ -11,6 +11,7 @@ import React from 'react';
import { ConfirmDeleteEndpointModal } from '.';
import * as i18n from './translations';
import { useScanUsage } from '../../../../../../../hooks/use_scan_usage';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
jest.mock('../../../../../../../hooks/use_scan_usage');
const mockUseScanUsage = useScanUsage as jest.Mock;
@ -19,7 +20,7 @@ describe('ConfirmDeleteEndpointModal', () => {
const mockOnCancel = jest.fn();
const mockOnConfirm = jest.fn();
const mockProvider = {
const mockProvider: InferenceInferenceEndpointInfo = {
inference_id: 'my-hugging-face',
service: 'hugging_face',
service_settings: {
@ -27,12 +28,7 @@ describe('ConfirmDeleteEndpointModal', () => {
url: 'https://dummy.huggingface.com',
},
task_settings: {},
} as any;
const mockItem = {
endpoint: 'my-hugging-face',
provider: mockProvider,
type: 'text_embedding',
task_type: 'text_embedding',
};
const Wrapper = () => {
@ -42,7 +38,7 @@ describe('ConfirmDeleteEndpointModal', () => {
<ConfirmDeleteEndpointModal
onCancel={mockOnCancel}
onConfirm={mockOnConfirm}
inferenceEndpoint={mockItem}
inferenceEndpoint={mockProvider}
/>
</QueryClientProvider>
);

View file

@ -10,16 +10,17 @@ import { EuiButtonEmpty, EuiConfirmModal, EuiFlexGroup, EuiFlexItem, EuiText } f
import { css } from '@emotion/react';
import { euiThemeVars } from '@kbn/ui-theme';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
import * as i18n from './translations';
import { useScanUsage } from '../../../../../../../hooks/use_scan_usage';
import { InferenceEndpointUI, InferenceUsageInfo } from '../../../../../types';
import { InferenceUsageInfo } from '../../../../../types';
import { RenderMessageWithIcon } from '../../component/render_message_with_icon';
import { ScanUsageResults } from '../../component/scan_usage_results';
interface ConfirmDeleteEndpointModalProps {
onCancel: () => void;
onConfirm: () => void;
inferenceEndpoint: InferenceEndpointUI;
inferenceEndpoint: InferenceInferenceEndpointInfo;
}
export const ConfirmDeleteEndpointModal: React.FC<ConfirmDeleteEndpointModalProps> = ({
@ -33,8 +34,8 @@ export const ConfirmDeleteEndpointModal: React.FC<ConfirmDeleteEndpointModalProp
const [ignoreWarningCheckbox, setIgnoreWarningCheckbox] = useState<boolean>(false);
const { data } = useScanUsage({
type: inferenceEndpoint.type,
id: inferenceEndpoint.endpoint,
type: inferenceEndpoint.task_type,
id: inferenceEndpoint.inference_id,
});
const onIgnoreWarningCheckboxChange = (state: boolean) => {
@ -88,7 +89,7 @@ export const ConfirmDeleteEndpointModal: React.FC<ConfirmDeleteEndpointModalProp
`}
data-test-subj="deleteModalInferenceEndpointName"
>
{inferenceEndpoint.endpoint}
{inferenceEndpoint.inference_id}
</EuiText>
</EuiFlexItem>
<EuiFlexItem>

View file

@ -10,10 +10,10 @@ import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import React from 'react';
import { DeleteAction } from './delete_action';
import { InferenceEndpointUI } from '../../../../types';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
describe('Delete Action', () => {
const mockProvider = {
const mockProvider: InferenceInferenceEndpointInfo = {
inference_id: 'my-hugging-face',
service: 'hugging_face',
service_settings: {
@ -21,15 +21,10 @@ describe('Delete Action', () => {
url: 'https://dummy.huggingface.com',
},
task_settings: {},
} as any;
const mockItem: InferenceEndpointUI = {
endpoint: 'my-hugging-face',
provider: mockProvider,
type: 'text_embedding',
task_type: 'text_embedding',
};
const Wrapper = ({ item }: { item: InferenceEndpointUI }) => {
const Wrapper = ({ item }: { item: InferenceInferenceEndpointInfo }) => {
const queryClient = new QueryClient();
return (
<QueryClientProvider client={queryClient}>
@ -39,7 +34,7 @@ describe('Delete Action', () => {
};
it('loads confirm delete modal', () => {
render(<Wrapper item={mockItem} />);
render(<Wrapper item={mockProvider} />);
expect(screen.getByTestId('deleteModalForInferenceUI')).toBeInTheDocument();
});
});

View file

@ -6,12 +6,12 @@
*/
import React from 'react';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
import { useDeleteEndpoint } from '../../../../../../hooks/use_delete_endpoint';
import { InferenceEndpointUI } from '../../../../types';
import { ConfirmDeleteEndpointModal } from './confirm_delete_endpoint';
interface DeleteActionProps {
selectedEndpoint: InferenceEndpointUI;
selectedEndpoint: InferenceInferenceEndpointInfo;
onCancel: () => void;
displayModal: boolean;
}
@ -29,8 +29,8 @@ export const DeleteAction: React.FC<DeleteActionProps> = ({
}
deleteEndpoint({
type: selectedEndpoint.type,
id: selectedEndpoint.endpoint,
type: selectedEndpoint.task_type,
id: selectedEndpoint.inference_id,
});
};

View file

@ -26,7 +26,7 @@ describe('RenderEndpoint component tests', () => {
task_settings: {},
} as any;
render(<EndpointInfo inferenceId={'cohere-2'} provider={mockProvider} />);
render(<EndpointInfo inferenceId={'cohere-2'} endpointInfo={mockProvider} />);
expect(screen.getByText('cohere-2')).toBeInTheDocument();
});
@ -42,7 +42,7 @@ describe('RenderEndpoint component tests', () => {
},
} as any;
render(<EndpointInfo inferenceId={'azure-openai-1'} provider={mockProvider} />);
render(<EndpointInfo inferenceId={'azure-openai-1'} endpointInfo={mockProvider} />);
expect(screen.getByText('azure-openai-1')).toBeInTheDocument();
});
@ -62,7 +62,7 @@ describe('RenderEndpoint component tests', () => {
},
} as any;
render(<EndpointInfo inferenceId={'elastic-rerank'} provider={mockProvider} />);
render(<EndpointInfo inferenceId={'elastic-rerank'} endpointInfo={mockProvider} />);
expect(screen.getByText('elastic-rerank')).toBeInTheDocument();
expect(screen.getByText('TECH PREVIEW')).toBeInTheDocument();

View file

@ -7,17 +7,17 @@
import { EuiBetaBadge, EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
import React from 'react';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
import { isEndpointPreconfigured } from '../../../../utils/preconfigured_endpoint_helper';
import * as i18n from './translations';
import { isProviderTechPreview } from '../../../../utils/reranker_helper';
export interface EndpointInfoProps {
inferenceId: string;
provider: InferenceAPIConfigResponse;
endpointInfo: InferenceInferenceEndpointInfo;
}
export const EndpointInfo: React.FC<EndpointInfoProps> = ({ inferenceId, provider }) => (
export const EndpointInfo: React.FC<EndpointInfoProps> = ({ inferenceId, endpointInfo }) => (
<EuiFlexGroup justifyContent="spaceBetween" alignItems="center">
<EuiFlexItem grow={false}>
<EuiFlexGroup gutterSize="s" alignItems="center" wrap>
@ -26,7 +26,7 @@ export const EndpointInfo: React.FC<EndpointInfoProps> = ({ inferenceId, provide
<strong>{inferenceId}</strong>
</span>
</EuiFlexItem>
{isProviderTechPreview(provider) ? (
{isProviderTechPreview(endpointInfo) ? (
<EuiFlexItem grow={false}>
<span>
<EuiBetaBadge

View file

@ -0,0 +1,92 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
import { EuiBadge, EuiFlexGroup, EuiFlexItem, EuiText } from '@elastic/eui';
import { ServiceProviderKeys } from '@kbn/inference-endpoint-ui-common';
import { ELASTIC_MODEL_DEFINITIONS } from '@kbn/ml-trained-models-utils';
import * as i18n from './translations';
export interface EndpointModelInfoProps {
endpointInfo: InferenceInferenceEndpointInfo;
}
export const EndpointModelInfo: React.FC<EndpointModelInfoProps> = ({ endpointInfo }) => {
const serviceSettings = endpointInfo.service_settings;
const modelId =
'model_id' in serviceSettings
? serviceSettings.model_id
: 'model' in serviceSettings
? serviceSettings.model
: undefined;
const isEligibleForMITBadge = modelId && ELASTIC_MODEL_DEFINITIONS[modelId]?.license === 'MIT';
return (
<EuiFlexGroup gutterSize="xs" direction="column">
{modelId && (
<EuiFlexItem>
<EuiFlexGroup gutterSize="xs" direction="row">
<EuiFlexItem grow={0}>
<EuiText size="s" color="subdued">
{modelId}
</EuiText>
</EuiFlexItem>
{isEligibleForMITBadge ? (
<EuiFlexItem grow={0}>
<EuiBadge
color="hollow"
iconType="popout"
iconSide="right"
href={ELASTIC_MODEL_DEFINITIONS[modelId].licenseUrl ?? ''}
target="_blank"
data-test-subj={'mit-license-badge'}
>
{i18n.MIT_LICENSE}
</EuiBadge>
</EuiFlexItem>
) : null}{' '}
</EuiFlexGroup>
</EuiFlexItem>
)}
<EuiFlexItem>{endpointModelAtrributes(endpointInfo)}</EuiFlexItem>
</EuiFlexGroup>
);
};
function endpointModelAtrributes(endpoint: InferenceInferenceEndpointInfo) {
switch (endpoint.service) {
case ServiceProviderKeys.hugging_face:
return huggingFaceAttributes(endpoint);
case ServiceProviderKeys.azureaistudio:
return azureOpenAIStudioAttributes(endpoint);
case ServiceProviderKeys.azureopenai:
return azureOpenAIAttributes(endpoint);
default:
return null;
}
}
function huggingFaceAttributes(endpoint: InferenceInferenceEndpointInfo) {
const serviceSettings = endpoint.service_settings;
const url = 'url' in serviceSettings ? serviceSettings.url : null;
return url;
}
function azureOpenAIStudioAttributes(endpoint: InferenceInferenceEndpointInfo) {
const serviceSettings = endpoint.service_settings;
return 'provider' in serviceSettings ? serviceSettings?.provider : undefined;
}
function azureOpenAIAttributes(endpoint: InferenceInferenceEndpointInfo) {
const serviceSettings = endpoint.service_settings;
return 'resource_name' in serviceSettings ? serviceSettings.resource_name : undefined;
}

View file

@ -8,6 +8,8 @@
import { render, screen } from '@testing-library/react';
import React from 'react';
import { ServiceProvider } from './service_provider';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
import { ServiceProviderKeys } from '@kbn/inference-endpoint-ui-common';
jest.mock('@kbn/ml-trained-models-utils', () => ({
...jest.requireActual('@kbn/ml-trained-models-utils'),
@ -20,8 +22,14 @@ jest.mock('@kbn/ml-trained-models-utils', () => ({
}));
describe('ServiceProvider component', () => {
const renderComponent = (
service: ServiceProviderKeys,
endpointInfo: InferenceInferenceEndpointInfo
) => {
render(<ServiceProvider service={service} endpointInfo={endpointInfo} />);
};
describe('with HuggingFace service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'my-hugging-face',
service: 'hugging_face',
service_settings: {
@ -29,9 +37,10 @@ describe('ServiceProvider component', () => {
url: 'https://dummy.huggingface.com',
},
task_settings: {},
} as any;
task_type: 'sparse_embedding',
};
it('renders the component with service and model details', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.hugging_face, mockEndpoint);
expect(screen.getByText('Hugging Face')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-hugging_face');
@ -41,7 +50,7 @@ describe('ServiceProvider component', () => {
});
describe('with openai service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'my-openai-endpoint',
service: 'openai',
service_settings: {
@ -49,9 +58,10 @@ describe('ServiceProvider component', () => {
model_id: 'text-embedding-3-small',
},
task_settings: {},
} as any;
task_type: 'text_embedding',
};
it('renders the component with service and model details', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.openai, mockEndpoint);
expect(screen.getByText('OpenAI')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-openai');
@ -61,7 +71,7 @@ describe('ServiceProvider component', () => {
});
describe('with cohere service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'cohere-2',
service: 'cohere',
service_settings: {
@ -74,10 +84,11 @@ describe('ServiceProvider component', () => {
embedding_type: 'byte',
},
task_settings: {},
} as any;
task_type: 'sparse_embedding',
};
it('renders the component with service and model details', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.cohere, mockEndpoint);
expect(screen.getByText('Cohere')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-cohere');
@ -90,14 +101,14 @@ describe('ServiceProvider component', () => {
...mockEndpoint,
service_settings: { ...mockEndpoint.service_settings, model_id: undefined },
};
render(<ServiceProvider providerEndpoint={modifiedEndpoint} />);
renderComponent(ServiceProviderKeys.cohere, modifiedEndpoint);
expect(screen.queryByText('embed-english-light-v3.0')).not.toBeInTheDocument();
});
});
describe('with azureaistudio service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'azure-ai-1',
service: 'azureaistudio',
service_settings: {
@ -105,10 +116,11 @@ describe('ServiceProvider component', () => {
provider: 'microsoft_phi',
endpoint_type: 'realtime',
},
} as any;
task_type: 'sparse_embedding',
};
it('renders the component with endpoint details', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.azureaistudio, mockEndpoint);
expect(screen.getByText('Azure AI Studio')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-azureaistudio');
@ -121,7 +133,7 @@ describe('ServiceProvider component', () => {
...mockEndpoint,
service_settings: {},
};
render(<ServiceProvider providerEndpoint={modifiedEndpoint} />);
renderComponent(ServiceProviderKeys.azureaistudio, modifiedEndpoint);
expect(screen.getByText('Azure AI Studio')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-azureaistudio');
@ -131,7 +143,7 @@ describe('ServiceProvider component', () => {
});
describe('with azureopenai service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'azure-openai-1',
service: 'azureopenai',
service_settings: {
@ -139,10 +151,11 @@ describe('ServiceProvider component', () => {
deployment_id: 'deployment-123',
api_version: 'v1',
},
} as any;
task_type: 'sparse_embedding',
};
it('renders the component with all required endpoint details', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.azureopenai, mockEndpoint);
expect(screen.getByText('Azure OpenAI')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-azureopenai');
@ -152,7 +165,7 @@ describe('ServiceProvider component', () => {
});
describe('with mistral service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'mistral-ai-1',
service: 'mistral',
service_settings: {
@ -162,10 +175,11 @@ describe('ServiceProvider component', () => {
requests_per_minute: 1000,
},
},
} as any;
task_type: 'sparse_embedding',
};
it('renders the component with endpoint details', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.mistral, mockEndpoint);
expect(screen.getByText('Mistral')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-mistral');
@ -178,7 +192,7 @@ describe('ServiceProvider component', () => {
...mockEndpoint,
service_settings: {},
};
render(<ServiceProvider providerEndpoint={modifiedEndpoint} />);
renderComponent(ServiceProviderKeys.mistral, modifiedEndpoint);
const icon = screen.getByTestId('table-column-service-provider-mistral');
expect(icon).toBeInTheDocument();
@ -188,7 +202,7 @@ describe('ServiceProvider component', () => {
});
describe('with elasticsearch service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'model-123',
service: 'elasticsearch',
service_settings: {
@ -196,10 +210,11 @@ describe('ServiceProvider component', () => {
num_threads: 10,
model_id: 'settings-model-123',
},
} as any;
task_type: 'sparse_embedding',
};
it('renders the component with endpoint model_id', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.elasticsearch, mockEndpoint);
expect(screen.getByText('Elasticsearch')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-elasticsearch');
@ -212,7 +227,7 @@ describe('ServiceProvider component', () => {
...mockEndpoint,
service_settings: { ...mockEndpoint.service_settings, model_id: 'model-with-mit-license' },
};
render(<ServiceProvider providerEndpoint={modifiedEndpoint} />);
renderComponent(ServiceProviderKeys.elasticsearch, modifiedEndpoint);
const mitBadge = screen.getByTestId('mit-license-badge');
expect(mitBadge).toBeInTheDocument();
@ -220,14 +235,14 @@ describe('ServiceProvider component', () => {
});
it('does not render the MIT license badge if the model is not eligible', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.elasticsearch, mockEndpoint);
expect(screen.queryByTestId('mit-license-badge')).not.toBeInTheDocument();
});
});
describe('with googleaistudio service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'google-ai-1',
service: 'googleaistudio',
service_settings: {
@ -236,10 +251,11 @@ describe('ServiceProvider component', () => {
requests_per_minute: 500,
},
},
} as any;
task_type: 'sparse_embedding',
};
it('renders the component with service and model details', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.googleaistudio, mockEndpoint);
expect(screen.getByText('Google AI Studio')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-googleaistudio');
@ -249,7 +265,7 @@ describe('ServiceProvider component', () => {
});
describe('with amazonbedrock service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'amazon-bedrock-1',
service: 'amazonbedrock',
service_settings: {
@ -257,10 +273,11 @@ describe('ServiceProvider component', () => {
provider: 'AMAZONTITAN',
model: 'model-bedrock-xyz',
},
} as any;
task_type: 'sparse_embedding',
};
it('renders the component with model and service details', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys.amazonbedrock, mockEndpoint);
expect(screen.getByText('Amazon Bedrock')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-amazonbedrock');
@ -270,7 +287,7 @@ describe('ServiceProvider component', () => {
});
describe('with alibabacloud-ai-search service', () => {
const mockEndpoint = {
const mockEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'alibabacloud-ai-search-1',
service: 'alibabacloud-ai-search',
service_settings: {
@ -278,10 +295,11 @@ describe('ServiceProvider component', () => {
host: 'host-123',
workspace: 'default-123',
},
} as any;
task_type: 'sparse_embedding',
};
it('renders the component with endpoint details', () => {
render(<ServiceProvider providerEndpoint={mockEndpoint} />);
renderComponent(ServiceProviderKeys['alibabacloud-ai-search'], mockEndpoint);
expect(screen.getByText('AlibabaCloud AI Search')).toBeInTheDocument();
const icon = screen.getByTestId('table-column-service-provider-alibabacloud-ai-search');

View file

@ -5,21 +5,16 @@
* 2.0.
*/
import { EuiBadge, EuiFlexGroup, EuiFlexItem, EuiIcon, EuiText } from '@elastic/eui';
import { EuiFlexGroup, EuiFlexItem, EuiIcon, EuiText } from '@elastic/eui';
import React from 'react';
import {
ELASTIC_MODEL_DEFINITIONS,
InferenceAPIConfigResponse,
} from '@kbn/ml-trained-models-utils';
import { SERVICE_PROVIDERS, ServiceProviderKeys } from '@kbn/inference-endpoint-ui-common';
import * as i18n from './translations';
import { EndpointModelInfo, EndpointModelInfoProps } from './endpoint_model_info';
interface ServiceProviderProps {
providerEndpoint: InferenceAPIConfigResponse;
interface ServiceProviderProps extends EndpointModelInfoProps {
service: ServiceProviderKeys;
}
export const ServiceProvider: React.FC<ServiceProviderProps> = ({ providerEndpoint }) => {
const { service } = providerEndpoint;
export const ServiceProvider: React.FC<ServiceProviderProps> = ({ service, endpointInfo }) => {
const provider = SERVICE_PROVIDERS[service];
return provider ? (
@ -39,7 +34,7 @@ export const ServiceProvider: React.FC<ServiceProviderProps> = ({ providerEndpoi
</EuiText>
</EuiFlexItem>
<EuiFlexItem>
<EndpointModelInfo providerEndpoint={providerEndpoint} />
<EndpointModelInfo endpointInfo={endpointInfo} />
</EuiFlexItem>
</EuiFlexGroup>
</EuiFlexItem>
@ -48,77 +43,3 @@ export const ServiceProvider: React.FC<ServiceProviderProps> = ({ providerEndpoi
<span>{service}</span>
);
};
const EndpointModelInfo: React.FC<ServiceProviderProps> = ({ providerEndpoint }) => {
const serviceSettings = providerEndpoint.service_settings;
const modelId =
'model_id' in serviceSettings
? serviceSettings.model_id
: 'model' in serviceSettings
? serviceSettings.model
: undefined;
const isEligibleForMITBadge = modelId && ELASTIC_MODEL_DEFINITIONS[modelId]?.license === 'MIT';
return (
<EuiFlexGroup gutterSize="xs" direction="column">
<EuiFlexItem>
<EuiFlexGroup gutterSize="xs" direction="row">
<EuiFlexItem grow={0}>
{modelId && (
<EuiText size="s" color="subdued">
{modelId}
</EuiText>
)}
</EuiFlexItem>
<EuiFlexItem grow={0}>
{isEligibleForMITBadge ? (
<EuiBadge
color="hollow"
iconType="popout"
iconSide="right"
href={ELASTIC_MODEL_DEFINITIONS[modelId].licenseUrl ?? ''}
target="_blank"
data-test-subj={'mit-license-badge'}
>
{i18n.MIT_LICENSE}
</EuiBadge>
) : null}{' '}
</EuiFlexItem>
</EuiFlexGroup>
</EuiFlexItem>
<EuiFlexItem>{endpointModelAtrributes(providerEndpoint)}</EuiFlexItem>
</EuiFlexGroup>
);
};
function endpointModelAtrributes(endpoint: InferenceAPIConfigResponse) {
switch (endpoint.service) {
case ServiceProviderKeys.hugging_face:
return huggingFaceAttributes(endpoint);
case ServiceProviderKeys.azureaistudio:
return azureOpenAIStudioAttributes(endpoint);
case ServiceProviderKeys.azureopenai:
return azureOpenAIAttributes(endpoint);
default:
return null;
}
}
function huggingFaceAttributes(endpoint: InferenceAPIConfigResponse) {
const serviceSettings = endpoint.service_settings;
const url = 'url' in serviceSettings ? serviceSettings.url : null;
return url;
}
function azureOpenAIStudioAttributes(endpoint: InferenceAPIConfigResponse) {
const serviceSettings = endpoint.service_settings;
return 'provider' in serviceSettings ? serviceSettings?.provider : undefined;
}
function azureOpenAIAttributes(endpoint: InferenceAPIConfigResponse) {
const serviceSettings = endpoint.service_settings;
return 'resource_name' in serviceSettings ? serviceSettings.resource_name : undefined;
}

View file

@ -5,16 +5,16 @@
* 2.0.
*/
import { TaskTypes } from '../../../../../common/types';
import { render, screen } from '@testing-library/react';
import React from 'react';
import { TaskType } from './task_type';
import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/types';
describe('TaskType component', () => {
it.each([
[TaskTypes.completion, 'completion'],
[TaskTypes.sparse_embedding, 'sparse_embedding'],
[TaskTypes.text_embedding, 'text_embedding'],
['completion' as InferenceTaskType, 'completion'],
['sparse_embedding' as InferenceTaskType, 'sparse_embedding'],
['text_embedding' as InferenceTaskType, 'text_embedding'],
])('renders the task type badge for %s', (taskType, expected) => {
render(<TaskType type={taskType} />);
const badge = screen.getByTestId(`table-column-task-type-${taskType}`);

View file

@ -5,12 +5,12 @@
* 2.0.
*/
import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/types';
import { EuiBadge } from '@elastic/eui';
import React from 'react';
import { TaskTypes } from '../../../../../common/types';
interface TaskTypeProps {
type?: TaskTypes;
type?: InferenceTaskType;
}
export const TaskType: React.FC<TaskTypeProps> = ({ type }) => {

View file

@ -9,11 +9,15 @@ import React, { useCallback, useMemo, useState } from 'react';
import { EuiBasicTable, EuiBasicTableColumn, EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import { TaskTypes } from '../../../common/types';
import {
InferenceInferenceEndpointInfo,
InferenceTaskType,
} from '@elastic/elasticsearch/lib/api/types';
import { ServiceProviderKeys } from '@kbn/inference-endpoint-ui-common';
import * as i18n from '../../../common/translations';
import { useTableData } from '../../hooks/use_table_data';
import { FilterOptions, InferenceEndpointUI } from './types';
import { FilterOptions } from './types';
import { useAllInferenceEndpointsState } from '../../hooks/use_all_inference_endpoints_state';
import { ServiceProviderFilter } from './filter/service_provider_filter';
@ -39,7 +43,7 @@ export const TabularPage: React.FC<TabularPageProps> = ({ inferenceEndpoints })
const [showDeleteAction, setShowDeleteAction] = useState(false);
const [showInferenceFlyout, setShowInferenceFlyout] = useState(false);
const [selectedInferenceEndpoint, setSelectedInferenceEndpoint] = useState<
InferenceEndpointUI | undefined
InferenceInferenceEndpointInfo | undefined
>(undefined);
const [searchKey, setSearchKey] = React.useState('');
const { queryParams, setQueryParams, filterOptions, setFilterOptions } =
@ -61,12 +65,15 @@ export const TabularPage: React.FC<TabularPageProps> = ({ inferenceEndpoints })
setShowDeleteAction(false);
}, []);
const displayDeleteActionitem = useCallback((selectedEndpoint: InferenceEndpointUI) => {
setSelectedInferenceEndpoint(selectedEndpoint);
setShowDeleteAction(true);
}, []);
const displayDeleteActionitem = useCallback(
(selectedEndpoint: InferenceInferenceEndpointInfo) => {
setSelectedInferenceEndpoint(selectedEndpoint);
setShowDeleteAction(true);
},
[]
);
const displayInferenceFlyout = useCallback((selectedEndpoint: InferenceEndpointUI) => {
const displayInferenceFlyout = useCallback((selectedEndpoint: InferenceInferenceEndpointInfo) => {
setShowInferenceFlyout(true);
setSelectedInferenceEndpoint(selectedEndpoint);
}, []);
@ -90,15 +97,19 @@ export const TabularPage: React.FC<TabularPageProps> = ({ inferenceEndpoints })
searchKey
);
const tableColumns = useMemo<Array<EuiBasicTableColumn<InferenceEndpointUI>>>(
const tableColumns = useMemo<Array<EuiBasicTableColumn<InferenceInferenceEndpointInfo>>>(
() => [
{
field: 'endpoint',
field: 'inference_id',
name: i18n.ENDPOINT,
'data-test-subj': 'endpointCell',
render: (endpoint: string, additionalInfo: InferenceEndpointUI) => {
if (endpoint) {
return <EndpointInfo inferenceId={endpoint} provider={additionalInfo.provider} />;
render: (
inferenceId: InferenceInferenceEndpointInfo['inference_id'],
endpointInfo: InferenceInferenceEndpointInfo
) => {
if (inferenceId) {
return <EndpointInfo inferenceId={inferenceId} endpointInfo={endpointInfo} />;
}
return null;
@ -107,12 +118,12 @@ export const TabularPage: React.FC<TabularPageProps> = ({ inferenceEndpoints })
width: '300px',
},
{
field: 'provider',
field: 'service',
name: i18n.SERVICE_PROVIDER,
'data-test-subj': 'providerCell',
render: (provider: InferenceAPIConfigResponse) => {
if (provider) {
return <ServiceProvider providerEndpoint={provider} />;
render: (service: ServiceProviderKeys, endpointInfo: InferenceInferenceEndpointInfo) => {
if (service) {
return <ServiceProvider service={service} endpointInfo={endpointInfo} />;
}
return null;
@ -121,12 +132,12 @@ export const TabularPage: React.FC<TabularPageProps> = ({ inferenceEndpoints })
width: '285px',
},
{
field: 'type',
field: 'task_type',
name: i18n.TASK_TYPE,
'data-test-subj': 'typeCell',
render: (type: TaskTypes) => {
if (type) {
return <TaskType type={type} />;
render: (taskType: InferenceTaskType) => {
if (taskType) {
return <TaskType type={taskType} />;
}
return null;
@ -149,7 +160,7 @@ export const TabularPage: React.FC<TabularPageProps> = ({ inferenceEndpoints })
description: i18n.ENDPOINT_COPY_ID_ACTION_LABEL,
icon: 'copyClipboard',
type: 'icon',
onClick: (item) => copyContent(item.endpoint),
onClick: (item) => copyContent(item.inference_id),
'data-test-subj': 'inference-endpoints-action-copy-id-label',
},
{
@ -157,10 +168,10 @@ export const TabularPage: React.FC<TabularPageProps> = ({ inferenceEndpoints })
description: i18n.ENDPOINT_DELETE_ACTION_LABEL,
icon: 'trash',
type: 'icon',
enabled: (item) => !isEndpointPreconfigured(item.endpoint),
enabled: (item) => !isEndpointPreconfigured(item.inference_id),
onClick: (item) => displayDeleteActionitem(item),
'data-test-subj': (item) =>
isEndpointPreconfigured(item.endpoint)
isEndpointPreconfigured(item.inference_id)
? 'inferenceUIDeleteAction-preconfigured'
: 'inferenceUIDeleteAction-user-defined',
},
@ -230,7 +241,7 @@ export const TabularPage: React.FC<TabularPageProps> = ({ inferenceEndpoints })
{showInferenceFlyout && selectedInferenceEndpoint ? (
<EditInferenceFlyout
onFlyoutClose={onCloseInferenceFlyout}
inferenceEndpointUI={selectedInferenceEndpoint}
selectedInferenceEndpoint={selectedInferenceEndpoint}
/>
) : null}
</>

View file

@ -5,13 +5,13 @@
* 2.0.
*/
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import { ServiceProviderKeys } from '@kbn/inference-endpoint-ui-common';
import { TaskTypes } from '../../types';
import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/types';
export const INFERENCE_ENDPOINTS_TABLE_PER_PAGE_VALUES = [25, 50, 100];
export enum SortFieldInferenceEndpoint {
endpoint = 'endpoint',
inference_id = 'inference_id',
}
export enum SortOrder {
asc = 'asc',
@ -30,7 +30,7 @@ export interface QueryParams extends SortingParams {
export interface FilterOptions {
provider: ServiceProviderKeys[];
type: TaskTypes[];
type: InferenceTaskType[];
}
export interface AllInferenceEndpointsTableState {
@ -43,12 +43,6 @@ export interface EuiBasicTableSortTypes {
field: string;
}
export interface InferenceEndpointUI {
endpoint: string;
provider: InferenceAPIConfigResponse;
type: string;
}
export interface InferenceUsageInfo {
id: string;
type: string;

View file

@ -12,7 +12,7 @@ import { render, screen } from '@testing-library/react';
import { EditInferenceFlyout } from './edit_inference_flyout';
import { useQueryInferenceEndpoints } from '../../hooks/use_inference_endpoints';
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import { InferenceEndpointUI } from '../all_inference_endpoints/types';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
jest.mock('../../hooks/use_kibana');
jest.mock('../../hooks/use_inference_endpoints');
@ -29,19 +29,17 @@ describe('EditInferenceFlyout', () => {
const mockToasts = { addSuccess: jest.fn(), addError: jest.fn() };
const mockHttp = jest.fn();
const mockInferenceEndpointUI = {
endpoint: 'test-endpoint',
type: 'sparse_embedding',
provider: {
service: 'openai',
service_settings: {
api_key: 'valueA',
organization_id: 'valueB',
url: 'https://someurl.com/chat/completions',
model_id: 'third-party',
},
const mockInferenceEndpoint: InferenceInferenceEndpointInfo = {
inference_id: 'test-endpoint',
task_type: 'sparse_embedding',
service: 'openai',
service_settings: {
api_key: 'valueA',
organization_id: 'valueB',
url: 'https://someurl.com/chat/completions',
model_id: 'third-party',
},
} as InferenceEndpointUI;
};
const queryClient = new QueryClient();
const renderComponent = () =>
@ -49,7 +47,7 @@ describe('EditInferenceFlyout', () => {
<QueryClientProvider client={queryClient}>
<EditInferenceFlyout
onFlyoutClose={mockOnFlyoutClose}
inferenceEndpointUI={mockInferenceEndpointUI}
selectedInferenceEndpoint={mockInferenceEndpoint}
/>
</QueryClientProvider>
);
@ -81,9 +79,9 @@ describe('EditInferenceFlyout', () => {
onSubmitSuccess: expect.any(Function),
inferenceEndpoint: {
config: {
inferenceId: mockInferenceEndpointUI.endpoint,
taskType: mockInferenceEndpointUI.type,
provider: mockInferenceEndpointUI.provider.service,
inferenceId: mockInferenceEndpoint.inference_id,
taskType: mockInferenceEndpoint.task_type,
provider: mockInferenceEndpoint.service,
providerConfig: {
api_key: 'valueA',
organization_id: 'valueB',

View file

@ -9,17 +9,17 @@ import { InferenceFlyoutWrapper } from '@kbn/inference-endpoint-ui-common/src/co
import React, { useCallback } from 'react';
import { InferenceEndpoint } from '@kbn/inference-endpoint-ui-common';
import { flattenObject } from '@kbn/object-utils';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
import { useKibana } from '../../hooks/use_kibana';
import { useQueryInferenceEndpoints } from '../../hooks/use_inference_endpoints';
import { InferenceEndpointUI } from '../all_inference_endpoints/types';
interface EditInterfaceFlyoutProps {
onFlyoutClose: () => void;
inferenceEndpointUI: InferenceEndpointUI;
selectedInferenceEndpoint: InferenceInferenceEndpointInfo;
}
export const EditInferenceFlyout: React.FC<EditInterfaceFlyoutProps> = ({
onFlyoutClose,
inferenceEndpointUI,
selectedInferenceEndpoint,
}) => {
const {
services: {
@ -34,10 +34,10 @@ export const EditInferenceFlyout: React.FC<EditInterfaceFlyoutProps> = ({
const inferenceEndpoint: InferenceEndpoint = {
config: {
inferenceId: inferenceEndpointUI.endpoint,
taskType: inferenceEndpointUI.type,
provider: inferenceEndpointUI.provider.service,
providerConfig: flattenObject(inferenceEndpointUI.provider.service_settings),
inferenceId: selectedInferenceEndpoint.inference_id,
taskType: selectedInferenceEndpoint.task_type,
provider: selectedInferenceEndpoint.service,
providerConfig: flattenObject(selectedInferenceEndpoint.service_settings),
},
secrets: {
providerSecrets: {},

View file

@ -7,14 +7,18 @@
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import { renderHook } from '@testing-library/react';
import { QueryParams } from '../components/all_inference_endpoints/types';
import {
QueryParams,
SortFieldInferenceEndpoint,
SortOrder,
} from '../components/all_inference_endpoints/types';
import { useTableData } from './use_table_data';
import { INFERENCE_ENDPOINTS_TABLE_PER_PAGE_VALUES } from '../components/all_inference_endpoints/types';
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import React from 'react';
import { TRAINED_MODEL_STATS_QUERY_KEY } from '../../common/constants';
const inferenceEndpoints = [
const inferenceEndpoints: InferenceAPIConfigResponse[] = [
{
inference_id: 'my-elser-model-04',
task_type: 'sparse_embedding',
@ -47,14 +51,14 @@ const inferenceEndpoints = [
},
task_settings: {},
},
] as InferenceAPIConfigResponse[];
];
const queryParams = {
const queryParams: QueryParams = {
page: 1,
perPage: 10,
sortField: 'endpoint',
sortOrder: 'desc',
} as QueryParams;
sortField: SortFieldInferenceEndpoint.inference_id,
sortOrder: SortOrder.desc,
};
const filterOptions = {
provider: ['elasticsearch', 'openai'],
@ -102,7 +106,7 @@ describe('useTableData', () => {
expect(result.current.sorting).toEqual({
sort: {
direction: 'desc',
field: 'endpoint',
field: 'inference_id',
},
});
});
@ -117,7 +121,7 @@ describe('useTableData', () => {
b.inference_id.localeCompare(a.inference_id)
);
const sortedEndpoints = result.current.sortedTableData.map((item) => item.endpoint);
const sortedEndpoints = result.current.sortedTableData.map((item) => item.inference_id);
const expectedModelIds = expectedSortedData.map((item) => item.inference_id);
expect(sortedEndpoints).toEqual(expectedModelIds);
@ -137,8 +141,8 @@ describe('useTableData', () => {
expect(
filteredData.every(
(endpoint) =>
filterOptions.provider.includes(endpoint.provider) &&
filterOptions.type.includes(endpoint.type)
filterOptions.provider.includes(endpoint.service) &&
filterOptions.type.includes(endpoint.task_type)
)
).toBeTruthy();
});
@ -150,6 +154,6 @@ describe('useTableData', () => {
{ wrapper }
);
const filteredData = result.current.sortedTableData;
expect(filteredData.every((item) => item.endpoint.includes(searchKey))).toBeTruthy();
expect(filteredData.every((item) => item.inference_id.includes(searchKey))).toBeTruthy();
});
});

View file

@ -10,22 +10,21 @@ import { Pagination } from '@elastic/eui';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import { useMemo } from 'react';
import { ServiceProviderKeys } from '@kbn/inference-endpoint-ui-common';
import { TaskTypes } from '../../common/types';
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
import { DEFAULT_TABLE_LIMIT } from '../components/all_inference_endpoints/constants';
import {
FilterOptions,
INFERENCE_ENDPOINTS_TABLE_PER_PAGE_VALUES,
InferenceEndpointUI,
QueryParams,
SortOrder,
} from '../components/all_inference_endpoints/types';
interface UseTableDataReturn {
tableData: InferenceEndpointUI[];
sortedTableData: InferenceEndpointUI[];
paginatedSortedTableData: InferenceEndpointUI[];
tableData: InferenceInferenceEndpointInfo[];
sortedTableData: InferenceInferenceEndpointInfo[];
paginatedSortedTableData: InferenceInferenceEndpointInfo[];
pagination: Pagination;
sorting: EuiTableSortingType<InferenceEndpointUI>;
sorting: EuiTableSortingType<InferenceInferenceEndpointInfo>;
}
export const useTableData = (
@ -34,7 +33,7 @@ export const useTableData = (
filterOptions: FilterOptions,
searchKey: string
): UseTableDataReturn => {
const tableData: InferenceEndpointUI[] = useMemo(() => {
const tableData: InferenceInferenceEndpointInfo[] = useMemo(() => {
let filteredEndpoints = inferenceEndpoints;
if (filterOptions.provider.length > 0) {
@ -45,20 +44,14 @@ export const useTableData = (
if (filterOptions.type.length > 0) {
filteredEndpoints = filteredEndpoints.filter((endpoint) =>
filterOptions.type.includes(TaskTypes[endpoint.task_type])
filterOptions.type.includes(endpoint.task_type)
);
}
return filteredEndpoints
.filter((endpoint) => endpoint.inference_id.includes(searchKey))
.map((endpoint) => ({
endpoint: endpoint.inference_id,
provider: endpoint,
type: endpoint.task_type,
}));
return filteredEndpoints.filter((endpoint) => endpoint.inference_id.includes(searchKey));
}, [inferenceEndpoints, searchKey, filterOptions]);
const sortedTableData: InferenceEndpointUI[] = useMemo(() => {
const sortedTableData: InferenceInferenceEndpointInfo[] = useMemo(() => {
return [...tableData].sort((a, b) => {
const aValue = a[queryParams.sortField];
const bValue = b[queryParams.sortField];
@ -81,7 +74,7 @@ export const useTableData = (
[inferenceEndpoints, queryParams]
);
const paginatedSortedTableData: InferenceEndpointUI[] = useMemo(() => {
const paginatedSortedTableData: InferenceInferenceEndpointInfo[] = useMemo(() => {
const pageSize = pagination.pageSize || DEFAULT_TABLE_LIMIT;
const startIndex = pagination.pageIndex * pageSize;
const endIndex = startIndex + pageSize;

View file

@ -1,24 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { useQuery } from '@tanstack/react-query';
import { InferenceStatsResponse } from '@kbn/ml-plugin/public/application/services/ml_api_service/trained_models';
import { useKibana } from './use_kibana';
import { TRAINED_MODEL_STATS_QUERY_KEY } from '../../common/constants';
export const useTrainedModelStats = () => {
const { services } = useKibana();
return useQuery({
queryKey: [TRAINED_MODEL_STATS_QUERY_KEY],
queryFn: async () => {
const response = await services.ml?.mlApi?.trainedModels.getTrainedModelStats();
return response || ({ count: 0, trained_model_stats: [] } as InferenceStatsResponse);
},
});
};

View file

@ -5,8 +5,8 @@
* 2.0.
*/
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
export const isProviderTechPreview = (provider: InferenceAPIConfigResponse) => {
import { InferenceInferenceEndpointInfo } from '@elastic/elasticsearch/lib/api/types';
export const isProviderTechPreview = (provider: InferenceInferenceEndpointInfo) => {
const { service_settings: serviceSettings, task_type: taskType } = provider;
const modelId = serviceSettings?.model_id;

View file

@ -5,8 +5,7 @@
* 2.0.
*/
import { FieldType } from '@kbn/inference-endpoint-ui-common';
import { InferenceProvider } from '../../types';
import { FieldType, InferenceProvider } from '@kbn/inference-endpoint-ui-common';
export const mockProviders: InferenceProvider[] = [
{

View file

@ -7,22 +7,15 @@
import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/types';
import { ElasticsearchClient } from '@kbn/core/server';
import { TaskTypes } from '../../common/types';
function isTaskType(type?: string): type is InferenceTaskType {
return type ? Object.values(TaskTypes).includes(type as TaskTypes) : true;
}
export const deleteInferenceEndpoint = async (
client: ElasticsearchClient,
type: string,
type: InferenceTaskType,
id: string,
scanUsage?: boolean
) => {
if (isTaskType(type)) {
if (scanUsage) {
return await client.inference.delete({ inference_id: id, task_type: type, dry_run: true });
}
return await client.inference.delete({ inference_id: id, task_type: type, force: true });
if (scanUsage) {
return await client.inference.delete({ inference_id: id, task_type: type, dry_run: true });
}
return await client.inference.delete({ inference_id: id, task_type: type, force: true });
};

View file

@ -8,6 +8,7 @@
import { IRouter } from '@kbn/core/server';
import { schema } from '@kbn/config-schema';
import type { Logger } from '@kbn/logging';
import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/types';
import { fetchInferenceEndpoints } from './lib/fetch_inference_endpoints';
import { APIRoutes } from './types';
import { errorHandler } from './utils/error_handler';
@ -55,7 +56,12 @@ export function defineRoutes({ logger, router }: { logger: Logger; router: IRout
const { type, id } = request.params;
const { scanUsage } = request.query;
const result = await deleteInferenceEndpoint(asCurrentUser, type, id, scanUsage ?? false);
const result = await deleteInferenceEndpoint(
asCurrentUser,
type as InferenceTaskType,
id,
scanUsage ?? false
);
return response.ok({ body: result });
})