mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[Search] Fixes a bug where third-party models could not be saved (#189098)
## Summary This enables saving a third-party model mappings in the UI with semantic text.
This commit is contained in:
parent
631baa336f
commit
fb34fc70bc
4 changed files with 10 additions and 5 deletions
|
@ -789,5 +789,5 @@ export const getFieldByPathName = (fields: NormalizedFields, name: string) => {
|
|||
export function isLocalModel(
|
||||
model: InferenceServiceSettings
|
||||
): model is LocalInferenceServiceSettings {
|
||||
return Boolean((model as LocalInferenceServiceSettings).service_settings.model_id);
|
||||
return ['elser', 'elasticsearch'].includes((model as LocalInferenceServiceSettings).service);
|
||||
}
|
||||
|
|
|
@ -231,7 +231,7 @@ export const DetailsPageMappingsContent: FunctionComponent<{
|
|||
.map((field) => field.inference_id)
|
||||
.filter(
|
||||
(inferenceId: string) =>
|
||||
state.inferenceToModelIdMap?.[inferenceId] &&
|
||||
state.inferenceToModelIdMap?.[inferenceId].trainedModelId && // third-party inference models don't have trainedModelId
|
||||
!state.inferenceToModelIdMap?.[inferenceId].isDeployed
|
||||
);
|
||||
setHasSavedFields(true);
|
||||
|
|
|
@ -101,9 +101,14 @@ export function TrainedModelsDeploymentModal({
|
|||
useEffect(() => {
|
||||
const models = inferenceIdsInPendingList.map(
|
||||
(inferenceId) => inferenceToModelIdMap?.[inferenceId]
|
||||
);
|
||||
); // filter out third-party models
|
||||
for (const model of models) {
|
||||
if (model && !model.isDownloading && !model.isDeployed) {
|
||||
if (
|
||||
model?.trainedModelId &&
|
||||
model.isDeployable &&
|
||||
!model.isDownloading &&
|
||||
!model.isDeployed
|
||||
) {
|
||||
// Sometimes the model gets stuck in a ready to deploy state, so we need to trigger deployment manually
|
||||
startModelAllocation(model.trainedModelId);
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ const getCustomInferenceIdMap = (
|
|||
const inferenceIdMap = models.reduce<InferenceToModelIdMap>((inferenceMap, model) => {
|
||||
const inferenceEntry = isLocalModel(model)
|
||||
? {
|
||||
trainedModelId: model.service_settings.model_id, // third-party models don't have trained model ids
|
||||
trainedModelId: model.service_settings.model_id,
|
||||
isDeployable: model.service === Service.elser || model.service === Service.elasticsearch,
|
||||
isDeployed: modelStatsById[model.service_settings.model_id]?.state === 'started',
|
||||
isDownloading: Boolean(downloadStates[model.service_settings.model_id]),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue