chore: fix local ai setting

This commit is contained in:
Nathan 2025-04-09 20:07:06 +08:00
parent ab70dccc8e
commit 210ea0c2ac
4 changed files with 15 additions and 8 deletions

View file

@ -140,10 +140,8 @@ class LocalAISettingPanel extends StatelessWidget {
crossAxisAlignment: CrossAxisAlignment.start,
children: [
const LocalAIStatusIndicator(),
if (state.showSettings) ...[
const VSpace(10),
OllamaSettingPage(),
],
const VSpace(10),
OllamaSettingPage(),
],
);
},

View file

@ -288,6 +288,10 @@ class _LackOfResource extends StatelessWidget {
text: LocaleKeys.settings_aiPage_keys_modelsMissing.tr(),
style: textStyle,
),
TextSpan(
text: modelNames.join(', '),
style: textStyle,
),
TextSpan(
text: ' ',
style: textStyle,

View file

@ -884,7 +884,7 @@
"pleaseFollowThese": "Please follow these",
"instructions": "instructions",
"installOllamaLai": "to set up Ollama and AppFlowy Local AI.",
"modelsMissing": "Cannot find the required models.",
"modelsMissing": "Cannot find the required models: ",
"downloadModel": "to download them."
}
},
@ -3209,4 +3209,4 @@
"rewrite": "Rewrite",
"insertBelow": "Insert below"
}
}
}

View file

@ -195,9 +195,14 @@ impl LocalAIResourceController {
let tags: TagsResponse = resp.json().await.inspect_err(|e| {
log::error!("[LLM Resource] Failed to parse /api/tags JSON response: {e:?}")
})?;
// Check each required model is present in the response.
// Check if each of our required models exists in the list of available models
trace!("[LLM Resource] ollama available models: {:?}", tags.models);
for required in &required_models {
if !tags.models.iter().any(|m| m.name.contains(required)) {
if !tags
.models
.iter()
.any(|m| m.name == *required || m.name == format!("{}:latest", required))
{
log::trace!(
"[LLM Resource] required model '{}' not found in API response",
required