Merge pull request #7717 from AppFlowy-IO/fix_local_ai_setting

chore: fix local ai setting
This commit is contained in:
Nathan.fooo 2025-04-09 20:43:36 +08:00 committed by GitHub
commit f7d7141a59
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 15 additions and 8 deletions

View file

@ -140,10 +140,8 @@ class LocalAISettingPanel extends StatelessWidget {
crossAxisAlignment: CrossAxisAlignment.start,
children: [
const LocalAIStatusIndicator(),
if (state.showSettings) ...[
const VSpace(10),
OllamaSettingPage(),
],
const VSpace(10),
OllamaSettingPage(),
],
);
},

View file

@ -288,6 +288,10 @@ class _LackOfResource extends StatelessWidget {
text: LocaleKeys.settings_aiPage_keys_modelsMissing.tr(),
style: textStyle,
),
TextSpan(
text: modelNames.join(', '),
style: textStyle,
),
TextSpan(
text: ' ',
style: textStyle,

View file

@ -885,7 +885,7 @@
"pleaseFollowThese": "Please follow these",
"instructions": "instructions",
"installOllamaLai": "to set up Ollama and AppFlowy Local AI.",
"modelsMissing": "Cannot find the required models.",
"modelsMissing": "Cannot find the required models: ",
"downloadModel": "to download them."
}
},
@ -3209,4 +3209,4 @@
"rewrite": "Rewrite",
"insertBelow": "Insert below"
}
}
}

View file

@ -195,9 +195,14 @@ impl LocalAIResourceController {
let tags: TagsResponse = resp.json().await.inspect_err(|e| {
log::error!("[LLM Resource] Failed to parse /api/tags JSON response: {e:?}")
})?;
// Check each required model is present in the response.
// Check if each of our required models exists in the list of available models
trace!("[LLM Resource] ollama available models: {:?}", tags.models);
for required in &required_models {
if !tags.models.iter().any(|m| m.name.contains(required)) {
if !tags
.models
.iter()
.any(|m| m.name == *required || m.name == format!("{}:latest", required))
{
log::trace!(
"[LLM Resource] required model '{}' not found in API response",
required