feat: Run Local AI model in AppFlowy (#5655)

* chore: load plugin

* chore: sidecar

* chore: fix test

* chore: clippy

* chore: save chat config

* chore: arc plugin

* chore: add plugins

* chore: clippy

* chore: test streaming

* chore: config chat

* chore: stream message

* chore: response with local ai

* chore: fix compile

* chore: config ui

* chore: fix load plugin

* chore: add docs

* chore: update docs

* chore: disable local ai

* chore: fix compile

* chore: clippy
This commit is contained in:
Nathan.fooo 2024-06-30 17:38:39 +08:00 committed by GitHub
parent 3bcadff152
commit e1c68c1b72
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
75 changed files with 3494 additions and 396 deletions

View file

@ -1,3 +1,4 @@
use crate::local_ai::llm_chat::LocalLLMSetting;
use flowy_chat_pub::cloud::{
ChatMessage, RelatedQuestion, RepeatedChatMessage, RepeatedRelatedQuestion,
};
@ -206,6 +207,38 @@ impl From<RepeatedRelatedQuestion> for RepeatedRelatedQuestionPB {
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct LocalLLMSettingPB {
#[pb(index = 1)]
pub chat_bin_path: String,
#[pb(index = 2)]
pub chat_model_path: String,
#[pb(index = 3)]
pub enabled: bool,
}
impl From<LocalLLMSetting> for LocalLLMSettingPB {
fn from(value: LocalLLMSetting) -> Self {
LocalLLMSettingPB {
chat_bin_path: value.chat_bin_path,
chat_model_path: value.chat_model_path,
enabled: value.enabled,
}
}
}
impl From<LocalLLMSettingPB> for LocalLLMSetting {
fn from(value: LocalLLMSettingPB) -> Self {
LocalLLMSetting {
chat_bin_path: value.chat_bin_path,
chat_model_path: value.chat_model_path,
enabled: value.enabled,
}
}
}
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct CompleteTextPB {
#[pb(index = 1)]