mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2025-12-27 07:04:08 +00:00
chore: return desc of ai model
This commit is contained in:
parent
07a78b4ad7
commit
d348361889
@ -144,34 +144,34 @@ EXTERNAL SOURCES:
|
||||
:path: Flutter/ephemeral/.symlinks/plugins/window_manager/macos
|
||||
|
||||
SPEC CHECKSUMS:
|
||||
app_links: 9028728e32c83a0831d9db8cf91c526d16cc5468
|
||||
appflowy_backend: 464aeb3e5c6966a41641a2111e5ead72ce2695f7
|
||||
auto_updater_macos: 3a42f1a06be6981f1a18be37e6e7bf86aa732118
|
||||
bitsdojo_window_macos: 7959fb0ca65a3ccda30095c181ecb856fae48ea9
|
||||
connectivity_plus: e74b9f74717d2d99d45751750e266e55912baeb5
|
||||
desktop_drop: e0b672a7d84c0a6cbc378595e82cdb15f2970a43
|
||||
device_info_plus: a56e6e74dbbd2bb92f2da12c64ddd4f67a749041
|
||||
file_selector_macos: 6280b52b459ae6c590af5d78fc35c7267a3c4b31
|
||||
flowy_infra_ui: 8760ff42a789de40bf5007a5f176b454722a341e
|
||||
app_links: 10e0a0ab602ffaf34d142cd4862f29d34b303b2a
|
||||
appflowy_backend: 865496343de667fc8c600e04b9fd05234e130cf9
|
||||
auto_updater_macos: 3e3462c418fe4e731917eacd8d28eef7af84086d
|
||||
bitsdojo_window_macos: 44e3b8fe3dd463820e0321f6256c5b1c16bb6a00
|
||||
connectivity_plus: 18d3c32514c886e046de60e9c13895109866c747
|
||||
desktop_drop: 69eeff437544aa619c8db7f4481b3a65f7696898
|
||||
device_info_plus: ce1b7762849d3ec103d0e0517299f2db7ad60720
|
||||
file_selector_macos: cc3858c981fe6889f364731200d6232dac1d812d
|
||||
flowy_infra_ui: 03301a39ad118771adbf051a664265c61c507f38
|
||||
FlutterMacOS: 8f6f14fa908a6fb3fba0cd85dbd81ec4b251fb24
|
||||
HotKey: 400beb7caa29054ea8d864c96f5ba7e5b4852277
|
||||
hotkey_manager: b443f35f4d772162937aa73fd8995e579f8ac4e2
|
||||
irondash_engine_context: 893c7d96d20ce361d7e996f39d360c4c2f9869ba
|
||||
local_notifier: ebf072651e35ae5e47280ad52e2707375cb2ae4e
|
||||
package_info_plus: f0052d280d17aa382b932f399edf32507174e870
|
||||
path_provider_foundation: 080d55be775b7414fd5a5ef3ac137b97b097e564
|
||||
hotkey_manager: c32bf0bfe8f934b7bc17ab4ad5c4c142960b023c
|
||||
irondash_engine_context: da62996ee25616d2f01bbeb85dc115d813359478
|
||||
local_notifier: e9506bc66fc70311e8bc7291fb70f743c081e4ff
|
||||
package_info_plus: 12f1c5c2cfe8727ca46cbd0b26677728972d9a5b
|
||||
path_provider_foundation: 2b6b4c569c0fb62ec74538f866245ac84301af46
|
||||
ReachabilitySwift: 32793e867593cfc1177f5d16491e3a197d2fccda
|
||||
screen_retriever_macos: 452e51764a9e1cdb74b3c541238795849f21557f
|
||||
screen_retriever_macos: 776e0fa5d42c6163d2bf772d22478df4b302b161
|
||||
Sentry: 1fe34e9c2cbba1e347623610d26db121dcb569f1
|
||||
sentry_flutter: e24b397f9a61fa5bbefd8279c3b2242ca86faa90
|
||||
share_plus: 510bf0af1a42cd602274b4629920c9649c52f4cc
|
||||
shared_preferences_foundation: 9e1978ff2562383bd5676f64ec4e9aa8fa06a6f7
|
||||
sentry_flutter: a39c2a2d67d5e5b9cb0b94a4985c76dd5b3fc737
|
||||
share_plus: 1fa619de8392a4398bfaf176d441853922614e89
|
||||
shared_preferences_foundation: fcdcbc04712aee1108ac7fda236f363274528f78
|
||||
Sparkle: 5f8960a7a119aa7d45dacc0d5837017170bc5675
|
||||
sqflite_darwin: 20b2a3a3b70e43edae938624ce550a3cbf66a3d0
|
||||
super_native_extensions: c2795d6d9aedf4a79fae25cb6160b71b50549189
|
||||
url_launcher_macos: 0fba8ddabfc33ce0a9afe7c5fef5aab3d8d2d673
|
||||
webview_flutter_wkwebview: 44d4dee7d7056d5ad185d25b38404436d56c547c
|
||||
window_manager: 1d01fa7ac65a6e6f83b965471b1a7fdd3f06166c
|
||||
sqflite_darwin: 5a7236e3b501866c1c9befc6771dfd73ffb8702d
|
||||
super_native_extensions: 85efee3a7495b46b04befcfc86ed12069264ebf3
|
||||
url_launcher_macos: c82c93949963e55b228a30115bd219499a6fe404
|
||||
webview_flutter_wkwebview: 0982481e3d9c78fd5c6f62a002fcd24fc791f1e4
|
||||
window_manager: 3a1844359a6295ab1e47659b1a777e36773cd6e8
|
||||
|
||||
PODFILE CHECKSUM: 0532f3f001ca3110b8be345d6491fff690e95823
|
||||
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
use crate::cloud::ai_dto::AvailableModel;
|
||||
pub use client_api::entity::ai_dto::{
|
||||
AppFlowyOfflineAI, CompleteTextParams, CompletionMessage, CompletionMetadata, CompletionType,
|
||||
CreateChatContext, CustomPrompt, LLMModel, LocalAIConfig, ModelInfo, ModelList, OutputContent,
|
||||
@ -27,29 +28,50 @@ pub type StreamComplete = BoxStream<'static, Result<CompletionStreamValue, Flowy
|
||||
pub struct AIModel {
|
||||
pub name: String,
|
||||
pub is_local: bool,
|
||||
#[serde(default)]
|
||||
pub desc: String,
|
||||
}
|
||||
|
||||
impl From<AvailableModel> for AIModel {
|
||||
fn from(value: AvailableModel) -> Self {
|
||||
let desc = value
|
||||
.metadata
|
||||
.as_ref()
|
||||
.and_then(|v| v.get("desc").map(|v| v.as_str().unwrap_or("")))
|
||||
.unwrap_or("");
|
||||
Self {
|
||||
name: value.name,
|
||||
is_local: false,
|
||||
desc: desc.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AIModel {
|
||||
pub fn server(name: String) -> Self {
|
||||
pub fn server(name: String, desc: String) -> Self {
|
||||
Self {
|
||||
name,
|
||||
is_local: false,
|
||||
desc,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn local(name: String) -> Self {
|
||||
pub fn local(name: String, desc: String) -> Self {
|
||||
Self {
|
||||
name,
|
||||
is_local: true,
|
||||
desc,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_MODEL_NAME: &str = "Auto";
|
||||
impl Default for AIModel {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
name: "Auto".to_string(),
|
||||
name: DEFAULT_MODEL_NAME.to_string(),
|
||||
is_local: false,
|
||||
desc: "".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -361,20 +361,18 @@ impl AIManager {
|
||||
if enabled {
|
||||
if let Some(name) = self.local_ai.get_plugin_chat_model() {
|
||||
info!("Set global active model to local ai: {}", name);
|
||||
let model = AIModel::local(name);
|
||||
let model = AIModel::local(name, "".to_string());
|
||||
self.update_selected_model(source_key, model).await?;
|
||||
}
|
||||
} else {
|
||||
info!("Set global active model to default");
|
||||
let global_active_model = self
|
||||
.get_workspace_select_model()
|
||||
.await
|
||||
.map(AIModel::server)
|
||||
.unwrap_or_else(|_| AIModel::default());
|
||||
|
||||
self
|
||||
.update_selected_model(source_key, global_active_model)
|
||||
.await?;
|
||||
let global_active_model = self.get_workspace_select_model().await?;
|
||||
let models = self.get_server_available_models().await?;
|
||||
if let Some(model) = models.into_iter().find(|m| m.name == global_active_model) {
|
||||
self
|
||||
.update_selected_model(source_key, AIModel::from(model))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -382,21 +380,21 @@ impl AIManager {
|
||||
|
||||
pub async fn get_available_models(&self, source: String) -> FlowyResult<AvailableModelsPB> {
|
||||
// Build the models list from server models and mark them as non-local.
|
||||
let mut models: Vec<AIModelPB> = self
|
||||
let mut models: Vec<AIModel> = self
|
||||
.get_server_available_models()
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|m| AIModelPB::server(m.name))
|
||||
.map(|m| AIModel::from(m))
|
||||
.collect();
|
||||
|
||||
// If user enable local ai, then add local ai model to the list.
|
||||
if let Some(local_model) = self.local_ai.get_plugin_chat_model() {
|
||||
models.push(AIModelPB::local(local_model));
|
||||
models.push(AIModel::local(local_model, "".to_string()));
|
||||
}
|
||||
|
||||
if models.is_empty() {
|
||||
return Ok(AvailableModelsPB {
|
||||
models,
|
||||
models: models.into_iter().map(|m| m.into()).collect(),
|
||||
selected_model: AIModelPB::default(),
|
||||
});
|
||||
}
|
||||
@ -405,7 +403,7 @@ impl AIManager {
|
||||
let global_active_model = self
|
||||
.get_workspace_select_model()
|
||||
.await
|
||||
.map(AIModel::server)
|
||||
.map(|m| AIModel::server(m, "".to_string()))
|
||||
.unwrap_or_else(|_| AIModel::default());
|
||||
|
||||
let mut user_selected_model = global_active_model.clone();
|
||||
@ -430,7 +428,7 @@ impl AIManager {
|
||||
.iter()
|
||||
.find(|m| m.name == user_selected_model.name)
|
||||
.cloned()
|
||||
.or_else(|| Some(AIModelPB::from(global_active_model)));
|
||||
.or_else(|| Some(AIModel::from(global_active_model)));
|
||||
|
||||
// Update the stored preference if a different model is used.
|
||||
if let Some(ref active_model) = active_model {
|
||||
@ -441,9 +439,10 @@ impl AIManager {
|
||||
}
|
||||
}
|
||||
|
||||
let selected_model = AIModelPB::from(active_model.unwrap_or_default());
|
||||
Ok(AvailableModelsPB {
|
||||
models,
|
||||
selected_model: active_model.unwrap_or_default(),
|
||||
models: models.into_iter().map(|m| m.into()).collect(),
|
||||
selected_model,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@ -200,19 +200,9 @@ pub struct AvailableModelPB {
|
||||
|
||||
#[pb(index = 2)]
|
||||
pub is_default: bool,
|
||||
}
|
||||
|
||||
impl From<AvailableModel> for AvailableModelPB {
|
||||
fn from(value: AvailableModel) -> Self {
|
||||
let is_default = value
|
||||
.metadata
|
||||
.and_then(|v| v.get("is_default").map(|v| v.as_bool().unwrap_or(false)))
|
||||
.unwrap_or(false);
|
||||
Self {
|
||||
name: value.name,
|
||||
is_default,
|
||||
}
|
||||
}
|
||||
#[pb(index = 3)]
|
||||
pub desc: String,
|
||||
}
|
||||
|
||||
#[derive(Default, ProtoBuf, Validate, Clone, Debug)]
|
||||
@ -248,22 +238,9 @@ pub struct AIModelPB {
|
||||
|
||||
#[pb(index = 2)]
|
||||
pub is_local: bool,
|
||||
}
|
||||
|
||||
impl AIModelPB {
|
||||
pub fn server(name: String) -> Self {
|
||||
Self {
|
||||
name,
|
||||
is_local: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn local(name: String) -> Self {
|
||||
Self {
|
||||
name,
|
||||
is_local: true,
|
||||
}
|
||||
}
|
||||
#[pb(index = 3)]
|
||||
pub desc: String,
|
||||
}
|
||||
|
||||
impl From<AIModel> for AIModelPB {
|
||||
@ -271,6 +248,7 @@ impl From<AIModel> for AIModelPB {
|
||||
Self {
|
||||
name: model.name,
|
||||
is_local: model.is_local,
|
||||
desc: model.desc,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -280,6 +258,7 @@ impl From<AIModelPB> for AIModel {
|
||||
AIModel {
|
||||
name: value.name,
|
||||
is_local: value.is_local,
|
||||
desc: value.desc,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user