mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2025-11-05 04:23:22 +00:00
Revert "chore: try to fix the mobile build" (#7844)
This reverts commit 3f06f6415aee635e1e39cc772ece081484f0ab14.
This commit is contained in:
parent
2463107c6c
commit
168b29abe3
@ -23,6 +23,8 @@ use flowy_ai_pub::persistence::{
|
|||||||
use flowy_ai_pub::user_service::AIUserService;
|
use flowy_ai_pub::user_service::AIUserService;
|
||||||
use futures_util::SinkExt;
|
use futures_util::SinkExt;
|
||||||
use lib_infra::util::get_operating_system;
|
use lib_infra::util::get_operating_system;
|
||||||
|
use ollama_rs::generation::embeddings::request::{EmbeddingsInput, GenerateEmbeddingsRequest};
|
||||||
|
use ollama_rs::Ollama;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -32,11 +34,6 @@ use tokio_stream::StreamExt;
|
|||||||
use tracing::{debug, error, info, instrument, warn};
|
use tracing::{debug, error, info, instrument, warn};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
|
||||||
use ollama_rs::generation::embeddings::request::{EmbeddingsInput, GenerateEmbeddingsRequest};
|
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
|
||||||
use ollama_rs::Ollama;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct LocalAISetting {
|
pub struct LocalAISetting {
|
||||||
pub ollama_server_url: String,
|
pub ollama_server_url: String,
|
||||||
@ -62,7 +59,6 @@ pub struct LocalAIController {
|
|||||||
current_chat_id: ArcSwapOption<Uuid>,
|
current_chat_id: ArcSwapOption<Uuid>,
|
||||||
store_preferences: Weak<KVStorePreferences>,
|
store_preferences: Weak<KVStorePreferences>,
|
||||||
user_service: Arc<dyn AIUserService>,
|
user_service: Arc<dyn AIUserService>,
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
|
||||||
ollama: ArcSwapOption<Ollama>,
|
ollama: ArcSwapOption<Ollama>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,105 +91,87 @@ impl LocalAIController {
|
|||||||
res_impl,
|
res_impl,
|
||||||
));
|
));
|
||||||
|
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
let ollama = ArcSwapOption::default();
|
||||||
let ollama = {
|
let sys = get_operating_system();
|
||||||
let mut ollama = ArcSwapOption::default();
|
if sys.is_desktop() {
|
||||||
let sys = get_operating_system();
|
let setting = local_ai_resource.get_llm_setting();
|
||||||
if sys.is_desktop() {
|
ollama.store(
|
||||||
let setting = local_ai_resource.get_llm_setting();
|
Ollama::try_new(&setting.ollama_server_url)
|
||||||
ollama.store(
|
.map(Arc::new)
|
||||||
Ollama::try_new(&setting.ollama_server_url)
|
.ok(),
|
||||||
.map(Arc::new)
|
);
|
||||||
.ok(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
ollama
|
|
||||||
};
|
|
||||||
|
|
||||||
// Subscribe to state changes
|
// Subscribe to state changes
|
||||||
let mut running_state_rx = local_ai.subscribe_running_state();
|
let mut running_state_rx = local_ai.subscribe_running_state();
|
||||||
let cloned_llm_res = Arc::clone(&local_ai_resource);
|
let cloned_llm_res = Arc::clone(&local_ai_resource);
|
||||||
let cloned_store_preferences = store_preferences.clone();
|
let cloned_store_preferences = store_preferences.clone();
|
||||||
let cloned_local_ai = Arc::clone(&local_ai);
|
let cloned_local_ai = Arc::clone(&local_ai);
|
||||||
let cloned_user_service = Arc::clone(&user_service);
|
let cloned_user_service = Arc::clone(&user_service);
|
||||||
|
|
||||||
// Spawn a background task to listen for plugin state changes
|
// Spawn a background task to listen for plugin state changes
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
while let Some(state) = running_state_rx.next().await {
|
while let Some(state) = running_state_rx.next().await {
|
||||||
// Skip if we can't get workspace_id
|
// Skip if we can't get workspace_id
|
||||||
let Ok(workspace_id) = cloned_user_service.workspace_id() else {
|
let Ok(workspace_id) = cloned_user_service.workspace_id() else {
|
||||||
continue;
|
continue;
|
||||||
};
|
|
||||||
|
|
||||||
let key = crate::local_ai::controller::local_ai_enabled_key(&workspace_id.to_string());
|
|
||||||
info!("[AI Plugin] state: {:?}", state);
|
|
||||||
|
|
||||||
// Read whether plugin is enabled from store; default to true
|
|
||||||
if let Some(store_preferences) = cloned_store_preferences.upgrade() {
|
|
||||||
let enabled = store_preferences.get_bool(&key).unwrap_or(true);
|
|
||||||
// Only check resource status if the plugin isn't in "UnexpectedStop" and is enabled
|
|
||||||
let (plugin_downloaded, lack_of_resource) =
|
|
||||||
if !matches!(state, RunningState::UnexpectedStop { .. }) && enabled {
|
|
||||||
// Possibly check plugin readiness and resource concurrency in parallel,
|
|
||||||
// but here we do it sequentially for clarity.
|
|
||||||
let downloaded = is_plugin_ready();
|
|
||||||
let resource_lack = cloned_llm_res.get_lack_of_resource().await;
|
|
||||||
(downloaded, resource_lack)
|
|
||||||
} else {
|
|
||||||
(false, None)
|
|
||||||
};
|
|
||||||
|
|
||||||
// If plugin is running, retrieve version
|
|
||||||
let plugin_version = if matches!(state, RunningState::Running { .. }) {
|
|
||||||
match cloned_local_ai.plugin_info().await {
|
|
||||||
Ok(info) => Some(info.version),
|
|
||||||
Err(_) => None,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Broadcast the new local AI state
|
let key = crate::local_ai::controller::local_ai_enabled_key(&workspace_id.to_string());
|
||||||
let new_state = RunningStatePB::from(state);
|
info!("[AI Plugin] state: {:?}", state);
|
||||||
chat_notification_builder(
|
|
||||||
APPFLOWY_AI_NOTIFICATION_KEY,
|
|
||||||
ChatNotification::UpdateLocalAIState,
|
|
||||||
)
|
|
||||||
.payload(LocalAIPB {
|
|
||||||
enabled,
|
|
||||||
plugin_downloaded,
|
|
||||||
lack_of_resource,
|
|
||||||
state: new_state,
|
|
||||||
plugin_version,
|
|
||||||
})
|
|
||||||
.send();
|
|
||||||
} else {
|
|
||||||
warn!("[AI Plugin] store preferences is dropped");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
// Read whether plugin is enabled from store; default to true
|
||||||
{
|
if let Some(store_preferences) = cloned_store_preferences.upgrade() {
|
||||||
Self {
|
let enabled = store_preferences.get_bool(&key).unwrap_or(true);
|
||||||
ai_plugin: local_ai,
|
// Only check resource status if the plugin isn't in "UnexpectedStop" and is enabled
|
||||||
resource: local_ai_resource,
|
let (plugin_downloaded, lack_of_resource) =
|
||||||
current_chat_id: ArcSwapOption::default(),
|
if !matches!(state, RunningState::UnexpectedStop { .. }) && enabled {
|
||||||
store_preferences,
|
// Possibly check plugin readiness and resource concurrency in parallel,
|
||||||
user_service,
|
// but here we do it sequentially for clarity.
|
||||||
ollama,
|
let downloaded = is_plugin_ready();
|
||||||
}
|
let resource_lack = cloned_llm_res.get_lack_of_resource().await;
|
||||||
|
(downloaded, resource_lack)
|
||||||
|
} else {
|
||||||
|
(false, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
// If plugin is running, retrieve version
|
||||||
|
let plugin_version = if matches!(state, RunningState::Running { .. }) {
|
||||||
|
match cloned_local_ai.plugin_info().await {
|
||||||
|
Ok(info) => Some(info.version),
|
||||||
|
Err(_) => None,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
// Broadcast the new local AI state
|
||||||
|
let new_state = RunningStatePB::from(state);
|
||||||
|
chat_notification_builder(
|
||||||
|
APPFLOWY_AI_NOTIFICATION_KEY,
|
||||||
|
ChatNotification::UpdateLocalAIState,
|
||||||
|
)
|
||||||
|
.payload(LocalAIPB {
|
||||||
|
enabled,
|
||||||
|
plugin_downloaded,
|
||||||
|
lack_of_resource,
|
||||||
|
state: new_state,
|
||||||
|
plugin_version,
|
||||||
|
})
|
||||||
|
.send();
|
||||||
|
} else {
|
||||||
|
warn!("[AI Plugin] store preferences is dropped");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
Self {
|
||||||
{
|
ai_plugin: local_ai,
|
||||||
Self {
|
resource: local_ai_resource,
|
||||||
ai_plugin: local_ai,
|
current_chat_id: ArcSwapOption::default(),
|
||||||
resource: local_ai_resource,
|
store_preferences,
|
||||||
current_chat_id: ArcSwapOption::default(),
|
user_service,
|
||||||
store_preferences,
|
ollama,
|
||||||
user_service,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[instrument(level = "debug", skip_all)]
|
#[instrument(level = "debug", skip_all)]
|
||||||
@ -329,35 +307,18 @@ impl LocalAIController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_all_chat_local_models(&self) -> Vec<AIModel> {
|
pub async fn get_all_chat_local_models(&self) -> Vec<AIModel> {
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
self
|
||||||
{
|
.get_filtered_local_models(|name| !name.contains("embed"))
|
||||||
self
|
.await
|
||||||
.get_filtered_local_models(|name| !name.contains("embed"))
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
|
||||||
{
|
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_all_embedded_local_models(&self) -> Vec<AIModel> {
|
pub async fn get_all_embedded_local_models(&self) -> Vec<AIModel> {
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
self
|
||||||
{
|
.get_filtered_local_models(|name| name.contains("embed"))
|
||||||
self
|
.await
|
||||||
.get_filtered_local_models(|name| name.contains("embed"))
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
|
||||||
{
|
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to avoid code duplication in model retrieval
|
// Helper function to avoid code duplication in model retrieval
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
|
||||||
async fn get_filtered_local_models<F>(&self, filter_fn: F) -> Vec<AIModel>
|
async fn get_filtered_local_models<F>(&self, filter_fn: F) -> Vec<AIModel>
|
||||||
where
|
where
|
||||||
F: Fn(&str) -> bool,
|
F: Fn(&str) -> bool,
|
||||||
@ -383,43 +344,35 @@ impl LocalAIController {
|
|||||||
let mut conn = self.user_service.sqlite_connection(uid)?;
|
let mut conn = self.user_service.sqlite_connection(uid)?;
|
||||||
match select_local_ai_model(&mut conn, model_name) {
|
match select_local_ai_model(&mut conn, model_name) {
|
||||||
None => {
|
None => {
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
let ollama = self
|
||||||
{
|
.ollama
|
||||||
let ollama = self
|
.load_full()
|
||||||
.ollama
|
.ok_or_else(|| FlowyError::local_ai().with_context("ollama is not initialized"))?;
|
||||||
.load_full()
|
|
||||||
.ok_or_else(|| FlowyError::local_ai().with_context("ollama is not initialized"))?;
|
|
||||||
|
|
||||||
let request = GenerateEmbeddingsRequest::new(
|
let request = GenerateEmbeddingsRequest::new(
|
||||||
model_name.to_string(),
|
model_name.to_string(),
|
||||||
EmbeddingsInput::Single("Hello".to_string()),
|
EmbeddingsInput::Single("Hello".to_string()),
|
||||||
);
|
);
|
||||||
|
|
||||||
let model_type = match ollama.generate_embeddings(request).await {
|
let model_type = match ollama.generate_embeddings(request).await {
|
||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
if value.embeddings.is_empty() {
|
if value.embeddings.is_empty() {
|
||||||
ModelType::Chat
|
ModelType::Chat
|
||||||
} else {
|
} else {
|
||||||
ModelType::Embedding
|
ModelType::Embedding
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Err(_) => ModelType::Chat,
|
Err(_) => ModelType::Chat,
|
||||||
};
|
};
|
||||||
|
|
||||||
upsert_local_ai_model(
|
upsert_local_ai_model(
|
||||||
&mut conn,
|
&mut conn,
|
||||||
&LocalAIModelTable {
|
&LocalAIModelTable {
|
||||||
name: model_name.to_string(),
|
name: model_name.to_string(),
|
||||||
model_type: model_type as i16,
|
model_type: model_type as i16,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
Ok(model_type)
|
Ok(model_type)
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
|
||||||
{
|
|
||||||
Ok(ModelType::Chat)
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
Some(r) => Ok(ModelType::from(r.model_type)),
|
Some(r) => Ok(ModelType::from(r.model_type)),
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user