diff --git a/frontend/appflowy_flutter/lib/plugins/ai_chat/chat_page.dart b/frontend/appflowy_flutter/lib/plugins/ai_chat/chat_page.dart index 5400297870..ce84f923d2 100644 --- a/frontend/appflowy_flutter/lib/plugins/ai_chat/chat_page.dart +++ b/frontend/appflowy_flutter/lib/plugins/ai_chat/chat_page.dart @@ -354,25 +354,32 @@ class _ChatContentPage extends StatelessWidget { BuildContext context, ChatMessageRefSource metadata, ) async { - if (isURL(metadata.name)) { - late Uri uri; - try { - uri = Uri.parse(metadata.name); - // `Uri` identifies `localhost` as a scheme - if (!uri.hasScheme || uri.scheme == 'localhost') { - uri = Uri.parse("http://${metadata.name}"); - await InternetAddress.lookup(uri.host); - } - await launchUrl(uri); - } catch (err) { - Log.error("failed to open url $err"); - } - } else { + // When the source of metatdata is appflowy, which means it is a appflowy page + if (metadata.source == "appflowy") { final sidebarView = await ViewBackendService.getView(metadata.id).toNullable(); if (context.mounted) { openPageFromMessage(context, sidebarView); } + return; + } + + if (metadata.source == "web") { + if (isURL(metadata.name)) { + late Uri uri; + try { + uri = Uri.parse(metadata.name); + // `Uri` identifies `localhost` as a scheme + if (!uri.hasScheme || uri.scheme == 'localhost') { + uri = Uri.parse("http://${metadata.name}"); + await InternetAddress.lookup(uri.host); + } + await launchUrl(uri); + } catch (err) { + Log.error("failed to open url $err"); + } + } + return; } } } diff --git a/frontend/rust-lib/Cargo.lock b/frontend/rust-lib/Cargo.lock index e687957458..91a5eccdce 100644 --- a/frontend/rust-lib/Cargo.lock +++ b/frontend/rust-lib/Cargo.lock @@ -198,7 +198,7 @@ dependencies = [ [[package]] name = "appflowy-local-ai" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=48271d4a2d225ac0af141b87780bfd07d41ec4f2#48271d4a2d225ac0af141b87780bfd07d41ec4f2" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900#19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900" dependencies = [ "anyhow", "appflowy-plugin", @@ -218,7 +218,7 @@ dependencies = [ [[package]] name = "appflowy-plugin" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=48271d4a2d225ac0af141b87780bfd07d41ec4f2#48271d4a2d225ac0af141b87780bfd07d41ec4f2" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900#19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900" dependencies = [ "anyhow", "cfg-if", diff --git a/frontend/rust-lib/Cargo.toml b/frontend/rust-lib/Cargo.toml index c9c365d7f6..5322f51f9a 100644 --- a/frontend/rust-lib/Cargo.toml +++ b/frontend/rust-lib/Cargo.toml @@ -152,5 +152,5 @@ collab-importer = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFl # To update the commit ID, run: # scripts/tool/update_local_ai_rev.sh new_rev_id # ⚠️⚠️⚠️️ -appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "48271d4a2d225ac0af141b87780bfd07d41ec4f2" } -appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "48271d4a2d225ac0af141b87780bfd07d41ec4f2" } +appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900" } +appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900" } diff --git a/frontend/rust-lib/flowy-ai/src/ai_manager.rs b/frontend/rust-lib/flowy-ai/src/ai_manager.rs index be2b2c8bee..96301a07b0 100644 --- a/frontend/rust-lib/flowy-ai/src/ai_manager.rs +++ b/frontend/rust-lib/flowy-ai/src/ai_manager.rs @@ -376,7 +376,6 @@ impl AIManager { .await?; let chat_setting_store_key = setting_store_key(chat_id); - if let Some(settings) = self .store_preferences .get_object::(&chat_setting_store_key) diff --git a/frontend/rust-lib/flowy-ai/src/local_ai/controller.rs b/frontend/rust-lib/flowy-ai/src/local_ai/controller.rs index d5e86da6d4..da24b70145 100644 --- a/frontend/rust-lib/flowy-ai/src/local_ai/controller.rs +++ b/frontend/rust-lib/flowy-ai/src/local_ai/controller.rs @@ -27,7 +27,7 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use tokio::select; use tokio_stream::StreamExt; -use tracing::{debug, error, info, instrument, trace}; +use tracing::{debug, error, info, instrument}; #[derive(Clone, Debug, Serialize, Deserialize)] pub struct LocalAISetting { @@ -315,6 +315,7 @@ impl LocalAIController { Ok(enabled) } + #[instrument(level = "debug", skip_all)] pub async fn index_message_metadata( &self, chat_id: &str, @@ -322,22 +323,27 @@ impl LocalAIController { index_process_sink: &mut (impl Sink + Unpin), ) -> FlowyResult<()> { if !self.is_enabled() { + info!("[AI Plugin] local ai is disabled, skip indexing"); return Ok(()); } for metadata in metadata_list { - if let Err(err) = metadata.data.validate() { - error!( - "[AI Plugin] invalid metadata: {:?}, error: {:?}", - metadata, err - ); - continue; - } + let mut file_metadata = HashMap::new(); + file_metadata.insert("id".to_string(), json!(&metadata.id)); + file_metadata.insert("name".to_string(), json!(&metadata.name)); + file_metadata.insert("source".to_string(), json!(&metadata.source)); + + let file_path = Path::new(&metadata.data.content); + if !file_path.exists() { + return Err( + FlowyError::record_not_found().with_context(format!("File not found: {:?}", file_path)), + ); + } + info!( + "[AI Plugin] embed file: {:?}, with metadata: {:?}", + file_path, file_metadata + ); - let mut index_metadata = HashMap::new(); - index_metadata.insert("id".to_string(), json!(&metadata.id)); - index_metadata.insert("name".to_string(), json!(&metadata.name)); - index_metadata.insert("source".to_string(), json!(&metadata.source)); match &metadata.data.content_type { ContextLoader::Unknown => { error!( @@ -345,35 +351,16 @@ impl LocalAIController { metadata.data.content_type ); }, - ContextLoader::Text | ContextLoader::Markdown => { - trace!("[AI Plugin]: index text: {}", metadata.data.content); + ContextLoader::Text | ContextLoader::Markdown | ContextLoader::PDF => { self .process_index_file( chat_id, - None, - Some(metadata.data.content.clone()), - metadata, - &index_metadata, + file_path.to_path_buf(), + &file_metadata, index_process_sink, ) .await?; }, - ContextLoader::PDF => { - trace!("[AI Plugin]: index pdf file: {}", metadata.data.content); - let file_path = Path::new(&metadata.data.content); - if file_path.exists() { - self - .process_index_file( - chat_id, - Some(file_path.to_path_buf()), - None, - metadata, - &index_metadata, - index_process_sink, - ) - .await?; - } - }, } } @@ -383,43 +370,38 @@ impl LocalAIController { async fn process_index_file( &self, chat_id: &str, - file_path: Option, - content: Option, - metadata: &ChatMessageMetadata, + file_path: PathBuf, index_metadata: &HashMap, index_process_sink: &mut (impl Sink + Unpin), ) -> Result<(), FlowyError> { + let file_name = file_path + .file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string(); + let _ = index_process_sink .send( StreamMessage::StartIndexFile { - file_name: metadata.name.clone(), + file_name: file_name.clone(), } .to_string(), ) .await; let result = self - .embed_file(chat_id, file_path, content, Some(index_metadata.clone())) + .ai_plugin + .embed_file(chat_id, file_path, Some(index_metadata.clone())) .await; match result { Ok(_) => { let _ = index_process_sink - .send( - StreamMessage::EndIndexFile { - file_name: metadata.name.clone(), - } - .to_string(), - ) + .send(StreamMessage::EndIndexFile { file_name }.to_string()) .await; }, Err(err) => { let _ = index_process_sink - .send( - StreamMessage::IndexFileError { - file_name: metadata.name.clone(), - } - .to_string(), - ) + .send(StreamMessage::IndexFileError { file_name }.to_string()) .await; error!("[AI Plugin] failed to index file: {:?}", err); }, diff --git a/frontend/rust-lib/flowy-ai/src/local_ai/resource.rs b/frontend/rust-lib/flowy-ai/src/local_ai/resource.rs index a8ee83a256..489a67f69e 100644 --- a/frontend/rust-lib/flowy-ai/src/local_ai/resource.rs +++ b/frontend/rust-lib/flowy-ai/src/local_ai/resource.rs @@ -264,6 +264,8 @@ impl LocalAIResourceController { Some(llm_setting.ollama_server_url.clone()), )?; + //config = config.with_log_level("debug".to_string()); + if rag_enabled { let resource_dir = self.resource_dir()?; let persist_directory = resource_dir.join("vectorstore"); diff --git a/frontend/rust-lib/flowy-ai/src/middleware/chat_service_mw.rs b/frontend/rust-lib/flowy-ai/src/middleware/chat_service_mw.rs index 7ebe5889a7..d03b1d88c2 100644 --- a/frontend/rust-lib/flowy-ai/src/middleware/chat_service_mw.rs +++ b/frontend/rust-lib/flowy-ai/src/middleware/chat_service_mw.rs @@ -66,14 +66,15 @@ impl AICloudServiceMiddleware { let _ = index_process_sink .send(StreamMessage::IndexStart.to_string()) .await; - - self + let result = self .local_ai .index_message_metadata(chat_id, metadata_list, index_process_sink) - .await?; + .await; let _ = index_process_sink .send(StreamMessage::IndexEnd.to_string()) .await; + + result? } else if let Some(_storage_service) = self.storage_service.upgrade() { // } @@ -312,7 +313,7 @@ impl ChatCloudService for AICloudServiceMiddleware { if self.local_ai.is_running() { self .local_ai - .embed_file(chat_id, Some(file_path.to_path_buf()), None, metadata) + .embed_file(chat_id, file_path.to_path_buf(), metadata) .await .map_err(|err| FlowyError::local_ai().with_context(err))?; Ok(())