chore: local ai embed file

This commit is contained in:
Nathan 2025-03-16 09:37:05 +08:00
parent 1f9fe89f87
commit af0c802486
7 changed files with 65 additions and 74 deletions

View File

@ -354,25 +354,32 @@ class _ChatContentPage extends StatelessWidget {
BuildContext context,
ChatMessageRefSource metadata,
) async {
if (isURL(metadata.name)) {
late Uri uri;
try {
uri = Uri.parse(metadata.name);
// `Uri` identifies `localhost` as a scheme
if (!uri.hasScheme || uri.scheme == 'localhost') {
uri = Uri.parse("http://${metadata.name}");
await InternetAddress.lookup(uri.host);
}
await launchUrl(uri);
} catch (err) {
Log.error("failed to open url $err");
}
} else {
// When the source of metatdata is appflowy, which means it is a appflowy page
if (metadata.source == "appflowy") {
final sidebarView =
await ViewBackendService.getView(metadata.id).toNullable();
if (context.mounted) {
openPageFromMessage(context, sidebarView);
}
return;
}
if (metadata.source == "web") {
if (isURL(metadata.name)) {
late Uri uri;
try {
uri = Uri.parse(metadata.name);
// `Uri` identifies `localhost` as a scheme
if (!uri.hasScheme || uri.scheme == 'localhost') {
uri = Uri.parse("http://${metadata.name}");
await InternetAddress.lookup(uri.host);
}
await launchUrl(uri);
} catch (err) {
Log.error("failed to open url $err");
}
}
return;
}
}
}

View File

@ -198,7 +198,7 @@ dependencies = [
[[package]]
name = "appflowy-local-ai"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=48271d4a2d225ac0af141b87780bfd07d41ec4f2#48271d4a2d225ac0af141b87780bfd07d41ec4f2"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900#19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900"
dependencies = [
"anyhow",
"appflowy-plugin",
@ -218,7 +218,7 @@ dependencies = [
[[package]]
name = "appflowy-plugin"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=48271d4a2d225ac0af141b87780bfd07d41ec4f2#48271d4a2d225ac0af141b87780bfd07d41ec4f2"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900#19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900"
dependencies = [
"anyhow",
"cfg-if",

View File

@ -152,5 +152,5 @@ collab-importer = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFl
# To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "48271d4a2d225ac0af141b87780bfd07d41ec4f2" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "48271d4a2d225ac0af141b87780bfd07d41ec4f2" }
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "19f9ea7f9cc7c811eef3349ac3f1c5e6fce5c900" }

View File

@ -376,7 +376,6 @@ impl AIManager {
.await?;
let chat_setting_store_key = setting_store_key(chat_id);
if let Some(settings) = self
.store_preferences
.get_object::<ChatSettings>(&chat_setting_store_key)

View File

@ -27,7 +27,7 @@ use std::path::{Path, PathBuf};
use std::sync::Arc;
use tokio::select;
use tokio_stream::StreamExt;
use tracing::{debug, error, info, instrument, trace};
use tracing::{debug, error, info, instrument};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct LocalAISetting {
@ -315,6 +315,7 @@ impl LocalAIController {
Ok(enabled)
}
#[instrument(level = "debug", skip_all)]
pub async fn index_message_metadata(
&self,
chat_id: &str,
@ -322,22 +323,27 @@ impl LocalAIController {
index_process_sink: &mut (impl Sink<String> + Unpin),
) -> FlowyResult<()> {
if !self.is_enabled() {
info!("[AI Plugin] local ai is disabled, skip indexing");
return Ok(());
}
for metadata in metadata_list {
if let Err(err) = metadata.data.validate() {
error!(
"[AI Plugin] invalid metadata: {:?}, error: {:?}",
metadata, err
);
continue;
}
let mut file_metadata = HashMap::new();
file_metadata.insert("id".to_string(), json!(&metadata.id));
file_metadata.insert("name".to_string(), json!(&metadata.name));
file_metadata.insert("source".to_string(), json!(&metadata.source));
let file_path = Path::new(&metadata.data.content);
if !file_path.exists() {
return Err(
FlowyError::record_not_found().with_context(format!("File not found: {:?}", file_path)),
);
}
info!(
"[AI Plugin] embed file: {:?}, with metadata: {:?}",
file_path, file_metadata
);
let mut index_metadata = HashMap::new();
index_metadata.insert("id".to_string(), json!(&metadata.id));
index_metadata.insert("name".to_string(), json!(&metadata.name));
index_metadata.insert("source".to_string(), json!(&metadata.source));
match &metadata.data.content_type {
ContextLoader::Unknown => {
error!(
@ -345,35 +351,16 @@ impl LocalAIController {
metadata.data.content_type
);
},
ContextLoader::Text | ContextLoader::Markdown => {
trace!("[AI Plugin]: index text: {}", metadata.data.content);
ContextLoader::Text | ContextLoader::Markdown | ContextLoader::PDF => {
self
.process_index_file(
chat_id,
None,
Some(metadata.data.content.clone()),
metadata,
&index_metadata,
file_path.to_path_buf(),
&file_metadata,
index_process_sink,
)
.await?;
},
ContextLoader::PDF => {
trace!("[AI Plugin]: index pdf file: {}", metadata.data.content);
let file_path = Path::new(&metadata.data.content);
if file_path.exists() {
self
.process_index_file(
chat_id,
Some(file_path.to_path_buf()),
None,
metadata,
&index_metadata,
index_process_sink,
)
.await?;
}
},
}
}
@ -383,43 +370,38 @@ impl LocalAIController {
async fn process_index_file(
&self,
chat_id: &str,
file_path: Option<PathBuf>,
content: Option<String>,
metadata: &ChatMessageMetadata,
file_path: PathBuf,
index_metadata: &HashMap<String, serde_json::Value>,
index_process_sink: &mut (impl Sink<String> + Unpin),
) -> Result<(), FlowyError> {
let file_name = file_path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let _ = index_process_sink
.send(
StreamMessage::StartIndexFile {
file_name: metadata.name.clone(),
file_name: file_name.clone(),
}
.to_string(),
)
.await;
let result = self
.embed_file(chat_id, file_path, content, Some(index_metadata.clone()))
.ai_plugin
.embed_file(chat_id, file_path, Some(index_metadata.clone()))
.await;
match result {
Ok(_) => {
let _ = index_process_sink
.send(
StreamMessage::EndIndexFile {
file_name: metadata.name.clone(),
}
.to_string(),
)
.send(StreamMessage::EndIndexFile { file_name }.to_string())
.await;
},
Err(err) => {
let _ = index_process_sink
.send(
StreamMessage::IndexFileError {
file_name: metadata.name.clone(),
}
.to_string(),
)
.send(StreamMessage::IndexFileError { file_name }.to_string())
.await;
error!("[AI Plugin] failed to index file: {:?}", err);
},

View File

@ -264,6 +264,8 @@ impl LocalAIResourceController {
Some(llm_setting.ollama_server_url.clone()),
)?;
//config = config.with_log_level("debug".to_string());
if rag_enabled {
let resource_dir = self.resource_dir()?;
let persist_directory = resource_dir.join("vectorstore");

View File

@ -66,14 +66,15 @@ impl AICloudServiceMiddleware {
let _ = index_process_sink
.send(StreamMessage::IndexStart.to_string())
.await;
self
let result = self
.local_ai
.index_message_metadata(chat_id, metadata_list, index_process_sink)
.await?;
.await;
let _ = index_process_sink
.send(StreamMessage::IndexEnd.to_string())
.await;
result?
} else if let Some(_storage_service) = self.storage_service.upgrade() {
//
}
@ -312,7 +313,7 @@ impl ChatCloudService for AICloudServiceMiddleware {
if self.local_ai.is_running() {
self
.local_ai
.embed_file(chat_id, Some(file_path.to_path_buf()), None, metadata)
.embed_file(chat_id, file_path.to_path_buf(), metadata)
.await
.map_err(|err| FlowyError::local_ai().with_context(err))?;
Ok(())