2024-08-01 23:13:35 +08:00
|
|
|
use crate::ai_manager::AIUserService;
|
2024-07-08 13:19:13 +08:00
|
|
|
use crate::entities::{ChatStatePB, ModelTypePB};
|
2024-07-15 15:23:23 +08:00
|
|
|
use crate::local_ai::local_llm_chat::LocalAIController;
|
2024-07-18 20:54:35 +08:00
|
|
|
use crate::notification::{make_notification, ChatNotification, APPFLOWY_AI_NOTIFICATION_KEY};
|
2024-06-30 17:38:39 +08:00
|
|
|
use crate::persistence::select_single_message;
|
2024-07-08 13:19:13 +08:00
|
|
|
use appflowy_plugin::error::PluginError;
|
2024-07-15 15:23:23 +08:00
|
|
|
|
2024-08-01 23:13:35 +08:00
|
|
|
use flowy_ai_pub::cloud::{
|
2024-08-06 07:56:13 +08:00
|
|
|
ChatCloudService, ChatMessage, ChatMessageMetadata, ChatMessageType, CompletionType,
|
|
|
|
CreateTextChatContext, LocalAIConfig, MessageCursor, RelatedQuestion, RepeatedChatMessage,
|
|
|
|
RepeatedRelatedQuestion, StreamAnswer, StreamComplete,
|
2024-06-30 17:38:39 +08:00
|
|
|
};
|
|
|
|
use flowy_error::{FlowyError, FlowyResult};
|
2024-07-08 13:19:13 +08:00
|
|
|
use futures::{stream, StreamExt, TryStreamExt};
|
2024-06-30 17:38:39 +08:00
|
|
|
use lib_infra::async_trait::async_trait;
|
|
|
|
use lib_infra::future::FutureResult;
|
2024-07-15 15:23:23 +08:00
|
|
|
|
2024-08-06 07:56:13 +08:00
|
|
|
use crate::local_ai::stream_util::LocalAIStreamAdaptor;
|
2024-08-08 12:07:00 +08:00
|
|
|
use std::path::Path;
|
2024-06-30 17:38:39 +08:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
2024-08-01 23:13:35 +08:00
|
|
|
pub struct AICloudServiceMiddleware {
|
2024-07-25 19:41:16 +08:00
|
|
|
cloud_service: Arc<dyn ChatCloudService>,
|
2024-08-01 23:13:35 +08:00
|
|
|
user_service: Arc<dyn AIUserService>,
|
2024-07-15 15:23:23 +08:00
|
|
|
local_llm_controller: Arc<LocalAIController>,
|
2024-06-30 17:38:39 +08:00
|
|
|
}
|
|
|
|
|
2024-08-01 23:13:35 +08:00
|
|
|
impl AICloudServiceMiddleware {
|
2024-06-30 17:38:39 +08:00
|
|
|
pub fn new(
|
2024-08-01 23:13:35 +08:00
|
|
|
user_service: Arc<dyn AIUserService>,
|
2024-06-30 17:38:39 +08:00
|
|
|
cloud_service: Arc<dyn ChatCloudService>,
|
2024-07-15 15:23:23 +08:00
|
|
|
local_llm_controller: Arc<LocalAIController>,
|
2024-06-30 17:38:39 +08:00
|
|
|
) -> Self {
|
|
|
|
Self {
|
|
|
|
user_service,
|
|
|
|
cloud_service,
|
2024-07-15 15:23:23 +08:00
|
|
|
local_llm_controller,
|
2024-06-30 17:38:39 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_message_content(&self, message_id: i64) -> FlowyResult<String> {
|
|
|
|
let uid = self.user_service.user_id()?;
|
|
|
|
let conn = self.user_service.sqlite_connection(uid)?;
|
|
|
|
let content = select_single_message(conn, message_id)?
|
|
|
|
.map(|data| data.content)
|
|
|
|
.ok_or_else(|| {
|
|
|
|
FlowyError::record_not_found().with_context(format!("Message not found: {}", message_id))
|
|
|
|
})?;
|
|
|
|
|
|
|
|
Ok(content)
|
|
|
|
}
|
2024-07-08 13:19:13 +08:00
|
|
|
|
|
|
|
fn handle_plugin_error(&self, err: PluginError) {
|
|
|
|
if matches!(
|
|
|
|
err,
|
|
|
|
PluginError::PluginNotConnected | PluginError::PeerDisconnect
|
|
|
|
) {
|
2024-07-18 20:54:35 +08:00
|
|
|
make_notification(
|
|
|
|
APPFLOWY_AI_NOTIFICATION_KEY,
|
2024-07-15 15:23:23 +08:00
|
|
|
ChatNotification::UpdateChatPluginState,
|
|
|
|
)
|
|
|
|
.payload(ChatStatePB {
|
|
|
|
model_type: ModelTypePB::LocalAI,
|
|
|
|
available: false,
|
2024-07-18 20:54:35 +08:00
|
|
|
})
|
|
|
|
.send();
|
2024-07-08 13:19:13 +08:00
|
|
|
}
|
|
|
|
}
|
2024-06-30 17:38:39 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
#[async_trait]
|
2024-08-01 23:13:35 +08:00
|
|
|
impl ChatCloudService for AICloudServiceMiddleware {
|
2024-06-30 17:38:39 +08:00
|
|
|
fn create_chat(
|
|
|
|
&self,
|
|
|
|
uid: &i64,
|
|
|
|
workspace_id: &str,
|
|
|
|
chat_id: &str,
|
|
|
|
) -> FutureResult<(), FlowyError> {
|
|
|
|
self.cloud_service.create_chat(uid, workspace_id, chat_id)
|
|
|
|
}
|
|
|
|
|
2024-08-06 07:56:13 +08:00
|
|
|
fn create_question(
|
2024-06-30 17:38:39 +08:00
|
|
|
&self,
|
|
|
|
workspace_id: &str,
|
|
|
|
chat_id: &str,
|
|
|
|
message: &str,
|
|
|
|
message_type: ChatMessageType,
|
2024-08-06 07:56:13 +08:00
|
|
|
metadata: Vec<ChatMessageMetadata>,
|
2024-06-30 17:38:39 +08:00
|
|
|
) -> FutureResult<ChatMessage, FlowyError> {
|
|
|
|
self
|
|
|
|
.cloud_service
|
2024-08-06 07:56:13 +08:00
|
|
|
.create_question(workspace_id, chat_id, message, message_type, metadata)
|
2024-06-30 17:38:39 +08:00
|
|
|
}
|
|
|
|
|
2024-08-06 07:56:13 +08:00
|
|
|
fn create_answer(
|
2024-06-30 17:38:39 +08:00
|
|
|
&self,
|
|
|
|
workspace_id: &str,
|
|
|
|
chat_id: &str,
|
|
|
|
message: &str,
|
|
|
|
question_id: i64,
|
2024-08-06 07:56:13 +08:00
|
|
|
metadata: Option<serde_json::Value>,
|
2024-06-30 17:38:39 +08:00
|
|
|
) -> FutureResult<ChatMessage, FlowyError> {
|
|
|
|
self
|
|
|
|
.cloud_service
|
2024-08-06 07:56:13 +08:00
|
|
|
.create_answer(workspace_id, chat_id, message, question_id, metadata)
|
2024-06-30 17:38:39 +08:00
|
|
|
}
|
|
|
|
|
2024-08-06 07:56:13 +08:00
|
|
|
async fn stream_answer(
|
2024-06-30 17:38:39 +08:00
|
|
|
&self,
|
|
|
|
workspace_id: &str,
|
|
|
|
chat_id: &str,
|
|
|
|
message_id: i64,
|
|
|
|
) -> Result<StreamAnswer, FlowyError> {
|
2024-07-18 20:54:35 +08:00
|
|
|
if self.local_llm_controller.is_running() {
|
2024-06-30 17:38:39 +08:00
|
|
|
let content = self.get_message_content(message_id)?;
|
2024-07-15 15:23:23 +08:00
|
|
|
match self
|
|
|
|
.local_llm_controller
|
|
|
|
.stream_question(chat_id, &content)
|
|
|
|
.await
|
|
|
|
{
|
2024-08-06 07:56:13 +08:00
|
|
|
Ok(stream) => Ok(LocalAIStreamAdaptor::new(stream).boxed()),
|
2024-07-08 13:19:13 +08:00
|
|
|
Err(err) => {
|
|
|
|
self.handle_plugin_error(err);
|
|
|
|
Ok(stream::once(async { Err(FlowyError::local_ai_unavailable()) }).boxed())
|
|
|
|
},
|
|
|
|
}
|
2024-06-30 17:38:39 +08:00
|
|
|
} else {
|
|
|
|
self
|
|
|
|
.cloud_service
|
2024-08-06 07:56:13 +08:00
|
|
|
.stream_answer(workspace_id, chat_id, message_id)
|
2024-06-30 17:38:39 +08:00
|
|
|
.await
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-06 07:56:13 +08:00
|
|
|
async fn get_answer(
|
2024-06-30 17:38:39 +08:00
|
|
|
&self,
|
|
|
|
workspace_id: &str,
|
|
|
|
chat_id: &str,
|
|
|
|
question_message_id: i64,
|
|
|
|
) -> Result<ChatMessage, FlowyError> {
|
2024-07-18 20:54:35 +08:00
|
|
|
if self.local_llm_controller.is_running() {
|
2024-06-30 17:38:39 +08:00
|
|
|
let content = self.get_message_content(question_message_id)?;
|
2024-07-15 15:23:23 +08:00
|
|
|
match self
|
|
|
|
.local_llm_controller
|
|
|
|
.ask_question(chat_id, &content)
|
|
|
|
.await
|
|
|
|
{
|
2024-07-08 13:19:13 +08:00
|
|
|
Ok(answer) => {
|
2024-08-06 07:56:13 +08:00
|
|
|
// TODO(nathan): metadata
|
2024-07-08 13:19:13 +08:00
|
|
|
let message = self
|
|
|
|
.cloud_service
|
2024-08-06 07:56:13 +08:00
|
|
|
.create_answer(workspace_id, chat_id, &answer, question_message_id, None)
|
2024-07-08 13:19:13 +08:00
|
|
|
.await?;
|
|
|
|
Ok(message)
|
|
|
|
},
|
|
|
|
Err(err) => {
|
|
|
|
self.handle_plugin_error(err);
|
|
|
|
Err(FlowyError::local_ai_unavailable())
|
|
|
|
},
|
|
|
|
}
|
2024-06-30 17:38:39 +08:00
|
|
|
} else {
|
|
|
|
self
|
|
|
|
.cloud_service
|
2024-08-06 07:56:13 +08:00
|
|
|
.get_answer(workspace_id, chat_id, question_message_id)
|
2024-06-30 17:38:39 +08:00
|
|
|
.await
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_chat_messages(
|
|
|
|
&self,
|
|
|
|
workspace_id: &str,
|
|
|
|
chat_id: &str,
|
|
|
|
offset: MessageCursor,
|
|
|
|
limit: u64,
|
|
|
|
) -> FutureResult<RepeatedChatMessage, FlowyError> {
|
|
|
|
self
|
|
|
|
.cloud_service
|
|
|
|
.get_chat_messages(workspace_id, chat_id, offset, limit)
|
|
|
|
}
|
|
|
|
|
2024-07-25 19:41:16 +08:00
|
|
|
async fn get_related_message(
|
2024-06-30 17:38:39 +08:00
|
|
|
&self,
|
|
|
|
workspace_id: &str,
|
|
|
|
chat_id: &str,
|
|
|
|
message_id: i64,
|
2024-07-25 19:41:16 +08:00
|
|
|
) -> Result<RepeatedRelatedQuestion, FlowyError> {
|
2024-07-18 20:54:35 +08:00
|
|
|
if self.local_llm_controller.is_running() {
|
2024-07-25 19:41:16 +08:00
|
|
|
let questions = self
|
|
|
|
.local_llm_controller
|
|
|
|
.get_related_question(chat_id)
|
|
|
|
.await
|
|
|
|
.map_err(|err| FlowyError::local_ai().with_context(err))?
|
|
|
|
.into_iter()
|
|
|
|
.map(|content| RelatedQuestion {
|
|
|
|
content,
|
|
|
|
metadata: None,
|
2024-06-30 17:38:39 +08:00
|
|
|
})
|
2024-07-25 19:41:16 +08:00
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
Ok(RepeatedRelatedQuestion {
|
|
|
|
message_id,
|
|
|
|
items: questions,
|
2024-06-30 17:38:39 +08:00
|
|
|
})
|
|
|
|
} else {
|
|
|
|
self
|
|
|
|
.cloud_service
|
|
|
|
.get_related_message(workspace_id, chat_id, message_id)
|
2024-07-25 19:41:16 +08:00
|
|
|
.await
|
2024-06-30 17:38:39 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn stream_complete(
|
|
|
|
&self,
|
|
|
|
workspace_id: &str,
|
|
|
|
text: &str,
|
|
|
|
complete_type: CompletionType,
|
|
|
|
) -> Result<StreamComplete, FlowyError> {
|
2024-07-18 20:54:35 +08:00
|
|
|
if self.local_llm_controller.is_running() {
|
2024-07-25 19:41:16 +08:00
|
|
|
match self
|
|
|
|
.local_llm_controller
|
|
|
|
.complete_text(text, complete_type as u8)
|
|
|
|
.await
|
|
|
|
{
|
|
|
|
Ok(stream) => Ok(
|
|
|
|
stream
|
|
|
|
.map_err(|err| FlowyError::local_ai().with_context(err))
|
|
|
|
.boxed(),
|
|
|
|
),
|
|
|
|
Err(err) => {
|
|
|
|
self.handle_plugin_error(err);
|
|
|
|
Ok(stream::once(async { Err(FlowyError::local_ai_unavailable()) }).boxed())
|
|
|
|
},
|
|
|
|
}
|
2024-06-30 17:38:39 +08:00
|
|
|
} else {
|
|
|
|
self
|
|
|
|
.cloud_service
|
|
|
|
.stream_complete(workspace_id, text, complete_type)
|
|
|
|
.await
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-07-15 15:23:23 +08:00
|
|
|
async fn index_file(
|
|
|
|
&self,
|
|
|
|
workspace_id: &str,
|
2024-08-08 12:07:00 +08:00
|
|
|
file_path: &Path,
|
2024-07-15 15:23:23 +08:00
|
|
|
chat_id: &str,
|
|
|
|
) -> Result<(), FlowyError> {
|
2024-07-18 20:54:35 +08:00
|
|
|
if self.local_llm_controller.is_running() {
|
2024-07-15 15:23:23 +08:00
|
|
|
self
|
|
|
|
.local_llm_controller
|
2024-08-08 12:07:00 +08:00
|
|
|
.index_file(chat_id, file_path.to_path_buf())
|
2024-07-15 15:23:23 +08:00
|
|
|
.await
|
|
|
|
.map_err(|err| FlowyError::local_ai().with_context(err))?;
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
self
|
|
|
|
.cloud_service
|
|
|
|
.index_file(workspace_id, file_path, chat_id)
|
|
|
|
.await
|
2024-06-30 17:38:39 +08:00
|
|
|
}
|
2024-07-15 15:23:23 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
async fn get_local_ai_config(&self, workspace_id: &str) -> Result<LocalAIConfig, FlowyError> {
|
|
|
|
self.cloud_service.get_local_ai_config(workspace_id).await
|
2024-06-30 17:38:39 +08:00
|
|
|
}
|
2024-08-06 07:56:13 +08:00
|
|
|
|
|
|
|
async fn create_chat_context(
|
|
|
|
&self,
|
|
|
|
workspace_id: &str,
|
|
|
|
chat_context: CreateTextChatContext,
|
|
|
|
) -> Result<(), FlowyError> {
|
|
|
|
if self.local_llm_controller.is_running() {
|
|
|
|
// TODO(nathan): support offline ai context
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
self
|
|
|
|
.cloud_service
|
|
|
|
.create_chat_context(workspace_id, chat_context)
|
|
|
|
.await
|
|
|
|
}
|
|
|
|
}
|
2024-06-30 17:38:39 +08:00
|
|
|
}
|