Merge pull request #7522 from AppFlowy-IO/response_format_local_ai

chore: support response format
This commit is contained in:
Nathan.fooo 2025-03-13 13:08:52 +08:00 committed by GitHub
commit 2b8aaf1d46
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 20 additions and 16 deletions

View File

@ -198,7 +198,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f1b5167e9569e8a61ef50a1afb140306a5287e57#f1b5167e9569e8a61ef50a1afb140306a5287e57" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=cf0b5e77d3bbcecbcd9cbed86476658b477399e6#cf0b5e77d3bbcecbcd9cbed86476658b477399e6"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -218,7 +218,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f1b5167e9569e8a61ef50a1afb140306a5287e57#f1b5167e9569e8a61ef50a1afb140306a5287e57" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=cf0b5e77d3bbcecbcd9cbed86476658b477399e6#cf0b5e77d3bbcecbcd9cbed86476658b477399e6"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",

View File

@ -152,5 +152,5 @@ collab-importer = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFl
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f1b5167e9569e8a61ef50a1afb140306a5287e57" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "cf0b5e77d3bbcecbcd9cbed86476658b477399e6" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f1b5167e9569e8a61ef50a1afb140306a5287e57" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "cf0b5e77d3bbcecbcd9cbed86476658b477399e6" }

View File

@ -92,7 +92,7 @@ pub trait ChatCloudService: Send + Sync + 'static {
params: CompleteTextParams, params: CompleteTextParams,
) -> Result<StreamComplete, FlowyError>; ) -> Result<StreamComplete, FlowyError>;
async fn index_file( async fn embed_file(
&self, &self,
workspace_id: &str, workspace_id: &str,
file_path: &Path, file_path: &Path,

View File

@ -587,7 +587,7 @@ impl Chat {
); );
self self
.chat_service .chat_service
.index_file( .embed_file(
&self.user_service.workspace_id()?, &self.user_service.workspace_id()?,
&file_path, &file_path,
&self.chat_id, &self.chat_id,

View File

@ -392,7 +392,7 @@ impl LocalAIController {
.await; .await;
let result = self let result = self
.index_file(chat_id, file_path, content, Some(index_metadata.clone())) .embed_file(chat_id, file_path, content, Some(index_metadata.clone()))
.await; .await;
match result { match result {
Ok(_) => { Ok(_) => {

View File

@ -163,7 +163,7 @@ impl ChatCloudService for AICloudServiceMiddleware {
let row = self.get_message_record(question_id)?; let row = self.get_message_record(question_id)?;
match self match self
.local_ai .local_ai
.stream_question(chat_id, &row.content, json!({})) .stream_question(chat_id, &row.content, Some(json!(format)), json!({}))
.await .await
{ {
Ok(stream) => Ok(QuestionStream::new(stream).boxed()), Ok(stream) => Ok(QuestionStream::new(stream).boxed()),
@ -277,7 +277,11 @@ impl ChatCloudService for AICloudServiceMiddleware {
if self.local_ai.is_running() { if self.local_ai.is_running() {
match self match self
.local_ai .local_ai
.complete_text(&params.text, params.completion_type.unwrap() as u8) .complete_text(
&params.text,
params.completion_type.unwrap() as u8,
Some(json!(params.format)),
)
.await .await
{ {
Ok(stream) => Ok( Ok(stream) => Ok(
@ -298,7 +302,7 @@ impl ChatCloudService for AICloudServiceMiddleware {
} }
} }
async fn index_file( async fn embed_file(
&self, &self,
workspace_id: &str, workspace_id: &str,
file_path: &Path, file_path: &Path,
@ -308,14 +312,14 @@ impl ChatCloudService for AICloudServiceMiddleware {
if self.local_ai.is_running() { if self.local_ai.is_running() {
self self
.local_ai .local_ai
.index_file(chat_id, Some(file_path.to_path_buf()), None, metadata) .embed_file(chat_id, Some(file_path.to_path_buf()), None, metadata)
.await .await
.map_err(|err| FlowyError::local_ai().with_context(err))?; .map_err(|err| FlowyError::local_ai().with_context(err))?;
Ok(()) Ok(())
} else { } else {
self self
.cloud_service .cloud_service
.index_file(workspace_id, file_path, chat_id, metadata) .embed_file(workspace_id, file_path, chat_id, metadata)
.await .await
} }
} }

View File

@ -781,7 +781,7 @@ impl ChatCloudService for ServerProvider {
.await .await
} }
async fn index_file( async fn embed_file(
&self, &self,
workspace_id: &str, workspace_id: &str,
file_path: &Path, file_path: &Path,
@ -791,7 +791,7 @@ impl ChatCloudService for ServerProvider {
self self
.get_server()? .get_server()?
.chat_service() .chat_service()
.index_file(workspace_id, file_path, chat_id, metadata) .embed_file(workspace_id, file_path, chat_id, metadata)
.await .await
} }

View File

@ -200,7 +200,7 @@ where
Ok(stream.boxed()) Ok(stream.boxed())
} }
async fn index_file( async fn embed_file(
&self, &self,
_workspace_id: &str, _workspace_id: &str,
_file_path: &Path, _file_path: &Path,

View File

@ -101,7 +101,7 @@ impl ChatCloudService for DefaultChatCloudServiceImpl {
Err(FlowyError::not_support().with_context("complete text is not supported in local server.")) Err(FlowyError::not_support().with_context("complete text is not supported in local server."))
} }
async fn index_file( async fn embed_file(
&self, &self,
_workspace_id: &str, _workspace_id: &str,
_file_path: &Path, _file_path: &Path,