Merge pull request #7573 from AppFlowy-IO/rename_plugin

chore: rename the local ai plugin
This commit is contained in:
Nathan.fooo 2025-03-19 11:29:23 +08:00 committed by GitHub
commit 8a9cc278ec
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 25 additions and 90 deletions

View File

@ -45,7 +45,7 @@ class LocalAISetting extends StatelessWidget {
collapsed: const SizedBox.shrink(),
expanded: Column(
children: [
const VSpace(6),
const VSpace(12),
DecoratedBox(
decoration: BoxDecoration(
color:

View File

@ -1,6 +1,5 @@
import 'package:appflowy/workspace/application/settings/ai/local_ai_setting_panel_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/ollma_setting.dart';
import 'package:expandable/expandable.dart';
import 'package:flowy_infra_ui/widget/spacing.dart';
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
@ -13,40 +12,19 @@ class LocalAISettingPanel extends StatelessWidget {
Widget build(BuildContext context) {
return BlocProvider(
create: (context) => LocalAISettingPanelBloc(),
child: ExpandableNotifier(
initialExpanded: true,
child: ExpandablePanel(
theme: const ExpandableThemeData(
headerAlignment: ExpandablePanelHeaderAlignment.center,
tapBodyToCollapse: false,
hasIcon: false,
tapBodyToExpand: false,
tapHeaderToExpand: false,
),
header: const SizedBox.shrink(),
collapsed: const SizedBox.shrink(),
expanded: Padding(
padding: const EdgeInsets.symmetric(vertical: 6),
child:
BlocBuilder<LocalAISettingPanelBloc, LocalAISettingPanelState>(
builder: (context, state) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
// If the progress indicator is startLocalAIApp, then don't show the LLM model.
if (state.progressIndicator ==
const LocalAIProgress.downloadLocalAIApp())
const SizedBox.shrink()
else ...[
OllamaSettingPage(),
VSpace(6),
PluginStateIndicator(),
],
],
);
},
),
),
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 6),
child: BlocBuilder<LocalAISettingPanelBloc, LocalAISettingPanelState>(
builder: (context, state) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
OllamaSettingPage(),
VSpace(6),
PluginStateIndicator(),
],
);
},
),
),
);

View File

@ -198,7 +198,7 @@ dependencies = [
[[package]]
name = "appflowy-local-ai"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=6a12c1bad70fb9486c7aabf379d72d94cb73a2d5#6a12c1bad70fb9486c7aabf379d72d94cb73a2d5"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=4dad7f8744f6703f094b4c594aa4d65a487cc540#4dad7f8744f6703f094b4c594aa4d65a487cc540"
dependencies = [
"anyhow",
"appflowy-plugin",
@ -218,7 +218,7 @@ dependencies = [
[[package]]
name = "appflowy-plugin"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=6a12c1bad70fb9486c7aabf379d72d94cb73a2d5#6a12c1bad70fb9486c7aabf379d72d94cb73a2d5"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=4dad7f8744f6703f094b4c594aa4d65a487cc540#4dad7f8744f6703f094b4c594aa4d65a487cc540"
dependencies = [
"anyhow",
"cfg-if",

View File

@ -152,5 +152,5 @@ collab-importer = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFl
# To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "6a12c1bad70fb9486c7aabf379d72d94cb73a2d5" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "6a12c1bad70fb9486c7aabf379d72d94cb73a2d5" }
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "4dad7f8744f6703f094b4c594aa4d65a487cc540" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "4dad7f8744f6703f094b4c594aa4d65a487cc540" }

View File

@ -116,46 +116,3 @@ async fn make_request(
}
Ok(response)
}
#[cfg(test)]
mod test {
use super::*;
use std::env::temp_dir;
#[tokio::test]
async fn retrieve_gpt4all_model_test() {
for url in [
// "https://gpt4all.io/models/gguf/all-MiniLM-L6-v2-f16.gguf",
"https://huggingface.co/second-state/All-MiniLM-L6-v2-Embedding-GGUF/resolve/main/all-MiniLM-L6-v2-Q3_K_L.gguf?download=true",
// "https://huggingface.co/MaziyarPanahi/Mistral-7B-Instruct-v0.3-GGUF/resolve/main/Mistral-7B-Instruct-v0.3.Q4_K_M.gguf?download=true",
] {
let temp_dir = temp_dir().join("download_llm");
if !temp_dir.exists() {
fs::create_dir(&temp_dir).await.unwrap();
}
let file_name = "llm_model.gguf";
let cancel_token = CancellationToken::new();
let token = cancel_token.clone();
tokio::spawn(async move {
tokio::time::sleep(tokio::time::Duration::from_secs(120)).await;
token.cancel();
});
let download_file = download_model(
url,
&temp_dir,
file_name,
Some(Arc::new(|a, b| {
println!("{}/{}", a, b);
})),
Some(cancel_token),
).await.unwrap();
let file_path = temp_dir.join(file_name);
assert_eq!(download_file, file_path);
println!("File path: {:?}", file_path);
assert!(file_path.exists());
std::fs::remove_file(file_path).unwrap();
}
}
}

View File

@ -258,7 +258,7 @@ impl LocalAIResourceController {
let mut config = OllamaPluginConfig::new(
bin_path,
"ollama_ai_plugin".to_string(),
"af_ollama_plugin".to_string(),
llm_setting.chat_model_name.clone(),
llm_setting.embedding_model_name.clone(),
Some(llm_setting.ollama_server_url.clone()),

View File

@ -101,18 +101,18 @@ pub(crate) fn ollama_plugin_path() -> std::path::PathBuf {
// Use LOCALAPPDATA for a user-specific installation path on Windows.
let local_appdata =
std::env::var("LOCALAPPDATA").unwrap_or_else(|_| "C:\\Program Files".to_string());
std::path::PathBuf::from(local_appdata).join("Programs\\appflowy_plugin\\ollama_ai_plugin.exe")
std::path::PathBuf::from(local_appdata).join("Programs\\appflowy_plugin\\af_ollama_plugin.exe")
}
#[cfg(target_os = "macos")]
{
let offline_app = "ollama_ai_plugin";
let offline_app = "af_ollama_plugin";
std::path::PathBuf::from(format!("/usr/local/bin/{}", offline_app))
}
#[cfg(target_os = "linux")]
{
let offline_app = "ollama_ai_plugin";
let offline_app = "af_ollama_plugin";
std::path::PathBuf::from(format!("/usr/local/bin/{}", offline_app))
}
}
@ -124,7 +124,7 @@ pub(crate) fn ollama_plugin_command_available() -> bool {
use std::os::windows::process::CommandExt;
const CREATE_NO_WINDOW: u32 = 0x08000000;
let output = Command::new("cmd")
.args(&["/C", "where", "ollama_ai_plugin"])
.args(&["/C", "where", "af_ollama_plugin"])
.creation_flags(CREATE_NO_WINDOW)
.output();
if let Ok(output) = output {
@ -135,7 +135,7 @@ pub(crate) fn ollama_plugin_command_available() -> bool {
// 2. Fallback: Check registry PATH for the executable
let path_dirs = get_windows_path_dirs();
let plugin_exe = "ollama_ai_plugin.exe"; // Adjust name if needed
let plugin_exe = "af_ollama_plugin.exe"; // Adjust name if needed
path_dirs.iter().any(|dir| {
let full_path = std::path::Path::new(dir).join(plugin_exe);
@ -147,7 +147,7 @@ pub(crate) fn ollama_plugin_command_available() -> bool {
false
} else {
let output = Command::new("command")
.args(["-v", "ollama_ai_plugin"])
.args(["-v", "af_ollama_plugin"])
.output();
match output {
Ok(o) => !o.stdout.is_empty(),