mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2026-01-06 12:21:48 +00:00
Merge pull request #7527 from AppFlowy-IO/fix_windows_terminal
Fix windows terminal
This commit is contained in:
commit
e4e75acdac
@ -39,8 +39,8 @@ class AIPromptInputBloc extends Bloc<AIPromptInputEvent, AIPromptInputState> {
|
||||
(event, emit) {
|
||||
event.when(
|
||||
updateAIState: (localAIState) {
|
||||
AiType aiType = localAIState.enabled ? AiType.local : AiType.cloud;
|
||||
bool supportChatWithFile =
|
||||
final aiType = localAIState.enabled ? AiType.local : AiType.cloud;
|
||||
final supportChatWithFile =
|
||||
aiType.isLocal && localAIState.state == RunningStatePB.Running;
|
||||
|
||||
// If local ai is enabled, user can only send messages when the AI is running
|
||||
@ -48,11 +48,6 @@ class AIPromptInputBloc extends Bloc<AIPromptInputEvent, AIPromptInputState> {
|
||||
? localAIState.state == RunningStatePB.Running
|
||||
: true;
|
||||
|
||||
if (localAIState.hasLackOfResource()) {
|
||||
aiType = AiType.cloud;
|
||||
supportChatWithFile = false;
|
||||
}
|
||||
|
||||
var hintText = aiType.isLocal
|
||||
? LocaleKeys.chat_inputLocalAIMessageHint.tr()
|
||||
: LocaleKeys.chat_inputMessageHint.tr();
|
||||
|
||||
@ -25,7 +25,7 @@ class PluginStateIndicator extends StatelessWidget {
|
||||
builder: (context, state) {
|
||||
return state.action.when(
|
||||
unknown: () => const SizedBox.shrink(),
|
||||
readToRun: () => const SizedBox.shrink(),
|
||||
readToRun: () => const _PrepareRunning(),
|
||||
initializingPlugin: () => const InitLocalAIIndicator(),
|
||||
running: () => const _LocalAIRunning(),
|
||||
restartPlugin: () => const _RestartPluginButton(),
|
||||
@ -37,6 +37,19 @@ class PluginStateIndicator extends StatelessWidget {
|
||||
}
|
||||
}
|
||||
|
||||
class _PrepareRunning extends StatelessWidget {
|
||||
const _PrepareRunning();
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Row(
|
||||
children: [
|
||||
FlowyText(LocaleKeys.settings_aiPage_keys_localAIStart.tr()),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class _RestartPluginButton extends StatelessWidget {
|
||||
const _RestartPluginButton();
|
||||
|
||||
|
||||
@ -846,7 +846,7 @@
|
||||
"downloadAIModelButton": "Download",
|
||||
"downloadingModel": "Downloading",
|
||||
"localAILoaded": "Local AI Model successfully added and ready to use",
|
||||
"localAIStart": "Local AI Chat is starting...",
|
||||
"localAIStart": "Local AI is starting...",
|
||||
"localAILoading": "Local AI Chat Model is loading...",
|
||||
"localAIStopped": "Local AI stopped",
|
||||
"localAIRunning": "Local AI is running",
|
||||
|
||||
4
frontend/rust-lib/Cargo.lock
generated
4
frontend/rust-lib/Cargo.lock
generated
@ -198,7 +198,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "appflowy-local-ai"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=cf0b5e77d3bbcecbcd9cbed86476658b477399e6#cf0b5e77d3bbcecbcd9cbed86476658b477399e6"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f4000af60ac18ad36a5dfb3fdc72c6d3ae967127#f4000af60ac18ad36a5dfb3fdc72c6d3ae967127"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"appflowy-plugin",
|
||||
@ -218,7 +218,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "appflowy-plugin"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=cf0b5e77d3bbcecbcd9cbed86476658b477399e6#cf0b5e77d3bbcecbcd9cbed86476658b477399e6"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f4000af60ac18ad36a5dfb3fdc72c6d3ae967127#f4000af60ac18ad36a5dfb3fdc72c6d3ae967127"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
|
||||
@ -152,5 +152,5 @@ collab-importer = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFl
|
||||
# To update the commit ID, run:
|
||||
# scripts/tool/update_local_ai_rev.sh new_rev_id
|
||||
# ⚠️⚠️⚠️️
|
||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "cf0b5e77d3bbcecbcd9cbed86476658b477399e6" }
|
||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "cf0b5e77d3bbcecbcd9cbed86476658b477399e6" }
|
||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f4000af60ac18ad36a5dfb3fdc72c6d3ae967127" }
|
||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f4000af60ac18ad36a5dfb3fdc72c6d3ae967127" }
|
||||
|
||||
@ -36,7 +36,7 @@ tokio-stream = "0.1.15"
|
||||
tokio-util = { workspace = true, features = ["full"] }
|
||||
appflowy-local-ai = { version = "0.1.0", features = ["verbose"] }
|
||||
appflowy-plugin = { version = "0.1.0" }
|
||||
reqwest = "0.11.27"
|
||||
reqwest = { version = "0.11.27", features = ["json"] }
|
||||
sha2 = "0.10.7"
|
||||
base64 = "0.21.5"
|
||||
futures-util = "0.3.30"
|
||||
|
||||
@ -16,6 +16,7 @@ use std::collections::HashMap;
|
||||
use crate::local_ai::watch::is_plugin_ready;
|
||||
use crate::stream_message::StreamMessage;
|
||||
use appflowy_local_ai::ollama_plugin::OllamaAIPlugin;
|
||||
use appflowy_plugin::core::plugin::RunningState;
|
||||
use arc_swap::ArcSwapOption;
|
||||
use futures_util::SinkExt;
|
||||
use lib_infra::util::get_operating_system;
|
||||
@ -97,11 +98,16 @@ impl LocalAIController {
|
||||
if let Ok(workspace_id) = cloned_user_service.workspace_id() {
|
||||
let key = local_ai_enabled_key(&workspace_id);
|
||||
info!("[AI Plugin] state: {:?}", state);
|
||||
let ready = is_plugin_ready();
|
||||
let lack_of_resource = cloned_llm_res.get_lack_of_resource().await;
|
||||
|
||||
let new_state = RunningStatePB::from(state);
|
||||
let enabled = cloned_store_preferences.get_bool(&key).unwrap_or(true);
|
||||
let mut ready = false;
|
||||
let mut lack_of_resource = None;
|
||||
if enabled {
|
||||
ready = is_plugin_ready();
|
||||
lack_of_resource = cloned_llm_res.get_lack_of_resource().await;
|
||||
}
|
||||
|
||||
chat_notification_builder(
|
||||
APPFLOWY_AI_NOTIFICATION_KEY,
|
||||
ChatNotification::UpdateLocalAIState,
|
||||
@ -192,7 +198,7 @@ impl LocalAIController {
|
||||
.workspace_id()
|
||||
.map(|workspace_id| local_ai_enabled_key(&workspace_id))
|
||||
{
|
||||
self.store_preferences.get_bool(&key).unwrap_or(true)
|
||||
self.store_preferences.get_bool(&key).unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@ -259,9 +265,14 @@ impl LocalAIController {
|
||||
pub async fn get_local_ai_state(&self) -> LocalAIPB {
|
||||
let start = std::time::Instant::now();
|
||||
let enabled = self.is_enabled();
|
||||
let is_app_downloaded = is_plugin_ready();
|
||||
let state = self.ai_plugin.get_plugin_running_state();
|
||||
let lack_of_resource = self.resource.get_lack_of_resource().await;
|
||||
let mut is_plugin_executable_ready = false;
|
||||
let mut state = RunningState::ReadyToConnect;
|
||||
let mut lack_of_resource = None;
|
||||
if enabled {
|
||||
is_plugin_executable_ready = is_plugin_ready();
|
||||
state = self.ai_plugin.get_plugin_running_state();
|
||||
lack_of_resource = self.resource.get_lack_of_resource().await;
|
||||
}
|
||||
let elapsed = start.elapsed();
|
||||
debug!(
|
||||
"[AI Plugin] get local ai state, elapsed: {:?}, thread: {:?}",
|
||||
@ -270,7 +281,7 @@ impl LocalAIController {
|
||||
);
|
||||
LocalAIPB {
|
||||
enabled,
|
||||
is_plugin_executable_ready: is_app_downloaded,
|
||||
is_plugin_executable_ready,
|
||||
state: RunningStatePB::from(state),
|
||||
lack_of_resource,
|
||||
}
|
||||
@ -462,11 +473,8 @@ async fn initialize_ai_plugin(
|
||||
ret: Option<tokio::sync::oneshot::Sender<()>>,
|
||||
) -> FlowyResult<()> {
|
||||
let plugin = plugin.clone();
|
||||
if plugin.get_plugin_running_state().is_loading() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let lack_of_resource = llm_resource.get_lack_of_resource().await;
|
||||
|
||||
chat_notification_builder(
|
||||
APPFLOWY_AI_NOTIFICATION_KEY,
|
||||
ChatNotification::UpdateLocalAIState,
|
||||
@ -494,6 +502,13 @@ async fn initialize_ai_plugin(
|
||||
})
|
||||
.send();
|
||||
|
||||
if let Err(err) = plugin.destroy_plugin().await {
|
||||
error!(
|
||||
"[AI Plugin] failed to destroy plugin when lack of resource: {:?}",
|
||||
err
|
||||
);
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
||||
@ -14,11 +14,22 @@ use crate::notification::{
|
||||
use appflowy_local_ai::ollama_plugin::OllamaPluginConfig;
|
||||
use lib_infra::util::{get_operating_system, OperatingSystem};
|
||||
use reqwest::Client;
|
||||
use serde::Deserialize;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tracing::{error, info, instrument, trace};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct TagsResponse {
|
||||
models: Vec<ModelEntry>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ModelEntry {
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait LLMResourceService: Send + Sync + 'static {
|
||||
/// Get local ai configuration from remote server
|
||||
@ -155,7 +166,6 @@ impl LocalAIResourceController {
|
||||
resources.pop().map(|r| r.desc())
|
||||
}
|
||||
|
||||
/// Returns true when all resources are downloaded and ready to use.
|
||||
pub async fn calculate_pending_resources(&self) -> FlowyResult<Vec<PendingResource>> {
|
||||
let mut resources = vec![];
|
||||
let app_path = ollama_plugin_path();
|
||||
@ -184,39 +194,40 @@ impl LocalAIResourceController {
|
||||
},
|
||||
}
|
||||
|
||||
let required_models = vec![
|
||||
setting.chat_model_name,
|
||||
setting.embedding_model_name,
|
||||
// Add any additional required models here.
|
||||
];
|
||||
match tokio::process::Command::new("ollama")
|
||||
.arg("list")
|
||||
.output()
|
||||
.await
|
||||
{
|
||||
Ok(output) if output.status.success() => {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
for model in &required_models {
|
||||
if !stdout.contains(model.as_str()) {
|
||||
let required_models = vec![setting.chat_model_name, setting.embedding_model_name];
|
||||
|
||||
// Query the /api/tags endpoint to get a structured list of locally available models.
|
||||
let tags_url = format!("{}/api/tags", setting.ollama_server_url);
|
||||
|
||||
match client.get(&tags_url).send().await {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
let tags: TagsResponse = resp.json().await.map_err(|e| {
|
||||
log::error!(
|
||||
"[LLM Resource] Failed to parse /api/tags JSON response: {:?}",
|
||||
e
|
||||
);
|
||||
e
|
||||
})?;
|
||||
// Check each required model is present in the response.
|
||||
for required in &required_models {
|
||||
if !tags.models.iter().any(|m| m.name.contains(required)) {
|
||||
log::trace!(
|
||||
"[LLM Resource] required model '{}' not found in ollama list",
|
||||
model
|
||||
"[LLM Resource] required model '{}' not found in API response",
|
||||
required
|
||||
);
|
||||
resources.push(PendingResource::MissingModel(model.clone()));
|
||||
resources.push(PendingResource::MissingModel(required.clone()));
|
||||
// Optionally, you could continue checking all models rather than returning early.
|
||||
return Ok(resources);
|
||||
}
|
||||
}
|
||||
},
|
||||
Ok(output) => {
|
||||
_ => {
|
||||
error!(
|
||||
"[LLM Resource] 'ollama list' command failed with status: {:?}",
|
||||
output.status
|
||||
"[LLM Resource] Failed to fetch models from {} (GET /api/tags)",
|
||||
setting.ollama_server_url
|
||||
);
|
||||
resources.push(PendingResource::OllamaServerNotReady);
|
||||
},
|
||||
Err(e) => {
|
||||
error!("[LLM Resource] failed to execute 'ollama list': {:?}", e);
|
||||
resources.push(PendingResource::OllamaServerNotReady);
|
||||
return Ok(resources);
|
||||
},
|
||||
}
|
||||
|
||||
@ -226,19 +237,15 @@ impl LocalAIResourceController {
|
||||
#[instrument(level = "info", skip_all)]
|
||||
pub async fn get_plugin_config(&self, rag_enabled: bool) -> FlowyResult<OllamaPluginConfig> {
|
||||
if !self.is_resource_ready().await {
|
||||
return Err(FlowyError::local_ai().with_context("Local AI resources are not ready"));
|
||||
return Err(FlowyError::new(
|
||||
ErrorCode::AppFlowyLAINotReady,
|
||||
"AppFlowyLAI not found",
|
||||
));
|
||||
}
|
||||
|
||||
let llm_setting = self.get_llm_setting();
|
||||
let bin_path = match get_operating_system() {
|
||||
OperatingSystem::MacOS | OperatingSystem::Windows => {
|
||||
if !is_plugin_ready() {
|
||||
return Err(FlowyError::new(
|
||||
ErrorCode::AppFlowyLAINotReady,
|
||||
"AppFlowyLAI not found",
|
||||
));
|
||||
}
|
||||
|
||||
OperatingSystem::MacOS | OperatingSystem::Windows | OperatingSystem::Linux => {
|
||||
ollama_plugin_path()
|
||||
},
|
||||
_ => {
|
||||
|
||||
@ -121,9 +121,11 @@ pub(crate) fn ollama_plugin_command_available() -> bool {
|
||||
if cfg!(windows) {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// 1. Try "where" command first
|
||||
use std::os::windows::process::CommandExt;
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
let output = Command::new("cmd")
|
||||
.args(["/C", "where", "ollama_ai_plugin"])
|
||||
.args(&["/C", "where", "ollama_ai_plugin"])
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.output();
|
||||
if let Ok(output) = output {
|
||||
if !output.stdout.is_empty() {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user