mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2025-08-15 04:11:16 +00:00
chore: clippy
This commit is contained in:
parent
6ba7f93f69
commit
83c53188e3
4
frontend/rust-lib/Cargo.lock
generated
4
frontend/rust-lib/Cargo.lock
generated
@ -198,7 +198,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "appflowy-local-ai"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8657a3ce64948c672f548ca0dd9f0257db9c7156#8657a3ce64948c672f548ca0dd9f0257db9c7156"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=a76364694d696767488c8b12e210eefa58453c89#a76364694d696767488c8b12e210eefa58453c89"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"appflowy-plugin",
|
||||
@ -218,7 +218,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "appflowy-plugin"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8657a3ce64948c672f548ca0dd9f0257db9c7156#8657a3ce64948c672f548ca0dd9f0257db9c7156"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=a76364694d696767488c8b12e210eefa58453c89#a76364694d696767488c8b12e210eefa58453c89"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
|
@ -152,5 +152,5 @@ collab-importer = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFl
|
||||
# To update the commit ID, run:
|
||||
# scripts/tool/update_local_ai_rev.sh new_rev_id
|
||||
# ⚠️⚠️⚠️️
|
||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8657a3ce64948c672f548ca0dd9f0257db9c7156" }
|
||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8657a3ce64948c672f548ca0dd9f0257db9c7156" }
|
||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "a76364694d696767488c8b12e210eefa58453c89" }
|
||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "a76364694d696767488c8b12e210eefa58453c89" }
|
||||
|
@ -13,6 +13,7 @@ use futures::Sink;
|
||||
use lib_infra::async_trait::async_trait;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::local_ai::watch::is_plugin_ready;
|
||||
use crate::stream_message::StreamMessage;
|
||||
use appflowy_local_ai::ollama_plugin::OllamaAIPlugin;
|
||||
use arc_swap::ArcSwapOption;
|
||||
@ -92,7 +93,7 @@ impl LocalAIController {
|
||||
if let Ok(workspace_id) = cloned_user_service.workspace_id() {
|
||||
let key = local_ai_enabled_key(&workspace_id);
|
||||
info!("[AI Plugin] state: {:?}", state);
|
||||
let ready = cloned_llm_res.is_plugin_ready();
|
||||
let ready = is_plugin_ready();
|
||||
let lack_of_resource = cloned_llm_res.get_lack_of_resource().await;
|
||||
|
||||
let new_state = RunningStatePB::from(state);
|
||||
@ -254,7 +255,7 @@ impl LocalAIController {
|
||||
pub async fn get_local_ai_state(&self) -> LocalAIPB {
|
||||
let start = std::time::Instant::now();
|
||||
let enabled = self.is_enabled();
|
||||
let is_app_downloaded = self.resource.is_plugin_ready();
|
||||
let is_app_downloaded = is_plugin_ready();
|
||||
let state = self.ai_plugin.get_plugin_running_state();
|
||||
let lack_of_resource = self.resource.get_lack_of_resource().await;
|
||||
let elapsed = start.elapsed();
|
||||
|
@ -5,9 +5,7 @@ use flowy_error::{ErrorCode, FlowyError, FlowyResult};
|
||||
use lib_infra::async_trait::async_trait;
|
||||
|
||||
use crate::entities::LackOfAIResourcePB;
|
||||
use crate::local_ai::watch::{
|
||||
is_plugin_ready, ollama_plugin_command_available, ollama_plugin_path,
|
||||
};
|
||||
use crate::local_ai::watch::{is_plugin_ready, ollama_plugin_path};
|
||||
#[cfg(target_os = "macos")]
|
||||
use crate::local_ai::watch::{watch_offline_app, WatchContext};
|
||||
use crate::notification::{
|
||||
@ -125,10 +123,6 @@ impl LocalAIResourceController {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_plugin_ready(&self) -> bool {
|
||||
ollama_plugin_path().exists() || ollama_plugin_command_available()
|
||||
}
|
||||
|
||||
pub async fn get_plugin_download_link(&self) -> FlowyResult<String> {
|
||||
let ai_config = self.get_local_ai_configuration().await?;
|
||||
Ok(ai_config.plugin.url)
|
||||
@ -165,7 +159,7 @@ impl LocalAIResourceController {
|
||||
pub async fn calculate_pending_resources(&self) -> FlowyResult<Vec<PendingResource>> {
|
||||
let mut resources = vec![];
|
||||
let app_path = ollama_plugin_path();
|
||||
if !app_path.exists() && !ollama_plugin_command_available() {
|
||||
if !is_plugin_ready() {
|
||||
trace!("[LLM Resource] offline app not found: {:?}", app_path);
|
||||
resources.push(PendingResource::PluginExecutableNotReady);
|
||||
return Ok(resources);
|
||||
|
@ -139,7 +139,7 @@ pub(crate) fn ollama_plugin_command_available() -> bool {
|
||||
false
|
||||
} else {
|
||||
let output = Command::new("command")
|
||||
.args(&["-v", "ollama_ai_plugin"])
|
||||
.args(["-v", "ollama_ai_plugin"])
|
||||
.output();
|
||||
match output {
|
||||
Ok(o) => !o.stdout.is_empty(),
|
||||
|
Loading…
x
Reference in New Issue
Block a user