From bd06e1d5598f07c53e2a2c09a161180444ccce16 Mon Sep 17 00:00:00 2001 From: Nathan Date: Tue, 11 Mar 2025 09:32:20 +0800 Subject: [PATCH] chore: clippy --- frontend/rust-lib/flowy-ai/src/local_ai/resource.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/frontend/rust-lib/flowy-ai/src/local_ai/resource.rs b/frontend/rust-lib/flowy-ai/src/local_ai/resource.rs index 2bcdd8c65f..c0dd3c2d38 100644 --- a/frontend/rust-lib/flowy-ai/src/local_ai/resource.rs +++ b/frontend/rust-lib/flowy-ai/src/local_ai/resource.rs @@ -163,12 +163,10 @@ impl LocalAIResourceController { pub async fn calculate_pending_resources(&self) -> FlowyResult> { let mut resources = vec![]; let app_path = ollama_plugin_path(); - if !app_path.exists() { - if !ollama_plugin_command_available() { - trace!("[LLM Resource] offline app not found: {:?}", app_path); - resources.push(PendingResource::PluginExecutableNotReady); - return Ok(resources); - } + if !app_path.exists() && !ollama_plugin_command_available() { + trace!("[LLM Resource] offline app not found: {:?}", app_path); + resources.push(PendingResource::PluginExecutableNotReady); + return Ok(resources); } let setting = self.get_llm_setting();