mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2025-11-19 20:07:49 +00:00
chore: switch to appflowy ai if local ai is not enable (#5839)
* chore: switch to appflowy ai if local ai is not enable * chore: clippy * chore: fix enable bug * chore: fix compile * chore: clippy
This commit is contained in:
parent
81532d014e
commit
7c3dd5375d
@ -13,9 +13,9 @@ class ChatInputBloc extends Bloc<ChatInputEvent, ChatInputState> {
|
|||||||
: listener = LocalLLMListener(),
|
: listener = LocalLLMListener(),
|
||||||
super(const ChatInputState(aiType: _AppFlowyAI())) {
|
super(const ChatInputState(aiType: _AppFlowyAI())) {
|
||||||
listener.start(
|
listener.start(
|
||||||
chatStateCallback: (aiState) {
|
stateCallback: (pluginState) {
|
||||||
if (!isClosed) {
|
if (!isClosed) {
|
||||||
add(ChatInputEvent.updateState(aiState));
|
add(ChatInputEvent.updatePluginState(pluginState));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@ -37,12 +37,12 @@ class ChatInputBloc extends Bloc<ChatInputEvent, ChatInputState> {
|
|||||||
) async {
|
) async {
|
||||||
await event.when(
|
await event.when(
|
||||||
started: () async {
|
started: () async {
|
||||||
final result = await ChatEventGetLocalAIChatState().send();
|
final result = await ChatEventGetLocalAIPluginState().send();
|
||||||
result.fold(
|
result.fold(
|
||||||
(aiState) {
|
(pluginState) {
|
||||||
if (!isClosed) {
|
if (!isClosed) {
|
||||||
add(
|
add(
|
||||||
ChatInputEvent.updateState(aiState),
|
ChatInputEvent.updatePluginState(pluginState),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -51,8 +51,8 @@ class ChatInputBloc extends Bloc<ChatInputEvent, ChatInputState> {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
updateState: (aiState) {
|
updatePluginState: (pluginState) {
|
||||||
if (aiState.pluginState.state == RunningStatePB.Running) {
|
if (pluginState.state == RunningStatePB.Running) {
|
||||||
emit(const ChatInputState(aiType: _LocalAI()));
|
emit(const ChatInputState(aiType: _LocalAI()));
|
||||||
} else {
|
} else {
|
||||||
emit(const ChatInputState(aiType: _AppFlowyAI()));
|
emit(const ChatInputState(aiType: _AppFlowyAI()));
|
||||||
@ -65,8 +65,9 @@ class ChatInputBloc extends Bloc<ChatInputEvent, ChatInputState> {
|
|||||||
@freezed
|
@freezed
|
||||||
class ChatInputEvent with _$ChatInputEvent {
|
class ChatInputEvent with _$ChatInputEvent {
|
||||||
const factory ChatInputEvent.started() = _Started;
|
const factory ChatInputEvent.started() = _Started;
|
||||||
const factory ChatInputEvent.updateState(LocalAIChatPB aiState) =
|
const factory ChatInputEvent.updatePluginState(
|
||||||
_UpdateAIState;
|
LocalAIPluginStatePB pluginState,
|
||||||
|
) = _UpdatePluginState;
|
||||||
}
|
}
|
||||||
|
|
||||||
@freezed
|
@freezed
|
||||||
|
|||||||
@ -223,20 +223,20 @@ class OpenOrDownloadOfflineAIApp extends StatelessWidget {
|
|||||||
],
|
],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
const SizedBox(
|
// const SizedBox(
|
||||||
height: 6,
|
// height: 6,
|
||||||
), // Replaced VSpace with SizedBox for simplicity
|
// ), // Replaced VSpace with SizedBox for simplicity
|
||||||
SizedBox(
|
// SizedBox(
|
||||||
height: 30,
|
// height: 30,
|
||||||
child: FlowyButton(
|
// child: FlowyButton(
|
||||||
useIntrinsicWidth: true,
|
// useIntrinsicWidth: true,
|
||||||
margin: const EdgeInsets.symmetric(horizontal: 12),
|
// margin: const EdgeInsets.symmetric(horizontal: 12),
|
||||||
text: FlowyText(
|
// text: FlowyText(
|
||||||
LocaleKeys.settings_aiPage_keys_activeOfflineAI.tr(),
|
// LocaleKeys.settings_aiPage_keys_activeOfflineAI.tr(),
|
||||||
),
|
// ),
|
||||||
onTap: onRetry,
|
// onTap: onRetry,
|
||||||
),
|
// ),
|
||||||
),
|
// ),
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/m
|
|||||||
import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart';
|
import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart';
|
||||||
import 'package:flowy_infra/theme_extension.dart';
|
import 'package:flowy_infra/theme_extension.dart';
|
||||||
import 'package:flowy_infra_ui/widget/spacing.dart';
|
import 'package:flowy_infra_ui/widget/spacing.dart';
|
||||||
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:flutter/material.dart';
|
import 'package:flutter/material.dart';
|
||||||
|
|
||||||
import 'package:appflowy/generated/locale_keys.g.dart';
|
import 'package:appflowy/generated/locale_keys.g.dart';
|
||||||
@ -52,12 +53,11 @@ class SettingsAIView extends StatelessWidget {
|
|||||||
];
|
];
|
||||||
|
|
||||||
children.add(const _AISearchToggle(value: false));
|
children.add(const _AISearchToggle(value: false));
|
||||||
// TODO(nathan): enable local ai
|
children.add(
|
||||||
// children.add(
|
_LocalAIOnBoarding(
|
||||||
// _LocalAIOnBoarding(
|
workspaceId: userProfile.workspaceId,
|
||||||
// workspaceId: userProfile.workspaceId,
|
),
|
||||||
// ),
|
);
|
||||||
// );
|
|
||||||
|
|
||||||
return SettingsBody(
|
return SettingsBody(
|
||||||
title: LocaleKeys.settings_aiPage_title.tr(),
|
title: LocaleKeys.settings_aiPage_title.tr(),
|
||||||
@ -130,7 +130,7 @@ class _LocalAIOnBoarding extends StatelessWidget {
|
|||||||
child: BlocBuilder<LocalAIOnBoardingBloc, LocalAIOnBoardingState>(
|
child: BlocBuilder<LocalAIOnBoardingBloc, LocalAIOnBoardingState>(
|
||||||
builder: (context, state) {
|
builder: (context, state) {
|
||||||
// Show the local AI settings if the user has purchased the AI Local plan
|
// Show the local AI settings if the user has purchased the AI Local plan
|
||||||
if (state.isPurchaseAILocal) {
|
if (kDebugMode || state.isPurchaseAILocal) {
|
||||||
return const LocalAISetting();
|
return const LocalAISetting();
|
||||||
} else {
|
} else {
|
||||||
// Show the upgrade to AI Local plan button if the user has not purchased the AI Local plan
|
// Show the upgrade to AI Local plan button if the user has not purchased the AI Local plan
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
import 'dart:io';
|
||||||
|
|
||||||
import 'package:flutter/material.dart';
|
import 'package:flutter/material.dart';
|
||||||
|
|
||||||
import 'package:appflowy/util/int64_extension.dart';
|
import 'package:appflowy/util/int64_extension.dart';
|
||||||
@ -212,24 +214,23 @@ class _SettingsBillingViewState extends State<SettingsBillingView> {
|
|||||||
|
|
||||||
// Currently, the AI Local tile is only available on macOS
|
// Currently, the AI Local tile is only available on macOS
|
||||||
// TODO(nathan): enable windows and linux
|
// TODO(nathan): enable windows and linux
|
||||||
// TODO(nathan): enable local ai
|
if (Platform.isMacOS)
|
||||||
// if (Platform.isMacOS)
|
_AITile(
|
||||||
// _AITile(
|
plan: SubscriptionPlanPB.AiLocal,
|
||||||
// plan: SubscriptionPlanPB.AiLocal,
|
label: LocaleKeys
|
||||||
// label: LocaleKeys
|
.settings_billingPage_addons_aiOnDevice_label
|
||||||
// .settings_billingPage_addons_aiOnDevice_label
|
.tr(),
|
||||||
// .tr(),
|
description: LocaleKeys
|
||||||
// description: LocaleKeys
|
.settings_billingPage_addons_aiOnDevice_description,
|
||||||
// .settings_billingPage_addons_aiOnDevice_description,
|
activeDescription: LocaleKeys
|
||||||
// activeDescription: LocaleKeys
|
.settings_billingPage_addons_aiOnDevice_activeDescription,
|
||||||
// .settings_billingPage_addons_aiOnDevice_activeDescription,
|
canceledDescription: LocaleKeys
|
||||||
// canceledDescription: LocaleKeys
|
.settings_billingPage_addons_aiOnDevice_canceledDescription,
|
||||||
// .settings_billingPage_addons_aiOnDevice_canceledDescription,
|
subscriptionInfo:
|
||||||
// subscriptionInfo:
|
state.subscriptionInfo.addOns.firstWhereOrNull(
|
||||||
// state.subscriptionInfo.addOns.firstWhereOrNull(
|
(a) => a.type == WorkspaceAddOnPBType.AddOnAiLocal,
|
||||||
// (a) => a.type == WorkspaceAddOnPBType.AddOnAiLocal,
|
),
|
||||||
// ),
|
),
|
||||||
// ),
|
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
import 'dart:io';
|
||||||
|
|
||||||
import 'package:flutter/material.dart';
|
import 'package:flutter/material.dart';
|
||||||
|
|
||||||
import 'package:appflowy/generated/flowy_svgs.g.dart';
|
import 'package:appflowy/generated/flowy_svgs.g.dart';
|
||||||
@ -139,44 +141,43 @@ class _SettingsPlanViewState extends State<SettingsPlanView> {
|
|||||||
|
|
||||||
// Currently, the AI Local tile is only available on macOS
|
// Currently, the AI Local tile is only available on macOS
|
||||||
// TODO(nathan): enable windows and linux
|
// TODO(nathan): enable windows and linux
|
||||||
// TODO(nathan): enable local ai
|
if (Platform.isMacOS)
|
||||||
// if (Platform.isMacOS)
|
Flexible(
|
||||||
// Flexible(
|
child: _AddOnBox(
|
||||||
// child: _AddOnBox(
|
title: LocaleKeys
|
||||||
// title: LocaleKeys
|
.settings_planPage_planUsage_addons_aiOnDevice_title
|
||||||
// .settings_planPage_planUsage_addons_aiOnDevice_title
|
.tr(),
|
||||||
// .tr(),
|
description: LocaleKeys
|
||||||
// description: LocaleKeys
|
.settings_planPage_planUsage_addons_aiOnDevice_description
|
||||||
// .settings_planPage_planUsage_addons_aiOnDevice_description
|
.tr(),
|
||||||
// .tr(),
|
price: LocaleKeys
|
||||||
// price: LocaleKeys
|
.settings_planPage_planUsage_addons_aiOnDevice_price
|
||||||
// .settings_planPage_planUsage_addons_aiOnDevice_price
|
.tr(
|
||||||
// .tr(
|
args: [
|
||||||
// args: [
|
SubscriptionPlanPB.AiLocal.priceAnnualBilling,
|
||||||
// SubscriptionPlanPB.AiLocal.priceAnnualBilling,
|
],
|
||||||
// ],
|
),
|
||||||
// ),
|
priceInfo: LocaleKeys
|
||||||
// priceInfo: LocaleKeys
|
.settings_planPage_planUsage_addons_aiOnDevice_priceInfo
|
||||||
// .settings_planPage_planUsage_addons_aiOnDevice_priceInfo
|
.tr(),
|
||||||
// .tr(),
|
billingInfo: LocaleKeys
|
||||||
// billingInfo: LocaleKeys
|
.settings_planPage_planUsage_addons_aiOnDevice_billingInfo
|
||||||
// .settings_planPage_planUsage_addons_aiOnDevice_billingInfo
|
.tr(
|
||||||
// .tr(
|
args: [
|
||||||
// args: [
|
SubscriptionPlanPB.AiLocal.priceMonthBilling,
|
||||||
// SubscriptionPlanPB.AiLocal.priceMonthBilling,
|
],
|
||||||
// ],
|
),
|
||||||
// ),
|
buttonText: state.subscriptionInfo.hasAIOnDevice
|
||||||
// buttonText: state.subscriptionInfo.hasAIOnDevice
|
? LocaleKeys
|
||||||
// ? LocaleKeys
|
.settings_planPage_planUsage_addons_activeLabel
|
||||||
// .settings_planPage_planUsage_addons_activeLabel
|
.tr()
|
||||||
// .tr()
|
: LocaleKeys
|
||||||
// : LocaleKeys
|
.settings_planPage_planUsage_addons_addLabel
|
||||||
// .settings_planPage_planUsage_addons_addLabel
|
.tr(),
|
||||||
// .tr(),
|
isActive: state.subscriptionInfo.hasAIOnDevice,
|
||||||
// isActive: state.subscriptionInfo.hasAIOnDevice,
|
plan: SubscriptionPlanPB.AiLocal,
|
||||||
// plan: SubscriptionPlanPB.AiLocal,
|
),
|
||||||
// ),
|
),
|
||||||
// ),
|
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
|||||||
@ -14,7 +14,7 @@ use flowy_sqlite::DBConnection;
|
|||||||
use lib_infra::util::timestamp;
|
use lib_infra::util::timestamp;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tracing::{error, info, trace};
|
use tracing::{info, trace};
|
||||||
|
|
||||||
pub trait ChatUserService: Send + Sync + 'static {
|
pub trait ChatUserService: Send + Sync + 'static {
|
||||||
fn user_id(&self) -> Result<i64, FlowyError>;
|
fn user_id(&self) -> Result<i64, FlowyError>;
|
||||||
@ -46,12 +46,6 @@ impl ChatManager {
|
|||||||
cloud_service.clone(),
|
cloud_service.clone(),
|
||||||
));
|
));
|
||||||
|
|
||||||
if local_ai_controller.can_init_plugin() {
|
|
||||||
if let Err(err) = local_ai_controller.initialize_ai_plugin(None) {
|
|
||||||
error!("[AI Plugin] failed to initialize local ai: {:?}", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// setup local chat service
|
// setup local chat service
|
||||||
let cloud_service_wm = Arc::new(CloudServiceMiddleware::new(
|
let cloud_service_wm = Arc::new(CloudServiceMiddleware::new(
|
||||||
user_service.clone(),
|
user_service.clone(),
|
||||||
|
|||||||
@ -16,8 +16,9 @@ use parking_lot::Mutex;
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use tokio::select;
|
||||||
use tokio_stream::StreamExt;
|
use tokio_stream::StreamExt;
|
||||||
use tracing::{debug, error, info, trace};
|
use tracing::{debug, error, info, instrument, trace};
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct LLMSetting {
|
pub struct LLMSetting {
|
||||||
@ -69,10 +70,11 @@ impl LocalAIController {
|
|||||||
let current_chat_id = Mutex::new(None);
|
let current_chat_id = Mutex::new(None);
|
||||||
|
|
||||||
let mut running_state_rx = llm_chat.subscribe_running_state();
|
let mut running_state_rx = llm_chat.subscribe_running_state();
|
||||||
let offline_ai_ready = llm_res.is_offline_ai_ready();
|
let cloned_llm_res = llm_res.clone();
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
while let Some(state) = running_state_rx.next().await {
|
while let Some(state) = running_state_rx.next().await {
|
||||||
info!("[AI Plugin] state: {:?}", state);
|
info!("[AI Plugin] state: {:?}", state);
|
||||||
|
let offline_ai_ready = cloned_llm_res.is_offline_app_ready();
|
||||||
let new_state = RunningStatePB::from(state);
|
let new_state = RunningStatePB::from(state);
|
||||||
make_notification(
|
make_notification(
|
||||||
APPFLOWY_AI_NOTIFICATION_KEY,
|
APPFLOWY_AI_NOTIFICATION_KEY,
|
||||||
@ -96,15 +98,37 @@ impl LocalAIController {
|
|||||||
let rag_enabled = this.is_rag_enabled();
|
let rag_enabled = this.is_rag_enabled();
|
||||||
let cloned_llm_chat = this.llm_chat.clone();
|
let cloned_llm_chat = this.llm_chat.clone();
|
||||||
let cloned_llm_res = this.llm_res.clone();
|
let cloned_llm_res = this.llm_res.clone();
|
||||||
|
let mut offline_ai_watch = this.llm_res.subscribe_offline_app_state();
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
while rx.recv().await.is_some() {
|
let init_fn = || {
|
||||||
if let Ok(chat_config) = cloned_llm_res.get_chat_config(rag_enabled) {
|
if let Ok(chat_config) = cloned_llm_res.get_chat_config(rag_enabled) {
|
||||||
if let Err(err) = initialize_chat_plugin(&cloned_llm_chat, chat_config, None) {
|
if let Err(err) = initialize_ai_plugin(&cloned_llm_chat, chat_config, None) {
|
||||||
|
error!("[AI Plugin] failed to setup plugin: {:?}", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
loop {
|
||||||
|
select! {
|
||||||
|
_ = offline_ai_watch.recv() => {
|
||||||
|
init_fn();
|
||||||
|
},
|
||||||
|
_ = rx.recv() => {
|
||||||
|
init_fn();
|
||||||
|
},
|
||||||
|
else => { break; }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if this.can_init_plugin() {
|
||||||
|
let result = this.llm_res.get_chat_config(this.is_rag_enabled());
|
||||||
|
if let Ok(chat_config) = result {
|
||||||
|
if let Err(err) = initialize_ai_plugin(&this.llm_chat, chat_config, None) {
|
||||||
error!("[AI Plugin] failed to setup plugin: {:?}", err);
|
error!("[AI Plugin] failed to setup plugin: {:?}", err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
@ -112,15 +136,6 @@ impl LocalAIController {
|
|||||||
self.llm_res.refresh_llm_resource().await
|
self.llm_res.refresh_llm_resource().await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn initialize_ai_plugin(
|
|
||||||
&self,
|
|
||||||
ret: Option<tokio::sync::oneshot::Sender<()>>,
|
|
||||||
) -> FlowyResult<()> {
|
|
||||||
let chat_config = self.llm_res.get_chat_config(self.is_rag_enabled())?;
|
|
||||||
initialize_chat_plugin(&self.llm_chat, chat_config, ret)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the local AI is enabled and ready to use.
|
/// Returns true if the local AI is enabled and ready to use.
|
||||||
pub fn can_init_plugin(&self) -> bool {
|
pub fn can_init_plugin(&self) -> bool {
|
||||||
self.is_enabled() && self.llm_res.is_resource_ready()
|
self.is_enabled() && self.llm_res.is_resource_ready()
|
||||||
@ -199,7 +214,10 @@ impl LocalAIController {
|
|||||||
let state = self.llm_res.use_local_llm(llm_id)?;
|
let state = self.llm_res.use_local_llm(llm_id)?;
|
||||||
// Re-initialize the plugin if the setting is updated and ready to use
|
// Re-initialize the plugin if the setting is updated and ready to use
|
||||||
if self.llm_res.is_resource_ready() {
|
if self.llm_res.is_resource_ready() {
|
||||||
self.initialize_ai_plugin(None)?;
|
let chat_config = self.llm_res.get_chat_config(self.is_rag_enabled())?;
|
||||||
|
if let Err(err) = initialize_ai_plugin(&self.llm_chat, chat_config, None) {
|
||||||
|
error!("failed to setup plugin: {:?}", err);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(state)
|
Ok(state)
|
||||||
}
|
}
|
||||||
@ -226,7 +244,7 @@ impl LocalAIController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_chat_plugin_state(&self) -> LocalAIPluginStatePB {
|
pub fn get_chat_plugin_state(&self) -> LocalAIPluginStatePB {
|
||||||
let offline_ai_ready = self.llm_res.is_offline_ai_ready();
|
let offline_ai_ready = self.llm_res.is_offline_app_ready();
|
||||||
let state = self.llm_chat.get_plugin_running_state();
|
let state = self.llm_chat.get_plugin_running_state();
|
||||||
LocalAIPluginStatePB {
|
LocalAIPluginStatePB {
|
||||||
state: RunningStatePB::from(state),
|
state: RunningStatePB::from(state),
|
||||||
@ -237,7 +255,7 @@ impl LocalAIController {
|
|||||||
pub fn restart_chat_plugin(&self) {
|
pub fn restart_chat_plugin(&self) {
|
||||||
let rag_enabled = self.is_rag_enabled();
|
let rag_enabled = self.is_rag_enabled();
|
||||||
if let Ok(chat_config) = self.llm_res.get_chat_config(rag_enabled) {
|
if let Ok(chat_config) = self.llm_res.get_chat_config(rag_enabled) {
|
||||||
if let Err(err) = initialize_chat_plugin(&self.llm_chat, chat_config, None) {
|
if let Err(err) = initialize_ai_plugin(&self.llm_chat, chat_config, None) {
|
||||||
error!("[AI Plugin] failed to setup plugin: {:?}", err);
|
error!("[AI Plugin] failed to setup plugin: {:?}", err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -268,7 +286,8 @@ impl LocalAIController {
|
|||||||
if enabled {
|
if enabled {
|
||||||
let chat_enabled = self
|
let chat_enabled = self
|
||||||
.store_preferences
|
.store_preferences
|
||||||
.get_bool_or_default(APPFLOWY_LOCAL_AI_CHAT_ENABLED);
|
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED)
|
||||||
|
.unwrap_or(true);
|
||||||
self.enable_chat_plugin(chat_enabled).await?;
|
self.enable_chat_plugin(chat_enabled).await?;
|
||||||
} else {
|
} else {
|
||||||
self.enable_chat_plugin(false).await?;
|
self.enable_chat_plugin(false).await?;
|
||||||
@ -300,9 +319,11 @@ impl LocalAIController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn enable_chat_plugin(&self, enabled: bool) -> FlowyResult<()> {
|
async fn enable_chat_plugin(&self, enabled: bool) -> FlowyResult<()> {
|
||||||
|
info!("[AI Plugin] enable chat plugin: {}", enabled);
|
||||||
if enabled {
|
if enabled {
|
||||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||||
if let Err(err) = self.initialize_ai_plugin(Some(tx)) {
|
let chat_config = self.llm_res.get_chat_config(self.is_rag_enabled())?;
|
||||||
|
if let Err(err) = initialize_ai_plugin(&self.llm_chat, chat_config, Some(tx)) {
|
||||||
error!("[AI Plugin] failed to initialize local ai: {:?}", err);
|
error!("[AI Plugin] failed to initialize local ai: {:?}", err);
|
||||||
}
|
}
|
||||||
let _ = rx.await;
|
let _ = rx.await;
|
||||||
@ -313,7 +334,8 @@ impl LocalAIController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn initialize_chat_plugin(
|
#[instrument(level = "debug", skip_all, err)]
|
||||||
|
fn initialize_ai_plugin(
|
||||||
llm_chat: &Arc<LocalChatLLMChat>,
|
llm_chat: &Arc<LocalChatLLMChat>,
|
||||||
mut chat_config: AIPluginConfig,
|
mut chat_config: AIPluginConfig,
|
||||||
ret: Option<tokio::sync::oneshot::Sender<()>>,
|
ret: Option<tokio::sync::oneshot::Sender<()>>,
|
||||||
|
|||||||
@ -15,8 +15,9 @@ use lib_infra::util::{get_operating_system, OperatingSystem};
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use crate::local_ai::path::offline_app_path;
|
||||||
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
||||||
use crate::local_ai::watch::{watch_path, WatchContext};
|
use crate::local_ai::watch::{watch_offline_app, WatchContext};
|
||||||
use tokio::fs::{self};
|
use tokio::fs::{self};
|
||||||
use tokio_util::sync::CancellationToken;
|
use tokio_util::sync::CancellationToken;
|
||||||
use tracing::{debug, error, info, instrument, trace, warn};
|
use tracing::{debug, error, info, instrument, trace, warn};
|
||||||
@ -69,7 +70,8 @@ pub struct LLMResourceController {
|
|||||||
download_task: Arc<RwLock<Option<DownloadTask>>>,
|
download_task: Arc<RwLock<Option<DownloadTask>>>,
|
||||||
resource_notify: tokio::sync::mpsc::Sender<()>,
|
resource_notify: tokio::sync::mpsc::Sender<()>,
|
||||||
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
||||||
offline_app_disk_watch: RwLock<Option<WatchContext>>,
|
#[allow(dead_code)]
|
||||||
|
offline_app_disk_watch: Option<WatchContext>,
|
||||||
offline_app_state_sender: tokio::sync::broadcast::Sender<WatchDiskEvent>,
|
offline_app_state_sender: tokio::sync::broadcast::Sender<WatchDiskEvent>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,8 +81,31 @@ impl LLMResourceController {
|
|||||||
resource_service: impl LLMResourceService,
|
resource_service: impl LLMResourceService,
|
||||||
resource_notify: tokio::sync::mpsc::Sender<()>,
|
resource_notify: tokio::sync::mpsc::Sender<()>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let (offline_app_ready_sender, _) = tokio::sync::broadcast::channel(1);
|
let (offline_app_state_sender, _) = tokio::sync::broadcast::channel(1);
|
||||||
let llm_setting = RwLock::new(resource_service.retrieve_setting());
|
let llm_setting = RwLock::new(resource_service.retrieve_setting());
|
||||||
|
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
||||||
|
let mut offline_app_disk_watch: Option<WatchContext> = None;
|
||||||
|
|
||||||
|
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
||||||
|
{
|
||||||
|
match watch_offline_app() {
|
||||||
|
Ok((new_watcher, mut rx)) => {
|
||||||
|
let sender = offline_app_state_sender.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
while let Some(event) = rx.recv().await {
|
||||||
|
if let Err(err) = sender.send(event) {
|
||||||
|
error!("[LLM Resource] Failed to send offline app state: {:?}", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
offline_app_disk_watch = Some(new_watcher);
|
||||||
|
},
|
||||||
|
Err(err) => {
|
||||||
|
error!("[LLM Resource] Failed to watch offline app path: {:?}", err);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
user_service,
|
user_service,
|
||||||
resource_service: Arc::new(resource_service),
|
resource_service: Arc::new(resource_service),
|
||||||
@ -89,8 +114,8 @@ impl LLMResourceController {
|
|||||||
download_task: Default::default(),
|
download_task: Default::default(),
|
||||||
resource_notify,
|
resource_notify,
|
||||||
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
||||||
offline_app_disk_watch: Default::default(),
|
offline_app_disk_watch,
|
||||||
offline_app_state_sender: offline_app_ready_sender,
|
offline_app_state_sender,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -100,32 +125,7 @@ impl LLMResourceController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn set_llm_setting(&self, llm_setting: LLMSetting) {
|
fn set_llm_setting(&self, llm_setting: LLMSetting) {
|
||||||
let offline_app_path = self.offline_app_path(&llm_setting.app.ai_plugin_name);
|
|
||||||
*self.llm_setting.write() = Some(llm_setting);
|
*self.llm_setting.write() = Some(llm_setting);
|
||||||
|
|
||||||
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
|
||||||
{
|
|
||||||
let is_diff = self
|
|
||||||
.offline_app_disk_watch
|
|
||||||
.read()
|
|
||||||
.as_ref()
|
|
||||||
.map(|watch_context| watch_context.path == offline_app_path)
|
|
||||||
.unwrap_or(true);
|
|
||||||
|
|
||||||
// If the offline app path is different from the current watch path, update the watch path.
|
|
||||||
if is_diff {
|
|
||||||
if let Ok((watcher, mut rx)) = watch_path(offline_app_path) {
|
|
||||||
let offline_app_ready_sender = self.offline_app_state_sender.clone();
|
|
||||||
tokio::spawn(async move {
|
|
||||||
while let Some(event) = rx.recv().await {
|
|
||||||
info!("Offline app file changed: {:?}", event);
|
|
||||||
let _ = offline_app_ready_sender.send(event);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
self.offline_app_disk_watch.write().replace(watcher);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true when all resources are downloaded and ready to use.
|
/// Returns true when all resources are downloaded and ready to use.
|
||||||
@ -136,17 +136,8 @@ impl LLMResourceController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_offline_ai_ready(&self) -> bool {
|
pub fn is_offline_app_ready(&self) -> bool {
|
||||||
match self.llm_setting.read().as_ref() {
|
offline_app_path().exists()
|
||||||
None => {
|
|
||||||
trace!("[LLM Resource] No local ai setting found");
|
|
||||||
false
|
|
||||||
},
|
|
||||||
Some(setting) => {
|
|
||||||
let path = self.offline_app_path(&setting.app.ai_plugin_name);
|
|
||||||
path.exists()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_offline_ai_app_download_link(&self) -> FlowyResult<String> {
|
pub async fn get_offline_ai_app_download_link(&self) -> FlowyResult<String> {
|
||||||
@ -256,9 +247,9 @@ impl LLMResourceController {
|
|||||||
None => Err(FlowyError::local_ai().with_context("Can't find any llm config")),
|
None => Err(FlowyError::local_ai().with_context("Can't find any llm config")),
|
||||||
Some(llm_setting) => {
|
Some(llm_setting) => {
|
||||||
let mut resources = vec![];
|
let mut resources = vec![];
|
||||||
let plugin_path = self.offline_app_path(&llm_setting.app.ai_plugin_name);
|
let app_path = offline_app_path();
|
||||||
if !plugin_path.exists() {
|
if !app_path.exists() {
|
||||||
trace!("[LLM Resource] offline plugin not found: {:?}", plugin_path);
|
trace!("[LLM Resource] offline app not found: {:?}", app_path);
|
||||||
resources.push(PendingResource::OfflineApp);
|
resources.push(PendingResource::OfflineApp);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -337,13 +328,6 @@ impl LLMResourceController {
|
|||||||
*self.download_task.write() = Some(download_task.clone());
|
*self.download_task.write() = Some(download_task.clone());
|
||||||
progress_notify(download_task.tx.subscribe());
|
progress_notify(download_task.tx.subscribe());
|
||||||
|
|
||||||
// let plugin_dir = self.user_plugin_folder()?;
|
|
||||||
// if !plugin_dir.exists() {
|
|
||||||
// fs::create_dir_all(&plugin_dir).await.map_err(|err| {
|
|
||||||
// FlowyError::local_ai().with_context(format!("Failed to create plugin dir: {:?}", err))
|
|
||||||
// })?;
|
|
||||||
// }
|
|
||||||
|
|
||||||
let model_dir = self.user_model_folder()?;
|
let model_dir = self.user_model_folder()?;
|
||||||
if !model_dir.exists() {
|
if !model_dir.exists() {
|
||||||
fs::create_dir_all(&model_dir).await.map_err(|err| {
|
fs::create_dir_all(&model_dir).await.map_err(|err| {
|
||||||
@ -352,43 +336,6 @@ impl LLMResourceController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
// let plugin_file_etag_dir = plugin_dir.join(&llm_setting.app.etag);
|
|
||||||
// We use the ETag as the identifier for the plugin file. If a file with the given ETag
|
|
||||||
// already exists, skip downloading it.
|
|
||||||
// if !plugin_file_etag_dir.exists() {
|
|
||||||
// let plugin_progress_tx = download_task.tx.clone();
|
|
||||||
// info!(
|
|
||||||
// "[LLM Resource] Downloading plugin: {:?}",
|
|
||||||
// llm_setting.app.etag
|
|
||||||
// );
|
|
||||||
// let file_name = format!("{}.zip", llm_setting.app.etag);
|
|
||||||
// let zip_plugin_file = download_plugin(
|
|
||||||
// &llm_setting.app.url,
|
|
||||||
// &plugin_dir,
|
|
||||||
// &file_name,
|
|
||||||
// Some(download_task.cancel_token.clone()),
|
|
||||||
// Some(Arc::new(move |downloaded, total_size| {
|
|
||||||
// let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0);
|
|
||||||
// let _ = plugin_progress_tx.send(format!("plugin:progress:{}", progress));
|
|
||||||
// })),
|
|
||||||
// Some(Duration::from_millis(100)),
|
|
||||||
// )
|
|
||||||
// .await?;
|
|
||||||
//
|
|
||||||
// // unzip file
|
|
||||||
// info!(
|
|
||||||
// "[LLM Resource] unzip {:?} to {:?}",
|
|
||||||
// zip_plugin_file, plugin_file_etag_dir
|
|
||||||
// );
|
|
||||||
// zip_extract(&zip_plugin_file, &plugin_file_etag_dir)?;
|
|
||||||
//
|
|
||||||
// // delete zip file
|
|
||||||
// info!("[LLM Resource] Delete zip file: {:?}", file_name);
|
|
||||||
// if let Err(err) = fs::remove_file(&zip_plugin_file).await {
|
|
||||||
// error!("Failed to delete zip file: {:?}", err);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// After download the plugin, start downloading models
|
// After download the plugin, start downloading models
|
||||||
let chat_model_file = (
|
let chat_model_file = (
|
||||||
model_dir.join(&llm_setting.llm_model.chat_model.file_name),
|
model_dir.join(&llm_setting.llm_model.chat_model.file_name),
|
||||||
@ -473,7 +420,7 @@ impl LLMResourceController {
|
|||||||
let model_dir = self.user_model_folder()?;
|
let model_dir = self.user_model_folder()?;
|
||||||
let bin_path = match get_operating_system() {
|
let bin_path = match get_operating_system() {
|
||||||
OperatingSystem::MacOS => {
|
OperatingSystem::MacOS => {
|
||||||
let path = self.offline_app_path(&llm_setting.app.ai_plugin_name);
|
let path = offline_app_path();
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
return Err(FlowyError::new(
|
return Err(FlowyError::new(
|
||||||
ErrorCode::AIOfflineNotInstalled,
|
ErrorCode::AIOfflineNotInstalled,
|
||||||
@ -560,10 +507,6 @@ impl LLMResourceController {
|
|||||||
self.resource_dir().map(|dir| dir.join(LLM_MODEL_DIR))
|
self.resource_dir().map(|dir| dir.join(LLM_MODEL_DIR))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn offline_app_path(&self, plugin_name: &str) -> PathBuf {
|
|
||||||
PathBuf::from(format!("/usr/local/bin/{}", plugin_name))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn model_path(&self, model_file_name: &str) -> FlowyResult<PathBuf> {
|
fn model_path(&self, model_file_name: &str) -> FlowyResult<PathBuf> {
|
||||||
self
|
self
|
||||||
.user_model_folder()
|
.user_model_folder()
|
||||||
|
|||||||
@ -2,5 +2,6 @@ pub mod local_llm_chat;
|
|||||||
pub mod local_llm_resource;
|
pub mod local_llm_resource;
|
||||||
mod model_request;
|
mod model_request;
|
||||||
|
|
||||||
|
mod path;
|
||||||
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
||||||
pub mod watch;
|
pub mod watch;
|
||||||
|
|||||||
33
frontend/rust-lib/flowy-chat/src/local_ai/path.rs
Normal file
33
frontend/rust-lib/flowy-chat/src/local_ai/path.rs
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
pub(crate) fn install_path() -> Option<PathBuf> {
|
||||||
|
#[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))]
|
||||||
|
return None;
|
||||||
|
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
return None;
|
||||||
|
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
return Some(PathBuf::from("/usr/local/bin"));
|
||||||
|
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn offline_app_path() -> PathBuf {
|
||||||
|
#[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))]
|
||||||
|
return PathBuf::new();
|
||||||
|
|
||||||
|
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
||||||
|
{
|
||||||
|
let offline_app = "appflowy_ai_plugin";
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
return PathBuf::from(format!("/usr/local/bin/{}", offline_app));
|
||||||
|
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
return PathBuf::from(format!("/usr/local/bin/{}", offline_app));
|
||||||
|
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
return PathBuf::from(format!("/usr/local/bin/{}", offline_app));
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,9 +1,10 @@
|
|||||||
use crate::local_ai::local_llm_resource::WatchDiskEvent;
|
use crate::local_ai::local_llm_resource::WatchDiskEvent;
|
||||||
|
use crate::local_ai::path::{install_path, offline_app_path};
|
||||||
use flowy_error::{FlowyError, FlowyResult};
|
use flowy_error::{FlowyError, FlowyResult};
|
||||||
use notify::{Event, RecursiveMode, Watcher};
|
use notify::{Event, RecursiveMode, Watcher};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver};
|
use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver};
|
||||||
use tracing::error;
|
use tracing::{error, trace};
|
||||||
|
|
||||||
pub struct WatchContext {
|
pub struct WatchContext {
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
@ -11,10 +12,21 @@ pub struct WatchContext {
|
|||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn watch_path(path: PathBuf) -> FlowyResult<(WatchContext, UnboundedReceiver<WatchDiskEvent>)> {
|
pub fn watch_offline_app() -> FlowyResult<(WatchContext, UnboundedReceiver<WatchDiskEvent>)> {
|
||||||
|
let install_path = install_path().ok_or_else(|| {
|
||||||
|
FlowyError::internal().with_context("Unsupported platform for offline app watching")
|
||||||
|
})?;
|
||||||
|
trace!(
|
||||||
|
"[LLM Resource] Start watching offline app path: {:?}",
|
||||||
|
install_path,
|
||||||
|
);
|
||||||
let (tx, rx) = unbounded_channel();
|
let (tx, rx) = unbounded_channel();
|
||||||
|
let app_path = offline_app_path();
|
||||||
let mut watcher = notify::recommended_watcher(move |res: Result<Event, _>| match res {
|
let mut watcher = notify::recommended_watcher(move |res: Result<Event, _>| match res {
|
||||||
Ok(event) => match event.kind {
|
Ok(event) => {
|
||||||
|
if event.paths.iter().any(|path| path == &app_path) {
|
||||||
|
trace!("watch event: {:?}", event);
|
||||||
|
match event.kind {
|
||||||
notify::EventKind::Create(_) => {
|
notify::EventKind::Create(_) => {
|
||||||
if let Err(err) = tx.send(WatchDiskEvent::Create) {
|
if let Err(err) = tx.send(WatchDiskEvent::Create) {
|
||||||
error!("watch send error: {:?}", err)
|
error!("watch send error: {:?}", err)
|
||||||
@ -25,14 +37,24 @@ pub fn watch_path(path: PathBuf) -> FlowyResult<(WatchContext, UnboundedReceiver
|
|||||||
error!("watch send error: {:?}", err)
|
error!("watch send error: {:?}", err)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => {},
|
_ => {
|
||||||
|
trace!("unhandle watch event: {:?}", event);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
Err(e) => error!("watch error: {:?}", e),
|
Err(e) => error!("watch error: {:?}", e),
|
||||||
})
|
})
|
||||||
.map_err(|err| FlowyError::internal().with_context(err))?;
|
.map_err(|err| FlowyError::internal().with_context(err))?;
|
||||||
watcher
|
watcher
|
||||||
.watch(&path, RecursiveMode::Recursive)
|
.watch(&install_path, RecursiveMode::NonRecursive)
|
||||||
.map_err(|err| FlowyError::internal().with_context(err))?;
|
.map_err(|err| FlowyError::internal().with_context(err))?;
|
||||||
|
|
||||||
Ok((WatchContext { watcher, path }, rx))
|
Ok((
|
||||||
|
WatchContext {
|
||||||
|
watcher,
|
||||||
|
path: install_path,
|
||||||
|
},
|
||||||
|
rx,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user