mirror of
				https://github.com/AppFlowy-IO/AppFlowy.git
				synced 2025-11-04 03:54:44 +00:00 
			
		
		
		
	chore: merge branch 'upstream/main' into feat/upload-file-in-document
This commit is contained in:
		
						commit
						61d30fad6c
					
				@ -1,4 +1,12 @@
 | 
			
		||||
# Release Notes
 | 
			
		||||
## Version 0.6.6 - 30/07/2024
 | 
			
		||||
### New Features
 | 
			
		||||
- Upgrade your workspace to a premium plan to unlock more features and storage.
 | 
			
		||||
- Image galleries and drag-and-drop image support in documents.
 | 
			
		||||
 | 
			
		||||
### Bug Fixes
 | 
			
		||||
- Fix minor UI issues on Desktop and Mobile.
 | 
			
		||||
 | 
			
		||||
## Version 0.6.5 - 24/07/2024
 | 
			
		||||
### New Features
 | 
			
		||||
- Publish a Database to the Web
 | 
			
		||||
 | 
			
		||||
@ -36,7 +36,7 @@ class _AnimatedGestureDetectorState extends State<AnimatedGestureDetector> {
 | 
			
		||||
      onTapUp: (details) {
 | 
			
		||||
        setState(() => scale = 1.0);
 | 
			
		||||
 | 
			
		||||
        HapticFeedbackType.vibrate.call();
 | 
			
		||||
        HapticFeedbackType.light.call();
 | 
			
		||||
 | 
			
		||||
        widget.onTapUp();
 | 
			
		||||
      },
 | 
			
		||||
 | 
			
		||||
@ -167,15 +167,16 @@ class _Pages extends StatelessWidget {
 | 
			
		||||
                    onSelected: context.pushView,
 | 
			
		||||
                    endActionPane: (context) {
 | 
			
		||||
                      final view = context.read<ViewBloc>().state.view;
 | 
			
		||||
                      final actions = [
 | 
			
		||||
                        MobilePaneActionType.more,
 | 
			
		||||
                        if (view.layout == ViewLayoutPB.Document)
 | 
			
		||||
                          MobilePaneActionType.add,
 | 
			
		||||
                      ];
 | 
			
		||||
                      return buildEndActionPane(
 | 
			
		||||
                        context,
 | 
			
		||||
                        [
 | 
			
		||||
                          MobilePaneActionType.more,
 | 
			
		||||
                          if (view.layout == ViewLayoutPB.Document)
 | 
			
		||||
                            MobilePaneActionType.add,
 | 
			
		||||
                        ],
 | 
			
		||||
                        actions,
 | 
			
		||||
                        spaceType: spaceType,
 | 
			
		||||
                        spaceRatio: 4,
 | 
			
		||||
                        spaceRatio: actions.length == 1 ? 3 : 4,
 | 
			
		||||
                      );
 | 
			
		||||
                    },
 | 
			
		||||
                  ),
 | 
			
		||||
 | 
			
		||||
@ -23,10 +23,12 @@ class MobileSpaceTabBar extends StatelessWidget {
 | 
			
		||||
    final labelStyle = baseStyle?.copyWith(
 | 
			
		||||
      fontWeight: FontWeight.w600,
 | 
			
		||||
      fontSize: 16.0,
 | 
			
		||||
      height: 20 / 16,
 | 
			
		||||
    );
 | 
			
		||||
    final unselectedLabelStyle = baseStyle?.copyWith(
 | 
			
		||||
      fontWeight: FontWeight.w400,
 | 
			
		||||
      fontSize: 15.0,
 | 
			
		||||
      height: 20 / 15,
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    return Container(
 | 
			
		||||
 | 
			
		||||
@ -52,7 +52,7 @@ class ChatInputBloc extends Bloc<ChatInputEvent, ChatInputState> {
 | 
			
		||||
        );
 | 
			
		||||
      },
 | 
			
		||||
      updateState: (aiState) {
 | 
			
		||||
        if (aiState.enabled) {
 | 
			
		||||
        if (aiState.pluginState.state == RunningStatePB.Running) {
 | 
			
		||||
          emit(const ChatInputState(aiType: _LocalAI()));
 | 
			
		||||
        } else {
 | 
			
		||||
          emit(const ChatInputState(aiType: _AppFlowyAI()));
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,41 @@
 | 
			
		||||
import 'dart:async';
 | 
			
		||||
 | 
			
		||||
import 'package:appflowy_backend/dispatch/dispatch.dart';
 | 
			
		||||
import 'package:bloc/bloc.dart';
 | 
			
		||||
import 'package:freezed_annotation/freezed_annotation.dart';
 | 
			
		||||
import 'package:url_launcher/url_launcher.dart' show launchUrl;
 | 
			
		||||
part 'download_offline_ai_app_bloc.freezed.dart';
 | 
			
		||||
 | 
			
		||||
class DownloadOfflineAIBloc
 | 
			
		||||
    extends Bloc<DownloadOfflineAIEvent, DownloadOfflineAIState> {
 | 
			
		||||
  DownloadOfflineAIBloc() : super(const DownloadOfflineAIState()) {
 | 
			
		||||
    on<DownloadOfflineAIEvent>(_handleEvent);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  Future<void> _handleEvent(
 | 
			
		||||
    DownloadOfflineAIEvent event,
 | 
			
		||||
    Emitter<DownloadOfflineAIState> emit,
 | 
			
		||||
  ) async {
 | 
			
		||||
    await event.when(
 | 
			
		||||
      started: () async {
 | 
			
		||||
        final result = await ChatEventGetOfflineAIAppLink().send();
 | 
			
		||||
        await result.fold(
 | 
			
		||||
          (app) async {
 | 
			
		||||
            await launchUrl(Uri.parse(app.link));
 | 
			
		||||
          },
 | 
			
		||||
          (err) {},
 | 
			
		||||
        );
 | 
			
		||||
      },
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@freezed
 | 
			
		||||
class DownloadOfflineAIEvent with _$DownloadOfflineAIEvent {
 | 
			
		||||
  const factory DownloadOfflineAIEvent.started() = _Started;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@freezed
 | 
			
		||||
class DownloadOfflineAIState with _$DownloadOfflineAIState {
 | 
			
		||||
  const factory DownloadOfflineAIState() = _DownloadOfflineAIState;
 | 
			
		||||
}
 | 
			
		||||
@ -20,7 +20,7 @@ class LocalAIChatSettingBloc
 | 
			
		||||
    listener.start(
 | 
			
		||||
      stateCallback: (newState) {
 | 
			
		||||
        if (!isClosed) {
 | 
			
		||||
          add(LocalAIChatSettingEvent.updateLLMRunningState(newState.state));
 | 
			
		||||
          add(LocalAIChatSettingEvent.updatePluginState(newState));
 | 
			
		||||
        }
 | 
			
		||||
      },
 | 
			
		||||
    );
 | 
			
		||||
@ -46,14 +46,14 @@ class LocalAIChatSettingBloc
 | 
			
		||||
                modelInfo: modelInfo,
 | 
			
		||||
                models: modelInfo.models,
 | 
			
		||||
                selectedLLMModel: modelInfo.selectedModel,
 | 
			
		||||
                fetchModelInfoState: const LoadingState.finish(),
 | 
			
		||||
                aiModelProgress: const AIModelProgress.finish(),
 | 
			
		||||
              ),
 | 
			
		||||
            );
 | 
			
		||||
          },
 | 
			
		||||
          (err) {
 | 
			
		||||
            emit(
 | 
			
		||||
              state.copyWith(
 | 
			
		||||
                fetchModelInfoState: LoadingState.finish(error: err),
 | 
			
		||||
                aiModelProgress: AIModelProgress.finish(error: err),
 | 
			
		||||
              ),
 | 
			
		||||
            );
 | 
			
		||||
          },
 | 
			
		||||
@ -68,7 +68,7 @@ class LocalAIChatSettingBloc
 | 
			
		||||
              emit(
 | 
			
		||||
                state.copyWith(
 | 
			
		||||
                  selectedLLMModel: llmModel,
 | 
			
		||||
                  localAIInfo: LocalAIProgress.showDownload(
 | 
			
		||||
                  progressIndicator: LocalAIProgress.showDownload(
 | 
			
		||||
                    llmResource,
 | 
			
		||||
                    llmModel,
 | 
			
		||||
                  ),
 | 
			
		||||
@ -80,7 +80,7 @@ class LocalAIChatSettingBloc
 | 
			
		||||
                state.copyWith(
 | 
			
		||||
                  selectedLLMModel: llmModel,
 | 
			
		||||
                  selectLLMState: const LoadingState.finish(),
 | 
			
		||||
                  localAIInfo: const LocalAIProgress.checkPluginState(),
 | 
			
		||||
                  progressIndicator: const LocalAIProgress.checkPluginState(),
 | 
			
		||||
                ),
 | 
			
		||||
              );
 | 
			
		||||
            }
 | 
			
		||||
@ -106,7 +106,7 @@ class LocalAIChatSettingBloc
 | 
			
		||||
        if (llmResource.pendingResources.isEmpty) {
 | 
			
		||||
          emit(
 | 
			
		||||
            state.copyWith(
 | 
			
		||||
              localAIInfo: const LocalAIProgress.checkPluginState(),
 | 
			
		||||
              progressIndicator: const LocalAIProgress.checkPluginState(),
 | 
			
		||||
            ),
 | 
			
		||||
          );
 | 
			
		||||
        } else {
 | 
			
		||||
@ -115,7 +115,7 @@ class LocalAIChatSettingBloc
 | 
			
		||||
            if (llmResource.isDownloading) {
 | 
			
		||||
              emit(
 | 
			
		||||
                state.copyWith(
 | 
			
		||||
                  localAIInfo:
 | 
			
		||||
                  progressIndicator:
 | 
			
		||||
                      LocalAIProgress.startDownloading(state.selectedLLMModel!),
 | 
			
		||||
                  selectLLMState: const LoadingState.finish(),
 | 
			
		||||
                ),
 | 
			
		||||
@ -124,7 +124,7 @@ class LocalAIChatSettingBloc
 | 
			
		||||
            } else {
 | 
			
		||||
              emit(
 | 
			
		||||
                state.copyWith(
 | 
			
		||||
                  localAIInfo: LocalAIProgress.showDownload(
 | 
			
		||||
                  progressIndicator: LocalAIProgress.showDownload(
 | 
			
		||||
                    llmResource,
 | 
			
		||||
                    state.selectedLLMModel!,
 | 
			
		||||
                  ),
 | 
			
		||||
@ -138,7 +138,7 @@ class LocalAIChatSettingBloc
 | 
			
		||||
      startDownloadModel: (LLMModelPB llmModel) {
 | 
			
		||||
        emit(
 | 
			
		||||
          state.copyWith(
 | 
			
		||||
            localAIInfo: LocalAIProgress.startDownloading(llmModel),
 | 
			
		||||
            progressIndicator: LocalAIProgress.startDownloading(llmModel),
 | 
			
		||||
            selectLLMState: const LoadingState.finish(),
 | 
			
		||||
          ),
 | 
			
		||||
        );
 | 
			
		||||
@ -149,19 +149,39 @@ class LocalAIChatSettingBloc
 | 
			
		||||
      },
 | 
			
		||||
      finishDownload: () async {
 | 
			
		||||
        emit(
 | 
			
		||||
          state.copyWith(localAIInfo: const LocalAIProgress.finishDownload()),
 | 
			
		||||
          state.copyWith(
 | 
			
		||||
            progressIndicator: const LocalAIProgress.finishDownload(),
 | 
			
		||||
          ),
 | 
			
		||||
        );
 | 
			
		||||
      },
 | 
			
		||||
      updateLLMRunningState: (RunningStatePB newRunningState) {
 | 
			
		||||
        if (newRunningState == RunningStatePB.Stopped) {
 | 
			
		||||
      updatePluginState: (LocalAIPluginStatePB pluginState) {
 | 
			
		||||
        if (pluginState.offlineAiReady) {
 | 
			
		||||
          ChatEventRefreshLocalAIModelInfo().send().then((result) {
 | 
			
		||||
            if (!isClosed) {
 | 
			
		||||
              add(LocalAIChatSettingEvent.didLoadModelInfo(result));
 | 
			
		||||
            }
 | 
			
		||||
          });
 | 
			
		||||
 | 
			
		||||
          if (pluginState.state == RunningStatePB.Stopped) {
 | 
			
		||||
            emit(
 | 
			
		||||
              state.copyWith(
 | 
			
		||||
                runningState: pluginState.state,
 | 
			
		||||
                progressIndicator: const LocalAIProgress.checkPluginState(),
 | 
			
		||||
              ),
 | 
			
		||||
            );
 | 
			
		||||
          } else {
 | 
			
		||||
            emit(
 | 
			
		||||
              state.copyWith(
 | 
			
		||||
                runningState: pluginState.state,
 | 
			
		||||
              ),
 | 
			
		||||
            );
 | 
			
		||||
          }
 | 
			
		||||
        } else {
 | 
			
		||||
          emit(
 | 
			
		||||
            state.copyWith(
 | 
			
		||||
              runningState: newRunningState,
 | 
			
		||||
              localAIInfo: const LocalAIProgress.checkPluginState(),
 | 
			
		||||
              progressIndicator: const LocalAIProgress.startOfflineAIApp(),
 | 
			
		||||
            ),
 | 
			
		||||
          );
 | 
			
		||||
        } else {
 | 
			
		||||
          emit(state.copyWith(runningState: newRunningState));
 | 
			
		||||
        }
 | 
			
		||||
      },
 | 
			
		||||
    );
 | 
			
		||||
@ -183,10 +203,21 @@ class LocalAIChatSettingBloc
 | 
			
		||||
 | 
			
		||||
  /// Handles the event to fetch local AI settings when the application starts.
 | 
			
		||||
  Future<void> _handleStarted() async {
 | 
			
		||||
    final result = await ChatEventRefreshLocalAIModelInfo().send();
 | 
			
		||||
    if (!isClosed) {
 | 
			
		||||
      add(LocalAIChatSettingEvent.didLoadModelInfo(result));
 | 
			
		||||
    }
 | 
			
		||||
    final result = await ChatEventGetLocalAIPluginState().send();
 | 
			
		||||
    result.fold(
 | 
			
		||||
      (pluginState) async {
 | 
			
		||||
        if (!isClosed) {
 | 
			
		||||
          add(LocalAIChatSettingEvent.updatePluginState(pluginState));
 | 
			
		||||
          if (pluginState.offlineAiReady) {
 | 
			
		||||
            final result = await ChatEventRefreshLocalAIModelInfo().send();
 | 
			
		||||
            if (!isClosed) {
 | 
			
		||||
              add(LocalAIChatSettingEvent.didLoadModelInfo(result));
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      },
 | 
			
		||||
      (err) => Log.error(err.toString()),
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @override
 | 
			
		||||
@ -214,9 +245,9 @@ class LocalAIChatSettingEvent with _$LocalAIChatSettingEvent {
 | 
			
		||||
 | 
			
		||||
  const factory LocalAIChatSettingEvent.cancelDownload() = _CancelDownload;
 | 
			
		||||
  const factory LocalAIChatSettingEvent.finishDownload() = _FinishDownload;
 | 
			
		||||
  const factory LocalAIChatSettingEvent.updateLLMRunningState(
 | 
			
		||||
    RunningStatePB newRunningState,
 | 
			
		||||
  ) = _RunningState;
 | 
			
		||||
  const factory LocalAIChatSettingEvent.updatePluginState(
 | 
			
		||||
    LocalAIPluginStatePB pluginState,
 | 
			
		||||
  ) = _PluginState;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@freezed
 | 
			
		||||
@ -224,29 +255,16 @@ class LocalAIChatSettingState with _$LocalAIChatSettingState {
 | 
			
		||||
  const factory LocalAIChatSettingState({
 | 
			
		||||
    LLMModelInfoPB? modelInfo,
 | 
			
		||||
    LLMModelPB? selectedLLMModel,
 | 
			
		||||
    LocalAIProgress? localAIInfo,
 | 
			
		||||
    @Default(LoadingState.loading()) LoadingState fetchModelInfoState,
 | 
			
		||||
    LocalAIProgress? progressIndicator,
 | 
			
		||||
    @Default(AIModelProgress.init()) AIModelProgress aiModelProgress,
 | 
			
		||||
    @Default(LoadingState.loading()) LoadingState selectLLMState,
 | 
			
		||||
    @Default([]) List<LLMModelPB> models,
 | 
			
		||||
    @Default(RunningStatePB.Connecting) RunningStatePB runningState,
 | 
			
		||||
  }) = _LocalAIChatSettingState;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// @freezed
 | 
			
		||||
// class LocalChatAIStateIndicator with _$LocalChatAIStateIndicator {
 | 
			
		||||
//   // when start downloading the model
 | 
			
		||||
//   const factory LocalChatAIStateIndicator.error(FlowyError error) = _OnError;
 | 
			
		||||
//   const factory LocalChatAIStateIndicator.ready(bool isEnabled) = _Ready;
 | 
			
		||||
// }
 | 
			
		||||
 | 
			
		||||
@freezed
 | 
			
		||||
class LocalAIProgress with _$LocalAIProgress {
 | 
			
		||||
  // when user select a new model, it will call requestDownload
 | 
			
		||||
  const factory LocalAIProgress.requestDownloadInfo(
 | 
			
		||||
    LocalModelResourcePB llmResource,
 | 
			
		||||
    LLMModelPB llmModel,
 | 
			
		||||
  ) = _RequestDownload;
 | 
			
		||||
 | 
			
		||||
  // when user comes back to the setting page, it will auto detect current llm state
 | 
			
		||||
  const factory LocalAIProgress.showDownload(
 | 
			
		||||
    LocalModelResourcePB llmResource,
 | 
			
		||||
@ -257,5 +275,13 @@ class LocalAIProgress with _$LocalAIProgress {
 | 
			
		||||
  const factory LocalAIProgress.startDownloading(LLMModelPB llmModel) =
 | 
			
		||||
      _Downloading;
 | 
			
		||||
  const factory LocalAIProgress.finishDownload() = _Finish;
 | 
			
		||||
  const factory LocalAIProgress.checkPluginState() = _PluginState;
 | 
			
		||||
  const factory LocalAIProgress.checkPluginState() = _CheckPluginState;
 | 
			
		||||
  const factory LocalAIProgress.startOfflineAIApp() = _StartOfflineAIApp;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@freezed
 | 
			
		||||
class AIModelProgress with _$AIModelProgress {
 | 
			
		||||
  const factory AIModelProgress.init() = _AIModelProgressInit;
 | 
			
		||||
  const factory AIModelProgress.loading() = _AIModelDownloading;
 | 
			
		||||
  const factory AIModelProgress.finish({FlowyError? error}) = _AIModelFinish;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -7,6 +7,7 @@ import 'package:appflowy_backend/log.dart';
 | 
			
		||||
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
 | 
			
		||||
import 'package:bloc/bloc.dart';
 | 
			
		||||
import 'package:freezed_annotation/freezed_annotation.dart';
 | 
			
		||||
import 'package:url_launcher/url_launcher.dart' show launchUrl;
 | 
			
		||||
part 'plugin_state_bloc.freezed.dart';
 | 
			
		||||
 | 
			
		||||
class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
 | 
			
		||||
@ -53,19 +54,31 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
 | 
			
		||||
        );
 | 
			
		||||
      },
 | 
			
		||||
      updateState: (LocalAIPluginStatePB pluginState) {
 | 
			
		||||
        switch (pluginState.state) {
 | 
			
		||||
          case RunningStatePB.Connecting:
 | 
			
		||||
            emit(
 | 
			
		||||
              const PluginStateState(action: PluginStateAction.loadingPlugin()),
 | 
			
		||||
            );
 | 
			
		||||
          case RunningStatePB.Running:
 | 
			
		||||
            emit(const PluginStateState(action: PluginStateAction.ready()));
 | 
			
		||||
            break;
 | 
			
		||||
          default:
 | 
			
		||||
            emit(
 | 
			
		||||
              state.copyWith(action: const PluginStateAction.restart()),
 | 
			
		||||
            );
 | 
			
		||||
            break;
 | 
			
		||||
        // if the offline ai is not started, ask user to start it
 | 
			
		||||
        if (pluginState.offlineAiReady) {
 | 
			
		||||
          // Chech state of the plugin
 | 
			
		||||
          switch (pluginState.state) {
 | 
			
		||||
            case RunningStatePB.Connecting:
 | 
			
		||||
              emit(
 | 
			
		||||
                const PluginStateState(
 | 
			
		||||
                  action: PluginStateAction.loadingPlugin(),
 | 
			
		||||
                ),
 | 
			
		||||
              );
 | 
			
		||||
            case RunningStatePB.Running:
 | 
			
		||||
              emit(const PluginStateState(action: PluginStateAction.ready()));
 | 
			
		||||
              break;
 | 
			
		||||
            default:
 | 
			
		||||
              emit(
 | 
			
		||||
                state.copyWith(action: const PluginStateAction.restartPlugin()),
 | 
			
		||||
              );
 | 
			
		||||
              break;
 | 
			
		||||
          }
 | 
			
		||||
        } else {
 | 
			
		||||
          emit(
 | 
			
		||||
            const PluginStateState(
 | 
			
		||||
              action: PluginStateAction.startAIOfflineApp(),
 | 
			
		||||
            ),
 | 
			
		||||
          );
 | 
			
		||||
        }
 | 
			
		||||
      },
 | 
			
		||||
      restartLocalAI: () async {
 | 
			
		||||
@ -83,6 +96,15 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
 | 
			
		||||
          (err) => Log.error(err.toString()),
 | 
			
		||||
        );
 | 
			
		||||
      },
 | 
			
		||||
      downloadOfflineAIApp: () async {
 | 
			
		||||
        final result = await ChatEventGetOfflineAIAppLink().send();
 | 
			
		||||
        await result.fold(
 | 
			
		||||
          (app) async {
 | 
			
		||||
            await launchUrl(Uri.parse(app.link));
 | 
			
		||||
          },
 | 
			
		||||
          (err) {},
 | 
			
		||||
        );
 | 
			
		||||
      },
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@ -95,6 +117,7 @@ class PluginStateEvent with _$PluginStateEvent {
 | 
			
		||||
  const factory PluginStateEvent.restartLocalAI() = _RestartLocalAI;
 | 
			
		||||
  const factory PluginStateEvent.openModelDirectory() =
 | 
			
		||||
      _OpenModelStorageDirectory;
 | 
			
		||||
  const factory PluginStateEvent.downloadOfflineAIApp() = _DownloadOfflineAIApp;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@freezed
 | 
			
		||||
@ -109,5 +132,6 @@ class PluginStateAction with _$PluginStateAction {
 | 
			
		||||
  const factory PluginStateAction.init() = _Init;
 | 
			
		||||
  const factory PluginStateAction.loadingPlugin() = _LoadingPlugin;
 | 
			
		||||
  const factory PluginStateAction.ready() = _Ready;
 | 
			
		||||
  const factory PluginStateAction.restart() = _Restart;
 | 
			
		||||
  const factory PluginStateAction.restartPlugin() = _RestartPlugin;
 | 
			
		||||
  const factory PluginStateAction.startAIOfflineApp() = _StartAIOfflineApp;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -293,6 +293,7 @@ class ConfirmPopup extends StatefulWidget {
 | 
			
		||||
    required this.title,
 | 
			
		||||
    required this.description,
 | 
			
		||||
    required this.onConfirm,
 | 
			
		||||
    this.onCancel,
 | 
			
		||||
    this.confirmLabel,
 | 
			
		||||
    this.confirmButtonColor,
 | 
			
		||||
    this.child,
 | 
			
		||||
@ -302,6 +303,7 @@ class ConfirmPopup extends StatefulWidget {
 | 
			
		||||
  final String title;
 | 
			
		||||
  final String description;
 | 
			
		||||
  final VoidCallback onConfirm;
 | 
			
		||||
  final VoidCallback? onCancel;
 | 
			
		||||
  final Color? confirmButtonColor;
 | 
			
		||||
  final ConfirmPopupStyle style;
 | 
			
		||||
 | 
			
		||||
@ -413,7 +415,10 @@ class _ConfirmPopupState extends State<ConfirmPopup> {
 | 
			
		||||
        );
 | 
			
		||||
      case ConfirmPopupStyle.cancelAndOk:
 | 
			
		||||
        return SpaceCancelOrConfirmButton(
 | 
			
		||||
          onCancel: () => Navigator.of(context).pop(),
 | 
			
		||||
          onCancel: () {
 | 
			
		||||
            widget.onCancel?.call();
 | 
			
		||||
            Navigator.of(context).pop();
 | 
			
		||||
          },
 | 
			
		||||
          onConfirm: () {
 | 
			
		||||
            widget.onConfirm();
 | 
			
		||||
            if (widget.closeOnAction) {
 | 
			
		||||
 | 
			
		||||
@ -38,20 +38,23 @@ class InitLocalAIIndicator extends StatelessWidget {
 | 
			
		||||
                  ],
 | 
			
		||||
                );
 | 
			
		||||
              case RunningStatePB.Running:
 | 
			
		||||
                return Row(
 | 
			
		||||
                  children: [
 | 
			
		||||
                    const HSpace(8),
 | 
			
		||||
                    const FlowySvg(
 | 
			
		||||
                      FlowySvgs.download_success_s,
 | 
			
		||||
                      color: Color(0xFF2E7D32),
 | 
			
		||||
                    ),
 | 
			
		||||
                    const HSpace(6),
 | 
			
		||||
                    FlowyText(
 | 
			
		||||
                      LocaleKeys.settings_aiPage_keys_localAILoaded.tr(),
 | 
			
		||||
                      fontSize: 11,
 | 
			
		||||
                      color: const Color(0xFF1E4620),
 | 
			
		||||
                    ),
 | 
			
		||||
                  ],
 | 
			
		||||
                return SizedBox(
 | 
			
		||||
                  height: 30,
 | 
			
		||||
                  child: Row(
 | 
			
		||||
                    children: [
 | 
			
		||||
                      const HSpace(8),
 | 
			
		||||
                      const FlowySvg(
 | 
			
		||||
                        FlowySvgs.download_success_s,
 | 
			
		||||
                        color: Color(0xFF2E7D32),
 | 
			
		||||
                      ),
 | 
			
		||||
                      const HSpace(6),
 | 
			
		||||
                      FlowyText(
 | 
			
		||||
                        LocaleKeys.settings_aiPage_keys_localAILoaded.tr(),
 | 
			
		||||
                        fontSize: 11,
 | 
			
		||||
                        color: const Color(0xFF1E4620),
 | 
			
		||||
                      ),
 | 
			
		||||
                    ],
 | 
			
		||||
                  ),
 | 
			
		||||
                );
 | 
			
		||||
              case RunningStatePB.Stopped:
 | 
			
		||||
                return Row(
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,7 @@
 | 
			
		||||
import 'package:appflowy/generated/flowy_svgs.g.dart';
 | 
			
		||||
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_bloc.dart';
 | 
			
		||||
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_toggle_bloc.dart';
 | 
			
		||||
import 'package:appflowy/workspace/presentation/home/menu/sidebar/space/shared_widget.dart';
 | 
			
		||||
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/downloading_model.dart';
 | 
			
		||||
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/init_local_ai.dart';
 | 
			
		||||
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/plugin_state.dart';
 | 
			
		||||
@ -67,53 +68,53 @@ class LocalAIChatSetting extends StatelessWidget {
 | 
			
		||||
              tapBodyToExpand: false,
 | 
			
		||||
              tapHeaderToExpand: false,
 | 
			
		||||
            ),
 | 
			
		||||
            header: const LocalAIChatSettingHeader(),
 | 
			
		||||
            header: const SizedBox.shrink(),
 | 
			
		||||
            collapsed: const SizedBox.shrink(),
 | 
			
		||||
            expanded: Padding(
 | 
			
		||||
              padding: const EdgeInsets.symmetric(vertical: 6),
 | 
			
		||||
              // child: _LocalLLMInfoWidget(),
 | 
			
		||||
              child: Column(
 | 
			
		||||
                crossAxisAlignment: CrossAxisAlignment.start,
 | 
			
		||||
                children: [
 | 
			
		||||
                  Row(
 | 
			
		||||
                    mainAxisAlignment: MainAxisAlignment.spaceBetween,
 | 
			
		||||
                    children: [
 | 
			
		||||
                      Flexible(
 | 
			
		||||
                        child: FlowyText.medium(
 | 
			
		||||
                          LocaleKeys.settings_aiPage_keys_llmModel.tr(),
 | 
			
		||||
                          fontSize: 14,
 | 
			
		||||
                        ),
 | 
			
		||||
                      ),
 | 
			
		||||
                      const Spacer(),
 | 
			
		||||
                      BlocBuilder<LocalAIChatSettingBloc,
 | 
			
		||||
                          LocalAIChatSettingState>(
 | 
			
		||||
                        builder: (context, state) {
 | 
			
		||||
                          return state.fetchModelInfoState.when(
 | 
			
		||||
                            loading: () => Expanded(
 | 
			
		||||
                              child: Row(
 | 
			
		||||
                                children: [
 | 
			
		||||
                                  Flexible(
 | 
			
		||||
                                    child: FlowyText(
 | 
			
		||||
                                      LocaleKeys
 | 
			
		||||
                                          .settings_aiPage_keys_fetchLocalModel
 | 
			
		||||
                                          .tr(),
 | 
			
		||||
                                    ),
 | 
			
		||||
                                  ),
 | 
			
		||||
                                  const Spacer(),
 | 
			
		||||
                                  const CircularProgressIndicator.adaptive(),
 | 
			
		||||
                                ],
 | 
			
		||||
                  BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
 | 
			
		||||
                    builder: (context, state) {
 | 
			
		||||
                      // If the progress indicator is startOfflineAIApp, then don't show the LLM model.
 | 
			
		||||
                      if (state.progressIndicator ==
 | 
			
		||||
                          const LocalAIProgress.startOfflineAIApp()) {
 | 
			
		||||
                        return const SizedBox.shrink();
 | 
			
		||||
                      } else {
 | 
			
		||||
                        return Row(
 | 
			
		||||
                          mainAxisAlignment: MainAxisAlignment.spaceBetween,
 | 
			
		||||
                          children: [
 | 
			
		||||
                            Flexible(
 | 
			
		||||
                              child: FlowyText.medium(
 | 
			
		||||
                                LocaleKeys.settings_aiPage_keys_llmModel.tr(),
 | 
			
		||||
                                fontSize: 14,
 | 
			
		||||
                              ),
 | 
			
		||||
                            ),
 | 
			
		||||
                            finish: (err) {
 | 
			
		||||
                              return (err == null)
 | 
			
		||||
                            const Spacer(),
 | 
			
		||||
                            state.aiModelProgress.when(
 | 
			
		||||
                              init: () => const SizedBox.shrink(),
 | 
			
		||||
                              loading: () {
 | 
			
		||||
                                return const Expanded(
 | 
			
		||||
                                  child: Row(
 | 
			
		||||
                                    children: [
 | 
			
		||||
                                      Spacer(),
 | 
			
		||||
                                      CircularProgressIndicator.adaptive(),
 | 
			
		||||
                                    ],
 | 
			
		||||
                                  ),
 | 
			
		||||
                                );
 | 
			
		||||
                              },
 | 
			
		||||
                              finish: (err) => (err == null)
 | 
			
		||||
                                  ? const _SelectLocalModelDropdownMenu()
 | 
			
		||||
                                  : const SizedBox.shrink();
 | 
			
		||||
                            },
 | 
			
		||||
                          );
 | 
			
		||||
                        },
 | 
			
		||||
                      ),
 | 
			
		||||
                    ],
 | 
			
		||||
                                  : const SizedBox.shrink(),
 | 
			
		||||
                            ),
 | 
			
		||||
                          ],
 | 
			
		||||
                        );
 | 
			
		||||
                      }
 | 
			
		||||
                    },
 | 
			
		||||
                  ),
 | 
			
		||||
                  const IntrinsicHeight(child: _LocalLLMInfoWidget()),
 | 
			
		||||
                  const IntrinsicHeight(child: _LocalAIStateWidget()),
 | 
			
		||||
                ],
 | 
			
		||||
              ),
 | 
			
		||||
            ),
 | 
			
		||||
@ -200,8 +201,8 @@ class _SelectLocalModelDropdownMenu extends StatelessWidget {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
class _LocalLLMInfoWidget extends StatelessWidget {
 | 
			
		||||
  const _LocalLLMInfoWidget();
 | 
			
		||||
class _LocalAIStateWidget extends StatelessWidget {
 | 
			
		||||
  const _LocalAIStateWidget();
 | 
			
		||||
 | 
			
		||||
  @override
 | 
			
		||||
  Widget build(BuildContext context) {
 | 
			
		||||
@ -210,15 +211,8 @@ class _LocalLLMInfoWidget extends StatelessWidget {
 | 
			
		||||
        final error = errorFromState(state);
 | 
			
		||||
        if (error == null) {
 | 
			
		||||
          // If the error is null, handle selected llm model.
 | 
			
		||||
          if (state.localAIInfo != null) {
 | 
			
		||||
            final child = state.localAIInfo!.when(
 | 
			
		||||
              requestDownloadInfo: (
 | 
			
		||||
                LocalModelResourcePB llmResource,
 | 
			
		||||
                LLMModelPB llmModel,
 | 
			
		||||
              ) {
 | 
			
		||||
                _showDownloadDialog(context, llmResource, llmModel);
 | 
			
		||||
                return const SizedBox.shrink();
 | 
			
		||||
              },
 | 
			
		||||
          if (state.progressIndicator != null) {
 | 
			
		||||
            final child = state.progressIndicator!.when(
 | 
			
		||||
              showDownload: (
 | 
			
		||||
                LocalModelResourcePB llmResource,
 | 
			
		||||
                LLMModelPB llmModel,
 | 
			
		||||
@ -241,6 +235,13 @@ class _LocalLLMInfoWidget extends StatelessWidget {
 | 
			
		||||
              },
 | 
			
		||||
              finishDownload: () => const InitLocalAIIndicator(),
 | 
			
		||||
              checkPluginState: () => const PluginStateIndicator(),
 | 
			
		||||
              startOfflineAIApp: () => OpenOrDownloadOfflineAIApp(
 | 
			
		||||
                onRetry: () {
 | 
			
		||||
                  context
 | 
			
		||||
                      .read<LocalAIChatSettingBloc>()
 | 
			
		||||
                      .add(const LocalAIChatSettingEvent.refreshAISetting());
 | 
			
		||||
                },
 | 
			
		||||
              ),
 | 
			
		||||
            );
 | 
			
		||||
 | 
			
		||||
            return Padding(
 | 
			
		||||
@ -266,44 +267,11 @@ class _LocalLLMInfoWidget extends StatelessWidget {
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  void _showDownloadDialog(
 | 
			
		||||
    BuildContext context,
 | 
			
		||||
    LocalModelResourcePB llmResource,
 | 
			
		||||
    LLMModelPB llmModel,
 | 
			
		||||
  ) {
 | 
			
		||||
    WidgetsBinding.instance.addPostFrameCallback(
 | 
			
		||||
      (_) {
 | 
			
		||||
        showDialog(
 | 
			
		||||
          context: context,
 | 
			
		||||
          barrierDismissible: false,
 | 
			
		||||
          useRootNavigator: false,
 | 
			
		||||
          builder: (dialogContext) {
 | 
			
		||||
            return _LLMModelDownloadDialog(
 | 
			
		||||
              llmResource: llmResource,
 | 
			
		||||
              onOkPressed: () {
 | 
			
		||||
                context.read<LocalAIChatSettingBloc>().add(
 | 
			
		||||
                      LocalAIChatSettingEvent.startDownloadModel(
 | 
			
		||||
                        llmModel,
 | 
			
		||||
                      ),
 | 
			
		||||
                    );
 | 
			
		||||
              },
 | 
			
		||||
              onCancelPressed: () {
 | 
			
		||||
                context.read<LocalAIChatSettingBloc>().add(
 | 
			
		||||
                      const LocalAIChatSettingEvent.cancelDownload(),
 | 
			
		||||
                    );
 | 
			
		||||
              },
 | 
			
		||||
            );
 | 
			
		||||
          },
 | 
			
		||||
        );
 | 
			
		||||
      },
 | 
			
		||||
      debugLabel: 'localModel.download',
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  FlowyError? errorFromState(LocalAIChatSettingState state) {
 | 
			
		||||
    final err = state.fetchModelInfoState.when(
 | 
			
		||||
    final err = state.aiModelProgress.when(
 | 
			
		||||
      loading: () => null,
 | 
			
		||||
      finish: (err) => err,
 | 
			
		||||
      init: () {},
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    if (err == null) {
 | 
			
		||||
@ -317,39 +285,48 @@ class _LocalLLMInfoWidget extends StatelessWidget {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
class _LLMModelDownloadDialog extends StatelessWidget {
 | 
			
		||||
  const _LLMModelDownloadDialog({
 | 
			
		||||
    required this.llmResource,
 | 
			
		||||
    required this.onOkPressed,
 | 
			
		||||
    required this.onCancelPressed,
 | 
			
		||||
  });
 | 
			
		||||
  final LocalModelResourcePB llmResource;
 | 
			
		||||
  final VoidCallback onOkPressed;
 | 
			
		||||
  final VoidCallback onCancelPressed;
 | 
			
		||||
void _showDownloadDialog(
 | 
			
		||||
  BuildContext context,
 | 
			
		||||
  LocalModelResourcePB llmResource,
 | 
			
		||||
  LLMModelPB llmModel,
 | 
			
		||||
) {
 | 
			
		||||
  if (llmResource.pendingResources.isEmpty) {
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @override
 | 
			
		||||
  Widget build(BuildContext context) {
 | 
			
		||||
    return NavigatorOkCancelDialog(
 | 
			
		||||
      title: LocaleKeys.settings_aiPage_keys_downloadLLMPrompt.tr(
 | 
			
		||||
  final res = llmResource.pendingResources.first;
 | 
			
		||||
  String desc = "";
 | 
			
		||||
  switch (res.resType) {
 | 
			
		||||
    case PendingResourceTypePB.AIModel:
 | 
			
		||||
      desc = LocaleKeys.settings_aiPage_keys_downloadLLMPromptDetail.tr(
 | 
			
		||||
        args: [
 | 
			
		||||
          llmResource.pendingResources[0].name,
 | 
			
		||||
          llmResource.pendingResources[0].fileSize,
 | 
			
		||||
        ],
 | 
			
		||||
      ),
 | 
			
		||||
      message: llmResource.pendingResources[0].fileSize == 0
 | 
			
		||||
          ? ""
 | 
			
		||||
          : LocaleKeys.settings_aiPage_keys_downloadLLMPromptDetail.tr(
 | 
			
		||||
              args: [
 | 
			
		||||
                llmResource.pendingResources[0].name,
 | 
			
		||||
                llmResource.pendingResources[0].fileSize.toString(),
 | 
			
		||||
              ],
 | 
			
		||||
            ),
 | 
			
		||||
      okTitle: LocaleKeys.button_confirm.tr(),
 | 
			
		||||
      cancelTitle: LocaleKeys.button_cancel.tr(),
 | 
			
		||||
      onOkPressed: onOkPressed,
 | 
			
		||||
      onCancelPressed: onCancelPressed,
 | 
			
		||||
      titleUpperCase: false,
 | 
			
		||||
    );
 | 
			
		||||
      );
 | 
			
		||||
      break;
 | 
			
		||||
    case PendingResourceTypePB.OfflineApp:
 | 
			
		||||
      desc = LocaleKeys.settings_aiPage_keys_downloadAppFlowyOfflineAI.tr();
 | 
			
		||||
      break;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  showConfirmDialog(
 | 
			
		||||
    context: context,
 | 
			
		||||
    style: ConfirmPopupStyle.cancelAndOk,
 | 
			
		||||
    title: LocaleKeys.settings_aiPage_keys_downloadLLMPrompt.tr(
 | 
			
		||||
      args: [res.name],
 | 
			
		||||
    ),
 | 
			
		||||
    description: desc,
 | 
			
		||||
    confirmLabel: LocaleKeys.button_confirm.tr(),
 | 
			
		||||
    onConfirm: () => context.read<LocalAIChatSettingBloc>().add(
 | 
			
		||||
          LocalAIChatSettingEvent.startDownloadModel(
 | 
			
		||||
            llmModel,
 | 
			
		||||
          ),
 | 
			
		||||
        ),
 | 
			
		||||
    onCancel: () => context.read<LocalAIChatSettingBloc>().add(
 | 
			
		||||
          const LocalAIChatSettingEvent.cancelDownload(),
 | 
			
		||||
        ),
 | 
			
		||||
  );
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
class _ShowDownloadIndicator extends StatelessWidget {
 | 
			
		||||
@ -381,29 +358,7 @@ class _ShowDownloadIndicator extends StatelessWidget {
 | 
			
		||||
                    color: Color(0xFF005483),
 | 
			
		||||
                  ),
 | 
			
		||||
                  onTap: () {
 | 
			
		||||
                    showDialog(
 | 
			
		||||
                      context: context,
 | 
			
		||||
                      barrierDismissible: false,
 | 
			
		||||
                      useRootNavigator: false,
 | 
			
		||||
                      builder: (dialogContext) {
 | 
			
		||||
                        return _LLMModelDownloadDialog(
 | 
			
		||||
                          llmResource: llmResource,
 | 
			
		||||
                          onOkPressed: () {
 | 
			
		||||
                            context.read<LocalAIChatSettingBloc>().add(
 | 
			
		||||
                                  LocalAIChatSettingEvent.startDownloadModel(
 | 
			
		||||
                                    llmModel,
 | 
			
		||||
                                  ),
 | 
			
		||||
                                );
 | 
			
		||||
                          },
 | 
			
		||||
                          onCancelPressed: () {
 | 
			
		||||
                            context.read<LocalAIChatSettingBloc>().add(
 | 
			
		||||
                                  const LocalAIChatSettingEvent
 | 
			
		||||
                                      .cancelDownload(),
 | 
			
		||||
                                );
 | 
			
		||||
                          },
 | 
			
		||||
                        );
 | 
			
		||||
                      },
 | 
			
		||||
                    );
 | 
			
		||||
                    _showDownloadDialog(context, llmResource, llmModel);
 | 
			
		||||
                  },
 | 
			
		||||
                ),
 | 
			
		||||
              ),
 | 
			
		||||
 | 
			
		||||
@ -1,10 +1,14 @@
 | 
			
		||||
import 'package:appflowy/core/helpers/url_launcher.dart';
 | 
			
		||||
import 'package:appflowy/generated/flowy_svgs.g.dart';
 | 
			
		||||
import 'package:appflowy/generated/locale_keys.g.dart';
 | 
			
		||||
import 'package:appflowy/workspace/application/settings/ai/download_offline_ai_app_bloc.dart';
 | 
			
		||||
import 'package:appflowy/workspace/application/settings/ai/plugin_state_bloc.dart';
 | 
			
		||||
import 'package:easy_localization/easy_localization.dart';
 | 
			
		||||
import 'package:flowy_infra/size.dart';
 | 
			
		||||
import 'package:flowy_infra_ui/style_widget/button.dart';
 | 
			
		||||
import 'package:flowy_infra_ui/style_widget/text.dart';
 | 
			
		||||
import 'package:flowy_infra_ui/widget/spacing.dart';
 | 
			
		||||
import 'package:flutter/gestures.dart';
 | 
			
		||||
import 'package:flutter/material.dart';
 | 
			
		||||
import 'package:flutter_bloc/flutter_bloc.dart';
 | 
			
		||||
 | 
			
		||||
@ -21,8 +25,15 @@ class PluginStateIndicator extends StatelessWidget {
 | 
			
		||||
          return state.action.when(
 | 
			
		||||
            init: () => const _InitPlugin(),
 | 
			
		||||
            ready: () => const _LocalAIReadyToUse(),
 | 
			
		||||
            restart: () => const _ReloadButton(),
 | 
			
		||||
            restartPlugin: () => const _ReloadButton(),
 | 
			
		||||
            loadingPlugin: () => const _InitPlugin(),
 | 
			
		||||
            startAIOfflineApp: () => OpenOrDownloadOfflineAIApp(
 | 
			
		||||
              onRetry: () {
 | 
			
		||||
                context
 | 
			
		||||
                    .read<PluginStateBloc>()
 | 
			
		||||
                    .add(const PluginStateEvent.started());
 | 
			
		||||
              },
 | 
			
		||||
            ),
 | 
			
		||||
          );
 | 
			
		||||
        },
 | 
			
		||||
      ),
 | 
			
		||||
@ -35,9 +46,15 @@ class _InitPlugin extends StatelessWidget {
 | 
			
		||||
 | 
			
		||||
  @override
 | 
			
		||||
  Widget build(BuildContext context) {
 | 
			
		||||
    return const SizedBox(
 | 
			
		||||
      height: 20,
 | 
			
		||||
      child: CircularProgressIndicator.adaptive(),
 | 
			
		||||
    return Row(
 | 
			
		||||
      children: [
 | 
			
		||||
        FlowyText(LocaleKeys.settings_aiPage_keys_localAIStart.tr()),
 | 
			
		||||
        const Spacer(),
 | 
			
		||||
        const SizedBox(
 | 
			
		||||
          height: 20,
 | 
			
		||||
          child: CircularProgressIndicator.adaptive(),
 | 
			
		||||
        ),
 | 
			
		||||
      ],
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@ -124,3 +141,106 @@ class _LocalAIReadyToUse extends StatelessWidget {
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
class OpenOrDownloadOfflineAIApp extends StatelessWidget {
 | 
			
		||||
  const OpenOrDownloadOfflineAIApp({required this.onRetry, super.key});
 | 
			
		||||
 | 
			
		||||
  final VoidCallback onRetry;
 | 
			
		||||
 | 
			
		||||
  @override
 | 
			
		||||
  Widget build(BuildContext context) {
 | 
			
		||||
    return BlocProvider(
 | 
			
		||||
      create: (context) => DownloadOfflineAIBloc(),
 | 
			
		||||
      child: BlocBuilder<DownloadOfflineAIBloc, DownloadOfflineAIState>(
 | 
			
		||||
        builder: (context, state) {
 | 
			
		||||
          return Column(
 | 
			
		||||
            crossAxisAlignment: CrossAxisAlignment.start,
 | 
			
		||||
            children: [
 | 
			
		||||
              RichText(
 | 
			
		||||
                maxLines: 3,
 | 
			
		||||
                textAlign: TextAlign.left,
 | 
			
		||||
                text: TextSpan(
 | 
			
		||||
                  children: <TextSpan>[
 | 
			
		||||
                    TextSpan(
 | 
			
		||||
                      text:
 | 
			
		||||
                          "${LocaleKeys.settings_aiPage_keys_offlineAIInstruction1.tr()} ",
 | 
			
		||||
                      style: Theme.of(context)
 | 
			
		||||
                          .textTheme
 | 
			
		||||
                          .bodySmall!
 | 
			
		||||
                          .copyWith(height: 1.5),
 | 
			
		||||
                    ),
 | 
			
		||||
                    TextSpan(
 | 
			
		||||
                      text:
 | 
			
		||||
                          " ${LocaleKeys.settings_aiPage_keys_offlineAIInstruction2.tr()} ",
 | 
			
		||||
                      style: Theme.of(context).textTheme.bodyMedium!.copyWith(
 | 
			
		||||
                            fontSize: FontSizes.s14,
 | 
			
		||||
                            color: Theme.of(context).colorScheme.primary,
 | 
			
		||||
                            height: 1.5,
 | 
			
		||||
                          ),
 | 
			
		||||
                      recognizer: TapGestureRecognizer()
 | 
			
		||||
                        ..onTap = () => afLaunchUrlString(
 | 
			
		||||
                              "https://docs.appflowy.io/docs/appflowy/product/appflowy-ai-offline",
 | 
			
		||||
                            ),
 | 
			
		||||
                    ),
 | 
			
		||||
                    TextSpan(
 | 
			
		||||
                      text:
 | 
			
		||||
                          " ${LocaleKeys.settings_aiPage_keys_offlineAIInstruction3.tr()} ",
 | 
			
		||||
                      style: Theme.of(context)
 | 
			
		||||
                          .textTheme
 | 
			
		||||
                          .bodySmall!
 | 
			
		||||
                          .copyWith(height: 1.5),
 | 
			
		||||
                    ),
 | 
			
		||||
                    TextSpan(
 | 
			
		||||
                      text:
 | 
			
		||||
                          "${LocaleKeys.settings_aiPage_keys_offlineAIDownload1.tr()} ",
 | 
			
		||||
                      style: Theme.of(context)
 | 
			
		||||
                          .textTheme
 | 
			
		||||
                          .bodySmall!
 | 
			
		||||
                          .copyWith(height: 1.5),
 | 
			
		||||
                    ),
 | 
			
		||||
                    TextSpan(
 | 
			
		||||
                      text:
 | 
			
		||||
                          " ${LocaleKeys.settings_aiPage_keys_offlineAIDownload2.tr()} ",
 | 
			
		||||
                      style: Theme.of(context).textTheme.bodyMedium!.copyWith(
 | 
			
		||||
                            fontSize: FontSizes.s14,
 | 
			
		||||
                            color: Theme.of(context).colorScheme.primary,
 | 
			
		||||
                            height: 1.5,
 | 
			
		||||
                          ),
 | 
			
		||||
                      recognizer: TapGestureRecognizer()
 | 
			
		||||
                        ..onTap =
 | 
			
		||||
                            () => context.read<DownloadOfflineAIBloc>().add(
 | 
			
		||||
                                  const DownloadOfflineAIEvent.started(),
 | 
			
		||||
                                ),
 | 
			
		||||
                    ),
 | 
			
		||||
                    TextSpan(
 | 
			
		||||
                      text:
 | 
			
		||||
                          " ${LocaleKeys.settings_aiPage_keys_offlineAIDownload3.tr()} ",
 | 
			
		||||
                      style: Theme.of(context)
 | 
			
		||||
                          .textTheme
 | 
			
		||||
                          .bodySmall!
 | 
			
		||||
                          .copyWith(height: 1.5),
 | 
			
		||||
                    ),
 | 
			
		||||
                  ],
 | 
			
		||||
                ),
 | 
			
		||||
              ),
 | 
			
		||||
              const SizedBox(
 | 
			
		||||
                height: 6,
 | 
			
		||||
              ), // Replaced VSpace with SizedBox for simplicity
 | 
			
		||||
              SizedBox(
 | 
			
		||||
                height: 30,
 | 
			
		||||
                child: FlowyButton(
 | 
			
		||||
                  useIntrinsicWidth: true,
 | 
			
		||||
                  margin: const EdgeInsets.symmetric(horizontal: 12),
 | 
			
		||||
                  text: FlowyText(
 | 
			
		||||
                    LocaleKeys.settings_aiPage_keys_activeOfflineAI.tr(),
 | 
			
		||||
                  ),
 | 
			
		||||
                  onTap: onRetry,
 | 
			
		||||
                ),
 | 
			
		||||
              ),
 | 
			
		||||
            ],
 | 
			
		||||
          );
 | 
			
		||||
        },
 | 
			
		||||
      ),
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -7,7 +7,6 @@ import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/m
 | 
			
		||||
import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart';
 | 
			
		||||
import 'package:flowy_infra/theme_extension.dart';
 | 
			
		||||
import 'package:flowy_infra_ui/widget/spacing.dart';
 | 
			
		||||
import 'package:flutter/foundation.dart';
 | 
			
		||||
import 'package:flutter/material.dart';
 | 
			
		||||
 | 
			
		||||
import 'package:appflowy/generated/locale_keys.g.dart';
 | 
			
		||||
@ -53,11 +52,12 @@ class SettingsAIView extends StatelessWidget {
 | 
			
		||||
          ];
 | 
			
		||||
 | 
			
		||||
          children.add(const _AISearchToggle(value: false));
 | 
			
		||||
          children.add(
 | 
			
		||||
            _LocalAIOnBoarding(
 | 
			
		||||
              workspaceId: userProfile.workspaceId,
 | 
			
		||||
            ),
 | 
			
		||||
          );
 | 
			
		||||
          // TODO(nathan): enable local ai
 | 
			
		||||
          // children.add(
 | 
			
		||||
          //   _LocalAIOnBoarding(
 | 
			
		||||
          //     workspaceId: userProfile.workspaceId,
 | 
			
		||||
          //   ),
 | 
			
		||||
          // );
 | 
			
		||||
 | 
			
		||||
          return SettingsBody(
 | 
			
		||||
            title: LocaleKeys.settings_aiPage_title.tr(),
 | 
			
		||||
@ -114,6 +114,7 @@ class _AISearchToggle extends StatelessWidget {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// ignore: unused_element
 | 
			
		||||
class _LocalAIOnBoarding extends StatelessWidget {
 | 
			
		||||
  const _LocalAIOnBoarding({required this.workspaceId});
 | 
			
		||||
  final String workspaceId;
 | 
			
		||||
@ -129,7 +130,7 @@ class _LocalAIOnBoarding extends StatelessWidget {
 | 
			
		||||
            child: BlocBuilder<LocalAIOnBoardingBloc, LocalAIOnBoardingState>(
 | 
			
		||||
              builder: (context, state) {
 | 
			
		||||
                // Show the local AI settings if the user has purchased the AI Local plan
 | 
			
		||||
                if (kDebugMode || state.isPurchaseAILocal) {
 | 
			
		||||
                if (state.isPurchaseAILocal) {
 | 
			
		||||
                  return const LocalAISetting();
 | 
			
		||||
                } else {
 | 
			
		||||
                  // Show the upgrade to AI Local plan button if the user has not purchased the AI Local plan
 | 
			
		||||
 | 
			
		||||
@ -1,5 +1,3 @@
 | 
			
		||||
import 'dart:io';
 | 
			
		||||
 | 
			
		||||
import 'package:flutter/material.dart';
 | 
			
		||||
 | 
			
		||||
import 'package:appflowy/util/int64_extension.dart';
 | 
			
		||||
@ -214,23 +212,24 @@ class _SettingsBillingViewState extends State<SettingsBillingView> {
 | 
			
		||||
 | 
			
		||||
                      // Currently, the AI Local tile is only available on macOS
 | 
			
		||||
                      // TODO(nathan): enable windows and linux
 | 
			
		||||
                      if (Platform.isMacOS)
 | 
			
		||||
                        _AITile(
 | 
			
		||||
                          plan: SubscriptionPlanPB.AiLocal,
 | 
			
		||||
                          label: LocaleKeys
 | 
			
		||||
                              .settings_billingPage_addons_aiOnDevice_label
 | 
			
		||||
                              .tr(),
 | 
			
		||||
                          description: LocaleKeys
 | 
			
		||||
                              .settings_billingPage_addons_aiOnDevice_description,
 | 
			
		||||
                          activeDescription: LocaleKeys
 | 
			
		||||
                              .settings_billingPage_addons_aiOnDevice_activeDescription,
 | 
			
		||||
                          canceledDescription: LocaleKeys
 | 
			
		||||
                              .settings_billingPage_addons_aiOnDevice_canceledDescription,
 | 
			
		||||
                          subscriptionInfo:
 | 
			
		||||
                              state.subscriptionInfo.addOns.firstWhereOrNull(
 | 
			
		||||
                            (a) => a.type == WorkspaceAddOnPBType.AddOnAiLocal,
 | 
			
		||||
                          ),
 | 
			
		||||
                        ),
 | 
			
		||||
                      // TODO(nathan): enable local ai
 | 
			
		||||
                      // if (Platform.isMacOS)
 | 
			
		||||
                      //   _AITile(
 | 
			
		||||
                      //     plan: SubscriptionPlanPB.AiLocal,
 | 
			
		||||
                      //     label: LocaleKeys
 | 
			
		||||
                      //         .settings_billingPage_addons_aiOnDevice_label
 | 
			
		||||
                      //         .tr(),
 | 
			
		||||
                      //     description: LocaleKeys
 | 
			
		||||
                      //         .settings_billingPage_addons_aiOnDevice_description,
 | 
			
		||||
                      //     activeDescription: LocaleKeys
 | 
			
		||||
                      //         .settings_billingPage_addons_aiOnDevice_activeDescription,
 | 
			
		||||
                      //     canceledDescription: LocaleKeys
 | 
			
		||||
                      //         .settings_billingPage_addons_aiOnDevice_canceledDescription,
 | 
			
		||||
                      //     subscriptionInfo:
 | 
			
		||||
                      //         state.subscriptionInfo.addOns.firstWhereOrNull(
 | 
			
		||||
                      //       (a) => a.type == WorkspaceAddOnPBType.AddOnAiLocal,
 | 
			
		||||
                      //     ),
 | 
			
		||||
                      //   ),
 | 
			
		||||
                    ],
 | 
			
		||||
                  ),
 | 
			
		||||
                ],
 | 
			
		||||
 | 
			
		||||
@ -1,5 +1,3 @@
 | 
			
		||||
import 'dart:io';
 | 
			
		||||
 | 
			
		||||
import 'package:flutter/material.dart';
 | 
			
		||||
 | 
			
		||||
import 'package:appflowy/generated/flowy_svgs.g.dart';
 | 
			
		||||
@ -141,43 +139,44 @@ class _SettingsPlanViewState extends State<SettingsPlanView> {
 | 
			
		||||
 | 
			
		||||
                    // Currently, the AI Local tile is only available on macOS
 | 
			
		||||
                    // TODO(nathan): enable windows and linux
 | 
			
		||||
                    if (Platform.isMacOS)
 | 
			
		||||
                      Flexible(
 | 
			
		||||
                        child: _AddOnBox(
 | 
			
		||||
                          title: LocaleKeys
 | 
			
		||||
                              .settings_planPage_planUsage_addons_aiOnDevice_title
 | 
			
		||||
                              .tr(),
 | 
			
		||||
                          description: LocaleKeys
 | 
			
		||||
                              .settings_planPage_planUsage_addons_aiOnDevice_description
 | 
			
		||||
                              .tr(),
 | 
			
		||||
                          price: LocaleKeys
 | 
			
		||||
                              .settings_planPage_planUsage_addons_aiOnDevice_price
 | 
			
		||||
                              .tr(
 | 
			
		||||
                            args: [
 | 
			
		||||
                              SubscriptionPlanPB.AiLocal.priceAnnualBilling,
 | 
			
		||||
                            ],
 | 
			
		||||
                          ),
 | 
			
		||||
                          priceInfo: LocaleKeys
 | 
			
		||||
                              .settings_planPage_planUsage_addons_aiOnDevice_priceInfo
 | 
			
		||||
                              .tr(),
 | 
			
		||||
                          billingInfo: LocaleKeys
 | 
			
		||||
                              .settings_planPage_planUsage_addons_aiOnDevice_billingInfo
 | 
			
		||||
                              .tr(
 | 
			
		||||
                            args: [
 | 
			
		||||
                              SubscriptionPlanPB.AiLocal.priceMonthBilling,
 | 
			
		||||
                            ],
 | 
			
		||||
                          ),
 | 
			
		||||
                          buttonText: state.subscriptionInfo.hasAIOnDevice
 | 
			
		||||
                              ? LocaleKeys
 | 
			
		||||
                                  .settings_planPage_planUsage_addons_activeLabel
 | 
			
		||||
                                  .tr()
 | 
			
		||||
                              : LocaleKeys
 | 
			
		||||
                                  .settings_planPage_planUsage_addons_addLabel
 | 
			
		||||
                                  .tr(),
 | 
			
		||||
                          isActive: state.subscriptionInfo.hasAIOnDevice,
 | 
			
		||||
                          plan: SubscriptionPlanPB.AiLocal,
 | 
			
		||||
                        ),
 | 
			
		||||
                      ),
 | 
			
		||||
                    // TODO(nathan): enable local ai
 | 
			
		||||
                    // if (Platform.isMacOS)
 | 
			
		||||
                    //   Flexible(
 | 
			
		||||
                    //     child: _AddOnBox(
 | 
			
		||||
                    //       title: LocaleKeys
 | 
			
		||||
                    //           .settings_planPage_planUsage_addons_aiOnDevice_title
 | 
			
		||||
                    //           .tr(),
 | 
			
		||||
                    //       description: LocaleKeys
 | 
			
		||||
                    //           .settings_planPage_planUsage_addons_aiOnDevice_description
 | 
			
		||||
                    //           .tr(),
 | 
			
		||||
                    //       price: LocaleKeys
 | 
			
		||||
                    //           .settings_planPage_planUsage_addons_aiOnDevice_price
 | 
			
		||||
                    //           .tr(
 | 
			
		||||
                    //         args: [
 | 
			
		||||
                    //           SubscriptionPlanPB.AiLocal.priceAnnualBilling,
 | 
			
		||||
                    //         ],
 | 
			
		||||
                    //       ),
 | 
			
		||||
                    //       priceInfo: LocaleKeys
 | 
			
		||||
                    //           .settings_planPage_planUsage_addons_aiOnDevice_priceInfo
 | 
			
		||||
                    //           .tr(),
 | 
			
		||||
                    //       billingInfo: LocaleKeys
 | 
			
		||||
                    //           .settings_planPage_planUsage_addons_aiOnDevice_billingInfo
 | 
			
		||||
                    //           .tr(
 | 
			
		||||
                    //         args: [
 | 
			
		||||
                    //           SubscriptionPlanPB.AiLocal.priceMonthBilling,
 | 
			
		||||
                    //         ],
 | 
			
		||||
                    //       ),
 | 
			
		||||
                    //       buttonText: state.subscriptionInfo.hasAIOnDevice
 | 
			
		||||
                    //           ? LocaleKeys
 | 
			
		||||
                    //               .settings_planPage_planUsage_addons_activeLabel
 | 
			
		||||
                    //               .tr()
 | 
			
		||||
                    //           : LocaleKeys
 | 
			
		||||
                    //               .settings_planPage_planUsage_addons_addLabel
 | 
			
		||||
                    //               .tr(),
 | 
			
		||||
                    //       isActive: state.subscriptionInfo.hasAIOnDevice,
 | 
			
		||||
                    //       plan: SubscriptionPlanPB.AiLocal,
 | 
			
		||||
                    //     ),
 | 
			
		||||
                    //   ),
 | 
			
		||||
                  ],
 | 
			
		||||
                ),
 | 
			
		||||
              ],
 | 
			
		||||
 | 
			
		||||
@ -419,6 +419,7 @@ Future<void> showConfirmDialog({
 | 
			
		||||
  required String title,
 | 
			
		||||
  required String description,
 | 
			
		||||
  VoidCallback? onConfirm,
 | 
			
		||||
  VoidCallback? onCancel,
 | 
			
		||||
  String? confirmLabel,
 | 
			
		||||
  ConfirmPopupStyle style = ConfirmPopupStyle.onlyOk,
 | 
			
		||||
}) {
 | 
			
		||||
@ -435,6 +436,7 @@ Future<void> showConfirmDialog({
 | 
			
		||||
            title: title,
 | 
			
		||||
            description: description,
 | 
			
		||||
            onConfirm: () => onConfirm?.call(),
 | 
			
		||||
            onCancel: () => onCancel?.call(),
 | 
			
		||||
            confirmLabel: confirmLabel,
 | 
			
		||||
            style: style,
 | 
			
		||||
          ),
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										30
									
								
								frontend/appflowy_tauri/src-tauri/Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										30
									
								
								frontend/appflowy_tauri/src-tauri/Cargo.lock
									
									
									
										generated
									
									
									
								
							@ -172,7 +172,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "app-error"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bincode",
 | 
			
		||||
@ -192,7 +192,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "appflowy-ai-client"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bytes",
 | 
			
		||||
@ -206,7 +206,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "appflowy-local-ai"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "appflowy-plugin",
 | 
			
		||||
@ -225,7 +225,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "appflowy-plugin"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "cfg-if",
 | 
			
		||||
@ -826,7 +826,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "client-api"
 | 
			
		||||
version = "0.2.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "again",
 | 
			
		||||
 "anyhow",
 | 
			
		||||
@ -876,7 +876,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "client-api-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "collab-entity",
 | 
			
		||||
 "collab-rt-entity",
 | 
			
		||||
@ -888,7 +888,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "client-websocket"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "futures-channel",
 | 
			
		||||
 "futures-util",
 | 
			
		||||
@ -1132,7 +1132,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "collab-rt-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bincode",
 | 
			
		||||
@ -1157,7 +1157,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "collab-rt-protocol"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "async-trait",
 | 
			
		||||
@ -1421,7 +1421,7 @@ dependencies = [
 | 
			
		||||
 "cssparser-macros",
 | 
			
		||||
 "dtoa-short",
 | 
			
		||||
 "itoa 1.0.6",
 | 
			
		||||
 "phf 0.8.0",
 | 
			
		||||
 "phf 0.11.2",
 | 
			
		||||
 "smallvec",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
@ -1532,7 +1532,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "database-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "app-error",
 | 
			
		||||
@ -3038,7 +3038,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "gotrue"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "futures-util",
 | 
			
		||||
@ -3055,7 +3055,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "gotrue-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "app-error",
 | 
			
		||||
@ -3487,7 +3487,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "infra"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bytes",
 | 
			
		||||
@ -6031,7 +6031,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "shared-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "app-error",
 | 
			
		||||
 | 
			
		||||
@ -53,7 +53,7 @@ collab-user = { version = "0.2" }
 | 
			
		||||
# Run the script:
 | 
			
		||||
# scripts/tool/update_client_api_rev.sh  new_rev_id
 | 
			
		||||
# ⚠️⚠️⚠️️
 | 
			
		||||
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "c2a839ba8bf9ead44679eb08f3a9680467b767ca" }
 | 
			
		||||
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "0062c950677f7f633f5b7edabc827a35d3bc92c3" }
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
serde_json.workspace = true
 | 
			
		||||
@ -128,5 +128,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
 | 
			
		||||
# To update the commit ID, run:
 | 
			
		||||
# scripts/tool/update_local_ai_rev.sh new_rev_id
 | 
			
		||||
# ⚠️⚠️⚠️️
 | 
			
		||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }
 | 
			
		||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }
 | 
			
		||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }
 | 
			
		||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										30
									
								
								frontend/appflowy_web_app/src-tauri/Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										30
									
								
								frontend/appflowy_web_app/src-tauri/Cargo.lock
									
									
									
										generated
									
									
									
								
							@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "app-error"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bincode",
 | 
			
		||||
@ -183,7 +183,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "appflowy-ai-client"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bytes",
 | 
			
		||||
@ -197,7 +197,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "appflowy-local-ai"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "appflowy-plugin",
 | 
			
		||||
@ -216,7 +216,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "appflowy-plugin"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "cfg-if",
 | 
			
		||||
@ -800,7 +800,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "client-api"
 | 
			
		||||
version = "0.2.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "again",
 | 
			
		||||
 "anyhow",
 | 
			
		||||
@ -850,7 +850,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "client-api-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "collab-entity",
 | 
			
		||||
 "collab-rt-entity",
 | 
			
		||||
@ -862,7 +862,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "client-websocket"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "futures-channel",
 | 
			
		||||
 "futures-util",
 | 
			
		||||
@ -1115,7 +1115,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "collab-rt-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bincode",
 | 
			
		||||
@ -1140,7 +1140,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "collab-rt-protocol"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "async-trait",
 | 
			
		||||
@ -1411,7 +1411,7 @@ dependencies = [
 | 
			
		||||
 "cssparser-macros",
 | 
			
		||||
 "dtoa-short",
 | 
			
		||||
 "itoa 1.0.10",
 | 
			
		||||
 "phf 0.8.0",
 | 
			
		||||
 "phf 0.11.2",
 | 
			
		||||
 "smallvec",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
@ -1522,7 +1522,7 @@ checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "database-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "app-error",
 | 
			
		||||
@ -3105,7 +3105,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "gotrue"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "futures-util",
 | 
			
		||||
@ -3122,7 +3122,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "gotrue-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "app-error",
 | 
			
		||||
@ -3559,7 +3559,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "infra"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bytes",
 | 
			
		||||
@ -6095,7 +6095,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "shared-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "app-error",
 | 
			
		||||
 | 
			
		||||
@ -52,7 +52,7 @@ collab-user = { version = "0.2" }
 | 
			
		||||
# Run the script:
 | 
			
		||||
# scripts/tool/update_client_api_rev.sh  new_rev_id
 | 
			
		||||
# ⚠️⚠️⚠️️
 | 
			
		||||
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "c2a839ba8bf9ead44679eb08f3a9680467b767ca" }
 | 
			
		||||
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "0062c950677f7f633f5b7edabc827a35d3bc92c3" }
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
serde_json.workspace = true
 | 
			
		||||
@ -128,6 +128,6 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
 | 
			
		||||
# To update the commit ID, run:
 | 
			
		||||
# scripts/tool/update_local_ai_rev.sh new_rev_id
 | 
			
		||||
# ⚠️⚠️⚠️️
 | 
			
		||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }
 | 
			
		||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }
 | 
			
		||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }
 | 
			
		||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -655,6 +655,7 @@
 | 
			
		||||
        "llmModel": "Language Model",
 | 
			
		||||
        "llmModelType": "Language Model Type",
 | 
			
		||||
        "downloadLLMPrompt": "Download {}",
 | 
			
		||||
        "downloadAppFlowyOfflineAI": "Downloading AI offline package will enable AI to run on your device. Do you want to continue?",
 | 
			
		||||
        "downloadLLMPromptDetail": "Downloading {} local model will take up to {} of storage. Do you want to continue?",
 | 
			
		||||
        "downloadAIModelButton": "Download AI model",
 | 
			
		||||
        "downloadingModel": "Downloading",
 | 
			
		||||
@ -667,7 +668,14 @@
 | 
			
		||||
        "disableLocalAITitle": "Disable local AI",
 | 
			
		||||
        "disableLocalAIDescription": "Do you want to disable local AI?",
 | 
			
		||||
        "localAIToggleTitle": "Toggle to enable or disable local AI",
 | 
			
		||||
        "fetchLocalModel": "Fetch local model configuration",
 | 
			
		||||
        "offlineAIInstruction1": "Follow the",
 | 
			
		||||
        "offlineAIInstruction2": "instruction",
 | 
			
		||||
        "offlineAIInstruction3": "to enable offline AI.",
 | 
			
		||||
        "offlineAIDownload1": "If you have not downloaded the AppFlowy AI, please",
 | 
			
		||||
        "offlineAIDownload2": "download",
 | 
			
		||||
        "offlineAIDownload3": "it first",
 | 
			
		||||
        "activeOfflineAI": "Active",
 | 
			
		||||
        "downloadOfflineAI": "Download",
 | 
			
		||||
        "openModelDirectory": "Open folder"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										119
									
								
								frontend/rust-lib/Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										119
									
								
								frontend/rust-lib/Cargo.lock
									
									
									
										generated
									
									
									
								
							@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "app-error"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bincode",
 | 
			
		||||
@ -183,7 +183,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "appflowy-ai-client"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bytes",
 | 
			
		||||
@ -197,7 +197,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "appflowy-local-ai"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "appflowy-plugin",
 | 
			
		||||
@ -216,7 +216,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "appflowy-plugin"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "cfg-if",
 | 
			
		||||
@ -718,7 +718,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "client-api"
 | 
			
		||||
version = "0.2.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "again",
 | 
			
		||||
 "anyhow",
 | 
			
		||||
@ -768,7 +768,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "client-api-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "collab-entity",
 | 
			
		||||
 "collab-rt-entity",
 | 
			
		||||
@ -780,7 +780,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "client-websocket"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "futures-channel",
 | 
			
		||||
 "futures-util",
 | 
			
		||||
@ -993,7 +993,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "collab-rt-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bincode",
 | 
			
		||||
@ -1018,7 +1018,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "collab-rt-protocol"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "async-trait",
 | 
			
		||||
@ -1356,7 +1356,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "database-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "app-error",
 | 
			
		||||
@ -1739,6 +1739,18 @@ dependencies = [
 | 
			
		||||
 "getrandom 0.2.10",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "filetime"
 | 
			
		||||
version = "0.2.23"
 | 
			
		||||
source = "registry+https://github.com/rust-lang/crates.io-index"
 | 
			
		||||
checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "cfg-if",
 | 
			
		||||
 "libc",
 | 
			
		||||
 "redox_syscall 0.4.1",
 | 
			
		||||
 "windows-sys 0.52.0",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "finl_unicode"
 | 
			
		||||
version = "1.2.0"
 | 
			
		||||
@ -1794,6 +1806,7 @@ dependencies = [
 | 
			
		||||
 "lib-infra",
 | 
			
		||||
 "log",
 | 
			
		||||
 "md5",
 | 
			
		||||
 "notify",
 | 
			
		||||
 "parking_lot 0.12.1",
 | 
			
		||||
 "protobuf",
 | 
			
		||||
 "reqwest",
 | 
			
		||||
@ -2457,6 +2470,15 @@ dependencies = [
 | 
			
		||||
 "windows-sys 0.52.0",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "fsevent-sys"
 | 
			
		||||
version = "4.1.0"
 | 
			
		||||
source = "registry+https://github.com/rust-lang/crates.io-index"
 | 
			
		||||
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "libc",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "fuchsia-cprng"
 | 
			
		||||
version = "0.1.1"
 | 
			
		||||
@ -2705,7 +2727,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "gotrue"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "futures-util",
 | 
			
		||||
@ -2722,7 +2744,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "gotrue-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "app-error",
 | 
			
		||||
@ -3087,7 +3109,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "infra"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "bytes",
 | 
			
		||||
@ -3098,6 +3120,26 @@ dependencies = [
 | 
			
		||||
 "tracing",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "inotify"
 | 
			
		||||
version = "0.9.6"
 | 
			
		||||
source = "registry+https://github.com/rust-lang/crates.io-index"
 | 
			
		||||
checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "bitflags 1.3.2",
 | 
			
		||||
 "inotify-sys",
 | 
			
		||||
 "libc",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "inotify-sys"
 | 
			
		||||
version = "0.1.5"
 | 
			
		||||
source = "registry+https://github.com/rust-lang/crates.io-index"
 | 
			
		||||
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "libc",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "inout"
 | 
			
		||||
version = "0.1.3"
 | 
			
		||||
@ -3181,6 +3223,26 @@ dependencies = [
 | 
			
		||||
 "simple_asn1",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "kqueue"
 | 
			
		||||
version = "1.0.8"
 | 
			
		||||
source = "registry+https://github.com/rust-lang/crates.io-index"
 | 
			
		||||
checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "kqueue-sys",
 | 
			
		||||
 "libc",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "kqueue-sys"
 | 
			
		||||
version = "1.0.4"
 | 
			
		||||
source = "registry+https://github.com/rust-lang/crates.io-index"
 | 
			
		||||
checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "bitflags 1.3.2",
 | 
			
		||||
 "libc",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "lazy_static"
 | 
			
		||||
version = "1.4.0"
 | 
			
		||||
@ -3583,6 +3645,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 | 
			
		||||
checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "libc",
 | 
			
		||||
 "log",
 | 
			
		||||
 "wasi 0.11.0+wasi-snapshot-preview1",
 | 
			
		||||
 "windows-sys 0.48.0",
 | 
			
		||||
]
 | 
			
		||||
@ -3642,6 +3705,25 @@ dependencies = [
 | 
			
		||||
 "minimal-lexical",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "notify"
 | 
			
		||||
version = "6.1.1"
 | 
			
		||||
source = "registry+https://github.com/rust-lang/crates.io-index"
 | 
			
		||||
checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "bitflags 2.4.0",
 | 
			
		||||
 "crossbeam-channel",
 | 
			
		||||
 "filetime",
 | 
			
		||||
 "fsevent-sys",
 | 
			
		||||
 "inotify",
 | 
			
		||||
 "kqueue",
 | 
			
		||||
 "libc",
 | 
			
		||||
 "log",
 | 
			
		||||
 "mio",
 | 
			
		||||
 "walkdir",
 | 
			
		||||
 "windows-sys 0.48.0",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "ntapi"
 | 
			
		||||
version = "0.4.1"
 | 
			
		||||
@ -4678,6 +4760,15 @@ dependencies = [
 | 
			
		||||
 "bitflags 1.3.2",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "redox_syscall"
 | 
			
		||||
version = "0.4.1"
 | 
			
		||||
source = "registry+https://github.com/rust-lang/crates.io-index"
 | 
			
		||||
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "bitflags 1.3.2",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "regex"
 | 
			
		||||
version = "1.9.5"
 | 
			
		||||
@ -5233,7 +5324,7 @@ dependencies = [
 | 
			
		||||
[[package]]
 | 
			
		||||
name = "shared-entity"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca"
 | 
			
		||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "anyhow",
 | 
			
		||||
 "app-error",
 | 
			
		||||
 | 
			
		||||
@ -99,8 +99,8 @@ zip = "2.1.3"
 | 
			
		||||
# Run the script.add_workspace_members:
 | 
			
		||||
# scripts/tool/update_client_api_rev.sh  new_rev_id
 | 
			
		||||
# ⚠️⚠️⚠️️
 | 
			
		||||
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "c2a839ba8bf9ead44679eb08f3a9680467b767ca" }
 | 
			
		||||
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "c2a839ba8bf9ead44679eb08f3a9680467b767ca" }
 | 
			
		||||
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "0062c950677f7f633f5b7edabc827a35d3bc92c3" }
 | 
			
		||||
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "0062c950677f7f633f5b7edabc827a35d3bc92c3" }
 | 
			
		||||
 | 
			
		||||
[profile.dev]
 | 
			
		||||
opt-level = 0
 | 
			
		||||
@ -147,5 +147,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
 | 
			
		||||
# To update the commit ID, run:
 | 
			
		||||
# scripts/tool/update_local_ai_rev.sh new_rev_id
 | 
			
		||||
# ⚠️⚠️⚠️️
 | 
			
		||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }
 | 
			
		||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }
 | 
			
		||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }
 | 
			
		||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,6 @@
 | 
			
		||||
use bytes::Bytes;
 | 
			
		||||
pub use client_api::entity::ai_dto::{
 | 
			
		||||
  AppFlowyAIPlugin, CompletionType, LLMModel, LocalAIConfig, ModelInfo, RelatedQuestion,
 | 
			
		||||
  AppFlowyOfflineAI, CompletionType, LLMModel, LocalAIConfig, ModelInfo, RelatedQuestion,
 | 
			
		||||
  RepeatedRelatedQuestion, StringOrMessage,
 | 
			
		||||
};
 | 
			
		||||
pub use client_api::entity::{
 | 
			
		||||
 | 
			
		||||
@ -44,6 +44,9 @@ md5 = "0.7.0"
 | 
			
		||||
zip = { workspace = true, features = ["deflate"] }
 | 
			
		||||
zip-extensions = "0.8.0"
 | 
			
		||||
 | 
			
		||||
[target.'cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))'.dependencies]
 | 
			
		||||
notify = "6.1.1"
 | 
			
		||||
 | 
			
		||||
[dev-dependencies]
 | 
			
		||||
dotenv = "0.15.0"
 | 
			
		||||
uuid.workspace = true
 | 
			
		||||
 | 
			
		||||
@ -21,7 +21,7 @@ pub trait ChatUserService: Send + Sync + 'static {
 | 
			
		||||
  fn device_id(&self) -> Result<String, FlowyError>;
 | 
			
		||||
  fn workspace_id(&self) -> Result<String, FlowyError>;
 | 
			
		||||
  fn sqlite_connection(&self, uid: i64) -> Result<DBConnection, FlowyError>;
 | 
			
		||||
  fn user_data_dir(&self) -> Result<PathBuf, FlowyError>;
 | 
			
		||||
  fn data_root_dir(&self) -> Result<PathBuf, FlowyError>;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub struct ChatManager {
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,7 @@
 | 
			
		||||
use crate::local_ai::local_llm_chat::LLMModelInfo;
 | 
			
		||||
use appflowy_plugin::core::plugin::RunningState;
 | 
			
		||||
 | 
			
		||||
use crate::local_ai::local_llm_resource::PendingResource;
 | 
			
		||||
use flowy_chat_pub::cloud::{
 | 
			
		||||
  ChatMessage, LLMModel, RelatedQuestion, RepeatedChatMessage, RepeatedRelatedQuestion,
 | 
			
		||||
};
 | 
			
		||||
@ -360,16 +361,38 @@ pub struct PendingResourcePB {
 | 
			
		||||
  pub name: String,
 | 
			
		||||
 | 
			
		||||
  #[pb(index = 2)]
 | 
			
		||||
  pub file_size: i64,
 | 
			
		||||
  pub file_size: String,
 | 
			
		||||
 | 
			
		||||
  #[pb(index = 3)]
 | 
			
		||||
  pub requirements: String,
 | 
			
		||||
 | 
			
		||||
  #[pb(index = 4)]
 | 
			
		||||
  pub res_type: PendingResourceTypePB,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Default, Clone, ProtoBuf_Enum, PartialEq, Eq, Copy)]
 | 
			
		||||
pub enum PendingResourceTypePB {
 | 
			
		||||
  #[default]
 | 
			
		||||
  OfflineApp = 0,
 | 
			
		||||
  AIModel = 1,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<PendingResource> for PendingResourceTypePB {
 | 
			
		||||
  fn from(value: PendingResource) -> Self {
 | 
			
		||||
    match value {
 | 
			
		||||
      PendingResource::OfflineApp { .. } => PendingResourceTypePB::OfflineApp,
 | 
			
		||||
      PendingResource::ModelInfoRes { .. } => PendingResourceTypePB::AIModel,
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Default, ProtoBuf, Clone, Debug)]
 | 
			
		||||
pub struct LocalAIPluginStatePB {
 | 
			
		||||
  #[pb(index = 1)]
 | 
			
		||||
  pub state: RunningStatePB,
 | 
			
		||||
 | 
			
		||||
  #[pb(index = 2)]
 | 
			
		||||
  pub offline_ai_ready: bool,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Default, Clone, ProtoBuf_Enum, PartialEq, Eq, Copy)]
 | 
			
		||||
@ -416,3 +439,9 @@ pub struct LocalModelStoragePB {
 | 
			
		||||
  #[pb(index = 1)]
 | 
			
		||||
  pub file_path: String,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Default, ProtoBuf, Clone, Debug)]
 | 
			
		||||
pub struct OfflineAIPB {
 | 
			
		||||
  #[pb(index = 1)]
 | 
			
		||||
  pub link: String,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -374,3 +374,22 @@ pub(crate) async fn get_model_storage_directory_handler(
 | 
			
		||||
    .get_model_storage_directory()?;
 | 
			
		||||
  data_result_ok(LocalModelStoragePB { file_path })
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[tracing::instrument(level = "debug", skip_all, err)]
 | 
			
		||||
pub(crate) async fn get_offline_app_handler(
 | 
			
		||||
  chat_manager: AFPluginState<Weak<ChatManager>>,
 | 
			
		||||
) -> DataResult<OfflineAIPB, FlowyError> {
 | 
			
		||||
  let chat_manager = upgrade_chat_manager(chat_manager)?;
 | 
			
		||||
  let (tx, rx) = oneshot::channel::<Result<String, FlowyError>>();
 | 
			
		||||
  tokio::spawn(async move {
 | 
			
		||||
    let link = chat_manager
 | 
			
		||||
      .local_ai_controller
 | 
			
		||||
      .get_offline_ai_app_download_link()
 | 
			
		||||
      .await?;
 | 
			
		||||
    let _ = tx.send(Ok(link));
 | 
			
		||||
    Ok::<_, FlowyError>(())
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  let link = rx.await??;
 | 
			
		||||
  data_result_ok(OfflineAIPB { link })
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -57,6 +57,7 @@ pub fn init(chat_manager: Weak<ChatManager>) -> AFPlugin {
 | 
			
		||||
      ChatEvent::GetModelStorageDirectory,
 | 
			
		||||
      get_model_storage_directory_handler,
 | 
			
		||||
    )
 | 
			
		||||
    .event(ChatEvent::GetOfflineAIAppLink, get_offline_app_handler)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
 | 
			
		||||
@ -133,4 +134,7 @@ pub enum ChatEvent {
 | 
			
		||||
 | 
			
		||||
  #[event(output = "LocalModelStoragePB")]
 | 
			
		||||
  GetModelStorageDirectory = 21,
 | 
			
		||||
 | 
			
		||||
  #[event(output = "OfflineAIPB")]
 | 
			
		||||
  GetOfflineAIAppLink = 22,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -6,23 +6,22 @@ use anyhow::Error;
 | 
			
		||||
use appflowy_local_ai::chat_plugin::{AIPluginConfig, LocalChatLLMChat};
 | 
			
		||||
use appflowy_plugin::manager::PluginManager;
 | 
			
		||||
use appflowy_plugin::util::is_apple_silicon;
 | 
			
		||||
use flowy_chat_pub::cloud::{AppFlowyAIPlugin, ChatCloudService, LLMModel, LocalAIConfig};
 | 
			
		||||
use flowy_chat_pub::cloud::{AppFlowyOfflineAI, ChatCloudService, LLMModel, LocalAIConfig};
 | 
			
		||||
use flowy_error::{FlowyError, FlowyResult};
 | 
			
		||||
use flowy_sqlite::kv::KVStorePreferences;
 | 
			
		||||
use futures::Sink;
 | 
			
		||||
use lib_infra::async_trait::async_trait;
 | 
			
		||||
 | 
			
		||||
use parking_lot::Mutex;
 | 
			
		||||
use serde::{Deserialize, Serialize};
 | 
			
		||||
use std::ops::Deref;
 | 
			
		||||
 | 
			
		||||
use parking_lot::Mutex;
 | 
			
		||||
use std::sync::Arc;
 | 
			
		||||
use tokio_stream::StreamExt;
 | 
			
		||||
use tracing::{debug, error, info, trace};
 | 
			
		||||
 | 
			
		||||
#[derive(Clone, Debug, Serialize, Deserialize)]
 | 
			
		||||
pub struct LLMSetting {
 | 
			
		||||
  pub plugin: AppFlowyAIPlugin,
 | 
			
		||||
  pub app: AppFlowyOfflineAI,
 | 
			
		||||
  pub llm_model: LLMModel,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -59,22 +58,6 @@ impl LocalAIController {
 | 
			
		||||
    cloud_service: Arc<dyn ChatCloudService>,
 | 
			
		||||
  ) -> Self {
 | 
			
		||||
    let llm_chat = Arc::new(LocalChatLLMChat::new(plugin_manager));
 | 
			
		||||
    let mut rx = llm_chat.subscribe_running_state();
 | 
			
		||||
 | 
			
		||||
    let _weak_store_preferences = Arc::downgrade(&store_preferences);
 | 
			
		||||
    tokio::spawn(async move {
 | 
			
		||||
      while let Some(state) = rx.next().await {
 | 
			
		||||
        info!("[AI Plugin] state: {:?}", state);
 | 
			
		||||
        let new_state = RunningStatePB::from(state);
 | 
			
		||||
        make_notification(
 | 
			
		||||
          APPFLOWY_AI_NOTIFICATION_KEY,
 | 
			
		||||
          ChatNotification::UpdateChatPluginState,
 | 
			
		||||
        )
 | 
			
		||||
        .payload(LocalAIPluginStatePB { state: new_state })
 | 
			
		||||
        .send();
 | 
			
		||||
      }
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    let res_impl = LLMResourceServiceImpl {
 | 
			
		||||
      user_service: user_service.clone(),
 | 
			
		||||
      cloud_service,
 | 
			
		||||
@ -85,6 +68,24 @@ impl LocalAIController {
 | 
			
		||||
    let llm_res = Arc::new(LLMResourceController::new(user_service, res_impl, tx));
 | 
			
		||||
    let current_chat_id = Mutex::new(None);
 | 
			
		||||
 | 
			
		||||
    let mut running_state_rx = llm_chat.subscribe_running_state();
 | 
			
		||||
    let offline_ai_ready = llm_res.is_offline_ai_ready();
 | 
			
		||||
    tokio::spawn(async move {
 | 
			
		||||
      while let Some(state) = running_state_rx.next().await {
 | 
			
		||||
        info!("[AI Plugin] state: {:?}", state);
 | 
			
		||||
        let new_state = RunningStatePB::from(state);
 | 
			
		||||
        make_notification(
 | 
			
		||||
          APPFLOWY_AI_NOTIFICATION_KEY,
 | 
			
		||||
          ChatNotification::UpdateChatPluginState,
 | 
			
		||||
        )
 | 
			
		||||
        .payload(LocalAIPluginStatePB {
 | 
			
		||||
          state: new_state,
 | 
			
		||||
          offline_ai_ready,
 | 
			
		||||
        })
 | 
			
		||||
        .send();
 | 
			
		||||
      }
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    let this = Self {
 | 
			
		||||
      llm_chat,
 | 
			
		||||
      llm_res,
 | 
			
		||||
@ -195,11 +196,6 @@ impl LocalAIController {
 | 
			
		||||
      return Err(FlowyError::local_ai_unavailable());
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let llm_chat = self.llm_chat.clone();
 | 
			
		||||
    match llm_chat.destroy_chat_plugin().await {
 | 
			
		||||
      Ok(_) => info!("[AI Plugin] destroy plugin successfully"),
 | 
			
		||||
      Err(err) => error!("[AI Plugin] failed to destroy plugin: {:?}", err),
 | 
			
		||||
    }
 | 
			
		||||
    let state = self.llm_res.use_local_llm(llm_id)?;
 | 
			
		||||
    // Re-initialize the plugin if the setting is updated and ready to use
 | 
			
		||||
    if self.llm_res.is_resource_ready() {
 | 
			
		||||
@ -230,9 +226,11 @@ impl LocalAIController {
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  pub fn get_chat_plugin_state(&self) -> LocalAIPluginStatePB {
 | 
			
		||||
    let offline_ai_ready = self.llm_res.is_offline_ai_ready();
 | 
			
		||||
    let state = self.llm_chat.get_plugin_running_state();
 | 
			
		||||
    LocalAIPluginStatePB {
 | 
			
		||||
      state: RunningStatePB::from(state),
 | 
			
		||||
      offline_ai_ready,
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -252,6 +250,10 @@ impl LocalAIController {
 | 
			
		||||
      .map(|path| path.to_string_lossy().to_string())
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  pub async fn get_offline_ai_app_download_link(&self) -> FlowyResult<String> {
 | 
			
		||||
    self.llm_res.get_offline_ai_app_download_link().await
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  pub async fn toggle_local_ai(&self) -> FlowyResult<bool> {
 | 
			
		||||
    let enabled = !self
 | 
			
		||||
      .store_preferences
 | 
			
		||||
@ -317,6 +319,7 @@ fn initialize_chat_plugin(
 | 
			
		||||
  ret: Option<tokio::sync::oneshot::Sender<()>>,
 | 
			
		||||
) -> FlowyResult<()> {
 | 
			
		||||
  let llm_chat = llm_chat.clone();
 | 
			
		||||
 | 
			
		||||
  tokio::spawn(async move {
 | 
			
		||||
    trace!("[AI Plugin] config: {:?}", chat_config);
 | 
			
		||||
    if is_apple_silicon().await.unwrap_or(false) {
 | 
			
		||||
 | 
			
		||||
@ -1,24 +1,25 @@
 | 
			
		||||
use crate::chat_manager::ChatUserService;
 | 
			
		||||
use crate::entities::{LocalModelResourcePB, PendingResourcePB};
 | 
			
		||||
use crate::entities::{LocalModelResourcePB, PendingResourcePB, PendingResourceTypePB};
 | 
			
		||||
use crate::local_ai::local_llm_chat::{LLMModelInfo, LLMSetting};
 | 
			
		||||
use crate::local_ai::model_request::download_model;
 | 
			
		||||
 | 
			
		||||
use appflowy_local_ai::chat_plugin::AIPluginConfig;
 | 
			
		||||
use flowy_chat_pub::cloud::{LLMModel, LocalAIConfig, ModelInfo};
 | 
			
		||||
use flowy_error::{FlowyError, FlowyResult};
 | 
			
		||||
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
 | 
			
		||||
use futures::Sink;
 | 
			
		||||
use futures_util::SinkExt;
 | 
			
		||||
use lib_infra::async_trait::async_trait;
 | 
			
		||||
use parking_lot::RwLock;
 | 
			
		||||
 | 
			
		||||
use appflowy_local_ai::plugin_request::download_plugin;
 | 
			
		||||
use lib_infra::util::{get_operating_system, OperatingSystem};
 | 
			
		||||
use std::path::PathBuf;
 | 
			
		||||
use std::sync::Arc;
 | 
			
		||||
use std::time::Duration;
 | 
			
		||||
 | 
			
		||||
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
 | 
			
		||||
use crate::local_ai::watch::{watch_path, WatchContext};
 | 
			
		||||
use tokio::fs::{self};
 | 
			
		||||
use tokio_util::sync::CancellationToken;
 | 
			
		||||
use tracing::{debug, error, info, instrument, trace, warn};
 | 
			
		||||
use zip_extensions::zip_extract;
 | 
			
		||||
 | 
			
		||||
#[async_trait]
 | 
			
		||||
pub trait LLMResourceService: Send + Sync + 'static {
 | 
			
		||||
@ -29,12 +30,17 @@ pub trait LLMResourceService: Send + Sync + 'static {
 | 
			
		||||
  fn is_rag_enabled(&self) -> bool;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
const PLUGIN_DIR: &str = "plugin";
 | 
			
		||||
const LLM_MODEL_DIR: &str = "models";
 | 
			
		||||
const DOWNLOAD_FINISH: &str = "finish";
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Clone)]
 | 
			
		||||
pub enum WatchDiskEvent {
 | 
			
		||||
  Create,
 | 
			
		||||
  Remove,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub enum PendingResource {
 | 
			
		||||
  PluginRes,
 | 
			
		||||
  OfflineApp,
 | 
			
		||||
  ModelInfoRes(Vec<ModelInfo>),
 | 
			
		||||
}
 | 
			
		||||
#[derive(Clone)]
 | 
			
		||||
@ -62,6 +68,9 @@ pub struct LLMResourceController {
 | 
			
		||||
  ai_config: RwLock<Option<LocalAIConfig>>,
 | 
			
		||||
  download_task: Arc<RwLock<Option<DownloadTask>>>,
 | 
			
		||||
  resource_notify: tokio::sync::mpsc::Sender<()>,
 | 
			
		||||
  #[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
 | 
			
		||||
  offline_app_disk_watch: RwLock<Option<WatchContext>>,
 | 
			
		||||
  offline_app_state_sender: tokio::sync::broadcast::Sender<WatchDiskEvent>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl LLMResourceController {
 | 
			
		||||
@ -70,6 +79,7 @@ impl LLMResourceController {
 | 
			
		||||
    resource_service: impl LLMResourceService,
 | 
			
		||||
    resource_notify: tokio::sync::mpsc::Sender<()>,
 | 
			
		||||
  ) -> Self {
 | 
			
		||||
    let (offline_app_ready_sender, _) = tokio::sync::broadcast::channel(1);
 | 
			
		||||
    let llm_setting = RwLock::new(resource_service.retrieve_setting());
 | 
			
		||||
    Self {
 | 
			
		||||
      user_service,
 | 
			
		||||
@ -78,6 +88,43 @@ impl LLMResourceController {
 | 
			
		||||
      ai_config: Default::default(),
 | 
			
		||||
      download_task: Default::default(),
 | 
			
		||||
      resource_notify,
 | 
			
		||||
      #[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
 | 
			
		||||
      offline_app_disk_watch: Default::default(),
 | 
			
		||||
      offline_app_state_sender: offline_app_ready_sender,
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  #[allow(dead_code)]
 | 
			
		||||
  pub fn subscribe_offline_app_state(&self) -> tokio::sync::broadcast::Receiver<WatchDiskEvent> {
 | 
			
		||||
    self.offline_app_state_sender.subscribe()
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  fn set_llm_setting(&self, llm_setting: LLMSetting) {
 | 
			
		||||
    let offline_app_path = self.offline_app_path(&llm_setting.app.ai_plugin_name);
 | 
			
		||||
    *self.llm_setting.write() = Some(llm_setting);
 | 
			
		||||
 | 
			
		||||
    #[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
 | 
			
		||||
    {
 | 
			
		||||
      let is_diff = self
 | 
			
		||||
        .offline_app_disk_watch
 | 
			
		||||
        .read()
 | 
			
		||||
        .as_ref()
 | 
			
		||||
        .map(|watch_context| watch_context.path == offline_app_path)
 | 
			
		||||
        .unwrap_or(true);
 | 
			
		||||
 | 
			
		||||
      // If the offline app path is different from the current watch path, update the watch path.
 | 
			
		||||
      if is_diff {
 | 
			
		||||
        if let Ok((watcher, mut rx)) = watch_path(offline_app_path) {
 | 
			
		||||
          let offline_app_ready_sender = self.offline_app_state_sender.clone();
 | 
			
		||||
          tokio::spawn(async move {
 | 
			
		||||
            while let Some(event) = rx.recv().await {
 | 
			
		||||
              info!("Offline app file changed: {:?}", event);
 | 
			
		||||
              let _ = offline_app_ready_sender.send(event);
 | 
			
		||||
            }
 | 
			
		||||
          });
 | 
			
		||||
          self.offline_app_disk_watch.write().replace(watcher);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -89,6 +136,24 @@ impl LLMResourceController {
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  pub fn is_offline_ai_ready(&self) -> bool {
 | 
			
		||||
    match self.llm_setting.read().as_ref() {
 | 
			
		||||
      None => {
 | 
			
		||||
        trace!("[LLM Resource] No local ai setting found");
 | 
			
		||||
        false
 | 
			
		||||
      },
 | 
			
		||||
      Some(setting) => {
 | 
			
		||||
        let path = self.offline_app_path(&setting.app.ai_plugin_name);
 | 
			
		||||
        path.exists()
 | 
			
		||||
      },
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  pub async fn get_offline_ai_app_download_link(&self) -> FlowyResult<String> {
 | 
			
		||||
    let ai_config = self.fetch_ai_config().await?;
 | 
			
		||||
    Ok(ai_config.plugin.url)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /// Retrieves model information and updates the current model settings.
 | 
			
		||||
  #[instrument(level = "debug", skip_all, err)]
 | 
			
		||||
  pub async fn refresh_llm_resource(&self) -> FlowyResult<LLMModelInfo> {
 | 
			
		||||
@ -101,10 +166,10 @@ impl LLMResourceController {
 | 
			
		||||
    let selected_model = self.select_model(&ai_config)?;
 | 
			
		||||
 | 
			
		||||
    let llm_setting = LLMSetting {
 | 
			
		||||
      plugin: ai_config.plugin.clone(),
 | 
			
		||||
      app: ai_config.plugin.clone(),
 | 
			
		||||
      llm_model: selected_model.clone(),
 | 
			
		||||
    };
 | 
			
		||||
    self.llm_setting.write().replace(llm_setting.clone());
 | 
			
		||||
    self.set_llm_setting(llm_setting.clone());
 | 
			
		||||
    self.resource_service.store_setting(llm_setting)?;
 | 
			
		||||
 | 
			
		||||
    Ok(LLMModelInfo {
 | 
			
		||||
@ -130,12 +195,12 @@ impl LLMResourceController {
 | 
			
		||||
      .ok_or_else(|| FlowyError::local_ai().with_context("No local ai config found"))?;
 | 
			
		||||
 | 
			
		||||
    let llm_setting = LLMSetting {
 | 
			
		||||
      plugin: package,
 | 
			
		||||
      app: package,
 | 
			
		||||
      llm_model: llm_config.clone(),
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    trace!("[LLM Resource] Selected AI setting: {:?}", llm_setting);
 | 
			
		||||
    *self.llm_setting.write() = Some(llm_setting.clone());
 | 
			
		||||
    self.set_llm_setting(llm_setting.clone());
 | 
			
		||||
    self.resource_service.store_setting(llm_setting)?;
 | 
			
		||||
    self.get_local_llm_state()
 | 
			
		||||
  }
 | 
			
		||||
@ -157,17 +222,19 @@ impl LLMResourceController {
 | 
			
		||||
    let pending_resources: Vec<_> = pending_resources
 | 
			
		||||
      .into_iter()
 | 
			
		||||
      .flat_map(|res| match res {
 | 
			
		||||
        PendingResource::PluginRes => vec![PendingResourcePB {
 | 
			
		||||
        PendingResource::OfflineApp => vec![PendingResourcePB {
 | 
			
		||||
          name: "AppFlowy Plugin".to_string(),
 | 
			
		||||
          file_size: 0,
 | 
			
		||||
          file_size: "0 GB".to_string(),
 | 
			
		||||
          requirements: "".to_string(),
 | 
			
		||||
          res_type: PendingResourceTypePB::OfflineApp,
 | 
			
		||||
        }],
 | 
			
		||||
        PendingResource::ModelInfoRes(model_infos) => model_infos
 | 
			
		||||
          .into_iter()
 | 
			
		||||
          .map(|model_info| PendingResourcePB {
 | 
			
		||||
            name: model_info.name,
 | 
			
		||||
            file_size: model_info.file_size,
 | 
			
		||||
            file_size: bytes_to_readable_format(model_info.file_size as u64),
 | 
			
		||||
            requirements: model_info.requirements,
 | 
			
		||||
            res_type: PendingResourceTypePB::AIModel,
 | 
			
		||||
          })
 | 
			
		||||
          .collect::<Vec<_>>(),
 | 
			
		||||
      })
 | 
			
		||||
@ -189,11 +256,10 @@ impl LLMResourceController {
 | 
			
		||||
      None => Err(FlowyError::local_ai().with_context("Can't find any llm config")),
 | 
			
		||||
      Some(llm_setting) => {
 | 
			
		||||
        let mut resources = vec![];
 | 
			
		||||
        let plugin_path = self.plugin_path(&llm_setting.plugin.etag)?;
 | 
			
		||||
 | 
			
		||||
        let plugin_path = self.offline_app_path(&llm_setting.app.ai_plugin_name);
 | 
			
		||||
        if !plugin_path.exists() {
 | 
			
		||||
          trace!("[LLM Resource] Plugin file not found: {:?}", plugin_path);
 | 
			
		||||
          resources.push(PendingResource::PluginRes);
 | 
			
		||||
          trace!("[LLM Resource] offline plugin not found: {:?}", plugin_path);
 | 
			
		||||
          resources.push(PendingResource::OfflineApp);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        let chat_model = self.model_path(&llm_setting.llm_model.chat_model.file_name)?;
 | 
			
		||||
@ -271,12 +337,12 @@ impl LLMResourceController {
 | 
			
		||||
    *self.download_task.write() = Some(download_task.clone());
 | 
			
		||||
    progress_notify(download_task.tx.subscribe());
 | 
			
		||||
 | 
			
		||||
    let plugin_dir = self.user_plugin_folder()?;
 | 
			
		||||
    if !plugin_dir.exists() {
 | 
			
		||||
      fs::create_dir_all(&plugin_dir).await.map_err(|err| {
 | 
			
		||||
        FlowyError::local_ai().with_context(format!("Failed to create plugin dir: {:?}", err))
 | 
			
		||||
      })?;
 | 
			
		||||
    }
 | 
			
		||||
    // let plugin_dir = self.user_plugin_folder()?;
 | 
			
		||||
    // if !plugin_dir.exists() {
 | 
			
		||||
    //   fs::create_dir_all(&plugin_dir).await.map_err(|err| {
 | 
			
		||||
    //     FlowyError::local_ai().with_context(format!("Failed to create plugin dir: {:?}", err))
 | 
			
		||||
    //   })?;
 | 
			
		||||
    // }
 | 
			
		||||
 | 
			
		||||
    let model_dir = self.user_model_folder()?;
 | 
			
		||||
    if !model_dir.exists() {
 | 
			
		||||
@ -286,42 +352,42 @@ impl LLMResourceController {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    tokio::spawn(async move {
 | 
			
		||||
      let plugin_file_etag_dir = plugin_dir.join(&llm_setting.plugin.etag);
 | 
			
		||||
      // let plugin_file_etag_dir = plugin_dir.join(&llm_setting.app.etag);
 | 
			
		||||
      // We use the ETag as the identifier for the plugin file. If a file with the given ETag
 | 
			
		||||
      // already exists, skip downloading it.
 | 
			
		||||
      if !plugin_file_etag_dir.exists() {
 | 
			
		||||
        let plugin_progress_tx = download_task.tx.clone();
 | 
			
		||||
        info!(
 | 
			
		||||
          "[LLM Resource] Downloading plugin: {:?}",
 | 
			
		||||
          llm_setting.plugin.etag
 | 
			
		||||
        );
 | 
			
		||||
        let file_name = format!("{}.zip", llm_setting.plugin.etag);
 | 
			
		||||
        let zip_plugin_file = download_plugin(
 | 
			
		||||
          &llm_setting.plugin.url,
 | 
			
		||||
          &plugin_dir,
 | 
			
		||||
          &file_name,
 | 
			
		||||
          Some(download_task.cancel_token.clone()),
 | 
			
		||||
          Some(Arc::new(move |downloaded, total_size| {
 | 
			
		||||
            let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0);
 | 
			
		||||
            let _ = plugin_progress_tx.send(format!("plugin:progress:{}", progress));
 | 
			
		||||
          })),
 | 
			
		||||
          Some(Duration::from_millis(100)),
 | 
			
		||||
        )
 | 
			
		||||
        .await?;
 | 
			
		||||
 | 
			
		||||
        // unzip file
 | 
			
		||||
        info!(
 | 
			
		||||
          "[LLM Resource] unzip {:?} to {:?}",
 | 
			
		||||
          zip_plugin_file, plugin_file_etag_dir
 | 
			
		||||
        );
 | 
			
		||||
        zip_extract(&zip_plugin_file, &plugin_file_etag_dir)?;
 | 
			
		||||
 | 
			
		||||
        // delete zip file
 | 
			
		||||
        info!("[LLM Resource] Delete zip file: {:?}", file_name);
 | 
			
		||||
        if let Err(err) = fs::remove_file(&zip_plugin_file).await {
 | 
			
		||||
          error!("Failed to delete zip file: {:?}", err);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      // if !plugin_file_etag_dir.exists() {
 | 
			
		||||
      //   let plugin_progress_tx = download_task.tx.clone();
 | 
			
		||||
      //   info!(
 | 
			
		||||
      //     "[LLM Resource] Downloading plugin: {:?}",
 | 
			
		||||
      //     llm_setting.app.etag
 | 
			
		||||
      //   );
 | 
			
		||||
      //   let file_name = format!("{}.zip", llm_setting.app.etag);
 | 
			
		||||
      //   let zip_plugin_file = download_plugin(
 | 
			
		||||
      //     &llm_setting.app.url,
 | 
			
		||||
      //     &plugin_dir,
 | 
			
		||||
      //     &file_name,
 | 
			
		||||
      //     Some(download_task.cancel_token.clone()),
 | 
			
		||||
      //     Some(Arc::new(move |downloaded, total_size| {
 | 
			
		||||
      //       let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0);
 | 
			
		||||
      //       let _ = plugin_progress_tx.send(format!("plugin:progress:{}", progress));
 | 
			
		||||
      //     })),
 | 
			
		||||
      //     Some(Duration::from_millis(100)),
 | 
			
		||||
      //   )
 | 
			
		||||
      //   .await?;
 | 
			
		||||
      //
 | 
			
		||||
      //   // unzip file
 | 
			
		||||
      //   info!(
 | 
			
		||||
      //     "[LLM Resource] unzip {:?} to {:?}",
 | 
			
		||||
      //     zip_plugin_file, plugin_file_etag_dir
 | 
			
		||||
      //   );
 | 
			
		||||
      //   zip_extract(&zip_plugin_file, &plugin_file_etag_dir)?;
 | 
			
		||||
      //
 | 
			
		||||
      //   // delete zip file
 | 
			
		||||
      //   info!("[LLM Resource] Delete zip file: {:?}", file_name);
 | 
			
		||||
      //   if let Err(err) = fs::remove_file(&zip_plugin_file).await {
 | 
			
		||||
      //     error!("Failed to delete zip file: {:?}", err);
 | 
			
		||||
      //   }
 | 
			
		||||
      // }
 | 
			
		||||
 | 
			
		||||
      // After download the plugin, start downloading models
 | 
			
		||||
      let chat_model_file = (
 | 
			
		||||
@ -391,7 +457,7 @@ impl LLMResourceController {
 | 
			
		||||
    Ok(())
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  #[instrument(level = "debug", skip_all, err)]
 | 
			
		||||
  #[instrument(level = "info", skip_all, err)]
 | 
			
		||||
  pub fn get_chat_config(&self, rag_enabled: bool) -> FlowyResult<AIPluginConfig> {
 | 
			
		||||
    if !self.is_resource_ready() {
 | 
			
		||||
      return Err(FlowyError::local_ai().with_context("Local AI resources are not ready"));
 | 
			
		||||
@ -405,9 +471,25 @@ impl LLMResourceController {
 | 
			
		||||
      .ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?;
 | 
			
		||||
 | 
			
		||||
    let model_dir = self.user_model_folder()?;
 | 
			
		||||
    let bin_path = self
 | 
			
		||||
      .plugin_path(&llm_setting.plugin.etag)?
 | 
			
		||||
      .join(llm_setting.plugin.name);
 | 
			
		||||
    let bin_path = match get_operating_system() {
 | 
			
		||||
      OperatingSystem::MacOS => {
 | 
			
		||||
        let path = self.offline_app_path(&llm_setting.app.ai_plugin_name);
 | 
			
		||||
        if !path.exists() {
 | 
			
		||||
          return Err(FlowyError::new(
 | 
			
		||||
            ErrorCode::AIOfflineNotInstalled,
 | 
			
		||||
            format!("AppFlowy Offline not installed at path: {:?}", path),
 | 
			
		||||
          ));
 | 
			
		||||
        }
 | 
			
		||||
        path
 | 
			
		||||
      },
 | 
			
		||||
      _ => {
 | 
			
		||||
        return Err(
 | 
			
		||||
          FlowyError::local_ai_unavailable()
 | 
			
		||||
            .with_context("Local AI not available on current platform"),
 | 
			
		||||
        );
 | 
			
		||||
      },
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    let chat_model_path = model_dir.join(&llm_setting.llm_model.chat_model.file_name);
 | 
			
		||||
    let mut config = AIPluginConfig::new(bin_path, chat_model_path)?;
 | 
			
		||||
 | 
			
		||||
@ -474,16 +556,12 @@ impl LLMResourceController {
 | 
			
		||||
    Ok(selected_model)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  fn user_plugin_folder(&self) -> FlowyResult<PathBuf> {
 | 
			
		||||
    self.resource_dir().map(|dir| dir.join(PLUGIN_DIR))
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  pub(crate) fn user_model_folder(&self) -> FlowyResult<PathBuf> {
 | 
			
		||||
    self.resource_dir().map(|dir| dir.join(LLM_MODEL_DIR))
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  fn plugin_path(&self, etag: &str) -> FlowyResult<PathBuf> {
 | 
			
		||||
    self.user_plugin_folder().map(|dir| dir.join(etag))
 | 
			
		||||
  pub(crate) fn offline_app_path(&self, plugin_name: &str) -> PathBuf {
 | 
			
		||||
    PathBuf::from(format!("/usr/local/bin/{}", plugin_name))
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  fn model_path(&self, model_file_name: &str) -> FlowyResult<PathBuf> {
 | 
			
		||||
@ -493,7 +571,19 @@ impl LLMResourceController {
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  pub(crate) fn resource_dir(&self) -> FlowyResult<PathBuf> {
 | 
			
		||||
    let user_data_dir = self.user_service.user_data_dir()?;
 | 
			
		||||
    Ok(user_data_dir.join("llm"))
 | 
			
		||||
    let user_data_dir = self.user_service.data_root_dir()?;
 | 
			
		||||
    Ok(user_data_dir.join("ai"))
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
fn bytes_to_readable_format(bytes: u64) -> String {
 | 
			
		||||
  const BYTES_IN_GIGABYTE: u64 = 1024 * 1024 * 1024;
 | 
			
		||||
  const BYTES_IN_MEGABYTE: u64 = 1024 * 1024;
 | 
			
		||||
 | 
			
		||||
  if bytes >= BYTES_IN_GIGABYTE {
 | 
			
		||||
    let gigabytes = (bytes as f64) / (BYTES_IN_GIGABYTE as f64);
 | 
			
		||||
    format!("{:.1} GB", gigabytes)
 | 
			
		||||
  } else {
 | 
			
		||||
    let megabytes = (bytes as f64) / (BYTES_IN_MEGABYTE as f64);
 | 
			
		||||
    format!("{:.2} MB", megabytes)
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -1,3 +1,6 @@
 | 
			
		||||
pub mod local_llm_chat;
 | 
			
		||||
pub mod local_llm_resource;
 | 
			
		||||
mod model_request;
 | 
			
		||||
 | 
			
		||||
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
 | 
			
		||||
pub mod watch;
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										38
									
								
								frontend/rust-lib/flowy-chat/src/local_ai/watch.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								frontend/rust-lib/flowy-chat/src/local_ai/watch.rs
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,38 @@
 | 
			
		||||
use crate::local_ai::local_llm_resource::WatchDiskEvent;
 | 
			
		||||
use flowy_error::{FlowyError, FlowyResult};
 | 
			
		||||
use notify::{Event, RecursiveMode, Watcher};
 | 
			
		||||
use std::path::PathBuf;
 | 
			
		||||
use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver};
 | 
			
		||||
use tracing::error;
 | 
			
		||||
 | 
			
		||||
pub struct WatchContext {
 | 
			
		||||
  #[allow(dead_code)]
 | 
			
		||||
  watcher: notify::RecommendedWatcher,
 | 
			
		||||
  pub path: PathBuf,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn watch_path(path: PathBuf) -> FlowyResult<(WatchContext, UnboundedReceiver<WatchDiskEvent>)> {
 | 
			
		||||
  let (tx, rx) = unbounded_channel();
 | 
			
		||||
  let mut watcher = notify::recommended_watcher(move |res: Result<Event, _>| match res {
 | 
			
		||||
    Ok(event) => match event.kind {
 | 
			
		||||
      notify::EventKind::Create(_) => {
 | 
			
		||||
        if let Err(err) = tx.send(WatchDiskEvent::Create) {
 | 
			
		||||
          error!("watch send error: {:?}", err)
 | 
			
		||||
        }
 | 
			
		||||
      },
 | 
			
		||||
      notify::EventKind::Remove(_) => {
 | 
			
		||||
        if let Err(err) = tx.send(WatchDiskEvent::Remove) {
 | 
			
		||||
          error!("watch send error: {:?}", err)
 | 
			
		||||
        }
 | 
			
		||||
      },
 | 
			
		||||
      _ => {},
 | 
			
		||||
    },
 | 
			
		||||
    Err(e) => error!("watch error: {:?}", e),
 | 
			
		||||
  })
 | 
			
		||||
  .map_err(|err| FlowyError::internal().with_context(err))?;
 | 
			
		||||
  watcher
 | 
			
		||||
    .watch(&path, RecursiveMode::Recursive)
 | 
			
		||||
    .map_err(|err| FlowyError::internal().with_context(err))?;
 | 
			
		||||
 | 
			
		||||
  Ok((WatchContext { watcher, path }, rx))
 | 
			
		||||
}
 | 
			
		||||
@ -52,7 +52,9 @@ impl ChatUserService for ChatUserServiceImpl {
 | 
			
		||||
    self.upgrade_user()?.get_sqlite_connection(uid)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  fn user_data_dir(&self) -> Result<PathBuf, FlowyError> {
 | 
			
		||||
    self.upgrade_user()?.get_user_data_dir()
 | 
			
		||||
  fn data_root_dir(&self) -> Result<PathBuf, FlowyError> {
 | 
			
		||||
    Ok(PathBuf::from(
 | 
			
		||||
      self.upgrade_user()?.get_application_root_dir(),
 | 
			
		||||
    ))
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -301,6 +301,9 @@ pub enum ErrorCode {
 | 
			
		||||
 | 
			
		||||
  #[error("Unsupported file format")]
 | 
			
		||||
  UnsupportedFileFormat = 104,
 | 
			
		||||
 | 
			
		||||
  #[error("AI offline not started")]
 | 
			
		||||
  AIOfflineNotInstalled = 105,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl ErrorCode {
 | 
			
		||||
 | 
			
		||||
@ -217,7 +217,7 @@ where
 | 
			
		||||
    let try_get_client = self.server.try_get_client();
 | 
			
		||||
    FutureResult::new(async move {
 | 
			
		||||
      let workspaces = try_get_client?.get_workspaces().await?;
 | 
			
		||||
      to_user_workspaces(workspaces.0)
 | 
			
		||||
      to_user_workspaces(workspaces)
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user