chore: Merge pull request #7515 from AppFlowy-IO/local_ai_opti

chore: disable input when local ai is initializing
This commit is contained in:
Nathan.fooo 2025-03-12 21:20:48 +08:00 committed by GitHub
commit 3aa55f83b1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 364 additions and 289 deletions

View File

@ -1,5 +1,6 @@
import 'dart:async';
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/plugins/ai_chat/application/chat_entity.dart';
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
@ -7,6 +8,7 @@ import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-ai/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-folder/protobuf.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
@ -17,14 +19,14 @@ part 'ai_prompt_input_bloc.freezed.dart';
class AIPromptInputBloc extends Bloc<AIPromptInputEvent, AIPromptInputState> {
AIPromptInputBloc({
required PredefinedFormat? predefinedFormat,
}) : _listener = LocalLLMListener(),
}) : _listener = LocalAIStateListener(),
super(AIPromptInputState.initial(predefinedFormat)) {
_dispatch();
_startListening();
_init();
}
final LocalLLMListener _listener;
final LocalAIStateListener _listener;
@override
Future<void> close() async {
@ -41,16 +43,32 @@ class AIPromptInputBloc extends Bloc<AIPromptInputEvent, AIPromptInputState> {
bool supportChatWithFile =
aiType.isLocal && localAIState.state == RunningStatePB.Running;
// If local ai is enabled, user can only send messages when the AI is running
final editable = localAIState.enabled
? localAIState.state == RunningStatePB.Running
: true;
if (localAIState.hasLackOfResource()) {
aiType = AiType.cloud;
supportChatWithFile = false;
}
var hintText = aiType.isLocal
? LocaleKeys.chat_inputLocalAIMessageHint.tr()
: LocaleKeys.chat_inputMessageHint.tr();
if (editable == false && aiType.isLocal) {
hintText =
LocaleKeys.settings_aiPage_keys_localAIInitializing.tr();
}
emit(
state.copyWith(
aiType: aiType,
supportChatWithFile: supportChatWithFile,
localAIState: localAIState,
editable: editable,
hintText: hintText,
),
);
},
@ -179,6 +197,8 @@ class AIPromptInputState with _$AIPromptInputState {
required LocalAIPB? localAIState,
required List<ChatFile> attachedFiles,
required List<ViewPB> mentionedPages,
required bool editable,
required String hintText,
}) = _AIPromptInputState;
factory AIPromptInputState.initial(PredefinedFormat? format) =>
@ -190,6 +210,8 @@ class AIPromptInputState with _$AIPromptInputState {
localAIState: null,
attachedFiles: [],
mentionedPages: [],
editable: true,
hintText: '',
);
}

View File

@ -1,11 +1,9 @@
import 'package:appflowy/ai/ai.dart';
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/plugins/ai_chat/application/chat_input_control_cubit.dart';
import 'package:appflowy/plugins/ai_chat/presentation/layout_define.dart';
import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/util/theme_extension.dart';
import 'package:appflowy_backend/protobuf/flowy-folder/protobuf.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:extended_text_field/extended_text_field.dart';
import 'package:flowy_infra/file_picker/file_picker_service.dart';
import 'package:flowy_infra_ui/flowy_infra_ui.dart';
@ -52,7 +50,6 @@ class _DesktopPromptInputState extends State<DesktopPromptInput> {
super.initState();
textController.addListener(handleTextControllerChanged);
focusNode.addListener(
() {
if (!widget.hideDecoration) {
@ -377,15 +374,13 @@ class _DesktopPromptInputState extends State<DesktopPromptInput> {
builder: (context, state) {
return PromptInputTextField(
key: textFieldKey,
editable: state.editable,
cubit: inputControlCubit,
textController: textController,
textFieldFocusNode: focusNode,
contentPadding:
calculateContentPadding(state.showPredefinedFormats),
hintText: switch (state.aiType) {
AiType.cloud => LocaleKeys.chat_inputMessageHint.tr(),
AiType.local => LocaleKeys.chat_inputLocalAIMessageHint.tr()
},
hintText: state.hintText,
);
},
),
@ -491,6 +486,7 @@ class _FocusNextItemIntent extends Intent {
class PromptInputTextField extends StatelessWidget {
const PromptInputTextField({
super.key,
required this.editable,
required this.cubit,
required this.textController,
required this.textFieldFocusNode,
@ -502,6 +498,7 @@ class PromptInputTextField extends StatelessWidget {
final TextEditingController textController;
final FocusNode textFieldFocusNode;
final EdgeInsetsGeometry contentPadding;
final bool editable;
final String hintText;
@override
@ -509,6 +506,8 @@ class PromptInputTextField extends StatelessWidget {
return ExtendedTextField(
controller: textController,
focusNode: textFieldFocusNode,
readOnly: !editable,
enabled: editable,
decoration: InputDecoration(
border: InputBorder.none,
enabledBorder: InputBorder.none,

View File

@ -23,11 +23,126 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {
parseMetadata(refSourceJsonString),
),
) {
_dispatch();
_registerEventHandlers();
_initializeStreamListener();
_checkInitialStreamState();
}
final String chatId;
final Int64? questionId;
void _registerEventHandlers() {
on<_UpdateText>((event, emit) {
emit(
state.copyWith(
text: event.text,
messageState: const MessageState.ready(),
),
);
});
on<_ReceiveError>((event, emit) {
emit(state.copyWith(messageState: MessageState.onError(event.error)));
});
on<_Retry>((event, emit) async {
if (questionId == null) {
Log.error("Question id is not valid: $questionId");
return;
}
emit(state.copyWith(messageState: const MessageState.loading()));
final payload = ChatMessageIdPB(
chatId: chatId,
messageId: questionId,
);
final result = await AIEventGetAnswerForQuestion(payload).send();
if (!isClosed) {
result.fold(
(answer) => add(ChatAIMessageEvent.retryResult(answer.content)),
(err) {
Log.error("Failed to get answer: $err");
add(ChatAIMessageEvent.receiveError(err.toString()));
},
);
}
});
on<_RetryResult>((event, emit) {
emit(
state.copyWith(
text: event.text,
messageState: const MessageState.ready(),
),
);
});
on<_OnAIResponseLimit>((event, emit) {
emit(
state.copyWith(
messageState: const MessageState.onAIResponseLimit(),
),
);
});
on<_OnAIImageResponseLimit>((event, emit) {
emit(
state.copyWith(
messageState: const MessageState.onAIImageResponseLimit(),
),
);
});
on<_OnAIMaxRquired>((event, emit) {
emit(
state.copyWith(
messageState: MessageState.onAIMaxRequired(event.message),
),
);
});
on<_OnLocalAIInitializing>((event, emit) {
emit(
state.copyWith(
messageState: const MessageState.onInitializingLocalAI(),
),
);
});
on<_ReceiveMetadata>((event, emit) {
Log.debug("AI Steps: ${event.metadata.progress?.step}");
emit(
state.copyWith(
sources: event.metadata.sources,
progress: event.metadata.progress,
),
);
});
}
void _initializeStreamListener() {
if (state.stream != null) {
_startListening();
state.stream!.listen(
onData: (text) => _safeAdd(ChatAIMessageEvent.updateText(text)),
onError: (error) =>
_safeAdd(ChatAIMessageEvent.receiveError(error.toString())),
onAIResponseLimit: () =>
_safeAdd(const ChatAIMessageEvent.onAIResponseLimit()),
onAIImageResponseLimit: () =>
_safeAdd(const ChatAIMessageEvent.onAIImageResponseLimit()),
onMetadata: (metadata) =>
_safeAdd(ChatAIMessageEvent.receiveMetadata(metadata)),
onAIMaxRequired: (message) {
Log.info(message);
_safeAdd(ChatAIMessageEvent.onAIMaxRequired(message));
},
onLocalAIInitializing: () =>
_safeAdd(const ChatAIMessageEvent.onLocalAIInitializing()),
);
}
}
void _checkInitialStreamState() {
if (state.stream != null) {
if (state.stream!.aiLimitReached) {
add(const ChatAIMessageEvent.onAIResponseLimit());
} else if (state.stream!.error != null) {
@ -36,130 +151,10 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {
}
}
final String chatId;
final Int64? questionId;
void _dispatch() {
on<ChatAIMessageEvent>(
(event, emit) {
event.when(
updateText: (newText) {
emit(
state.copyWith(
text: newText,
messageState: const MessageState.ready(),
),
);
},
receiveError: (error) {
emit(state.copyWith(messageState: MessageState.onError(error)));
},
retry: () {
if (questionId is! Int64) {
Log.error("Question id is not Int64: $questionId");
return;
}
emit(
state.copyWith(
messageState: const MessageState.loading(),
),
);
final payload = ChatMessageIdPB(
chatId: chatId,
messageId: questionId,
);
AIEventGetAnswerForQuestion(payload).send().then((result) {
if (!isClosed) {
result.fold(
(answer) {
add(ChatAIMessageEvent.retryResult(answer.content));
},
(err) {
Log.error("Failed to get answer: $err");
add(ChatAIMessageEvent.receiveError(err.toString()));
},
);
}
});
},
retryResult: (String text) {
emit(
state.copyWith(
text: text,
messageState: const MessageState.ready(),
),
);
},
onAIResponseLimit: () {
emit(
state.copyWith(
messageState: const MessageState.onAIResponseLimit(),
),
);
},
onAIImageResponseLimit: () {
emit(
state.copyWith(
messageState: const MessageState.onAIImageResponseLimit(),
),
);
},
onAIMaxRequired: (message) {
emit(
state.copyWith(
messageState: MessageState.onAIMaxRequired(message),
),
);
},
receiveMetadata: (metadata) {
Log.debug("AI Steps: ${metadata.progress?.step}");
emit(
state.copyWith(
sources: metadata.sources,
progress: metadata.progress,
),
);
},
);
},
);
}
void _startListening() {
state.stream!.listen(
onData: (text) {
if (!isClosed) {
add(ChatAIMessageEvent.updateText(text));
}
},
onError: (error) {
if (!isClosed) {
add(ChatAIMessageEvent.receiveError(error.toString()));
}
},
onAIResponseLimit: () {
if (!isClosed) {
add(const ChatAIMessageEvent.onAIResponseLimit());
}
},
onAIImageResponseLimit: () {
if (!isClosed) {
add(const ChatAIMessageEvent.onAIImageResponseLimit());
}
},
onMetadata: (metadata) {
if (!isClosed) {
add(ChatAIMessageEvent.receiveMetadata(metadata));
}
},
onAIMaxRequired: (message) {
if (!isClosed) {
Log.info(message);
add(ChatAIMessageEvent.onAIMaxRequired(message));
}
},
);
void _safeAdd(ChatAIMessageEvent event) {
if (!isClosed) {
add(event);
}
}
}
@ -174,6 +169,8 @@ class ChatAIMessageEvent with _$ChatAIMessageEvent {
_OnAIImageResponseLimit;
const factory ChatAIMessageEvent.onAIMaxRequired(String message) =
_OnAIMaxRquired;
const factory ChatAIMessageEvent.onLocalAIInitializing() =
_OnLocalAIInitializing;
const factory ChatAIMessageEvent.receiveMetadata(
MetadataCollection metadata,
) = _ReceiveMetadata;
@ -209,6 +206,7 @@ class MessageState with _$MessageState {
const factory MessageState.onAIResponseLimit() = _AIResponseLimit;
const factory MessageState.onAIImageResponseLimit() = _AIImageResponseLimit;
const factory MessageState.onAIMaxRequired(String message) = _AIMaxRequired;
const factory MessageState.onInitializingLocalAI() = _LocalAIInitializing;
const factory MessageState.ready() = _Ready;
const factory MessageState.loading() = _Loading;
}

View File

@ -4,53 +4,33 @@ import 'dart:isolate';
import 'package:appflowy/plugins/ai_chat/application/chat_message_service.dart';
/// Constants for event prefixes.
class AnswerEventPrefix {
static const data = 'data:';
static const error = 'error:';
static const metadata = 'metadata:';
static const aiResponseLimit = 'AI_RESPONSE_LIMIT';
static const aiImageResponseLimit = 'AI_IMAGE_RESPONSE_LIMIT';
static const aiMaxRequired = 'AI_MAX_REQUIRED:';
static const localAINotReady = 'LOCAL_AI_NOT_READY';
}
/// A stream that receives answer events from an isolate or external process.
/// It caches events that might occur before a listener is attached.
class AnswerStream {
AnswerStream() {
_port.handler = _controller.add;
_subscription = _controller.stream.listen(
(event) {
if (event.startsWith("data:")) {
_hasStarted = true;
final newText = event.substring(5);
_text += newText;
_onData?.call(_text);
} else if (event.startsWith("error:")) {
_error = event.substring(5);
_onError?.call(_error!);
} else if (event.startsWith("metadata:")) {
if (_onMetadata != null) {
final s = event.substring(9);
_onMetadata!(parseMetadata(s));
}
} else if (event == "AI_RESPONSE_LIMIT") {
_aiLimitReached = true;
_onAIResponseLimit?.call();
} else if (event == "AI_IMAGE_RESPONSE_LIMIT") {
_aiImageLimitReached = true;
_onAIImageResponseLimit?.call();
} else if (event.startsWith("AI_MAX_REQUIRED:")) {
final msg = event.substring(16);
// If the callback is not registered yet, add the event to the buffer.
if (_onAIMaxRequired != null) {
_onAIMaxRequired!(msg);
} else {
_pendingAIMaxRequiredEvents.add(msg);
}
}
},
onDone: () {
_onEnd?.call();
},
onError: (error) {
_error = error.toString();
_onError?.call(error.toString());
},
_handleEvent,
onDone: _onDoneCallback,
onError: _handleError,
);
}
final RawReceivePort _port = RawReceivePort();
final StreamController<String> _controller = StreamController.broadcast();
late StreamSubscription<String> _subscription;
bool _hasStarted = false;
bool _aiLimitReached = false;
bool _aiImageLimitReached = false;
@ -62,13 +42,15 @@ class AnswerStream {
void Function()? _onStart;
void Function()? _onEnd;
void Function(String error)? _onError;
void Function()? _onLocalAIInitializing;
void Function()? _onAIResponseLimit;
void Function()? _onAIImageResponseLimit;
void Function(String message)? _onAIMaxRequired;
void Function(MetadataCollection metadataCollection)? _onMetadata;
void Function(MetadataCollection metadata)? _onMetadata;
// Buffer for events that occur before listen() is called.
// Caches for events that occur before listen() is called.
final List<String> _pendingAIMaxRequiredEvents = [];
bool _pendingLocalAINotReady = false;
int get nativePort => _port.sendPort.nativePort;
bool get hasStarted => _hasStarted;
@ -77,12 +59,61 @@ class AnswerStream {
String? get error => _error;
String get text => _text;
/// Releases the resources used by the AnswerStream.
Future<void> dispose() async {
await _controller.close();
await _subscription.cancel();
_port.close();
}
/// Handles incoming events from the underlying stream.
void _handleEvent(String event) {
if (event.startsWith(AnswerEventPrefix.data)) {
_hasStarted = true;
final newText = event.substring(AnswerEventPrefix.data.length);
_text += newText;
_onData?.call(_text);
} else if (event.startsWith(AnswerEventPrefix.error)) {
_error = event.substring(AnswerEventPrefix.error.length);
_onError?.call(_error!);
} else if (event.startsWith(AnswerEventPrefix.metadata)) {
final s = event.substring(AnswerEventPrefix.metadata.length);
_onMetadata?.call(parseMetadata(s));
} else if (event == AnswerEventPrefix.aiResponseLimit) {
_aiLimitReached = true;
_onAIResponseLimit?.call();
} else if (event == AnswerEventPrefix.aiImageResponseLimit) {
_aiImageLimitReached = true;
_onAIImageResponseLimit?.call();
} else if (event.startsWith(AnswerEventPrefix.aiMaxRequired)) {
final msg = event.substring(AnswerEventPrefix.aiMaxRequired.length);
if (_onAIMaxRequired != null) {
_onAIMaxRequired!(msg);
} else {
_pendingAIMaxRequiredEvents.add(msg);
}
} else if (event.startsWith(AnswerEventPrefix.localAINotReady)) {
if (_onLocalAIInitializing != null) {
_onLocalAIInitializing!();
} else {
_pendingLocalAINotReady = true;
}
}
}
void _onDoneCallback() {
_onEnd?.call();
}
void _handleError(dynamic error) {
_error = error.toString();
_onError?.call(_error!);
}
/// Registers listeners for various events.
///
/// If certain events have already occurred (e.g. AI_MAX_REQUIRED or LOCAL_AI_NOT_READY),
/// they will be flushed immediately.
void listen({
void Function(String text)? onData,
void Function()? onStart,
@ -92,6 +123,7 @@ class AnswerStream {
void Function()? onAIImageResponseLimit,
void Function(String message)? onAIMaxRequired,
void Function(MetadataCollection metadata)? onMetadata,
void Function()? onLocalAIInitializing,
}) {
_onData = onData;
_onStart = onStart;
@ -99,10 +131,11 @@ class AnswerStream {
_onError = onError;
_onAIResponseLimit = onAIResponseLimit;
_onAIImageResponseLimit = onAIImageResponseLimit;
_onMetadata = onMetadata;
_onAIMaxRequired = onAIMaxRequired;
_onMetadata = onMetadata;
_onLocalAIInitializing = onLocalAIInitializing;
// Flush any buffered AI_MAX_REQUIRED events.
// Flush pending AI_MAX_REQUIRED events.
if (_onAIMaxRequired != null && _pendingAIMaxRequiredEvents.isNotEmpty) {
for (final msg in _pendingAIMaxRequiredEvents) {
_onAIMaxRequired!(msg);
@ -110,6 +143,12 @@ class AnswerStream {
_pendingAIMaxRequiredEvents.clear();
}
// Flush pending LOCAL_AI_NOT_READY event.
if (_pendingLocalAINotReady && _onLocalAIInitializing != null) {
_onLocalAIInitializing!();
_pendingLocalAINotReady = false;
}
_onStart?.call();
}
}

View File

@ -247,6 +247,9 @@ class _ChatContentPage extends StatelessWidget {
onChangeFormat: (format) => context
.read<ChatBloc>()
.add(ChatEvent.regenerateAnswer(message.id, format)),
onStopStream: () => context.read<ChatBloc>().add(
const ChatEvent.stopStream(),
),
);
},
);

View File

@ -32,6 +32,7 @@ class ChatAIMessageWidget extends StatelessWidget {
required this.questionId,
required this.chatId,
required this.refSourceJsonString,
required this.onStopStream,
this.onSelectedMetadata,
this.onRegenerate,
this.onChangeFormat,
@ -50,6 +51,7 @@ class ChatAIMessageWidget extends StatelessWidget {
final String? refSourceJsonString;
final void Function(ChatMessageRefSource metadata)? onSelectedMetadata;
final void Function()? onRegenerate;
final void Function() onStopStream;
final void Function(PredefinedFormat)? onChangeFormat;
final bool isStreaming;
final bool isLastMessage;
@ -126,26 +128,39 @@ class ChatAIMessageWidget extends StatelessWidget {
);
},
onError: (error) {
onStopStream();
return ChatErrorMessageWidget(
errorMessage: LocaleKeys.chat_aiServerUnavailable.tr(),
);
},
onAIResponseLimit: () {
onStopStream();
return ChatErrorMessageWidget(
errorMessage:
LocaleKeys.sideBar_askOwnerToUpgradeToAIMax.tr(),
);
},
onAIImageResponseLimit: () {
onStopStream();
return ChatErrorMessageWidget(
errorMessage: LocaleKeys.sideBar_purchaseAIMax.tr(),
);
},
onAIMaxRequired: (message) {
onStopStream();
return ChatErrorMessageWidget(
errorMessage: message,
);
},
onInitializingLocalAI: () {
onStopStream();
return ChatErrorMessageWidget(
errorMessage: LocaleKeys
.settings_aiPage_keys_localAIInitializing
.tr(),
);
},
),
),
);

View File

@ -13,7 +13,7 @@ part 'local_ai_setting_panel_bloc.freezed.dart';
class LocalAISettingPanelBloc
extends Bloc<LocalAISettingPanelEvent, LocalAISettingPanelState> {
LocalAISettingPanelBloc()
: listener = LocalLLMListener(),
: listener = LocalAIStateListener(),
super(const LocalAISettingPanelState()) {
on<LocalAISettingPanelEvent>(_handleEvent);
@ -35,7 +35,7 @@ class LocalAISettingPanelBloc
);
}
final LocalLLMListener listener;
final LocalAIStateListener listener;
/// Handles incoming events and dispatches them to the appropriate handler.
Future<void> _handleEvent(

View File

@ -11,8 +11,8 @@ import 'package:appflowy_result/appflowy_result.dart';
typedef PluginStateCallback = void Function(LocalAIPB state);
typedef PluginResourceCallback = void Function(LackOfAIResourcePB data);
class LocalLLMListener {
LocalLLMListener() {
class LocalAIStateListener {
LocalAIStateListener() {
_parser =
ChatNotificationParser(id: "appflowy_ai_plugin", callback: _callback);
_subscription = RustStreamReceiver.listen(

View File

@ -11,7 +11,7 @@ part 'plugin_state_bloc.freezed.dart';
class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
PluginStateBloc()
: listener = LocalLLMListener(),
: listener = LocalAIStateListener(),
super(
const PluginStateState(
action: PluginStateAction.unknown(),
@ -33,7 +33,7 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
on<PluginStateEvent>(_handleEvent);
}
final LocalLLMListener listener;
final LocalAIStateListener listener;
@override
Future<void> close() async {

View File

@ -2,7 +2,6 @@ import 'package:flutter/material.dart';
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/settings_ai_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/local_ai_setting_panel.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart';
@ -17,64 +16,54 @@ class LocalAISetting extends StatelessWidget {
@override
Widget build(BuildContext context) {
return BlocBuilder<SettingsAIBloc, SettingsAIState>(
builder: (context, state) {
if (state.aiSettings == null) {
return const SizedBox.shrink();
}
return BlocProvider(
create: (context) =>
LocalAIToggleBloc()..add(const LocalAIToggleEvent.started()),
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 6),
child: ExpandableNotifier(
child: BlocListener<LocalAIToggleBloc, LocalAIToggleState>(
listener: (context, state) {
final controller =
ExpandableController.of(context, required: true)!;
return BlocProvider(
create: (context) =>
LocalAIToggleBloc()..add(const LocalAIToggleEvent.started()),
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 6),
child: ExpandableNotifier(
child: BlocListener<LocalAIToggleBloc, LocalAIToggleState>(
listener: (context, state) {
final controller =
ExpandableController.of(context, required: true)!;
state.pageIndicator.when(
error: (_) => controller.expanded = true,
isEnabled: (enabled) => controller.expanded = enabled,
loading: () => controller.expanded = true,
);
},
child: ExpandablePanel(
theme: const ExpandableThemeData(
headerAlignment: ExpandablePanelHeaderAlignment.center,
tapBodyToCollapse: false,
hasIcon: false,
tapBodyToExpand: false,
tapHeaderToExpand: false,
state.pageIndicator.when(
error: (_) => controller.expanded = true,
isEnabled: (enabled) => controller.expanded = enabled,
loading: () => controller.expanded = true,
);
},
child: ExpandablePanel(
theme: const ExpandableThemeData(
headerAlignment: ExpandablePanelHeaderAlignment.center,
tapBodyToCollapse: false,
hasIcon: false,
tapBodyToExpand: false,
tapHeaderToExpand: false,
),
header: const LocalAISettingHeader(),
collapsed: const SizedBox.shrink(),
expanded: Column(
children: [
const VSpace(6),
DecoratedBox(
decoration: BoxDecoration(
color:
Theme.of(context).colorScheme.surfaceContainerHighest,
borderRadius: const BorderRadius.all(Radius.circular(4)),
),
child: const Padding(
padding:
EdgeInsets.symmetric(horizontal: 12, vertical: 6),
child: LocalAISettingPanel(),
),
),
header: const LocalAISettingHeader(),
collapsed: const SizedBox.shrink(),
expanded: Column(
children: [
const VSpace(6),
DecoratedBox(
decoration: BoxDecoration(
color: Theme.of(context)
.colorScheme
.surfaceContainerHighest,
borderRadius:
const BorderRadius.all(Radius.circular(4)),
),
child: const Padding(
padding:
EdgeInsets.symmetric(horizontal: 12, vertical: 6),
child: LocalAISettingPanel(),
),
),
],
),
),
],
),
),
),
);
},
),
),
);
}
}
@ -87,12 +76,8 @@ class LocalAISettingHeader extends StatelessWidget {
return BlocBuilder<LocalAIToggleBloc, LocalAIToggleState>(
builder: (context, state) {
return state.pageIndicator.when(
error: (error) {
return const SizedBox.shrink();
},
loading: () {
return const SizedBox.shrink();
},
error: (error) => SizedBox.shrink(),
loading: () => const SizedBox.shrink(),
isEnabled: (isEnabled) {
return Row(
children: [

View File

@ -55,13 +55,7 @@ class SettingsAIView extends StatelessWidget {
];
children.add(const _AISearchToggle(value: false));
children.add(
_LocalAIOnBoarding(
userProfile: userProfile,
currentWorkspaceMemberRole: state.currentWorkspaceMemberRole!,
workspaceId: workspaceId,
),
);
children.add(const LocalAISetting());
return SettingsBody(
title: LocaleKeys.settings_aiPage_title.tr(),

View File

@ -144,34 +144,34 @@ EXTERNAL SOURCES:
:path: Flutter/ephemeral/.symlinks/plugins/window_manager/macos
SPEC CHECKSUMS:
app_links: 9028728e32c83a0831d9db8cf91c526d16cc5468
appflowy_backend: 464aeb3e5c6966a41641a2111e5ead72ce2695f7
auto_updater_macos: 3a42f1a06be6981f1a18be37e6e7bf86aa732118
bitsdojo_window_macos: 7959fb0ca65a3ccda30095c181ecb856fae48ea9
connectivity_plus: e74b9f74717d2d99d45751750e266e55912baeb5
desktop_drop: e0b672a7d84c0a6cbc378595e82cdb15f2970a43
device_info_plus: a56e6e74dbbd2bb92f2da12c64ddd4f67a749041
file_selector_macos: 6280b52b459ae6c590af5d78fc35c7267a3c4b31
flowy_infra_ui: 8760ff42a789de40bf5007a5f176b454722a341e
app_links: 10e0a0ab602ffaf34d142cd4862f29d34b303b2a
appflowy_backend: 865496343de667fc8c600e04b9fd05234e130cf9
auto_updater_macos: 3e3462c418fe4e731917eacd8d28eef7af84086d
bitsdojo_window_macos: 44e3b8fe3dd463820e0321f6256c5b1c16bb6a00
connectivity_plus: 18d3c32514c886e046de60e9c13895109866c747
desktop_drop: 69eeff437544aa619c8db7f4481b3a65f7696898
device_info_plus: ce1b7762849d3ec103d0e0517299f2db7ad60720
file_selector_macos: cc3858c981fe6889f364731200d6232dac1d812d
flowy_infra_ui: 03301a39ad118771adbf051a664265c61c507f38
FlutterMacOS: 8f6f14fa908a6fb3fba0cd85dbd81ec4b251fb24
HotKey: 400beb7caa29054ea8d864c96f5ba7e5b4852277
hotkey_manager: b443f35f4d772162937aa73fd8995e579f8ac4e2
irondash_engine_context: 893c7d96d20ce361d7e996f39d360c4c2f9869ba
local_notifier: ebf072651e35ae5e47280ad52e2707375cb2ae4e
package_info_plus: f0052d280d17aa382b932f399edf32507174e870
path_provider_foundation: 080d55be775b7414fd5a5ef3ac137b97b097e564
hotkey_manager: c32bf0bfe8f934b7bc17ab4ad5c4c142960b023c
irondash_engine_context: da62996ee25616d2f01bbeb85dc115d813359478
local_notifier: e9506bc66fc70311e8bc7291fb70f743c081e4ff
package_info_plus: 12f1c5c2cfe8727ca46cbd0b26677728972d9a5b
path_provider_foundation: 2b6b4c569c0fb62ec74538f866245ac84301af46
ReachabilitySwift: 32793e867593cfc1177f5d16491e3a197d2fccda
screen_retriever_macos: 452e51764a9e1cdb74b3c541238795849f21557f
screen_retriever_macos: 776e0fa5d42c6163d2bf772d22478df4b302b161
Sentry: 1fe34e9c2cbba1e347623610d26db121dcb569f1
sentry_flutter: e24b397f9a61fa5bbefd8279c3b2242ca86faa90
share_plus: 510bf0af1a42cd602274b4629920c9649c52f4cc
shared_preferences_foundation: 9e1978ff2562383bd5676f64ec4e9aa8fa06a6f7
sentry_flutter: a39c2a2d67d5e5b9cb0b94a4985c76dd5b3fc737
share_plus: 1fa619de8392a4398bfaf176d441853922614e89
shared_preferences_foundation: fcdcbc04712aee1108ac7fda236f363274528f78
Sparkle: 5f8960a7a119aa7d45dacc0d5837017170bc5675
sqflite_darwin: 20b2a3a3b70e43edae938624ce550a3cbf66a3d0
super_native_extensions: c2795d6d9aedf4a79fae25cb6160b71b50549189
url_launcher_macos: 0fba8ddabfc33ce0a9afe7c5fef5aab3d8d2d673
webview_flutter_wkwebview: 44d4dee7d7056d5ad185d25b38404436d56c547c
window_manager: 1d01fa7ac65a6e6f83b965471b1a7fdd3f06166c
sqflite_darwin: 5a7236e3b501866c1c9befc6771dfd73ffb8702d
super_native_extensions: 85efee3a7495b46b04befcfc86ed12069264ebf3
url_launcher_macos: c82c93949963e55b228a30115bd219499a6fe404
webview_flutter_wkwebview: 0982481e3d9c78fd5c6f62a002fcd24fc791f1e4
window_manager: 3a1844359a6295ab1e47659b1a777e36773cd6e8
PODFILE CHECKSUM: 0532f3f001ca3110b8be345d6491fff690e95823

View File

@ -198,7 +198,7 @@ dependencies = [
[[package]]
name = "appflowy-local-ai"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8fea7ed2375eb54c8dfb8af6db6e32a61854fb2e#8fea7ed2375eb54c8dfb8af6db6e32a61854fb2e"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f1b5167e9569e8a61ef50a1afb140306a5287e57#f1b5167e9569e8a61ef50a1afb140306a5287e57"
dependencies = [
"anyhow",
"appflowy-plugin",
@ -218,7 +218,7 @@ dependencies = [
[[package]]
name = "appflowy-plugin"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8fea7ed2375eb54c8dfb8af6db6e32a61854fb2e#8fea7ed2375eb54c8dfb8af6db6e32a61854fb2e"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f1b5167e9569e8a61ef50a1afb140306a5287e57#f1b5167e9569e8a61ef50a1afb140306a5287e57"
dependencies = [
"anyhow",
"cfg-if",

View File

@ -152,5 +152,5 @@ collab-importer = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFl
# To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8fea7ed2375eb54c8dfb8af6db6e32a61854fb2e" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8fea7ed2375eb54c8dfb8af6db6e32a61854fb2e" }
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f1b5167e9569e8a61ef50a1afb140306a5287e57" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f1b5167e9569e8a61ef50a1afb140306a5287e57" }

View File

@ -263,6 +263,10 @@ impl Chat {
let _ = answer_sink
.send(format!("AI_MAX_REQUIRED:{}", err.msg))
.await;
} else if err.is_local_ai_not_ready() {
let _ = answer_sink
.send(format!("LOCAL_AI_NOT_READY:{}", err.msg))
.await;
} else {
let _ = answer_sink.send(format!("error:{}", err)).await;
}

View File

@ -72,6 +72,10 @@ impl LocalAIController {
user_service: Arc<dyn AIUserService>,
cloud_service: Arc<dyn ChatCloudService>,
) -> Self {
debug!(
"[AI Plugin] init local ai controller, thread: {:?}",
std::thread::current().id()
);
let local_ai = Arc::new(OllamaAIPlugin::new(plugin_manager));
let res_impl = LLMResourceServiceImpl {
user_service: user_service.clone(),
@ -176,7 +180,7 @@ impl LocalAIController {
if !self.is_enabled() {
return false;
}
self.ai_plugin.get_plugin_running_state().is_ready()
self.ai_plugin.get_plugin_running_state().is_running()
}
/// Indicate whether the local AI is enabled.

View File

@ -158,18 +158,22 @@ impl ChatCloudService for AICloudServiceMiddleware {
question_id: i64,
format: ResponseFormat,
) -> Result<StreamAnswer, FlowyError> {
if self.local_ai.is_running() {
let row = self.get_message_record(question_id)?;
match self
.local_ai
.stream_question(chat_id, &row.content, json!({}))
.await
{
Ok(stream) => Ok(QuestionStream::new(stream).boxed()),
Err(err) => {
self.handle_plugin_error(err);
Ok(stream::once(async { Err(FlowyError::local_ai_unavailable()) }).boxed())
},
if self.local_ai.is_enabled() {
if self.local_ai.is_running() {
let row = self.get_message_record(question_id)?;
match self
.local_ai
.stream_question(chat_id, &row.content, json!({}))
.await
{
Ok(stream) => Ok(QuestionStream::new(stream).boxed()),
Err(err) => {
self.handle_plugin_error(err);
Ok(stream::once(async { Err(FlowyError::local_ai_unavailable()) }).boxed())
},
}
} else {
Err(FlowyError::local_ai_not_ready())
}
} else {
self

View File

@ -371,6 +371,9 @@ pub enum ErrorCode {
#[error("Request timeout")]
RequestTimeout = 127,
#[error("Local AI is not ready")]
LocalAINotReady = 128,
}
impl ErrorCode {

View File

@ -95,6 +95,10 @@ impl FlowyError {
self.code == ErrorCode::AIImageResponseLimitExceeded
}
pub fn is_local_ai_not_ready(&self) -> bool {
self.code == ErrorCode::LocalAINotReady
}
pub fn is_ai_max_required(&self) -> bool {
self.code == ErrorCode::AIMaxRequired
}
@ -151,6 +155,7 @@ impl FlowyError {
static_flowy_error!(file_storage_limit, ErrorCode::FileStorageLimitExceeded);
static_flowy_error!(view_is_locked, ErrorCode::ViewIsLocked);
static_flowy_error!(local_ai_not_ready, ErrorCode::LocalAINotReady);
}
impl std::convert::From<ErrorCode> for FlowyError {