chore: show not ready state when using ai writer with local ai

This commit is contained in:
Nathan 2025-03-30 12:06:08 +08:00
parent 3c74208ab9
commit f76ce2be14
6 changed files with 65 additions and 5 deletions

View File

@ -28,6 +28,7 @@ abstract class AIRepository {
required Future<void> Function(String text) processAssistMessage,
required Future<void> Function() onEnd,
required void Function(AIError error) onError,
required void Function() onLocalAIInitializing,
});
}
@ -45,12 +46,14 @@ class AppFlowyAIService implements AIRepository {
required Future<void> Function(String text) processAssistMessage,
required Future<void> Function() onEnd,
required void Function(AIError error) onError,
required void Function() onLocalAIInitializing,
}) async {
final stream = AppFlowyCompletionStream(
onStart: onStart,
processMessage: processMessage,
processAssistMessage: processAssistMessage,
processError: onError,
onLocalAIInitializing: onLocalAIInitializing,
onEnd: onEnd,
);
@ -85,6 +88,7 @@ abstract class CompletionStream {
required this.processMessage,
required this.processAssistMessage,
required this.processError,
required this.onLocalAIInitializing,
required this.onEnd,
});
@ -92,6 +96,7 @@ abstract class CompletionStream {
final Future<void> Function(String text) processMessage;
final Future<void> Function(String text) processAssistMessage;
final void Function(AIError error) processError;
final void Function() onLocalAIInitializing;
final Future<void> Function() onEnd;
}
@ -102,6 +107,7 @@ class AppFlowyCompletionStream extends CompletionStream {
required super.processAssistMessage,
required super.processError,
required super.onEnd,
required super.onLocalAIInitializing,
}) {
_startListening();
}
@ -159,6 +165,10 @@ class AppFlowyCompletionStream extends CompletionStream {
await onEnd();
}
if (event.startsWith("LOCAL_AI_NOT_READY:")) {
onLocalAIInitializing();
}
if (event.startsWith("error:")) {
processError(
AIError(message: event.substring(6), code: AIErrorCode.other),

View File

@ -568,6 +568,24 @@ class MainContentArea extends StatelessWidget {
),
);
}
if (state is LocalAIRunningAiWriterState) {
return Padding(
padding: EdgeInsets.all(8.0),
child: Row(
children: [
const HSpace(8.0),
Opacity(
opacity: 0.5,
child: FlowyText(
LocaleKeys.settings_aiPage_keys_localAIInitializing.tr(),
),
),
const HSpace(8.0),
const CircularProgressIndicator.adaptive(),
],
),
);
}
return const SizedBox.shrink();
},
);

View File

@ -390,6 +390,9 @@ class AiWriterCubit extends Cubit<AiWriterState> {
AiWriterRecord.ai(content: _textRobot.markdownText),
);
},
onLocalAIInitializing: () {
emit(LocalAIRunningAiWriterState(command));
},
);
if (stream != null) {
@ -481,6 +484,9 @@ class AiWriterCubit extends Cubit<AiWriterState> {
AiWriterRecord.ai(content: _textRobot.markdownText),
);
},
onLocalAIInitializing: () {
emit(LocalAIRunningAiWriterState(command));
},
);
if (stream != null) {
emit(
@ -569,6 +575,9 @@ class AiWriterCubit extends Cubit<AiWriterState> {
AiWriterRecord.ai(content: _textRobot.markdownText),
);
},
onLocalAIInitializing: () {
emit(LocalAIRunningAiWriterState(command));
},
);
if (stream != null) {
emit(
@ -639,6 +648,9 @@ class AiWriterCubit extends Cubit<AiWriterState> {
}
emit(ErrorAiWriterState(command, error: error));
},
onLocalAIInitializing: () {
emit(LocalAIRunningAiWriterState(command));
},
);
if (stream != null) {
emit(
@ -714,3 +726,11 @@ class DocumentContentEmptyAiWriterState extends AiWriterState
final void Function() onConfirm;
}
class LocalAIRunningAiWriterState extends AiWriterState
with RegisteredAiWriter {
const LocalAIRunningAiWriterState(this.command);
@override
final AiWriterCommand command;
}

View File

@ -30,6 +30,7 @@ class _MockAIRepository extends Mock implements AppFlowyAIService {
required Future<void> Function(String text) processAssistMessage,
required Future<void> Function() onEnd,
required void Function(AIError error) onError,
required void Function() onLocalAIInitializing,
}) async {
final stream = _MockCompletionStream();
unawaited(
@ -62,6 +63,7 @@ class _MockAIRepositoryLess extends Mock implements AppFlowyAIService {
required Future<void> Function(String text) processAssistMessage,
required Future<void> Function() onEnd,
required void Function(AIError error) onError,
required void Function() onLocalAIInitializing,
}) async {
final stream = _MockCompletionStream();
unawaited(
@ -90,6 +92,7 @@ class _MockAIRepositoryMore extends Mock implements AppFlowyAIService {
required Future<void> Function(String text) processAssistMessage,
required Future<void> Function() onEnd,
required void Function(AIError error) onError,
required void Function() onLocalAIInitializing,
}) async {
final stream = _MockCompletionStream();
unawaited(
@ -120,6 +123,7 @@ class _MockErrorRepository extends Mock implements AppFlowyAIService {
required Future<void> Function(String text) processAssistMessage,
required Future<void> Function() onEnd,
required void Function(AIError error) onError,
required void Function() onLocalAIInitializing,
}) async {
final stream = _MockCompletionStream();
unawaited(

View File

@ -858,7 +858,8 @@
"localAILoading": "Local AI Chat Model is loading...",
"localAIStopped": "Local AI stopped",
"localAIRunning": "Local AI is running",
"localAIInitializing": "Local AI is loading and may take a few minutes, depending on your device",
"localAIInitializing": "Local AI is loading and may take a few seconds, depending on your device",
"localAINotReadyRetryLater": "Local AI is initializing, please retry later",
"localAINotReadyTextFieldPrompt": "You can not edit while Local AI is loading",
"failToLoadLocalAI": "Failed to start local AI",
"restartLocalAI": "Restart Local AI",

View File

@ -12,6 +12,7 @@ use flowy_error::{FlowyError, FlowyResult};
use futures::{SinkExt, StreamExt};
use lib_infra::isolate_stream::IsolateSink;
use crate::stream_message::StreamMessage;
use crate::util::ai_available_models_key;
use flowy_sqlite::kv::KVStorePreferences;
use std::sync::{Arc, Weak};
@ -188,12 +189,18 @@ impl CompletionTask {
}
}
async fn handle_error(sink: &mut IsolateSink, error: FlowyError) {
if error.is_ai_response_limit_exceeded() {
async fn handle_error(sink: &mut IsolateSink, err: FlowyError) {
if err.is_ai_response_limit_exceeded() {
let _ = sink.send("AI_RESPONSE_LIMIT".to_string()).await;
} else if error.is_ai_image_response_limit_exceeded() {
} else if err.is_ai_image_response_limit_exceeded() {
let _ = sink.send("AI_IMAGE_RESPONSE_LIMIT".to_string()).await;
} else if err.is_ai_max_required() {
let _ = sink.send(format!("AI_MAX_REQUIRED:{}", err.msg)).await;
} else if err.is_local_ai_not_ready() {
let _ = sink.send(format!("LOCAL_AI_NOT_READY:{}", err.msg)).await;
} else {
let _ = sink.send(format!("error:{}", error)).await;
let _ = sink
.send(StreamMessage::OnError(err.msg.clone()).to_string())
.await;
}
}