chore: Support new error code (#7311)

* chore: fetch model list

* chore: suppor new error code
This commit is contained in:
Nathan.fooo 2025-02-03 20:52:08 +08:00 committed by GitHub
parent 25a27dfa81
commit aacd09d8e2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 114 additions and 35 deletions

View File

@ -148,6 +148,15 @@ class CompletionStream {
);
}
if (event.startsWith("AI_MAX_REQUIRED:")) {
final msg = event.substring(16);
onError(
AIError(
message: msg,
),
);
}
if (event.startsWith("start:")) {
await onStart();
}

View File

@ -247,6 +247,7 @@ Future<AppFlowyCloudConfiguration> configurationFromUri(
// In development mode, the app is configured to access the AppFlowy cloud server directly through specific ports.
// This setup bypasses the need for Nginx, meaning that the AppFlowy cloud should be running without an Nginx server
// in the development environment.
// If you modify following code, please update the corresponding documentation in the appflowy billing.
if (authenticatorType == AuthenticatorType.appflowyCloudDevelop) {
return AppFlowyCloudConfiguration(
base_url: "$baseUrl:8000",

View File

@ -105,6 +105,13 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {
),
);
},
onAIMaxRequired: (message) {
emit(
state.copyWith(
messageState: MessageState.onAIMaxRequired(message),
),
);
},
receiveMetadata: (metadata) {
Log.debug("AI Steps: ${metadata.progress?.step}");
emit(
@ -146,6 +153,12 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {
add(ChatAIMessageEvent.receiveMetadata(metadata));
}
},
onAIMaxRequired: (message) {
if (!isClosed) {
Log.info(message);
add(ChatAIMessageEvent.onAIMaxRequired(message));
}
},
);
}
}
@ -159,6 +172,8 @@ class ChatAIMessageEvent with _$ChatAIMessageEvent {
const factory ChatAIMessageEvent.onAIResponseLimit() = _OnAIResponseLimit;
const factory ChatAIMessageEvent.onAIImageResponseLimit() =
_OnAIImageResponseLimit;
const factory ChatAIMessageEvent.onAIMaxRequired(String message) =
_OnAIMaxRquired;
const factory ChatAIMessageEvent.receiveMetadata(
MetadataCollection metadata,
) = _ReceiveMetadata;
@ -193,6 +208,7 @@ class MessageState with _$MessageState {
const factory MessageState.onError(String error) = _Error;
const factory MessageState.onAIResponseLimit() = _AIResponseLimit;
const factory MessageState.onAIImageResponseLimit() = _AIImageResponseLimit;
const factory MessageState.onAIMaxRequired(String message) = _AIMaxRequired;
const factory MessageState.ready() = _Ready;
const factory MessageState.loading() = _Loading;
}

View File

@ -28,6 +28,14 @@ class AnswerStream {
} else if (event == "AI_IMAGE_RESPONSE_LIMIT") {
_aiImageLimitReached = true;
_onAIImageResponseLimit?.call();
} else if (event.startsWith("AI_MAX_REQUIRED:")) {
final msg = event.substring(16);
// If the callback is not registered yet, add the event to the buffer.
if (_onAIMaxRequired != null) {
_onAIMaxRequired!(msg);
} else {
_pendingAIMaxRequiredEvents.add(msg);
}
}
},
onDone: () {
@ -56,8 +64,12 @@ class AnswerStream {
void Function(String error)? _onError;
void Function()? _onAIResponseLimit;
void Function()? _onAIImageResponseLimit;
void Function(String message)? _onAIMaxRequired;
void Function(MetadataCollection metadataCollection)? _onMetadata;
// Buffer for events that occur before listen() is called.
final List<String> _pendingAIMaxRequiredEvents = [];
int get nativePort => _port.sendPort.nativePort;
bool get hasStarted => _hasStarted;
bool get aiLimitReached => _aiLimitReached;
@ -78,6 +90,7 @@ class AnswerStream {
void Function(String error)? onError,
void Function()? onAIResponseLimit,
void Function()? onAIImageResponseLimit,
void Function(String message)? onAIMaxRequired,
void Function(MetadataCollection metadata)? onMetadata,
}) {
_onData = onData;
@ -87,6 +100,15 @@ class AnswerStream {
_onAIResponseLimit = onAIResponseLimit;
_onAIImageResponseLimit = onAIImageResponseLimit;
_onMetadata = onMetadata;
_onAIMaxRequired = onAIMaxRequired;
// Flush any buffered AI_MAX_REQUIRED events.
if (_onAIMaxRequired != null && _pendingAIMaxRequiredEvents.isNotEmpty) {
for (final msg in _pendingAIMaxRequiredEvents) {
_onAIMaxRequired!(msg);
}
_pendingAIMaxRequiredEvents.clear();
}
_onStart?.call();
}

View File

@ -137,6 +137,11 @@ class ChatAIMessageWidget extends StatelessWidget {
errorMessage: LocaleKeys.sideBar_purchaseAIMax.tr(),
);
},
onAIMaxRequired: (message) {
return ChatErrorMessageWidget(
errorMessage: message,
);
},
),
),
);

View File

@ -22,6 +22,7 @@ class SettingsAIBloc extends Bloc<SettingsAIEvent, SettingsAIState> {
_userService = UserBackendService(userId: userProfile.id),
super(
SettingsAIState(
selectedAIModel: userProfile.aiModel,
userProfile: userProfile,
currentWorkspaceMemberRole: currentWorkspaceMemberRole,
),
@ -98,7 +99,25 @@ class SettingsAIBloc extends Bloc<SettingsAIEvent, SettingsAIState> {
Log.info("Available models: $decodedJson");
if (decodedJson is Map<String, dynamic>) {
final models = ModelList.fromJson(decodedJson).models;
emit(state.copyWith(availableModels: models));
if (models.isEmpty) {
// If available models is empty, then we just show the
// Default
emit(state.copyWith(availableModels: ["Default"]));
return;
}
if (!models.contains(state.selectedAIModel)) {
// Use first model as default model if current selected model
// is not available
emit(
state.copyWith(
availableModels: models,
selectedAIModel: models[0],
),
);
} else {
emit(state.copyWith(availableModels: models));
}
}
},
refreshMember: (member) {
@ -185,8 +204,9 @@ class SettingsAIState with _$SettingsAIState {
const factory SettingsAIState({
required UserProfilePB userProfile,
UseAISettingPB? aiSettings,
@Default("Default") String selectedAIModel,
AFRolePB? currentWorkspaceMemberRole,
@Default(["default"]) List<String> availableModels,
@Default(["Default"]) List<String> availableModels,
@Default(true) bool enableSearchIndexing,
}) = _SettingsAIState;
}

View File

@ -33,7 +33,7 @@ class AIModelSelection extends StatelessWidget {
onChanged: (model) => context
.read<SettingsAIBloc>()
.add(SettingsAIEvent.selectModel(model)),
selectedOption: state.userProfile.aiModel,
selectedOption: state.selectedAIModel,
options: state.availableModels
.map(
(model) => buildDropdownMenuEntry<String>(

View File

@ -163,7 +163,7 @@ checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
[[package]]
name = "app-error"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"anyhow",
"bincode",
@ -183,7 +183,7 @@ dependencies = [
[[package]]
name = "appflowy-ai-client"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"anyhow",
"bytes",
@ -786,7 +786,7 @@ dependencies = [
[[package]]
name = "client-api"
version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"again",
"anyhow",
@ -843,7 +843,7 @@ dependencies = [
[[package]]
name = "client-api-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"collab-entity",
"collab-rt-entity",
@ -856,7 +856,7 @@ dependencies = [
[[package]]
name = "client-websocket"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"futures-channel",
"futures-util",
@ -1128,7 +1128,7 @@ dependencies = [
[[package]]
name = "collab-rt-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"anyhow",
"bincode",
@ -1153,7 +1153,7 @@ dependencies = [
[[package]]
name = "collab-rt-protocol"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"anyhow",
"async-trait",
@ -1400,7 +1400,7 @@ dependencies = [
"cssparser-macros",
"dtoa-short",
"itoa",
"phf 0.11.2",
"phf 0.8.0",
"smallvec",
]
@ -1548,7 +1548,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]]
name = "database-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"anyhow",
"app-error",
@ -2970,7 +2970,7 @@ dependencies = [
[[package]]
name = "gotrue"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"anyhow",
"futures-util",
@ -2987,7 +2987,7 @@ dependencies = [
[[package]]
name = "gotrue-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"anyhow",
"app-error",
@ -3598,7 +3598,7 @@ dependencies = [
[[package]]
name = "infra"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"anyhow",
"bytes",
@ -4624,7 +4624,7 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12"
dependencies = [
"phf_macros 0.8.0",
"phf_macros",
"phf_shared 0.8.0",
"proc-macro-hack",
]
@ -4644,7 +4644,6 @@ version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc"
dependencies = [
"phf_macros 0.11.3",
"phf_shared 0.11.2",
]
@ -4712,19 +4711,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "phf_macros"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216"
dependencies = [
"phf_generator 0.11.2",
"phf_shared 0.11.2",
"proc-macro2",
"quote",
"syn 2.0.94",
]
[[package]]
name = "phf_shared"
version = "0.8.0"
@ -6154,7 +6140,7 @@ dependencies = [
[[package]]
name = "shared-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=82409199f8ffa0166f2f5d9403ccd55831890549#82409199f8ffa0166f2f5d9403ccd55831890549"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a#4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a"
dependencies = [
"anyhow",
"app-error",

View File

@ -103,8 +103,8 @@ dashmap = "6.0.1"
# Run the script.add_workspace_members:
# scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "82409199f8ffa0166f2f5d9403ccd55831890549" }
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "82409199f8ffa0166f2f5d9403ccd55831890549" }
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a" }
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a" }
[profile.dev]
opt-level = 0

View File

@ -259,6 +259,10 @@ impl Chat {
let _ = answer_sink
.send("AI_IMAGE_RESPONSE_LIMIT".to_string())
.await;
} else if err.is_ai_max_required() {
let _ = answer_sink
.send(format!("AI_MAX_REQUIRED:{}", err.msg))
.await;
} else {
let _ = answer_sink.send(format!("error:{}", err)).await;
}

View File

@ -13,6 +13,7 @@ use flowy_ai_pub::cloud::{ChatMessageMetadata, ChatMessageType, ChatRAGData, Con
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
use lib_infra::isolate_stream::IsolateSink;
use serde_json::json;
use std::sync::{Arc, Weak};
use tracing::trace;
use validator::Validate;
@ -112,7 +113,14 @@ pub(crate) async fn get_available_model_list_handler(
ai_manager: AFPluginState<Weak<AIManager>>,
) -> DataResult<ModelConfigPB, FlowyError> {
let ai_manager = upgrade_ai_manager(ai_manager)?;
let models = serde_json::to_string(&ai_manager.get_available_models().await?)?;
let available_models = ai_manager.get_available_models().await?;
let models = available_models
.models
.into_iter()
.map(|m| m.name)
.collect::<Vec<String>>();
let models = serde_json::to_string(&json!({"models": models}))?;
data_result_ok(ModelConfigPB { models })
}

View File

@ -362,6 +362,9 @@ pub enum ErrorCode {
#[error("AI Image Response limit exceeded")]
AIImageResponseLimitExceeded = 124,
#[error("AI Max Required")]
AIMaxRequired = 125,
}
impl ErrorCode {

View File

@ -13,7 +13,7 @@ use crate::code::ErrorCode;
pub type FlowyResult<T> = anyhow::Result<T, FlowyError>;
#[derive(Debug, Default, Clone, ProtoBuf, Error)]
#[error("{msg}")]
#[error("code:{code}, message:{msg}")]
pub struct FlowyError {
#[pb(index = 1)]
pub code: ErrorCode,
@ -95,6 +95,10 @@ impl FlowyError {
self.code == ErrorCode::AIImageResponseLimitExceeded
}
pub fn is_ai_max_required(&self) -> bool {
self.code == ErrorCode::AIMaxRequired
}
static_flowy_error!(internal, ErrorCode::Internal);
static_flowy_error!(record_not_found, ErrorCode::RecordNotFound);
static_flowy_error!(workspace_initialize, ErrorCode::WorkspaceInitializeError);

View File

@ -25,6 +25,7 @@ impl From<AppResponseError> for FlowyError {
AppErrorCode::WorkspaceMemberLimitExceeded => ErrorCode::WorkspaceMemberLimitExceeded,
AppErrorCode::AIResponseLimitExceeded => ErrorCode::AIResponseLimitExceeded,
AppErrorCode::AIImageResponseLimitExceeded => ErrorCode::AIImageResponseLimitExceeded,
AppErrorCode::AIMaxRequired => ErrorCode::AIMaxRequired,
AppErrorCode::FileStorageLimitExceeded => ErrorCode::FileStorageLimitExceeded,
AppErrorCode::SingleUploadLimitExceeded => ErrorCode::SingleUploadLimitExceeded,
AppErrorCode::CustomNamespaceDisabled => ErrorCode::CustomNamespaceRequirePlanUpgrade,