chore: remove default model name

This commit is contained in:
Nathan 2025-03-24 21:59:42 +08:00
parent 682a50da53
commit 35081fd311
9 changed files with 90 additions and 80 deletions

View File

@ -739,6 +739,7 @@ class _SelectModelButtonState extends State<SelectModelButton> {
);
},
child: _CurrentModelButton(
key: ValueKey(state.availableModels?.selectedModel.name),
modelName: state.availableModels?.selectedModel.name ?? "",
onTap: () => popoverController.show(),
),
@ -810,6 +811,7 @@ class _CurrentModelButton extends StatelessWidget {
const _CurrentModelButton({
required this.modelName,
required this.onTap,
super.key,
});
final String modelName;

View File

@ -87,13 +87,13 @@ class AiSettingsGroup extends StatelessWidget {
children: availableModels
.mapIndexed(
(index, model) => FlowyOptionTile.checkbox(
text: model,
text: model.name,
showTopBorder: index == 0,
isSelected: state.selectedAIModel == model,
isSelected: state.selectedAIModel == model.name,
onTap: () {
context
.read<SettingsAIBloc>()
.add(SettingsAIEvent.selectModel(model));
.add(SettingsAIEvent.selectModel(model.name));
context.pop();
},
),

View File

@ -1,9 +1,8 @@
import 'dart:convert';
import 'package:appflowy/user/application/user_listener.dart';
import 'package:appflowy/user/application/user_service.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-ai/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:appflowy_result/appflowy_result.dart';
@ -95,32 +94,22 @@ class SettingsAIBloc extends Bloc<SettingsAIEvent, SettingsAIState> {
),
);
},
didLoadAvailableModels: (String models) {
final dynamic decodedJson = jsonDecode(models);
Log.info("Available models: $decodedJson");
if (decodedJson is Map<String, dynamic>) {
final models = ModelList.fromJson(decodedJson).models;
if (models.isEmpty) {
// If available models is empty, then we just show the
// Default
emit(state.copyWith(availableModels: ["Default"]));
return;
}
if (!models.contains(state.selectedAIModel)) {
// Use first model as default model if current selected model
// is not available
final selectedModel = models[0];
_updateUserWorkspaceSetting(model: selectedModel);
emit(
state.copyWith(
availableModels: models,
selectedAIModel: selectedModel,
),
);
} else {
emit(state.copyWith(availableModels: models));
}
didLoadAvailableModels: (List<AvailableModelPB> models) {
if (state.selectedAIModel.isEmpty) {
final m = models.firstWhere((model) => model.isDefault);
_updateUserWorkspaceSetting(model: m.name);
emit(
state.copyWith(
availableModels: models,
selectedAIModel: m.name,
),
);
} else {
emit(
state.copyWith(
availableModels: models,
),
);
}
},
refreshMember: (member) {
@ -203,7 +192,7 @@ class SettingsAIEvent with _$SettingsAIEvent {
) = _DidReceiveUserProfile;
const factory SettingsAIEvent.didLoadAvailableModels(
String models,
List<AvailableModelPB> models,
) = _DidLoadAvailableModels;
}
@ -214,7 +203,7 @@ class SettingsAIState with _$SettingsAIState {
UseAISettingPB? aiSettings,
@Default("Default") String selectedAIModel,
AFRolePB? currentWorkspaceMemberRole,
@Default(["Default"]) List<String> availableModels,
@Default([]) List<AvailableModelPB> availableModels,
@Default(true) bool enableSearchIndexing,
}) = _SettingsAIState;
}

View File

@ -38,8 +38,8 @@ class AIModelSelection extends StatelessWidget {
.map(
(model) => buildDropdownMenuEntry<String>(
context,
value: model,
label: model,
value: model.name,
label: model.name,
),
)
.toList(),

View File

@ -144,34 +144,34 @@ EXTERNAL SOURCES:
:path: Flutter/ephemeral/.symlinks/plugins/window_manager/macos
SPEC CHECKSUMS:
app_links: 9028728e32c83a0831d9db8cf91c526d16cc5468
appflowy_backend: 464aeb3e5c6966a41641a2111e5ead72ce2695f7
auto_updater_macos: 3a42f1a06be6981f1a18be37e6e7bf86aa732118
bitsdojo_window_macos: 7959fb0ca65a3ccda30095c181ecb856fae48ea9
connectivity_plus: e74b9f74717d2d99d45751750e266e55912baeb5
desktop_drop: e0b672a7d84c0a6cbc378595e82cdb15f2970a43
device_info_plus: a56e6e74dbbd2bb92f2da12c64ddd4f67a749041
file_selector_macos: 6280b52b459ae6c590af5d78fc35c7267a3c4b31
flowy_infra_ui: 8760ff42a789de40bf5007a5f176b454722a341e
app_links: 10e0a0ab602ffaf34d142cd4862f29d34b303b2a
appflowy_backend: 865496343de667fc8c600e04b9fd05234e130cf9
auto_updater_macos: 3e3462c418fe4e731917eacd8d28eef7af84086d
bitsdojo_window_macos: 44e3b8fe3dd463820e0321f6256c5b1c16bb6a00
connectivity_plus: 18d3c32514c886e046de60e9c13895109866c747
desktop_drop: 69eeff437544aa619c8db7f4481b3a65f7696898
device_info_plus: ce1b7762849d3ec103d0e0517299f2db7ad60720
file_selector_macos: cc3858c981fe6889f364731200d6232dac1d812d
flowy_infra_ui: 03301a39ad118771adbf051a664265c61c507f38
FlutterMacOS: 8f6f14fa908a6fb3fba0cd85dbd81ec4b251fb24
HotKey: 400beb7caa29054ea8d864c96f5ba7e5b4852277
hotkey_manager: b443f35f4d772162937aa73fd8995e579f8ac4e2
irondash_engine_context: 893c7d96d20ce361d7e996f39d360c4c2f9869ba
local_notifier: ebf072651e35ae5e47280ad52e2707375cb2ae4e
package_info_plus: f0052d280d17aa382b932f399edf32507174e870
path_provider_foundation: 080d55be775b7414fd5a5ef3ac137b97b097e564
hotkey_manager: c32bf0bfe8f934b7bc17ab4ad5c4c142960b023c
irondash_engine_context: da62996ee25616d2f01bbeb85dc115d813359478
local_notifier: e9506bc66fc70311e8bc7291fb70f743c081e4ff
package_info_plus: 12f1c5c2cfe8727ca46cbd0b26677728972d9a5b
path_provider_foundation: 2b6b4c569c0fb62ec74538f866245ac84301af46
ReachabilitySwift: 32793e867593cfc1177f5d16491e3a197d2fccda
screen_retriever_macos: 452e51764a9e1cdb74b3c541238795849f21557f
screen_retriever_macos: 776e0fa5d42c6163d2bf772d22478df4b302b161
Sentry: 1fe34e9c2cbba1e347623610d26db121dcb569f1
sentry_flutter: e24b397f9a61fa5bbefd8279c3b2242ca86faa90
share_plus: 510bf0af1a42cd602274b4629920c9649c52f4cc
shared_preferences_foundation: 9e1978ff2562383bd5676f64ec4e9aa8fa06a6f7
sentry_flutter: a39c2a2d67d5e5b9cb0b94a4985c76dd5b3fc737
share_plus: 1fa619de8392a4398bfaf176d441853922614e89
shared_preferences_foundation: fcdcbc04712aee1108ac7fda236f363274528f78
Sparkle: 5f8960a7a119aa7d45dacc0d5837017170bc5675
sqflite_darwin: 20b2a3a3b70e43edae938624ce550a3cbf66a3d0
super_native_extensions: c2795d6d9aedf4a79fae25cb6160b71b50549189
url_launcher_macos: 0fba8ddabfc33ce0a9afe7c5fef5aab3d8d2d673
webview_flutter_wkwebview: 44d4dee7d7056d5ad185d25b38404436d56c547c
window_manager: 1d01fa7ac65a6e6f83b965471b1a7fdd3f06166c
sqflite_darwin: 5a7236e3b501866c1c9befc6771dfd73ffb8702d
super_native_extensions: 85efee3a7495b46b04befcfc86ed12069264ebf3
url_launcher_macos: c82c93949963e55b228a30115bd219499a6fe404
webview_flutter_wkwebview: 0982481e3d9c78fd5c6f62a002fcd24fc791f1e4
window_manager: 3a1844359a6295ab1e47659b1a777e36773cd6e8
PODFILE CHECKSUM: 0532f3f001ca3110b8be345d6491fff690e95823

View File

@ -20,6 +20,7 @@ use crate::util::ai_available_models_key;
use collab_integrate::persistence::collab_metadata_sql::{
batch_insert_collab_metadata, batch_select_collab_metadata, AFCollabMetadata,
};
use flowy_ai_pub::cloud::ai_dto::AvailableModel;
use flowy_storage_pub::storage::StorageService;
use lib_infra::async_trait::async_trait;
use lib_infra::util::timestamp;
@ -57,7 +58,7 @@ pub trait AIExternalService: Send + Sync + 'static {
#[derive(Debug, Default)]
struct ServerModelsCache {
models: Vec<String>,
models: Vec<AvailableModel>,
timestamp: Option<i64>,
}
@ -274,10 +275,9 @@ impl AIManager {
Ok(model)
}
pub async fn get_server_available_models(&self) -> FlowyResult<Vec<String>> {
pub async fn get_server_available_models(&self) -> FlowyResult<Vec<AvailableModel>> {
let workspace_id = self.user_service.workspace_id()?;
let now = timestamp(); // This is safer than using SystemTime which could fail
let now = timestamp();
// First, try reading from the cache with expiration check
let should_fetch = {
@ -298,16 +298,9 @@ impl AIManager {
.await
{
Ok(list) => {
let models = list
.models
.into_iter()
.map(|m| m.name)
.collect::<Vec<String>>();
// Update the cache with new timestamp - handle potential errors
let models = list.models;
if let Err(err) = self.update_models_cache(&models, now).await {
error!("Failed to update models cache: {}", err);
// Still return the fetched models even if caching failed
}
Ok(models)
@ -328,7 +321,11 @@ impl AIManager {
}
}
async fn update_models_cache(&self, models: &[String], timestamp: i64) -> FlowyResult<()> {
async fn update_models_cache(
&self,
models: &[AvailableModel],
timestamp: i64,
) -> FlowyResult<()> {
match self.server_models.try_write() {
Ok(mut cache) => {
cache.models = models.to_vec();
@ -360,8 +357,8 @@ impl AIManager {
.get_server_available_models()
.await?
.into_iter()
.map(|name| AIModelPB {
name,
.map(|m| AIModelPB {
name: m.name,
is_local: false,
})
.collect();

View File

@ -3,6 +3,7 @@ use std::collections::HashMap;
use crate::local_ai::controller::LocalAISetting;
use crate::local_ai::resource::PendingResource;
use flowy_ai_pub::cloud::ai_dto::AvailableModel;
use flowy_ai_pub::cloud::{
AIModel, ChatMessage, ChatMessageMetadata, ChatMessageType, CompletionMessage, LLMModel,
OutputContent, OutputLayout, RelatedQuestion, RepeatedChatMessage, RepeatedRelatedQuestion,
@ -186,7 +187,29 @@ pub struct ChatMessageListPB {
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct ServerAvailableModelsPB {
#[pb(index = 1)]
pub models: String,
pub models: Vec<AvailableModelPB>,
}
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct AvailableModelPB {
#[pb(index = 1)]
pub name: String,
#[pb(index = 2)]
pub is_default: bool,
}
impl From<AvailableModel> for AvailableModelPB {
fn from(value: AvailableModel) -> Self {
let is_default = value
.metadata
.and_then(|v| v.get("is_default").map(|v| v.as_bool().unwrap_or(false)))
.unwrap_or(false);
Self {
name: value.name,
is_default,
}
}
}
#[derive(Default, ProtoBuf, Validate, Clone, Debug)]

View File

@ -7,7 +7,6 @@ use crate::entities::*;
use flowy_ai_pub::cloud::{ChatMessageMetadata, ChatMessageType, ChatRAGData, ContextLoader};
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
use serde_json::json;
use std::sync::{Arc, Weak};
use tracing::trace;
use validator::Validate;
@ -108,7 +107,10 @@ pub(crate) async fn get_server_model_list_handler(
) -> DataResult<ServerAvailableModelsPB, FlowyError> {
let ai_manager = upgrade_ai_manager(ai_manager)?;
let models = ai_manager.get_server_available_models().await?;
let models = serde_json::to_string(&json!({"models": models}))?;
let models = models
.into_iter()
.map(AvailableModelPB::from)
.collect::<Vec<_>>();
data_result_ok(ServerAvailableModelsPB { models })
}

View File

@ -77,10 +77,7 @@ impl From<UserProfile> for UserProfilePB {
EncryptionType::NoEncryption => ("".to_string(), EncryptionTypePB::NoEncryption),
EncryptionType::SelfEncryption(sign) => (sign, EncryptionTypePB::Symmetric),
};
let mut ai_model = user_profile.ai_model;
if ai_model.is_empty() {
ai_model = "Default".to_string();
}
let ai_model = user_profile.ai_model;
Self {
id: user_profile.uid,
email: user_profile.email,