chore: import user data

This commit is contained in:
Nathan 2025-04-24 10:23:45 +08:00
parent 276e6503f7
commit b23f6be47d
14 changed files with 311 additions and 65 deletions

View File

@ -135,6 +135,24 @@ fn select_user_table_row(uid: i64, conn: &mut SqliteConnection) -> Result<UserTa
Ok(row)
}
pub fn select_user_id(conn: &mut SqliteConnection) -> Result<i64, FlowyError> {
let row = user_table::dsl::user_table
.select(user_table::id)
.first::<String>(conn)?;
let uid = row
.parse::<i64>()
.map_err(|err| FlowyError::internal().with_context(err))?;
Ok(uid)
}
pub fn select_user_name(uid: i64, conn: &mut SqliteConnection) -> Result<String, FlowyError> {
let name = user_table::dsl::user_table
.select(user_table::name)
.filter(user_table::id.eq(&uid.to_string()))
.first(conn)?;
Ok(name)
}
pub fn select_user_profile(
uid: i64,
workspace_id: &str,

View File

@ -14,3 +14,47 @@ pub struct ImportAppFlowyDataPB {
#[pb(index = 3, one_of)]
pub parent_view_id: Option<String>,
}
#[derive(ProtoBuf, Validate, Default)]
pub struct UserDataPathPB {
#[pb(index = 1)]
#[validate(custom(function = "required_not_empty_str"))]
pub path: String,
}
#[derive(ProtoBuf, Validate, Default)]
pub struct ImportUserDataPB {
#[pb(index = 1)]
#[validate(custom(function = "required_not_empty_str"))]
pub path: String,
#[pb(index = 2, one_of)]
pub parent_view_id: Option<String>,
#[pb(index = 3)]
pub workspaces: Vec<WorkspaceDataPreviewPB>,
}
#[derive(ProtoBuf, Validate, Default)]
pub struct WorkspaceDataPreviewPB {
#[pb(index = 1)]
pub name: String,
#[pb(index = 2)]
pub created_at: i64,
#[pb(index = 3)]
pub workspace_id: String,
#[pb(index = 4)]
pub workspace_database_id: String,
}
#[derive(ProtoBuf, Validate, Default)]
pub struct UserDataPreviewPB {
#[pb(index = 1)]
pub user_name: String,
#[pb(index = 2)]
pub workspaces: Vec<WorkspaceDataPreviewPB>,
}

View File

@ -3,7 +3,7 @@ use crate::notification::{send_notification, UserNotification};
use crate::services::cloud_config::{
get_cloud_config, get_or_create_cloud_config, save_cloud_config,
};
use crate::services::data_import::prepare_import;
use crate::services::data_import::{prepare_import, user_data_preview};
use crate::user_manager::UserManager;
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use flowy_sqlite::kv::KVStorePreferences;
@ -292,6 +292,24 @@ pub async fn import_appflowy_data_folder_handler(
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn preview_user_data_folder_handler(
data: AFPluginData<UserDataPathPB>,
) -> DataResult<UserDataPreviewPB, FlowyError> {
let data = data.try_into_inner()?;
let preview = user_data_preview(&data.path)?;
data_result_ok(preview)
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn import_user_data_folder_handler(
data: AFPluginData<ImportUserDataPB>,
manager: AFPluginState<Weak<UserManager>>,
) -> Result<(), FlowyError> {
let data = data.try_into_inner()?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn get_user_setting(
manager: AFPluginState<Weak<UserManager>>,

View File

@ -80,6 +80,8 @@ pub fn init(user_manager: Weak<UserManager>) -> AFPlugin {
.event(UserEvent::GetWorkspaceSetting, get_workspace_setting_handler)
.event(UserEvent::NotifyDidSwitchPlan, notify_did_switch_plan_handler)
.event(UserEvent::PasscodeSignIn, sign_in_with_passcode_handler)
.event(UserEvent::PreviewAppFlowyUserData, preview_user_data_folder_handler)
.event(UserEvent::ImportAppFlowyUserData, import_user_data_folder_handler)
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
@ -273,6 +275,12 @@ pub enum UserEvent {
#[event(input = "PasscodeSignInPB", output = "GotrueTokenResponsePB")]
PasscodeSignIn = 65,
#[event(input = "UserDataPathPB")]
PreviewAppFlowyUserData = 66,
#[event(input = "ImportUserDataPB")]
ImportAppFlowyUserData = 67,
}
#[async_trait]

View File

@ -34,7 +34,8 @@ impl UserDataMigration for AnonUserWorkspaceTableMigration {
#[instrument(name = "AnonUserWorkspaceTableMigration", skip_all, err)]
fn run(
&self,
user: &Session,
uid: i64,
workspace_id: &str,
_collab_db: &Weak<CollabKVDB>,
user_auth_type: &AuthType,
db: &mut SqliteConnection,
@ -46,7 +47,7 @@ impl UserDataMigration for AnonUserWorkspaceTableMigration {
if let Some(mut user_workspace) = get_session_workspace(store_preferences) {
if select_user_workspace(&user_workspace.id, db).ok().is_none() {
user_workspace.workspace_type = AuthType::Local;
upsert_user_workspace(user.user_id, *user_auth_type, user_workspace, db)?;
upsert_user_workspace(uid, *user_auth_type, user_workspace, db)?;
}
}
}

View File

@ -39,7 +39,8 @@ impl UserDataMigration for CollabDocKeyWithWorkspaceIdMigration {
#[instrument(name = "CollabDocKeyWithWorkspaceIdMigration", skip_all, err)]
fn run(
&self,
user: &Session,
uid: i64,
workspace_id: &str,
collab_db: &Weak<CollabKVDB>,
_user_auth_type: &AuthType,
_db: &mut SqliteConnection,
@ -48,9 +49,9 @@ impl UserDataMigration for CollabDocKeyWithWorkspaceIdMigration {
let collab_db = collab_db
.upgrade()
.ok_or_else(|| FlowyError::internal().with_context("Failed to upgrade DB object"))?;
trace!("migrate key with workspace id:{}", user.workspace_id);
trace!("migrate key with workspace id:{}", workspace_id);
collab_db.with_write_txn(|txn| {
migrate_old_keys(txn, &user.workspace_id)?;
migrate_old_keys(txn, &workspace_id)?;
Ok(())
})?;
Ok(())

View File

@ -41,7 +41,8 @@ impl UserDataMigration for HistoricalEmptyDocumentMigration {
#[instrument(name = "HistoricalEmptyDocumentMigration", skip_all, err)]
fn run(
&self,
user: &Session,
uid: i64,
workspace_id: &str,
collab_db: &Weak<CollabKVDB>,
user_auth_type: &AuthType,
_db: &mut SqliteConnection,
@ -57,27 +58,20 @@ impl UserDataMigration for HistoricalEmptyDocumentMigration {
.upgrade()
.ok_or_else(|| FlowyError::internal().with_context("Failed to upgrade DB object"))?;
collab_db.with_write_txn(|write_txn| {
let origin = CollabOrigin::Client(CollabClient::new(user.user_id, "phantom"));
let folder_collab = match load_collab(
user.user_id,
write_txn,
&user.workspace_id,
&user.workspace_id,
) {
let origin = CollabOrigin::Client(CollabClient::new(uid, "phantom"));
let folder_collab = match load_collab(uid, write_txn, workspace_id, workspace_id) {
Ok(fc) => fc,
Err(_) => return Ok(()),
};
let folder = Folder::open(user.user_id, folder_collab, None)
let folder = Folder::open(uid, folder_collab, None)
.map_err(|err| PersistenceError::Internal(err.into()))?;
if let Some(workspace_id) = folder.get_workspace_id() {
let migration_views = folder.get_views_belong_to(&workspace_id);
// For historical reasons, the first level documents are empty. So migrate them by inserting
// the default document data.
for view in migration_views {
if migrate_empty_document(write_txn, &origin, &view, user.user_id, &user.workspace_id)
.is_err()
{
if migrate_empty_document(write_txn, &origin, &view, uid, &workspace_id).is_err() {
event!(
tracing::Level::ERROR,
"Failed to migrate document {}",

View File

@ -17,7 +17,8 @@ use tracing::info;
pub const FIRST_TIME_INSTALL_VERSION: &str = "first_install_version";
pub struct UserLocalDataMigration {
session: Session,
uid: i64,
workspace_id: String,
collab_db: Weak<CollabKVDB>,
sqlite_pool: Arc<ConnectionPool>,
kv: Arc<KVStorePreferences>,
@ -25,13 +26,15 @@ pub struct UserLocalDataMigration {
impl UserLocalDataMigration {
pub fn new(
session: Session,
uid: i64,
workspace_id: String,
collab_db: Weak<CollabKVDB>,
sqlite_pool: Arc<ConnectionPool>,
kv: Arc<KVStorePreferences>,
) -> Self {
Self {
session,
uid,
workspace_id,
collab_db,
sqlite_pool,
kv,
@ -76,7 +79,8 @@ impl UserLocalDataMigration {
let migration_name = migration.name().to_string();
if !duplicated_names.contains(&migration_name) {
migration.run(
&self.session,
self.uid,
&self.workspace_id,
&self.collab_db,
user_auth_type,
&mut conn,
@ -102,7 +106,8 @@ pub trait UserDataMigration {
fn run_when(&self, first_installed_version: &Option<Version>, current_version: &Version) -> bool;
fn run(
&self,
user: &Session,
uid: i64,
workspace_id: &str,
collab_db: &Weak<CollabKVDB>,
user_auth_type: &AuthType,
db: &mut SqliteConnection,

View File

@ -39,7 +39,8 @@ impl UserDataMigration for FavoriteV1AndWorkspaceArrayMigration {
#[instrument(name = "FavoriteV1AndWorkspaceArrayMigration", skip_all, err)]
fn run(
&self,
user: &Session,
uid: i64,
workspace_id: &str,
collab_db: &Weak<CollabKVDB>,
_user_auth_type: &AuthType,
_db: &mut SqliteConnection,
@ -49,14 +50,9 @@ impl UserDataMigration for FavoriteV1AndWorkspaceArrayMigration {
.upgrade()
.ok_or_else(|| FlowyError::internal().with_context("Failed to upgrade DB object"))?;
collab_db.with_write_txn(|write_txn| {
if let Ok(collab) = load_collab(
user.user_id,
write_txn,
&user.workspace_id,
&user.workspace_id,
) {
let mut folder = Folder::open(user.user_id, collab, None)
.map_err(|err| PersistenceError::Internal(err.into()))?;
if let Ok(collab) = load_collab(uid, write_txn, workspace_id, workspace_id) {
let mut folder =
Folder::open(uid, collab, None).map_err(|err| PersistenceError::Internal(err.into()))?;
folder
.body
.migrate_workspace_to_view(&mut folder.collab.transact_mut());
@ -75,9 +71,9 @@ impl UserDataMigration for FavoriteV1AndWorkspaceArrayMigration {
.encode_collab()
.map_err(|err| PersistenceError::Internal(err.into()))?;
write_txn.flush_doc(
user.user_id,
&user.workspace_id,
&user.workspace_id,
uid,
workspace_id,
workspace_id,
encode.state_vector.to_vec(),
encode.doc_state.to_vec(),
)?;

View File

@ -37,7 +37,8 @@ impl UserDataMigration for WorkspaceTrashMapToSectionMigration {
#[instrument(name = "WorkspaceTrashMapToSectionMigration", skip_all, err)]
fn run(
&self,
user: &Session,
uid: i64,
workspace_id: &str,
collab_db: &Weak<CollabKVDB>,
_user_auth_type: &AuthType,
_db: &mut SqliteConnection,
@ -47,14 +48,9 @@ impl UserDataMigration for WorkspaceTrashMapToSectionMigration {
.upgrade()
.ok_or_else(|| FlowyError::internal().with_context("Failed to upgrade DB object"))?;
collab_db.with_write_txn(|write_txn| {
if let Ok(collab) = load_collab(
user.user_id,
write_txn,
&user.workspace_id,
&user.workspace_id,
) {
let mut folder = Folder::open(user.user_id, collab, None)
.map_err(|err| PersistenceError::Internal(err.into()))?;
if let Ok(collab) = load_collab(uid, write_txn, workspace_id, workspace_id) {
let mut folder =
Folder::open(uid, collab, None).map_err(|err| PersistenceError::Internal(err.into()))?;
let trash_ids = folder
.get_trash_v1()
.into_iter()
@ -69,9 +65,9 @@ impl UserDataMigration for WorkspaceTrashMapToSectionMigration {
.encode_collab()
.map_err(|err| PersistenceError::Internal(err.into()))?;
write_txn.flush_doc(
user.user_id,
&user.workspace_id,
&user.workspace_id,
uid,
workspace_id,
workspace_id,
encode.state_vector.to_vec(),
encode.doc_state.to_vec(),
)?;

View File

@ -34,13 +34,18 @@ use flowy_user_pub::session::Session;
use rayon::prelude::*;
use std::collections::{HashMap, HashSet};
use crate::entities::{ImportUserDataPB, UserDataPreviewPB, WorkspaceDataPreviewPB};
use collab_document::blocks::TextDelta;
use collab_document::document::Document;
use flowy_user_pub::sql::{select_user_auth_type, select_user_profile, select_user_workspace};
use flowy_sqlite::Database;
use flowy_user_pub::sql::{
select_all_user_workspace, select_user_auth_type, select_user_id, select_user_name,
select_user_profile, select_user_workspace,
};
use semver::Version;
use serde_json::json;
use std::ops::{Deref, DerefMut};
use std::path::Path;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Weak};
use tracing::{error, event, info, instrument, warn};
use uuid::Uuid;
@ -123,7 +128,8 @@ pub(crate) fn prepare_import(
.or_else(|_| select_user_auth_type(imported_session.user_id, &mut conn))?;
run_data_migration(
&imported_session,
imported_session.user_id,
imported_session.workspace_id.clone(),
&imported_user_auth_type,
Arc::downgrade(&imported_collab_db),
imported_sqlite_db.get_pool(),
@ -141,18 +147,6 @@ pub(crate) fn prepare_import(
})
}
#[allow(dead_code)]
fn migrate_user_awareness(
old_to_new_id_map: &mut OldToNewIdMap,
old_user_session: &Session,
new_user_session: &Session,
) -> Result<(), PersistenceError> {
let old_uid = old_user_session.user_id;
let new_uid = new_user_session.user_id;
old_to_new_id_map.insert(old_uid.to_string(), new_uid.to_string());
Ok(())
}
/// This path refers to the directory where AppFlowy stores its data. The directory structure is as follows:
/// root folder:
/// - cache.db

View File

@ -2,5 +2,7 @@ mod appflowy_data_import;
pub use appflowy_data_import::*;
pub(crate) mod importer;
pub use importer::load_collab_by_object_id;
mod user_data_import;
pub pub use importer::load_collab_by_object_id;
pub use importer::load_collab_by_object_ids;

View File

@ -0,0 +1,168 @@
use crate::migrations::session_migration::{get_session_workspace, migrate_session};
use std::collections::{HashMap, HashSet};
use anyhow::anyhow;
use collab_integrate::CollabKVDB;
use flowy_user_pub::entities::AuthType;
use crate::entities::{ImportUserDataPB, UserDataPreviewPB, WorkspaceDataPreviewPB};
use flowy_error::ErrorCode;
use flowy_sqlite::Database;
use flowy_user_pub::sql::{
select_all_user_workspace, select_user_auth_type, select_user_id, select_user_name,
};
use semver::Version;
use std::path::{Path, PathBuf};
use std::sync::Arc;
pub(crate) struct ImportedUserData {
pub uid: i64,
pub user_auth_type: AuthType,
pub data: ImportUserDataPB,
pub app_version: Version,
pub collab_db: Arc<CollabKVDB>,
pub sqlite_db: Database,
}
pub struct ImportedUserWorkspaceResult {
workspace_id: String,
workspace_name: String,
success: bool,
error_code: ErrorCode,
}
pub struct ImportedUserDataResult {
results: Vec<ImportedUserWorkspaceResult>,
}
fn import_user_workspace(
current_uid: i64,
current_collab_db: &Arc<CollabKVDB>,
imported_uid: i64,
imported_collab_db: &Arc<CollabKVDB>,
imported_sqlite_db: &Database,
imported_workspace: &WorkspaceDataPreviewPB,
import_to_view_id: &str,
) -> ImportedUserWorkspaceResult {
let mut database_view_ids_by_database_id: HashMap<String, Vec<String>> = HashMap::new();
let mut row_object_ids = HashSet::new();
let mut document_object_ids = HashSet::new();
let mut database_object_ids = HashSet::new();
todo!()
}
pub fn import_user_data(
current_uid: i64,
current_workspace_id: &str,
current_collab_db: &Arc<CollabKVDB>,
data: ImportedUserData,
) -> anyhow::Result<ImportedUserDataResult> {
let imported_uid = data.uid;
let imported_collab_db = data.collab_db;
let imported_sqlite_db = data.sqlite_db;
let imported_user_data = data.data;
let import_to_view_id = imported_user_data
.parent_view_id
.unwrap_or_else(|| current_workspace_id.to_string());
let mut results = vec![];
for workspace in imported_user_data.workspaces {
results.push(import_user_workspace(
current_uid,
current_collab_db,
imported_uid,
&imported_collab_db,
&imported_sqlite_db,
&workspace,
&import_to_view_id,
));
}
Ok(ImportedUserDataResult { results })
}
pub(crate) fn user_data_preview(path: &str) -> anyhow::Result<UserDataPreviewPB> {
if !Path::new(path).exists() {
return Err(anyhow!("The path: {} is not exist", path));
}
let sqlite_db_path = PathBuf::from(path).join("flowy-database.db");
if !sqlite_db_path.exists() {
return Err(anyhow!(
"Can not find flowy-database.db at path: {}",
sqlite_db_path.display()
));
}
let collab_db_path = PathBuf::from(path).join("collab_db");
if !collab_db_path.exists() {
return Err(anyhow!(
"Can not find collab_db at path: {}",
collab_db_path.display()
));
}
let imported_sqlite_db = flowy_sqlite::init(sqlite_db_path)
.map_err(|err| anyhow!("[AppflowyData]: open import collab db failed: {:?}", err))?;
let mut conn = imported_sqlite_db.get_connection()?;
let uid = select_user_id(&mut conn)?;
let user_name = select_user_name(uid, &mut conn)?;
let workspaces = select_all_user_workspace(uid, &mut conn)?
.into_iter()
.map(|w| WorkspaceDataPreviewPB {
name: w.name,
created_at: w.created_at.timestamp(),
workspace_id: w.id,
workspace_database_id: w.workspace_database_id,
})
.collect::<Vec<_>>();
Ok(UserDataPreviewPB {
user_name,
workspaces,
})
}
pub(crate) fn get_import_user_data(
user_data: ImportUserDataPB,
app_version: &Version,
) -> anyhow::Result<ImportedUserData> {
let sqlite_db_path = PathBuf::from(&user_data.path).join("flowy-database.db");
if !sqlite_db_path.exists() {
return Err(anyhow!(
"Can not find flowy-database.db at path: {}",
sqlite_db_path.display()
));
}
let collab_db_path = PathBuf::from(&user_data.path).join("collab_db");
if !collab_db_path.exists() {
return Err(anyhow!(
"Can not find collab_db at path: {}",
collab_db_path.display()
));
}
let sqlite_db = flowy_sqlite::init(sqlite_db_path)
.map_err(|err| anyhow!("[AppflowyData]: open import collab db failed: {:?}", err))?;
let collab_db = Arc::new(
CollabKVDB::open(collab_db_path)
.map_err(|err| anyhow!("[AppflowyData]: open import collab db failed: {:?}", err))?,
);
let mut conn = sqlite_db.get_connection()?;
let uid = select_user_id(&mut conn)?;
let user_auth_type = select_user_auth_type(uid, &mut conn)?;
Ok(ImportedUserData {
uid,
user_auth_type,
data: user_data,
app_version: app_version.clone(),
collab_db,
sqlite_db,
})
}

View File

@ -850,7 +850,8 @@ fn mark_all_migrations_as_applied(sqlite_pool: &Arc<ConnectionPool>) {
}
pub(crate) fn run_data_migration(
session: &Session,
uid: i64,
workspace_id: String,
user_auth_type: &AuthType,
collab_db: Weak<CollabKVDB>,
sqlite_pool: Arc<ConnectionPool>,
@ -858,7 +859,7 @@ pub(crate) fn run_data_migration(
app_version: &Version,
) {
let migrations = collab_migration_list();
match UserLocalDataMigration::new(session.clone(), collab_db, sqlite_pool, kv).run(
match UserLocalDataMigration::new(uid, workspace_id, collab_db, sqlite_pool, kv).run(
migrations,
user_auth_type,
app_version,