Ver Fonte

feat: Data sync on signup (#3283)

* chore: sync all data

* chore: sync database row and document in the row

* chore: sync inline view id

* chore: sync row and document in row

* fix: tests

* fix: migrate document in row

* chore: retry when create collab fail

* fix: invalid secret cause rerun application

* chore: fix clippy warnnings
Nathan.fooo há 1 ano atrás
pai
commit
4e67282f2b
66 ficheiros alterados com 1491 adições e 512 exclusões
  1. 21 13
      frontend/appflowy_flutter/lib/user/presentation/sign_in_screen.dart
  2. 11 0
      frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_third_party_login.dart
  3. 1 1
      frontend/appflowy_flutter/packages/flowy_infra_ui/lib/style_widget/snap_bar.dart
  4. 9 9
      frontend/appflowy_tauri/src-tauri/Cargo.toml
  5. 1 0
      frontend/resources/translations/en.json
  6. 17 14
      frontend/rust-lib/Cargo.lock
  7. 8 8
      frontend/rust-lib/Cargo.toml
  8. 8 3
      frontend/rust-lib/flowy-core/src/deps_resolve/folder_deps.rs
  9. 18 10
      frontend/rust-lib/flowy-core/src/lib.rs
  10. 3 3
      frontend/rust-lib/flowy-database2/src/entities/database_entities.rs
  11. 1 5
      frontend/rust-lib/flowy-database2/src/entities/group_entities/group.rs
  12. 16 16
      frontend/rust-lib/flowy-database2/src/entities/row_entities.rs
  13. 1 1
      frontend/rust-lib/flowy-database2/src/entities/view_entities.rs
  14. 1 1
      frontend/rust-lib/flowy-database2/src/event_handler.rs
  15. 8 8
      frontend/rust-lib/flowy-database2/src/manager.rs
  16. 17 31
      frontend/rust-lib/flowy-database2/src/services/database/database_editor.rs
  17. 6 6
      frontend/rust-lib/flowy-database2/src/services/database/entities.rs
  18. 7 7
      frontend/rust-lib/flowy-database2/src/services/database_view/view_editor.rs
  19. 5 4
      frontend/rust-lib/flowy-database2/src/services/field_settings/mod.rs
  20. 4 4
      frontend/rust-lib/flowy-database2/src/services/filter/controller.rs
  21. 3 3
      frontend/rust-lib/flowy-database2/src/services/group/controller.rs
  22. 2 2
      frontend/rust-lib/flowy-database2/src/services/group/controller_impls/checkbox_controller.rs
  23. 2 2
      frontend/rust-lib/flowy-database2/src/services/group/controller_impls/date_controller.rs
  24. 2 2
      frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/util.rs
  25. 2 2
      frontend/rust-lib/flowy-database2/src/services/group/controller_impls/url_controller.rs
  26. 1 1
      frontend/rust-lib/flowy-database2/tests/database/block_test/script.rs
  27. 1 0
      frontend/rust-lib/flowy-database2/tests/database/field_settings_test/script.rs
  28. 3 1
      frontend/rust-lib/flowy-document2/src/event_handler.rs
  29. 13 14
      frontend/rust-lib/flowy-document2/src/manager.rs
  30. 3 1
      frontend/rust-lib/flowy-document2/tests/document/document_redo_undo_test.rs
  31. 5 4
      frontend/rust-lib/flowy-document2/tests/document/document_test.rs
  32. 1 0
      frontend/rust-lib/flowy-document2/tests/document/util.rs
  33. 4 0
      frontend/rust-lib/flowy-folder-deps/src/cloud.rs
  34. 3 3
      frontend/rust-lib/flowy-folder2/src/entities/view.rs
  35. 49 59
      frontend/rust-lib/flowy-folder2/src/manager.rs
  36. 5 3
      frontend/rust-lib/flowy-folder2/src/test_helper.rs
  37. 2 5
      frontend/rust-lib/flowy-folder2/src/view_operation.rs
  38. 1 1
      frontend/rust-lib/flowy-server/src/local_server/impls/user.rs
  39. 9 5
      frontend/rust-lib/flowy-server/src/supabase/api/collab_storage.rs
  40. 2 0
      frontend/rust-lib/flowy-server/src/supabase/api/document.rs
  41. 1 1
      frontend/rust-lib/flowy-server/src/supabase/api/request.rs
  42. 69 22
      frontend/rust-lib/flowy-server/src/supabase/api/user.rs
  43. 2 2
      frontend/rust-lib/flowy-server/tests/supabase_test/user_test.rs
  44. 5 3
      frontend/rust-lib/flowy-test/src/document/document_event.rs
  45. 3 3
      frontend/rust-lib/flowy-test/src/lib.rs
  46. 5 1
      frontend/rust-lib/flowy-test/tests/document/supabase_test/helper.rs
  47. 6 2
      frontend/rust-lib/flowy-test/tests/user/migration_test/document_test.rs
  48. 4 0
      frontend/rust-lib/flowy-test/tests/user/migration_test/history_user_db/README.md
  49. 0 1
      frontend/rust-lib/flowy-test/tests/user/migration_test/mod.rs
  50. 0 47
      frontend/rust-lib/flowy-test/tests/user/migration_test/util.rs
  51. 6 2
      frontend/rust-lib/flowy-test/tests/user/migration_test/version_test.rs
  52. 270 1
      frontend/rust-lib/flowy-test/tests/user/supabase_test/auth_test.rs
  53. 4 0
      frontend/rust-lib/flowy-test/tests/user/supabase_test/history_user_db/README.md
  54. BIN
      frontend/rust-lib/flowy-test/tests/user/supabase_test/history_user_db/workspace_sync.zip
  55. 41 0
      frontend/rust-lib/flowy-test/tests/util.rs
  56. 1 1
      frontend/rust-lib/flowy-user-deps/src/cloud.rs
  57. 3 2
      frontend/rust-lib/flowy-user-deps/src/entities.rs
  58. 3 0
      frontend/rust-lib/flowy-user/Cargo.toml
  59. 2 2
      frontend/rust-lib/flowy-user/src/event_map.rs
  60. 25 20
      frontend/rust-lib/flowy-user/src/manager.rs
  61. 0 143
      frontend/rust-lib/flowy-user/src/migrations/local_user_to_cloud.rs
  62. 430 0
      frontend/rust-lib/flowy-user/src/migrations/migrate_to_new_user.rs
  63. 4 3
      frontend/rust-lib/flowy-user/src/migrations/mod.rs
  64. 327 0
      frontend/rust-lib/flowy-user/src/migrations/sync_new_user.rs
  65. 1 1
      frontend/rust-lib/flowy-user/src/services/entities.rs
  66. 3 3
      frontend/rust-lib/flowy-user/src/services/user_workspace_sql.rs

+ 21 - 13
frontend/appflowy_flutter/lib/user/presentation/sign_in_screen.dart

@@ -73,19 +73,27 @@ class SignInScreen extends StatelessWidget {
 }
 
 void handleOpenWorkspaceError(BuildContext context, FlowyError error) {
-  if (error.code == ErrorCode.WorkspaceDataNotSync) {
-    final userFolder = UserFolderPB.fromBuffer(error.payload);
-    getIt<AuthRouter>().pushWorkspaceErrorScreen(context, userFolder, error);
-  } else {
-    Log.error(error);
-    showSnapBar(
-      context,
-      error.msg,
-      onClosed: () {
-        getIt<AuthService>().signOut();
-        runAppFlowy();
-      },
-    );
+  Log.error(error);
+  switch (error.code) {
+    case ErrorCode.WorkspaceDataNotSync:
+      final userFolder = UserFolderPB.fromBuffer(error.payload);
+      getIt<AuthRouter>().pushWorkspaceErrorScreen(context, userFolder, error);
+      break;
+    case ErrorCode.InvalidEncryptSecret:
+      showSnapBar(
+        context,
+        error.msg,
+      );
+      break;
+    default:
+      showSnapBar(
+        context,
+        error.msg,
+        onClosed: () {
+          getIt<AuthService>().signOut();
+          runAppFlowy();
+        },
+      );
   }
 }
 

+ 11 - 0
frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_third_party_login.dart

@@ -31,6 +31,14 @@ class SettingThirdPartyLogin extends StatelessWidget {
           final indicator = state.isSubmitting
               ? const CircularProgressIndicator.adaptive()
               : const SizedBox.shrink();
+
+          final promptMessage = state.isSubmitting
+              ? FlowyText.medium(
+                  LocaleKeys.signIn_syncPromptMessage.tr(),
+                  maxLines: null,
+                )
+              : const SizedBox.shrink();
+
           return Column(
             crossAxisAlignment: CrossAxisAlignment.start,
             children: [
@@ -45,9 +53,12 @@ class SettingThirdPartyLogin extends StatelessWidget {
                 ],
               ),
               const VSpace(6),
+              promptMessage,
+              const VSpace(6),
               const ThirdPartySignInButtons(
                 mainAxisAlignment: MainAxisAlignment.start,
               ),
+              const VSpace(6),
             ],
           );
         },

+ 1 - 1
frontend/appflowy_flutter/packages/flowy_infra_ui/lib/style_widget/snap_bar.dart

@@ -7,7 +7,7 @@ void showSnapBar(BuildContext context, String title, {VoidCallback? onClosed}) {
   ScaffoldMessenger.of(context)
       .showSnackBar(
         SnackBar(
-          duration: const Duration(milliseconds: 10000),
+          duration: const Duration(milliseconds: 8000),
           content: WillPopScope(
             onWillPop: () async {
               ScaffoldMessenger.of(context).removeCurrentSnackBar();

+ 9 - 9
frontend/appflowy_tauri/src-tauri/Cargo.toml

@@ -34,15 +34,15 @@ default = ["custom-protocol"]
 custom-protocol = ["tauri/custom-protocol"]
 
 [patch.crates-io]
-collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-persistence = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-define = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
+collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-persistence = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-define = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
 
 #collab = { path = "../../../../AppFlowy-Collab/collab" }
 #collab-folder = { path = "../../../../AppFlowy-Collab/collab-folder" }

+ 1 - 0
frontend/resources/translations/en.json

@@ -41,6 +41,7 @@
     "dontHaveAnAccount": "Don't have an account?",
     "repeatPasswordEmptyError": "Repeat password can't be empty",
     "unmatchedPasswordError": "Repeat password is not the same as password",
+    "syncPromptMessage": "Syncing the data might take a while. Please don't close this page",
     "signInWith": "Sign in with:"
   },
   "workspace": {

+ 17 - 14
frontend/rust-lib/Cargo.lock

@@ -113,14 +113,14 @@ dependencies = [
 
 [[package]]
 name = "anyhow"
-version = "1.0.72"
+version = "1.0.75"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854"
+checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
 
 [[package]]
 name = "appflowy-integrate"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "anyhow",
  "collab",
@@ -611,7 +611,7 @@ dependencies = [
 [[package]]
 name = "collab"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "anyhow",
  "bytes",
@@ -629,7 +629,7 @@ dependencies = [
 [[package]]
 name = "collab-client-ws"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "bytes",
  "collab-sync",
@@ -647,7 +647,7 @@ dependencies = [
 [[package]]
 name = "collab-database"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "anyhow",
  "async-trait",
@@ -676,7 +676,7 @@ dependencies = [
 [[package]]
 name = "collab-define"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "uuid",
 ]
@@ -684,7 +684,7 @@ dependencies = [
 [[package]]
 name = "collab-derive"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -696,7 +696,7 @@ dependencies = [
 [[package]]
 name = "collab-document"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "anyhow",
  "collab",
@@ -715,7 +715,7 @@ dependencies = [
 [[package]]
 name = "collab-folder"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "anyhow",
  "chrono",
@@ -735,7 +735,7 @@ dependencies = [
 [[package]]
 name = "collab-persistence"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "bincode",
  "chrono",
@@ -755,7 +755,7 @@ dependencies = [
 [[package]]
 name = "collab-plugins"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "anyhow",
  "async-trait",
@@ -784,7 +784,7 @@ dependencies = [
 [[package]]
 name = "collab-sync"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "bytes",
  "collab",
@@ -806,7 +806,7 @@ dependencies = [
 [[package]]
 name = "collab-user"
 version = "0.1.0"
-source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=b1f6737#b1f67375e39c67e32c502b2968749bedf61d6a46"
+source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=1b297c#1b297c2ed75aa33b964f0da546d771b00805be62"
 dependencies = [
  "anyhow",
  "collab",
@@ -1738,11 +1738,13 @@ dependencies = [
 name = "flowy-user"
 version = "0.1.0"
 dependencies = [
+ "anyhow",
  "appflowy-integrate",
  "base64 0.21.2",
  "bytes",
  "chrono",
  "collab",
+ "collab-database",
  "collab-document",
  "collab-folder",
  "collab-user",
@@ -1754,6 +1756,7 @@ dependencies = [
  "flowy-derive",
  "flowy-encrypt",
  "flowy-error",
+ "flowy-folder-deps",
  "flowy-notification",
  "flowy-server-config",
  "flowy-sqlite",

+ 8 - 8
frontend/rust-lib/Cargo.toml

@@ -39,14 +39,14 @@ opt-level = 3
 incremental = false
 
 [patch.crates-io]
-collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
-collab-define = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "b1f6737" }
+collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
+collab-define = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "1b297c" }
 
 #collab = { path = "../AppFlowy-Collab/collab" }
 #collab-folder = { path = "../AppFlowy-Collab/collab-folder" }

+ 8 - 3
frontend/rust-lib/flowy-core/src/deps_resolve/folder_deps.rs

@@ -111,6 +111,7 @@ impl FolderOperationHandler for DocumentFolderOperation {
           let document_pb = JsonToDocumentParser::json_str_to_document(json_str).unwrap();
           manager
             .create_document(uid, &view.parent_view.id, Some(document_pb.into()))
+            .await
             .unwrap();
           view
         })
@@ -165,7 +166,9 @@ impl FolderOperationHandler for DocumentFolderOperation {
     let manager = self.0.clone();
     FutureResult::new(async move {
       let data = DocumentDataPB::try_from(Bytes::from(data))?;
-      manager.create_document(user_id, &view_id, Some(data.into()))?;
+      manager
+        .create_document(user_id, &view_id, Some(data.into()))
+        .await?;
       Ok(())
     })
   }
@@ -182,7 +185,7 @@ impl FolderOperationHandler for DocumentFolderOperation {
     let view_id = view_id.to_string();
     let manager = self.0.clone();
     FutureResult::new(async move {
-      manager.create_document(user_id, &view_id, None)?;
+      manager.create_document(user_id, &view_id, None).await?;
       Ok(())
     })
   }
@@ -199,7 +202,9 @@ impl FolderOperationHandler for DocumentFolderOperation {
     let manager = self.0.clone();
     FutureResult::new(async move {
       let data = DocumentDataPB::try_from(Bytes::from(bytes))?;
-      manager.create_document(uid, &view_id, Some(data.into()))?;
+      manager
+        .create_document(uid, &view_id, Some(data.into()))
+        .await?;
       Ok(())
     })
   }

+ 18 - 10
frontend/rust-lib/flowy-core/src/lib.rs

@@ -16,10 +16,10 @@ use tokio::sync::RwLock;
 use flowy_database2::DatabaseManager;
 use flowy_document2::manager::DocumentManager;
 use flowy_error::FlowyResult;
-use flowy_folder2::manager::{FolderInitializeData, FolderManager};
+use flowy_folder2::manager::{FolderInitializeDataSource, FolderManager};
 use flowy_sqlite::kv::StorePreferences;
 use flowy_task::{TaskDispatcher, TaskRunner};
-use flowy_user::event_map::{SignUpContext, UserCloudServiceProvider, UserStatusCallback};
+use flowy_user::event_map::{UserCloudServiceProvider, UserStatusCallback};
 use flowy_user::manager::{UserManager, UserSessionConfig};
 use flowy_user_deps::cloud::UserCloudConfig;
 use flowy_user_deps::entities::{AuthType, UserProfile, UserWorkspace};
@@ -316,13 +316,19 @@ impl UserStatusCallback for UserStatusCallbackImpl {
     to_fut(async move {
       collab_builder.initialize(user_workspace.id.clone());
       folder_manager
-        .initialize(user_id, &user_workspace.id, FolderInitializeData::Empty)
+        .initialize(
+          user_id,
+          &user_workspace.id,
+          FolderInitializeDataSource::LocalDisk {
+            create_if_not_exist: false,
+          },
+        )
         .await?;
       database_manager
         .initialize(
           user_id,
           user_workspace.id.clone(),
-          user_workspace.database_storage_id,
+          user_workspace.database_views_aggregate_id,
         )
         .await?;
       document_manager
@@ -352,7 +358,7 @@ impl UserStatusCallback for UserStatusCallbackImpl {
         .initialize(
           user_id,
           user_workspace.id.clone(),
-          user_workspace.database_storage_id,
+          user_workspace.database_views_aggregate_id,
         )
         .await?;
       document_manager
@@ -364,7 +370,7 @@ impl UserStatusCallback for UserStatusCallbackImpl {
 
   fn did_sign_up(
     &self,
-    context: SignUpContext,
+    is_new_user: bool,
     user_profile: &UserProfile,
     user_workspace: &UserWorkspace,
     _device_id: &str,
@@ -380,8 +386,10 @@ impl UserStatusCallback for UserStatusCallbackImpl {
         .initialize_with_new_user(
           user_profile.uid,
           &user_profile.token,
-          context.is_new,
-          context.local_folder,
+          is_new_user,
+          FolderInitializeDataSource::LocalDisk {
+            create_if_not_exist: true,
+          },
           &user_workspace.id,
         )
         .await?;
@@ -389,7 +397,7 @@ impl UserStatusCallback for UserStatusCallbackImpl {
         .initialize_with_new_user(
           user_profile.uid,
           user_workspace.id.clone(),
-          user_workspace.database_storage_id,
+          user_workspace.database_views_aggregate_id,
         )
         .await?;
 
@@ -425,7 +433,7 @@ impl UserStatusCallback for UserStatusCallbackImpl {
         .initialize(
           user_id,
           user_workspace.id.clone(),
-          user_workspace.database_storage_id,
+          user_workspace.database_views_aggregate_id,
         )
         .await?;
       document_manager

+ 3 - 3
frontend/rust-lib/flowy-database2/src/entities/database_entities.rs

@@ -1,6 +1,6 @@
 use collab::core::collab_state::SyncState;
 use collab_database::rows::RowId;
-use collab_database::user::DatabaseRecord;
+use collab_database::user::DatabaseWithViews;
 use collab_database::views::DatabaseLayout;
 
 use flowy_derive::ProtoBuf;
@@ -197,8 +197,8 @@ pub struct DatabaseDescriptionPB {
   pub database_id: String,
 }
 
-impl From<DatabaseRecord> for DatabaseDescriptionPB {
-  fn from(data: DatabaseRecord) -> Self {
+impl From<DatabaseWithViews> for DatabaseDescriptionPB {
+  fn from(data: DatabaseWithViews) -> Self {
     Self {
       name: data.name,
       database_id: data.database_id,

+ 1 - 5
frontend/rust-lib/flowy-database2/src/entities/group_entities/group.rs

@@ -94,11 +94,7 @@ impl std::convert::From<GroupData> for GroupPB {
       field_id: group_data.field_id,
       group_id: group_data.id,
       group_name: group_data.name,
-      rows: group_data
-        .rows
-        .into_iter()
-        .map(|row_detail| RowMetaPB::from(row_detail.meta))
-        .collect(),
+      rows: group_data.rows.into_iter().map(RowMetaPB::from).collect(),
       is_default: group_data.is_default,
       is_visible: group_data.is_visible,
     }

+ 16 - 16
frontend/rust-lib/flowy-database2/src/entities/row_entities.rs

@@ -1,6 +1,6 @@
 use std::collections::HashMap;
 
-use collab_database::rows::{Row, RowId, RowMeta};
+use collab_database::rows::{Row, RowDetail, RowId};
 use collab_database::views::RowOrder;
 
 use flowy_derive::ProtoBuf;
@@ -61,27 +61,27 @@ pub struct RowMetaPB {
   pub cover: Option<String>,
 }
 
-impl std::convert::From<&RowMeta> for RowMetaPB {
-  fn from(row_meta: &RowMeta) -> Self {
+impl std::convert::From<&RowDetail> for RowMetaPB {
+  fn from(row_detail: &RowDetail) -> Self {
     Self {
-      id: row_meta.row_id.clone(),
-      document_id: row_meta.document_id.clone(),
-      icon: row_meta.icon_url.clone(),
-      cover: row_meta.cover_url.clone(),
+      id: row_detail.row.id.to_string(),
+      document_id: row_detail.document_id.clone(),
+      icon: row_detail.meta.icon_url.clone(),
+      cover: row_detail.meta.cover_url.clone(),
     }
   }
 }
-
-impl std::convert::From<RowMeta> for RowMetaPB {
-  fn from(row_meta: RowMeta) -> Self {
+impl std::convert::From<RowDetail> for RowMetaPB {
+  fn from(row_detail: RowDetail) -> Self {
     Self {
-      id: row_meta.row_id,
-      document_id: row_meta.document_id,
-      icon: row_meta.icon_url,
-      cover: row_meta.cover_url,
+      id: row_detail.row.id.to_string(),
+      document_id: row_detail.document_id,
+      icon: row_detail.meta.icon_url,
+      cover: row_detail.meta.cover_url,
     }
   }
 }
+//
 
 #[derive(Debug, Default, Clone, ProtoBuf)]
 pub struct UpdateRowMetaChangesetPB {
@@ -251,7 +251,7 @@ impl std::convert::From<RowMetaPB> for InsertedRowPB {
 impl From<InsertedRow> for InsertedRowPB {
   fn from(data: InsertedRow) -> Self {
     Self {
-      row_meta: data.row_meta.into(),
+      row_meta: data.row_detail.into(),
       index: data.index,
       is_new: data.is_new,
     }
@@ -274,7 +274,7 @@ pub struct UpdatedRowPB {
 
 impl From<UpdatedRow> for UpdatedRowPB {
   fn from(data: UpdatedRow) -> Self {
-    let row_meta = data.row_meta.map(RowMetaPB::from);
+    let row_meta = data.row_detail.map(RowMetaPB::from);
     Self {
       row_id: data.row_id,
       field_ids: data.field_ids,

+ 1 - 1
frontend/rust-lib/flowy-database2/src/entities/view_entities.rs

@@ -88,7 +88,7 @@ impl From<RowDetail> for DidFetchRowPB {
       visibility: value.row.visibility,
       created_at: value.row.created_at,
       modified_at: value.row.modified_at,
-      meta: RowMetaPB::from(value.meta),
+      meta: RowMetaPB::from(value),
     }
   }
 }

+ 1 - 1
frontend/rust-lib/flowy-database2/src/event_handler.rs

@@ -461,7 +461,7 @@ pub(crate) async fn create_row_handler(
     .await?
   {
     None => Err(FlowyError::internal().with_context("Create row fail")),
-    Some(row) => data_result_ok(RowMetaPB::from(row.meta)),
+    Some(row) => data_result_ok(RowMetaPB::from(row)),
   }
 }
 

+ 8 - 8
frontend/rust-lib/flowy-database2/src/manager.rs

@@ -76,7 +76,7 @@ impl DatabaseManager {
     &self,
     uid: i64,
     _workspace_id: String,
-    database_storage_id: String,
+    database_views_aggregate_id: String,
   ) -> FlowyResult<()> {
     let collab_db = self.user.collab_db(uid)?;
     let collab_builder = UserDatabaseCollabServiceImpl {
@@ -87,11 +87,11 @@ impl DatabaseManager {
     let mut collab_raw_data = CollabRawData::default();
 
     // If the workspace database not exist in disk, try to fetch from remote.
-    if !self.is_collab_exist(uid, &collab_db, &database_storage_id) {
+    if !self.is_collab_exist(uid, &collab_db, &database_views_aggregate_id) {
       tracing::trace!("workspace database not exist, try to fetch from remote");
       match self
         .cloud_service
-        .get_collab_update(&database_storage_id, CollabType::WorkspaceDatabase)
+        .get_collab_update(&database_views_aggregate_id, CollabType::WorkspaceDatabase)
         .await
       {
         Ok(updates) => {
@@ -100,17 +100,17 @@ impl DatabaseManager {
         Err(err) => {
           return Err(FlowyError::record_not_found().with_context(format!(
             "get workspace database :{} failed: {}",
-            database_storage_id, err,
+            database_views_aggregate_id, err,
           )));
         },
       }
     }
 
     // Construct the workspace database.
-    tracing::trace!("open workspace database: {}", &database_storage_id);
+    tracing::trace!("open workspace database: {}", &database_views_aggregate_id);
     let collab = collab_builder.build_collab_with_config(
       uid,
-      &database_storage_id,
+      &database_views_aggregate_id,
       CollabType::WorkspaceDatabase,
       collab_db.clone(),
       collab_raw_data,
@@ -130,10 +130,10 @@ impl DatabaseManager {
     &self,
     user_id: i64,
     workspace_id: String,
-    database_storage_id: String,
+    database_views_aggregate_id: String,
   ) -> FlowyResult<()> {
     self
-      .initialize(user_id, workspace_id, database_storage_id)
+      .initialize(user_id, workspace_id, database_views_aggregate_id)
       .await?;
     Ok(())
   }

+ 17 - 31
frontend/rust-lib/flowy-database2/src/services/database/database_editor.rs

@@ -411,8 +411,8 @@ impl DatabaseEditor {
 
   pub async fn move_row(&self, view_id: &str, from: RowId, to: RowId) {
     let database = self.database.lock();
-    if let (Some(row_meta), Some(from_index), Some(to_index)) = (
-      database.get_row_meta(&from),
+    if let (Some(row_detail), Some(from_index), Some(to_index)) = (
+      database.get_row_detail(&from),
       database.index_of_row(view_id, &from),
       database.index_of_row(view_id, &to),
     ) {
@@ -422,7 +422,7 @@ impl DatabaseEditor {
       drop(database);
 
       let delete_row_id = from.into_inner();
-      let insert_row = InsertedRowPB::new(RowMetaPB::from(&row_meta)).with_index(to_index as i32);
+      let insert_row = InsertedRowPB::new(RowMetaPB::from(row_detail)).with_index(to_index as i32);
       let changes = RowsChangePB::from_move(vec![delete_row_id], vec![insert_row]);
       send_notification(view_id, DatabaseNotification::DidUpdateViewRows)
         .payload(changes)
@@ -442,10 +442,8 @@ impl DatabaseEditor {
     let result = self.database.lock().create_row_in_view(view_id, params);
     if let Some((index, row_order)) = result {
       tracing::trace!("create row: {:?} at {}", row_order, index);
-      let row = self.database.lock().get_row(&row_order.id);
-      let row_meta = self.database.lock().get_row_meta(&row_order.id);
-      if let Some(meta) = row_meta {
-        let row_detail = RowDetail { row, meta };
+      let row_detail = self.database.lock().get_row_detail(&row_order.id);
+      if let Some(row_detail) = row_detail {
         for view in self.database_views.editors().await {
           view.v_did_create_row(&row_detail, &group_id, index).await;
         }
@@ -545,9 +543,10 @@ impl DatabaseEditor {
   pub fn get_row_meta(&self, view_id: &str, row_id: &RowId) -> Option<RowMetaPB> {
     if self.database.lock().views.is_row_exist(view_id, row_id) {
       let row_meta = self.database.lock().get_row_meta(row_id)?;
+      let row_document_id = self.database.lock().get_row_document_id(row_id)?;
       Some(RowMetaPB {
         id: row_id.clone().into_inner(),
-        document_id: row_meta.document_id,
+        document_id: row_document_id,
         icon: row_meta.icon_url,
         cover: row_meta.cover_url,
       })
@@ -559,9 +558,7 @@ impl DatabaseEditor {
 
   pub fn get_row_detail(&self, view_id: &str, row_id: &RowId) -> Option<RowDetail> {
     if self.database.lock().views.is_row_exist(view_id, row_id) {
-      let meta = self.database.lock().get_row_meta(row_id)?;
-      let row = self.database.lock().get_row(row_id);
-      Some(RowDetail { row, meta })
+      self.database.lock().get_row_detail(row_id)
     } else {
       tracing::warn!("the row:{} is exist in view:{}", row_id.as_str(), view_id);
       None
@@ -587,15 +584,15 @@ impl DatabaseEditor {
     });
 
     // Use the temporary row meta to get rid of the lock that not implement the `Send` or 'Sync' trait.
-    let row_meta = self.database.lock().get_row_meta(row_id);
-    if let Some(row_meta) = row_meta {
+    let row_detail = self.database.lock().get_row_detail(row_id);
+    if let Some(row_detail) = row_detail {
       for view in self.database_views.editors().await {
-        view.v_did_update_row_meta(row_id, &row_meta).await;
+        view.v_did_update_row_meta(row_id, &row_detail).await;
       }
 
       // Notifies the client that the row meta has been updated.
       send_notification(row_id.as_str(), DatabaseNotification::DidUpdateRowMeta)
-        .payload(RowMetaPB::from(&row_meta))
+        .payload(RowMetaPB::from(&row_detail))
         .send();
     }
   }
@@ -1084,7 +1081,7 @@ impl DatabaseEditor {
 
     let rows = rows
       .into_iter()
-      .map(|row_detail| RowMetaPB::from(&row_detail.meta))
+      .map(|row_detail| RowMetaPB::from(row_detail.as_ref()))
       .collect::<Vec<RowMetaPB>>();
     Ok(DatabasePB {
       id: database_id,
@@ -1245,17 +1242,10 @@ impl DatabaseViewData for DatabaseViewDataImpl {
 
   fn get_row(&self, view_id: &str, row_id: &RowId) -> Fut<Option<(usize, Arc<RowDetail>)>> {
     let index = self.database.lock().index_of_row(view_id, row_id);
-    let row = self.database.lock().get_row(row_id);
-    let row_meta = self.database.lock().get_row_meta(row_id);
+    let row_detail = self.database.lock().get_row_detail(row_id);
     to_fut(async move {
-      match (index, row_meta) {
-        (Some(index), Some(row_meta)) => {
-          let row_detail = RowDetail {
-            row,
-            meta: row_meta,
-          };
-          Some((index, Arc::new(row_detail)))
-        },
+      match (index, row_detail) {
+        (Some(index), Some(row_detail)) => Some((index, Arc::new(row_detail))),
         _ => None,
       }
     })
@@ -1266,11 +1256,7 @@ impl DatabaseViewData for DatabaseViewDataImpl {
     let rows = database.get_rows_for_view(view_id);
     let row_details = rows
       .into_iter()
-      .flat_map(|row| {
-        database
-          .get_row_meta(&row.id)
-          .map(|meta| RowDetail { row, meta })
-      })
+      .flat_map(|row| database.get_row_detail(&row.id))
       .collect::<Vec<RowDetail>>();
 
     to_fut(async move { row_details.into_iter().map(Arc::new).collect() })

+ 6 - 6
frontend/rust-lib/flowy-database2/src/services/database/entities.rs

@@ -1,4 +1,4 @@
-use collab_database::rows::{RowId, RowMeta};
+use collab_database::rows::{RowDetail, RowId};
 use collab_database::views::DatabaseLayout;
 
 #[derive(Debug, Clone)]
@@ -14,7 +14,7 @@ pub enum DatabaseRowEvent {
 
 #[derive(Debug, Clone)]
 pub struct InsertedRow {
-  pub row_meta: RowMeta,
+  pub row_detail: RowDetail,
   pub index: Option<i32>,
   pub is_new: bool,
 }
@@ -29,7 +29,7 @@ pub struct UpdatedRow {
   pub field_ids: Vec<String>,
 
   /// The meta of row was updated if this is Some.
-  pub row_meta: Option<RowMeta>,
+  pub row_detail: Option<RowDetail>,
 }
 
 impl UpdatedRow {
@@ -38,7 +38,7 @@ impl UpdatedRow {
       row_id: row_id.to_string(),
       height: None,
       field_ids: vec![],
-      row_meta: None,
+      row_detail: None,
     }
   }
 
@@ -52,8 +52,8 @@ impl UpdatedRow {
     self
   }
 
-  pub fn with_row_meta(mut self, row_meta: RowMeta) -> Self {
-    self.row_meta = Some(row_meta);
+  pub fn with_row_meta(mut self, row_detail: RowDetail) -> Self {
+    self.row_detail = Some(row_detail);
     self
   }
 }

+ 7 - 7
frontend/rust-lib/flowy-database2/src/services/database_view/view_editor.rs

@@ -4,7 +4,7 @@ use std::sync::Arc;
 
 use collab_database::database::{gen_database_filter_id, gen_database_sort_id, MutexDatabase};
 use collab_database::fields::{Field, TypeOptionData};
-use collab_database::rows::{Cells, Row, RowCell, RowDetail, RowId, RowMeta};
+use collab_database::rows::{Cells, Row, RowCell, RowDetail, RowId};
 use collab_database::views::{DatabaseLayout, DatabaseView, LayoutSetting};
 use tokio::sync::{broadcast, RwLock};
 
@@ -216,8 +216,8 @@ impl DatabaseViewEditor {
       .await;
   }
 
-  pub async fn v_did_update_row_meta(&self, row_id: &RowId, row_meta: &RowMeta) {
-    let update_row = UpdatedRow::new(row_id.as_str()).with_row_meta(row_meta.clone());
+  pub async fn v_did_update_row_meta(&self, row_id: &RowId, row_detail: &RowDetail) {
+    let update_row = UpdatedRow::new(row_id.as_str()).with_row_meta(row_detail.clone());
     let changeset = RowsChangePB::from_update(update_row.into());
     send_notification(&self.view_id, DatabaseNotification::DidUpdateViewRows)
       .payload(changeset)
@@ -234,7 +234,7 @@ impl DatabaseViewEditor {
     // Send the group notification if the current view has groups
     match group_id.as_ref() {
       None => {
-        let row = InsertedRowPB::new(RowMetaPB::from(&row_detail.meta)).with_index(index as i32);
+        let row = InsertedRowPB::new(RowMetaPB::from(row_detail)).with_index(index as i32);
         changes = RowsChangePB::from_insert(row);
       },
       Some(group_id) => {
@@ -246,7 +246,7 @@ impl DatabaseViewEditor {
           .await;
 
         let inserted_row = InsertedRowPB {
-          row_meta: RowMetaPB::from(&row_detail.meta),
+          row_meta: RowMetaPB::from(row_detail),
           index: Some(index as i32),
           is_new: true,
         };
@@ -790,7 +790,7 @@ impl DatabaseViewEditor {
 
     let (_, row_detail) = self.delegate.get_row(&self.view_id, &row_id).await?;
     Some(CalendarEventPB {
-      row_meta: RowMetaPB::from(&row_detail.meta),
+      row_meta: RowMetaPB::from(row_detail.as_ref()),
       date_field_id: date_field.id.clone(),
       title,
       timestamp,
@@ -853,7 +853,7 @@ impl DatabaseViewEditor {
 
       let (_, row_detail) = self.delegate.get_row(&self.view_id, &row_id).await?;
       let event = CalendarEventPB {
-        row_meta: RowMetaPB::from(&row_detail.meta),
+        row_meta: RowMetaPB::from(row_detail.as_ref()),
         date_field_id: calendar_setting.field_id.clone(),
         title,
         timestamp,

+ 5 - 4
frontend/rust-lib/flowy-database2/src/services/field_settings/mod.rs

@@ -1,7 +1,8 @@
-mod entities;
-mod field_settings;
-mod field_settings_builder;
-
 pub use entities::*;
 pub use field_settings::*;
 pub use field_settings_builder::*;
+
+mod entities;
+#[allow(clippy::module_inception)]
+mod field_settings;
+mod field_settings_builder;

+ 4 - 4
frontend/rust-lib/flowy-database2/src/services/filter/controller.rs

@@ -161,9 +161,9 @@ impl FilterController {
       ) {
         if is_visible {
           if let Some((index, _row)) = self.delegate.get_row(&self.view_id, &row_id).await {
-            notification
-              .visible_rows
-              .push(InsertedRowPB::new(RowMetaPB::from(&row_detail.meta)).with_index(index as i32))
+            notification.visible_rows.push(
+              InsertedRowPB::new(RowMetaPB::from(row_detail.as_ref())).with_index(index as i32),
+            )
           }
         } else {
           notification.invisible_rows.push(row_id);
@@ -197,7 +197,7 @@ impl FilterController {
         &self.cell_filter_cache,
       ) {
         if is_visible {
-          let row_meta = RowMetaPB::from(&row_detail.meta);
+          let row_meta = RowMetaPB::from(row_detail.as_ref());
           visible_rows.push(InsertedRowPB::new(row_meta).with_index(index as i32))
         } else {
           invisible_rows.push(row_id);

+ 3 - 3
frontend/rust-lib/flowy-database2/src/services/group/controller.rs

@@ -165,7 +165,7 @@ where
     if !no_status_group_rows.is_empty() {
       changeset
         .inserted_rows
-        .push(InsertedRowPB::new(RowMetaPB::from(&row_detail.meta)));
+        .push(InsertedRowPB::new(RowMetaPB::from(row_detail)));
       no_status_group.add_row(row_detail.clone());
     }
 
@@ -190,7 +190,7 @@ where
 
     let mut deleted_row_ids = vec![];
     for row_detail in &no_status_group.rows {
-      let row_id = row_detail.meta.row_id.clone();
+      let row_id = row_detail.row.id.to_string();
       if default_group_deleted_rows
         .iter()
         .any(|deleted_row| deleted_row.row_meta.id == row_id)
@@ -200,7 +200,7 @@ where
     }
     no_status_group
       .rows
-      .retain(|row_detail| !deleted_row_ids.contains(&row_detail.meta.row_id));
+      .retain(|row_detail| !deleted_row_ids.contains(&row_detail.row.id));
     changeset.deleted_rows.extend(deleted_row_ids);
     Some(changeset)
   }

+ 2 - 2
frontend/rust-lib/flowy-database2/src/services/group/controller_impls/checkbox_controller.rs

@@ -69,7 +69,7 @@ impl GroupCustomize for CheckboxGroupController {
           if is_not_contained {
             changeset
               .inserted_rows
-              .push(InsertedRowPB::new(RowMetaPB::from(&row_detail.meta)));
+              .push(InsertedRowPB::new(RowMetaPB::from(row_detail)));
             group.add_row(row_detail.clone());
           }
         }
@@ -87,7 +87,7 @@ impl GroupCustomize for CheckboxGroupController {
           if is_not_contained {
             changeset
               .inserted_rows
-              .push(InsertedRowPB::new(RowMetaPB::from(&row_detail.meta)));
+              .push(InsertedRowPB::new(RowMetaPB::from(row_detail)));
             group.add_row(row_detail.clone());
           }
         }

+ 2 - 2
frontend/rust-lib/flowy-database2/src/services/group/controller_impls/date_controller.rs

@@ -112,7 +112,7 @@ impl GroupCustomize for DateGroupController {
         &setting_content,
       );
       let mut new_group = self.context.add_new_group(group)?;
-      new_group.group.rows.push(RowMetaPB::from(&row_detail.meta));
+      new_group.group.rows.push(RowMetaPB::from(row_detail));
       inserted_group = Some(new_group);
     }
 
@@ -164,7 +164,7 @@ impl GroupCustomize for DateGroupController {
         if !group.contains_row(&row_detail.row.id) {
           changeset
             .inserted_rows
-            .push(InsertedRowPB::new(RowMetaPB::from(&row_detail.meta)));
+            .push(InsertedRowPB::new(RowMetaPB::from(row_detail)));
           group.add_row(row_detail.clone());
         }
       } else if group.contains_row(&row_detail.row.id) {

+ 2 - 2
frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/util.rs

@@ -31,7 +31,7 @@ pub fn add_or_remove_select_option_row(
         if !group.contains_row(&row_detail.row.id) {
           changeset
             .inserted_rows
-            .push(InsertedRowPB::new(RowMetaPB::from(&row_detail.meta)));
+            .push(InsertedRowPB::new(RowMetaPB::from(row_detail)));
           group.add_row(row_detail.clone());
         }
       } else if group.contains_row(&row_detail.row.id) {
@@ -104,7 +104,7 @@ pub fn move_group_row(
   }
 
   if group.id == *to_group_id {
-    let mut inserted_row = InsertedRowPB::new(RowMetaPB::from(&row_detail.meta));
+    let mut inserted_row = InsertedRowPB::new(RowMetaPB::from((*row_detail).clone()));
     match to_index {
       None => {
         changeset.inserted_rows.push(inserted_row);

+ 2 - 2
frontend/rust-lib/flowy-database2/src/services/group/controller_impls/url_controller.rs

@@ -58,7 +58,7 @@ impl GroupCustomize for URLGroupController {
       let cell_data: URLCellData = _cell_data.clone().into();
       let group = make_group_from_url_cell(&cell_data);
       let mut new_group = self.context.add_new_group(group)?;
-      new_group.group.rows.push(RowMetaPB::from(&row_detail.meta));
+      new_group.group.rows.push(RowMetaPB::from(row_detail));
       inserted_group = Some(new_group);
     }
 
@@ -99,7 +99,7 @@ impl GroupCustomize for URLGroupController {
         if !group.contains_row(&row_detail.row.id) {
           changeset
             .inserted_rows
-            .push(InsertedRowPB::new(RowMetaPB::from(&row_detail.meta)));
+            .push(InsertedRowPB::new(RowMetaPB::from(row_detail)));
           group.add_row(row_detail.clone());
         }
       } else if group.contains_row(&row_detail.row.id) {

+ 1 - 1
frontend/rust-lib/flowy-database2/tests/database/block_test/script.rs

@@ -43,7 +43,7 @@ impl DatabaseRowTest {
           .unwrap();
         self
           .row_by_row_id
-          .insert(row_detail.row.id.to_string(), row_detail.meta.into());
+          .insert(row_detail.row.id.to_string(), row_detail.into());
         self.row_details = self.get_rows().await;
       },
       RowScript::UpdateTextCell { row_id, content } => {

+ 1 - 0
frontend/rust-lib/flowy-database2/tests/database/field_settings_test/script.rs

@@ -3,6 +3,7 @@ use flowy_database2::services::field_settings::FieldSettingsChangesetParams;
 
 use crate::database::database_editor::DatabaseEditorTest;
 
+#[allow(clippy::enum_variant_names)]
 pub enum FieldSettingsScript {
   AssertFieldSettings {
     field_id: String,

+ 3 - 1
frontend/rust-lib/flowy-document2/src/event_handler.rs

@@ -34,7 +34,9 @@ pub(crate) async fn create_document_handler(
   let manager = upgrade_document(manager)?;
   let params: CreateDocumentParams = data.into_inner().try_into()?;
   let uid = manager.user.user_id()?;
-  manager.create_document(uid, &params.document_id, params.initial_data)?;
+  manager
+    .create_document(uid, &params.document_id, params.initial_data)
+    .await?;
   Ok(())
 }
 

+ 13 - 14
frontend/rust-lib/flowy-document2/src/manager.rs

@@ -57,7 +57,7 @@ impl DocumentManager {
   ///
   /// if the document already exists, return the existing document.
   /// if the data is None, will create a document with default data.
-  pub fn create_document(
+  pub async fn create_document(
     &self,
     uid: i64,
     doc_id: &str,
@@ -65,13 +65,19 @@ impl DocumentManager {
   ) -> FlowyResult<Arc<MutexDocument>> {
     tracing::trace!("create a document: {:?}", doc_id);
     let collab = self.collab_for_document(uid, doc_id, vec![])?;
-    let data = data.unwrap_or_else(default_document_data);
-    let document = Arc::new(MutexDocument::create_with_data(collab, data)?);
-    Ok(document)
+
+    match self.get_document(doc_id).await {
+      Ok(document) => Ok(document),
+      Err(_) => {
+        let data = data.unwrap_or_else(default_document_data);
+        let document = Arc::new(MutexDocument::create_with_data(collab, data)?);
+        Ok(document)
+      },
+    }
   }
 
   /// Return the document
-  #[tracing::instrument(level = "debug", skip_all)]
+  #[tracing::instrument(level = "debug", skip(self), err)]
   pub async fn get_document(&self, doc_id: &str) -> FlowyResult<Arc<MutexDocument>> {
     if let Some(doc) = self.documents.read().get(doc_id) {
       return Ok(doc.clone());
@@ -83,10 +89,7 @@ impl DocumentManager {
     }
 
     let uid = self.user.user_id()?;
-    let db = self.user.collab_db(uid)?;
-    let collab = self
-      .collab_builder
-      .build(uid, doc_id, CollabType::Document, updates, db)?;
+    let collab = self.collab_for_document(uid, doc_id, updates)?;
     let document = Arc::new(MutexDocument::open(doc_id, collab)?);
 
     // save the document to the memory and read it from the memory if we open the same document again.
@@ -101,11 +104,7 @@ impl DocumentManager {
   pub async fn get_document_data(&self, doc_id: &str) -> FlowyResult<DocumentData> {
     let mut updates = vec![];
     if !self.is_doc_exist(doc_id)? {
-      if let Ok(document_updates) = self.cloud_service.get_document_updates(doc_id).await {
-        updates = document_updates;
-      } else {
-        return Err(FlowyError::collab_not_sync());
-      }
+      updates = self.cloud_service.get_document_updates(doc_id).await?;
     }
     let uid = self.user.user_id()?;
     let collab = self.collab_for_document(uid, doc_id, updates)?;

+ 3 - 1
frontend/rust-lib/flowy-document2/tests/document/document_redo_undo_test.rs

@@ -13,7 +13,9 @@ async fn undo_redo_test() {
   let data = default_document_data();
 
   // create a document
-  _ = test.create_document(test.user.user_id().unwrap(), &doc_id, Some(data.clone()));
+  _ = test
+    .create_document(test.user.user_id().unwrap(), &doc_id, Some(data.clone()))
+    .await;
 
   // open a document
   let document = test.get_document(&doc_id).await.unwrap();

+ 5 - 4
frontend/rust-lib/flowy-document2/tests/document/document_test.rs

@@ -16,6 +16,7 @@ async fn restore_document() {
   let uid = test.user.user_id().unwrap();
   let document_a = test
     .create_document(uid, &doc_id, Some(data.clone()))
+    .await
     .unwrap();
   let data_a = document_a.lock().get_document_data().unwrap();
   assert_eq!(data_a, data);
@@ -33,7 +34,7 @@ async fn restore_document() {
   assert_eq!(data_b, data);
 
   // restore
-  _ = test.create_document(uid, &doc_id, Some(data.clone()));
+  _ = test.create_document(uid, &doc_id, Some(data.clone())).await;
   // open a document
   let data_b = test
     .get_document(&doc_id)
@@ -56,7 +57,7 @@ async fn document_apply_insert_action() {
   let data = default_document_data();
 
   // create a document
-  _ = test.create_document(uid, &doc_id, Some(data.clone()));
+  _ = test.create_document(uid, &doc_id, Some(data.clone())).await;
 
   // open a document
   let document = test.get_document(&doc_id).await.unwrap();
@@ -107,7 +108,7 @@ async fn document_apply_update_page_action() {
   let data = default_document_data();
 
   // create a document
-  _ = test.create_document(uid, &doc_id, Some(data.clone()));
+  _ = test.create_document(uid, &doc_id, Some(data.clone())).await;
 
   // open a document
   let document = test.get_document(&doc_id).await.unwrap();
@@ -148,7 +149,7 @@ async fn document_apply_update_action() {
   let data = default_document_data();
 
   // create a document
-  _ = test.create_document(uid, &doc_id, Some(data.clone()));
+  _ = test.create_document(uid, &doc_id, Some(data.clone())).await;
 
   // open a document
   let document = test.get_document(&doc_id).await.unwrap();

+ 1 - 0
frontend/rust-lib/flowy-document2/tests/document/util.rs

@@ -94,6 +94,7 @@ pub async fn create_and_open_empty_document() -> (DocumentTest, Arc<MutexDocumen
   // create a document
   _ = test
     .create_document(uid, &doc_id, Some(data.clone()))
+    .await
     .unwrap();
 
   let document = test.get_document(&doc_id).await.unwrap();

+ 4 - 0
frontend/rust-lib/flowy-folder-deps/src/cloud.rs

@@ -31,3 +31,7 @@ pub struct FolderSnapshot {
 pub fn gen_workspace_id() -> Uuid {
   uuid::Uuid::new_v4()
 }
+
+pub fn gen_view_id() -> Uuid {
+  uuid::Uuid::new_v4()
+}

+ 3 - 3
frontend/rust-lib/flowy-folder2/src/entities/view.rs

@@ -5,12 +5,12 @@ use std::sync::Arc;
 
 use collab_folder::core::{View, ViewLayout};
 
-use crate::entities::icon::ViewIconPB;
 use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
 use flowy_error::ErrorCode;
+use flowy_folder_deps::cloud::gen_view_id;
 
+use crate::entities::icon::ViewIconPB;
 use crate::entities::parser::view::{ViewDesc, ViewIdentify, ViewName, ViewThumbnail};
-use crate::view_operation::gen_view_id;
 
 #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
 pub struct ChildViewUpdatePB {
@@ -226,7 +226,7 @@ impl TryInto<CreateViewParams> for CreateViewPayloadPB {
   fn try_into(self) -> Result<CreateViewParams, Self::Error> {
     let name = ViewName::parse(self.name)?.0;
     let parent_view_id = ViewIdentify::parse(self.parent_view_id)?.0;
-    let view_id = gen_view_id();
+    let view_id = gen_view_id().to_string();
 
     Ok(CreateViewParams {
       parent_view_id,

+ 49 - 59
frontend/rust-lib/flowy-folder2/src/manager.rs

@@ -16,7 +16,7 @@ use tokio_stream::StreamExt;
 use tracing::{event, Level};
 
 use flowy_error::{ErrorCode, FlowyError, FlowyResult};
-use flowy_folder_deps::cloud::FolderCloudService;
+use flowy_folder_deps::cloud::{gen_view_id, FolderCloudService};
 
 use crate::entities::icon::UpdateViewIconParams;
 use crate::entities::{
@@ -31,9 +31,7 @@ use crate::notification::{
 };
 use crate::share::ImportParams;
 use crate::user_default::DefaultFolderBuilder;
-use crate::view_operation::{
-  create_view, gen_view_id, FolderOperationHandler, FolderOperationHandlers,
-};
+use crate::view_operation::{create_view, FolderOperationHandler, FolderOperationHandlers};
 
 /// [FolderUser] represents the user for folder.
 pub trait FolderUser: Send + Sync {
@@ -129,10 +127,9 @@ impl FolderManager {
   }
 
   pub async fn get_workspace_views(&self, workspace_id: &str) -> FlowyResult<Vec<ViewPB>> {
-    let views = self.with_folder(
-      || vec![],
-      |folder| get_workspace_view_pbs(workspace_id, folder),
-    );
+    let views = self.with_folder(std::vec::Vec::new, |folder| {
+      get_workspace_view_pbs(workspace_id, folder)
+    });
 
     Ok(views)
   }
@@ -143,7 +140,7 @@ impl FolderManager {
     &self,
     uid: i64,
     workspace_id: &str,
-    initial_data: FolderInitializeData,
+    initial_data: FolderInitializeDataSource,
   ) -> FlowyResult<()> {
     *self.workspace_id.write() = Some(workspace_id.to_string());
     let workspace_id = workspace_id.to_string();
@@ -156,25 +153,34 @@ impl FolderManager {
       };
 
       let folder = match initial_data {
-        FolderInitializeData::Empty => {
+        FolderInitializeDataSource::LocalDisk {
+          create_if_not_exist,
+        } => {
           let is_exist = is_exist_in_local_disk(&self.user, &workspace_id).unwrap_or(false);
-          if !is_exist {
+          if is_exist {
+            let collab = self.collab_for_folder(uid, &workspace_id, collab_db, vec![])?;
+            Folder::open(collab, Some(folder_notifier))
+          } else if create_if_not_exist {
+            let folder_data =
+              DefaultFolderBuilder::build(uid, workspace_id.to_string(), &self.operation_handlers)
+                .await;
+            let collab = self.collab_for_folder(uid, &workspace_id, collab_db, vec![])?;
+            Folder::create(collab, Some(folder_notifier), Some(folder_data))
+          } else {
             return Err(FlowyError::new(
               ErrorCode::RecordNotFound,
               "Can't find any workspace data",
             ));
           }
-          let collab = self.collab_for_folder(uid, &workspace_id, collab_db, vec![])?;
-          Folder::open(collab, Some(folder_notifier))
         },
-        FolderInitializeData::Raw(raw_data) => {
+        FolderInitializeDataSource::Cloud(raw_data) => {
           if raw_data.is_empty() {
             return Err(workspace_data_not_sync_error(uid, &workspace_id));
           }
           let collab = self.collab_for_folder(uid, &workspace_id, collab_db, raw_data)?;
           Folder::open(collab, Some(folder_notifier))
         },
-        FolderInitializeData::Data(folder_data) => {
+        FolderInitializeDataSource::FolderData(folder_data) => {
           let collab = self.collab_for_folder(uid, &workspace_id, collab_db, vec![])?;
           Folder::create(collab, Some(folder_notifier), Some(folder_data))
         },
@@ -239,7 +245,7 @@ impl FolderManager {
       .initialize(
         user_id,
         workspace_id,
-        FolderInitializeData::Raw(folder_updates),
+        FolderInitializeDataSource::Cloud(folder_updates),
       )
       .await?;
     Ok(())
@@ -252,27 +258,13 @@ impl FolderManager {
     user_id: i64,
     _token: &str,
     is_new: bool,
-    folder_data: Option<FolderData>,
+    data_source: FolderInitializeDataSource,
     workspace_id: &str,
   ) -> FlowyResult<()> {
     // Create the default workspace if the user is new
     tracing::info!("initialize_when_sign_up: is_new: {}", is_new);
     if is_new {
-      let folder_data = match folder_data {
-        None => {
-          DefaultFolderBuilder::build(user_id, workspace_id.to_string(), &self.operation_handlers)
-            .await
-        },
-        Some(folder_data) => folder_data,
-      };
-
-      self
-        .initialize(
-          user_id,
-          workspace_id,
-          FolderInitializeData::Data(folder_data),
-        )
-        .await?;
+      self.initialize(user_id, workspace_id, data_source).await?;
     } else {
       // The folder updates should not be empty, as the folder data is stored
       // when the user signs up for the first time.
@@ -290,7 +282,7 @@ impl FolderManager {
         .initialize(
           user_id,
           workspace_id,
-          FolderInitializeData::Raw(folder_updates),
+          FolderInitializeDataSource::Cloud(folder_updates),
         )
         .await?;
     }
@@ -376,7 +368,9 @@ impl FolderManager {
   }
 
   pub async fn get_all_workspaces(&self) -> Vec<Workspace> {
-    self.with_folder(|| vec![], |folder| folder.workspaces.get_all_workspaces())
+    self.with_folder(std::vec::Vec::new, |folder| {
+      folder.workspaces.get_all_workspaces()
+    })
   }
 
   pub async fn create_view_with_params(&self, params: CreateViewParams) -> FlowyResult<View> {
@@ -628,10 +622,9 @@ impl FolderManager {
   /// Return a list of views that belong to the given parent view id.
   #[tracing::instrument(level = "debug", skip(self, parent_view_id), err)]
   pub async fn get_views_belong_to(&self, parent_view_id: &str) -> FlowyResult<Vec<Arc<View>>> {
-    let views = self.with_folder(
-      || vec![],
-      |folder| folder.views.get_views_belong_to(parent_view_id),
-    );
+    let views = self.with_folder(std::vec::Vec::new, |folder| {
+      folder.views.get_views_belong_to(parent_view_id)
+    });
     Ok(views)
   }
 
@@ -686,7 +679,7 @@ impl FolderManager {
       desc: view.desc.clone(),
       layout: view.layout.clone().into(),
       initial_data: view_data.to_vec(),
-      view_id: gen_view_id(),
+      view_id: gen_view_id().to_string(),
       meta: Default::default(),
       set_as_current: true,
       index,
@@ -757,25 +750,22 @@ impl FolderManager {
 
   #[tracing::instrument(level = "trace", skip(self))]
   pub(crate) async fn get_all_favorites(&self) -> Vec<FavoritesInfo> {
-    self.with_folder(
-      || vec![],
-      |folder| {
-        let trash_ids = folder
-          .get_all_trash()
-          .into_iter()
-          .map(|trash| trash.id)
-          .collect::<Vec<String>>();
+    self.with_folder(std::vec::Vec::new, |folder| {
+      let trash_ids = folder
+        .get_all_trash()
+        .into_iter()
+        .map(|trash| trash.id)
+        .collect::<Vec<String>>();
 
-        let mut views = folder.get_all_favorites();
-        views.retain(|view| !trash_ids.contains(&view.id));
-        views
-      },
-    )
+      let mut views = folder.get_all_favorites();
+      views.retain(|view| !trash_ids.contains(&view.id));
+      views
+    })
   }
 
   #[tracing::instrument(level = "trace", skip(self))]
   pub(crate) async fn get_all_trash(&self) -> Vec<TrashInfo> {
-    self.with_folder(|| vec![], |folder| folder.get_all_trash())
+    self.with_folder(std::vec::Vec::new, |folder| folder.get_all_trash())
   }
 
   #[tracing::instrument(level = "trace", skip(self))]
@@ -804,7 +794,7 @@ impl FolderManager {
   /// Delete all the trash permanently.
   #[tracing::instrument(level = "trace", skip(self))]
   pub(crate) async fn delete_all_trash(&self) {
-    let deleted_trash = self.with_folder(|| vec![], |folder| folder.get_all_trash());
+    let deleted_trash = self.with_folder(std::vec::Vec::new, |folder| folder.get_all_trash());
     for trash in deleted_trash {
       let _ = self.delete_trash(&trash.id).await;
     }
@@ -843,7 +833,7 @@ impl FolderManager {
     }
 
     let handler = self.get_handler(&import_data.view_layout)?;
-    let view_id = gen_view_id();
+    let view_id = gen_view_id().to_string();
     let uid = self.user.user_id()?;
     if let Some(data) = import_data.data {
       handler
@@ -1238,13 +1228,13 @@ impl Deref for MutexFolder {
 unsafe impl Sync for MutexFolder {}
 unsafe impl Send for MutexFolder {}
 
-pub enum FolderInitializeData {
+pub enum FolderInitializeDataSource {
   /// It means using the data stored on local disk to initialize the folder
-  Empty,
+  LocalDisk { create_if_not_exist: bool },
   /// If there is no data stored on local disk, we will use the data from the server to initialize the folder
-  Raw(CollabRawData),
+  Cloud(CollabRawData),
   /// If the user is new, we use the [DefaultFolderBuilder] to create the default folder.
-  Data(FolderData),
+  FolderData(FolderData),
 }
 
 fn is_exist_in_local_disk(user: &Arc<dyn FolderUser>, doc_id: &str) -> FlowyResult<bool> {

+ 5 - 3
frontend/rust-lib/flowy-folder2/src/test_helper.rs

@@ -1,7 +1,9 @@
+use std::collections::HashMap;
+
+use flowy_folder_deps::cloud::gen_view_id;
+
 use crate::entities::{CreateViewParams, ViewLayoutPB};
 use crate::manager::FolderManager;
-use crate::view_operation::gen_view_id;
-use std::collections::HashMap;
 
 #[cfg(feature = "test_helper")]
 impl FolderManager {
@@ -34,7 +36,7 @@ impl FolderManager {
     layout: ViewLayoutPB,
     ext: HashMap<String, String>,
   ) -> String {
-    let view_id = gen_view_id();
+    let view_id = gen_view_id().to_string();
     let params = CreateViewParams {
       parent_view_id: app_id.to_string(),
       name: name.to_string(),

+ 2 - 5
frontend/rust-lib/flowy-folder2/src/view_operation.rs

@@ -8,6 +8,7 @@ use collab_folder::core::{RepeatedViewIdentifier, ViewIcon, ViewIdentifier, View
 use tokio::sync::RwLock;
 
 use flowy_error::FlowyError;
+use flowy_folder_deps::cloud::gen_view_id;
 use lib_infra::future::FutureResult;
 use lib_infra::util::timestamp;
 
@@ -62,7 +63,7 @@ impl ViewBuilder {
   pub fn new(parent_view_id: String) -> Self {
     Self {
       parent_view_id,
-      view_id: gen_view_id(),
+      view_id: gen_view_id().to_string(),
       name: Default::default(),
       desc: Default::default(),
       layout: ViewLayout::Document,
@@ -260,10 +261,6 @@ pub(crate) fn create_view(params: CreateViewParams, layout: ViewLayout) -> View
   }
 }
 
-pub fn gen_view_id() -> String {
-  uuid::Uuid::new_v4().to_string()
-}
-
 #[cfg(test)]
 mod tests {
   use crate::view_operation::{FlattedViews, WorkspaceViewBuilder};

+ 1 - 1
frontend/rust-lib/flowy-server/src/local_server/impls/user.rs

@@ -136,6 +136,6 @@ fn make_user_workspace() -> UserWorkspace {
     id: uuid::Uuid::new_v4().to_string(),
     name: "My Workspace".to_string(),
     created_at: Default::default(),
-    database_storage_id: uuid::Uuid::new_v4().to_string(),
+    database_views_aggregate_id: uuid::Uuid::new_v4().to_string(),
   }
 }

+ 9 - 5
frontend/rust-lib/flowy-server/src/supabase/api/collab_storage.rs

@@ -141,9 +141,9 @@ where
     update: Vec<u8>,
   ) -> Result<(), Error> {
     if let Some(postgrest) = self.server.get_postgrest() {
-      let workspace_id = object
-        .get_workspace_id()
-        .ok_or(anyhow::anyhow!("Invalid workspace id"))?;
+      let workspace_id = object.get_workspace_id().ok_or(anyhow::anyhow!(
+        "Can't get the workspace id in CollabObject"
+      ))?;
       send_update(workspace_id, object, update, &postgrest, &self.secret()).await?;
     }
 
@@ -198,7 +198,6 @@ pub(crate) async fn flush_collab_with_update(
 ) -> Result<(), Error> {
   // 2.Merge the updates into one and then delete the merged updates
   let merge_result = spawn_blocking(move || merge_updates(update_items, update)).await??;
-  tracing::trace!("Merged updates count: {}", merge_result.merged_keys.len());
 
   let workspace_id = object
     .get_workspace_id()
@@ -207,7 +206,12 @@ pub(crate) async fn flush_collab_with_update(
   let value_size = merge_result.new_update.len() as i32;
   let md5 = md5(&merge_result.new_update);
 
-  tracing::trace!("Flush collab id:{} type:{}", object.object_id, object.ty);
+  tracing::trace!(
+    "Flush collab id:{} type:{} is_encrypt: {}",
+    object.object_id,
+    object.ty,
+    secret.is_some()
+  );
   let (new_update, encrypt) =
     SupabaseBinaryColumnEncoder::encode(merge_result.new_update, &secret)?;
   let params = InsertParamsBuilder::new()

+ 2 - 0
frontend/rust-lib/flowy-server/src/supabase/api/document.rs

@@ -26,6 +26,7 @@ impl<T> DocumentCloudService for SupabaseDocumentServiceImpl<T>
 where
   T: SupabaseServerService,
 {
+  #[tracing::instrument(level = "debug", skip(self))]
   fn get_document_updates(&self, document_id: &str) -> FutureResult<Vec<Vec<u8>>, Error> {
     let try_get_postgrest = self.server.try_get_weak_postgrest();
     let document_id = document_id.to_string();
@@ -70,6 +71,7 @@ where
     })
   }
 
+  #[tracing::instrument(level = "debug", skip(self))]
   fn get_document_data(&self, document_id: &str) -> FutureResult<Option<DocumentData>, Error> {
     let try_get_postgrest = self.server.try_get_weak_postgrest();
     let document_id = document_id.to_string();

+ 1 - 1
frontend/rust-lib/flowy-server/src/supabase/api/request.rs

@@ -415,7 +415,7 @@ pub struct UpdateItem {
   pub value: Vec<u8>,
 }
 
-pub struct RetryCondition(Weak<PostgresWrapper>);
+pub struct RetryCondition(pub Weak<PostgresWrapper>);
 impl Condition<anyhow::Error> for RetryCondition {
   fn should_retry(&mut self, _error: &anyhow::Error) -> bool {
     self.0.upgrade().is_some()

+ 69 - 22
frontend/rust-lib/flowy-server/src/supabase/api/user.rs

@@ -1,5 +1,9 @@
+use std::future::Future;
+use std::iter::Take;
+use std::pin::Pin;
 use std::str::FromStr;
 use std::sync::{Arc, Weak};
+use std::time::Duration;
 
 use anyhow::Error;
 use collab::core::collab::MutexCollab;
@@ -8,6 +12,8 @@ use collab_plugins::cloud_storage::CollabObject;
 use parking_lot::RwLock;
 use serde_json::Value;
 use tokio::sync::oneshot::channel;
+use tokio_retry::strategy::FixedInterval;
+use tokio_retry::{Action, RetryIf};
 use uuid::Uuid;
 
 use flowy_folder_deps::cloud::{Folder, Workspace};
@@ -18,13 +24,13 @@ use lib_infra::box_any::BoxAny;
 use lib_infra::future::FutureResult;
 use lib_infra::util::timestamp;
 
-use crate::supabase::api::request::{get_updates_from_server, FetchObjectUpdateAction};
+use crate::supabase::api::request::{
+  get_updates_from_server, FetchObjectUpdateAction, RetryCondition,
+};
 use crate::supabase::api::util::{
   ExtendedResponse, InsertParamsBuilder, RealtimeBinaryColumnDecoder, SupabaseBinaryColumnDecoder,
 };
-use crate::supabase::api::{
-  flush_collab_with_update, send_update, PostgresWrapper, SupabaseServerService,
-};
+use crate::supabase::api::{flush_collab_with_update, PostgresWrapper, SupabaseServerService};
 use crate::supabase::define::*;
 use crate::supabase::entities::UserProfileResponse;
 use crate::supabase::entities::{GetUserProfileParams, RealtimeUserEvent};
@@ -273,7 +279,7 @@ where
   }
 
   fn reset_workspace(&self, collab_object: CollabObject) -> FutureResult<(), Error> {
-    let collab_object = collab_object.clone();
+    let collab_object = collab_object;
 
     let try_get_postgrest = self.server.try_get_weak_postgrest();
     let (tx, rx) = channel();
@@ -308,7 +314,7 @@ where
   fn create_collab_object(
     &self,
     collab_object: &CollabObject,
-    data: Vec<u8>,
+    update: Vec<u8>,
   ) -> FutureResult<(), Error> {
     let try_get_postgrest = self.server.try_get_weak_postgrest();
     let cloned_collab_object = collab_object.clone();
@@ -316,29 +322,70 @@ where
     tokio::spawn(async move {
       tx.send(
         async move {
-          let workspace_id = cloned_collab_object
-            .get_workspace_id()
-            .ok_or(anyhow::anyhow!("Invalid workspace id"))?;
+          CreateCollabAction::new(cloned_collab_object, try_get_postgrest?, update)
+            .run()
+            .await?;
+          Ok(())
+        }
+        .await,
+      )
+    });
+    FutureResult::new(async { rx.await? })
+  }
+}
 
-          let postgrest = try_get_postgrest?
-            .upgrade()
-            .ok_or(anyhow::anyhow!("postgrest is not available"))?;
+pub struct CreateCollabAction {
+  collab_object: CollabObject,
+  postgrest: Weak<PostgresWrapper>,
+  update: Vec<u8>,
+}
 
-          let encryption_secret = postgrest.secret();
-          send_update(
-            workspace_id,
+impl CreateCollabAction {
+  pub fn new(
+    collab_object: CollabObject,
+    postgrest: Weak<PostgresWrapper>,
+    update: Vec<u8>,
+  ) -> Self {
+    Self {
+      collab_object,
+      postgrest,
+      update,
+    }
+  }
+
+  pub fn run(self) -> RetryIf<Take<FixedInterval>, CreateCollabAction, RetryCondition> {
+    let postgrest = self.postgrest.clone();
+    let retry_strategy = FixedInterval::new(Duration::from_secs(2)).take(3);
+    RetryIf::spawn(retry_strategy, self, RetryCondition(postgrest))
+  }
+}
+
+impl Action for CreateCollabAction {
+  type Future = Pin<Box<dyn Future<Output = Result<Self::Item, Self::Error>> + Send>>;
+  type Item = ();
+  type Error = anyhow::Error;
+
+  fn run(&mut self) -> Self::Future {
+    let weak_postgres = self.postgrest.clone();
+    let cloned_collab_object = self.collab_object.clone();
+    let cloned_update = self.update.clone();
+    Box::pin(async move {
+      match weak_postgres.upgrade() {
+        None => Ok(()),
+        Some(postgrest) => {
+          let secret = postgrest.secret();
+          flush_collab_with_update(
             &cloned_collab_object,
-            data,
+            vec![],
             &postgrest,
-            &encryption_secret,
+            cloned_update,
+            secret,
           )
           .await?;
           Ok(())
-        }
-        .await,
-      )
-    });
-    FutureResult::new(async { rx.await? })
+        },
+      }
+    })
   }
 }
 

+ 2 - 2
frontend/rust-lib/flowy-server/tests/supabase_test/user_test.rs

@@ -20,7 +20,7 @@ async fn supabase_user_sign_up_test() {
   let user: SignUpResponse = user_service.sign_up(BoxAny::new(params)).await.unwrap();
   assert!(!user.latest_workspace.id.is_empty());
   assert!(!user.user_workspaces.is_empty());
-  assert!(!user.latest_workspace.database_storage_id.is_empty());
+  assert!(!user.latest_workspace.database_views_aggregate_id.is_empty());
 }
 
 #[tokio::test]
@@ -37,7 +37,7 @@ async fn supabase_user_sign_up_with_existing_uuid_test() {
     .unwrap();
   let user: SignUpResponse = user_service.sign_up(BoxAny::new(params)).await.unwrap();
   assert!(!user.latest_workspace.id.is_empty());
-  assert!(!user.latest_workspace.database_storage_id.is_empty());
+  assert!(!user.latest_workspace.database_views_aggregate_id.is_empty());
   assert!(!user.user_workspaces.is_empty());
 }
 

+ 5 - 3
frontend/rust-lib/flowy-test/src/document/document_event.rs

@@ -1,11 +1,12 @@
-use crate::document::utils::{gen_id, gen_text_block_data};
-use crate::event_builder::EventBuilder;
-use crate::FlowyCoreTest;
 use flowy_document2::entities::*;
 use flowy_document2::event_map::DocumentEvent;
 use flowy_folder2::entities::{CreateViewPayloadPB, ViewLayoutPB, ViewPB};
 use flowy_folder2::event_map::FolderEvent;
 
+use crate::document::utils::{gen_id, gen_text_block_data};
+use crate::event_builder::EventBuilder;
+use crate::FlowyCoreTest;
+
 const TEXT_BLOCK_TY: &str = "paragraph";
 
 pub struct DocumentEventTest {
@@ -138,6 +139,7 @@ impl DocumentEventTest {
   }
 
   /// Insert a new text block at the index of parent's children.
+  /// return the new block id.
   pub async fn insert_index(
     &self,
     document_id: &str,

+ 3 - 3
frontend/rust-lib/flowy-test/src/lib.rs

@@ -61,7 +61,7 @@ impl FlowyCoreTest {
 
   pub fn new_with_user_data_path(path: PathBuf, name: String) -> Self {
     let config = AppFlowyCoreConfig::new(path.to_str().unwrap(), name).log_filter(
-      "info",
+      "debug",
       vec!["flowy_test".to_string(), "lib_dispatch".to_string()],
     );
 
@@ -259,12 +259,12 @@ impl FlowyCoreTest {
   pub async fn create_document(
     &self,
     parent_id: &str,
-    name: &str,
+    name: String,
     initial_data: Vec<u8>,
   ) -> ViewPB {
     let payload = CreateViewPayloadPB {
       parent_view_id: parent_id.to_string(),
-      name: name.to_string(),
+      name,
       desc: "".to_string(),
       thumbnail: None,
       layout: ViewLayoutPB::Document,

+ 5 - 1
frontend/rust-lib/flowy-test/tests/document/supabase_test/helper.rs

@@ -33,7 +33,11 @@ impl FlowySupabaseDocumentTest {
     let current_workspace = self.inner.get_current_workspace().await;
     self
       .inner
-      .create_document(&current_workspace.workspace.id, "my document", vec![])
+      .create_document(
+        &current_workspace.workspace.id,
+        "my document".to_string(),
+        vec![],
+      )
       .await
   }
 

+ 6 - 2
frontend/rust-lib/flowy-test/tests/user/migration_test/document_test.rs

@@ -2,11 +2,15 @@ use flowy_core::DEFAULT_NAME;
 use flowy_folder2::entities::ViewLayoutPB;
 use flowy_test::FlowyCoreTest;
 
-use crate::user::migration_test::util::unzip_history_user_db;
+use crate::util::unzip_history_user_db;
 
 #[tokio::test]
 async fn migrate_historical_empty_document_test() {
-  let (cleaner, user_db_path) = unzip_history_user_db("historical_empty_document").unwrap();
+  let (cleaner, user_db_path) = unzip_history_user_db(
+    "./tests/user/migration_test/history_user_db",
+    "historical_empty_document",
+  )
+  .unwrap();
   let test = FlowyCoreTest::new_with_user_data_path(user_db_path, DEFAULT_NAME.to_string());
 
   let views = test.get_all_workspace_views().await;

+ 4 - 0
frontend/rust-lib/flowy-test/tests/user/migration_test/history_user_db/README.md

@@ -0,0 +1,4 @@
+
+## Don't modify the zip files in this folder
+
+The zip files in this folder are used for integration tests. If the tests fail, it means users upgrading to this version of AppFlowy will encounter issues

+ 0 - 1
frontend/rust-lib/flowy-test/tests/user/migration_test/mod.rs

@@ -1,3 +1,2 @@
 mod document_test;
-mod util;
 mod version_test;

+ 0 - 47
frontend/rust-lib/flowy-test/tests/user/migration_test/util.rs

@@ -1,47 +0,0 @@
-use flowy_test::Cleaner;
-use nanoid::nanoid;
-use std::fs::{create_dir_all, File};
-use std::io::copy;
-use std::path::{Path, PathBuf};
-use zip::ZipArchive;
-
-pub fn unzip_history_user_db(folder_name: &str) -> std::io::Result<(Cleaner, PathBuf)> {
-  // Open the zip file
-  let zip_file_path = format!(
-    "./tests/user/migration_test/history_user_db/{}.zip",
-    folder_name
-  );
-  let reader = File::open(zip_file_path)?;
-  let output_folder_path = format!(
-    "./tests/user/migration_test/history_user_db/unit_test_{}",
-    nanoid!(6)
-  );
-
-  // Create a ZipArchive from the file
-  let mut archive = ZipArchive::new(reader)?;
-
-  // Iterate through each file in the zip
-  for i in 0..archive.len() {
-    let mut file = archive.by_index(i)?;
-    let output_path = Path::new(&output_folder_path).join(file.mangled_name());
-
-    if file.name().ends_with('/') {
-      // Create directory
-      create_dir_all(&output_path)?;
-    } else {
-      // Write file
-      if let Some(p) = output_path.parent() {
-        if !p.exists() {
-          create_dir_all(p)?;
-        }
-      }
-      let mut outfile = File::create(&output_path)?;
-      copy(&mut file, &mut outfile)?;
-    }
-  }
-  let path = format!("{}/{}", output_folder_path, folder_name);
-  Ok((
-    Cleaner::new(PathBuf::from(output_folder_path)),
-    PathBuf::from(path),
-  ))
-}

+ 6 - 2
frontend/rust-lib/flowy-test/tests/user/migration_test/version_test.rs

@@ -2,11 +2,15 @@ use flowy_core::DEFAULT_NAME;
 use flowy_folder2::entities::ViewLayoutPB;
 use flowy_test::FlowyCoreTest;
 
-use crate::user::migration_test::util::unzip_history_user_db;
+use crate::util::unzip_history_user_db;
 
 #[tokio::test]
 async fn migrate_020_historical_empty_document_test() {
-  let (cleaner, user_db_path) = unzip_history_user_db("020_historical_user_data").unwrap();
+  let (cleaner, user_db_path) = unzip_history_user_db(
+    "./tests/user/migration_test/history_user_db",
+    "020_historical_user_data",
+  )
+  .unwrap();
   let test = FlowyCoreTest::new_with_user_data_path(user_db_path, DEFAULT_NAME.to_string());
 
   let mut views = test.get_all_workspace_views().await;

+ 270 - 1
frontend/rust-lib/flowy-test/tests/user/supabase_test/auth_test.rs

@@ -1,9 +1,16 @@
 use std::collections::HashMap;
 
+use assert_json_diff::assert_json_eq;
+use collab_database::rows::database_row_document_id_from_row_id;
+use collab_document::blocks::DocumentData;
+use collab_folder::core::FolderData;
 use nanoid::nanoid;
+use serde_json::json;
 
+use flowy_core::DEFAULT_NAME;
 use flowy_encrypt::decrypt_text;
-use flowy_server::supabase::define::{USER_EMAIL, USER_UUID};
+use flowy_server::supabase::define::{CollabType, USER_EMAIL, USER_UUID};
+use flowy_test::document::document_event::DocumentEventTest;
 use flowy_test::event_builder::EventBuilder;
 use flowy_test::FlowyCoreTest;
 use flowy_user::entities::{
@@ -259,3 +266,265 @@ async fn update_user_profile_with_existing_email_test() {
     assert_eq!(error.code, ErrorCode::Conflict);
   }
 }
+
+#[tokio::test]
+async fn migrate_anon_document_on_cloud_signup() {
+  if get_supabase_config().is_some() {
+    let test = FlowyCoreTest::new();
+    let user_profile = test.sign_up_as_guest().await.user_profile;
+
+    let view = test
+      .create_view(&user_profile.workspace_id, "My first view".to_string())
+      .await;
+    let document_event = DocumentEventTest::new_with_core(test.clone());
+    let block_id = document_event
+      .insert_index(&view.id, "hello world", 1, None)
+      .await;
+
+    let _ = test.supabase_party_sign_up().await;
+
+    // After sign up, the documents should be migrated to the cloud
+    // So, we can get the document data from the cloud
+    let data: DocumentData = test
+      .document_manager
+      .get_cloud_service()
+      .get_document_data(&view.id)
+      .await
+      .unwrap()
+      .unwrap();
+    let block = data.blocks.get(&block_id).unwrap();
+    assert_json_eq!(
+      block.data,
+      json!({
+        "delta": [
+          {
+            "insert": "hello world"
+          }
+        ]
+      })
+    );
+  }
+}
+
+#[tokio::test]
+async fn migrate_anon_data_on_cloud_signup() {
+  if get_supabase_config().is_some() {
+    let (cleaner, user_db_path) = unzip_history_user_db(
+      "./tests/user/supabase_test/history_user_db",
+      "workspace_sync",
+    )
+    .unwrap();
+    let test = FlowyCoreTest::new_with_user_data_path(user_db_path, DEFAULT_NAME.to_string());
+    let user_profile = test.supabase_party_sign_up().await;
+
+    // Get the folder data from remote
+    let folder_data: FolderData = test
+      .folder_manager
+      .get_cloud_service()
+      .get_folder_data(&user_profile.workspace_id)
+      .await
+      .unwrap()
+      .unwrap();
+
+    let expected_folder_data = expected_workspace_sync_folder_data();
+
+    if folder_data.workspaces.len() != expected_folder_data.workspaces.len() {
+      dbg!(&folder_data.workspaces);
+    }
+
+    assert_eq!(
+      folder_data.workspaces.len(),
+      expected_folder_data.workspaces.len()
+    );
+    assert_eq!(folder_data.views.len(), expected_folder_data.views.len());
+
+    // After migration, the ids of the folder_data should be different from the expected_folder_data
+    for i in 0..folder_data.views.len() {
+      let left_view = &folder_data.views[i];
+      let right_view = &expected_folder_data.views[i];
+      assert_ne!(left_view.id, right_view.id);
+      assert_ne!(left_view.parent_view_id, right_view.parent_view_id);
+      assert_eq!(left_view.name, right_view.name);
+    }
+
+    assert_ne!(
+      folder_data.current_workspace_id,
+      expected_folder_data.current_workspace_id
+    );
+    assert_ne!(folder_data.current_view, expected_folder_data.current_view);
+
+    let database_views = folder_data
+      .views
+      .iter()
+      .filter(|view| view.layout.is_database())
+      .collect::<Vec<_>>();
+
+    // Try to load the database from the cloud.
+    for (i, database_view) in database_views.iter().enumerate() {
+      let cloud_service = test.database_manager.get_cloud_service();
+      let database_id = test
+        .database_manager
+        .get_database_id_with_view_id(&database_view.id)
+        .await
+        .unwrap();
+      let editor = test
+        .database_manager
+        .get_database(&database_id)
+        .await
+        .unwrap();
+
+      // The database view setting should be loaded by the view id
+      let _ = editor
+        .get_database_view_setting(&database_view.id)
+        .await
+        .unwrap();
+
+      let rows = editor.get_rows(&database_view.id).await.unwrap();
+      assert_eq!(rows.len(), 3);
+
+      if i == 0 {
+        let first_row = rows.first().unwrap().as_ref();
+        let icon_url = first_row.meta.icon_url.clone().unwrap();
+        assert_eq!(icon_url, "😄");
+
+        let document_id = database_row_document_id_from_row_id(&first_row.row.id);
+        let document_data: DocumentData = test
+          .document_manager
+          .get_cloud_service()
+          .get_document_data(&document_id)
+          .await
+          .unwrap()
+          .unwrap();
+
+        let editor = test
+          .document_manager
+          .get_document(&document_id)
+          .await
+          .unwrap();
+        let expected_document_data = editor.lock().get_document_data().unwrap();
+
+        // let expected_document_data = test
+        //   .document_manager
+        //   .get_document_data(&document_id)
+        //   .await
+        //   .unwrap();
+        assert_eq!(document_data, expected_document_data);
+        let json = json!(document_data);
+        assert_eq!(
+          json["blocks"]["LPMpo0Qaab"]["data"]["delta"][0]["insert"],
+          json!("Row document")
+        );
+      }
+
+      assert!(cloud_service
+        .get_collab_update(&database_id, CollabType::Database)
+        .await
+        .is_ok());
+    }
+
+    drop(cleaner);
+  }
+}
+
+fn expected_workspace_sync_folder_data() -> FolderData {
+  serde_json::from_value::<FolderData>(json!({
+    "current_view": "e0811131-9928-4541-a174-20b7553d9e4c",
+    "current_workspace_id": "8df7f755-fa5d-480e-9f8e-48ea0fed12b3",
+    "views": [
+      {
+        "children": {
+          "items": [
+            {
+              "id": "e0811131-9928-4541-a174-20b7553d9e4c"
+            },
+            {
+              "id": "53333949-c262-447b-8597-107589697059"
+            }
+          ]
+        },
+        "created_at": 1693147093,
+        "desc": "",
+        "icon": null,
+        "id": "e203afb3-de5d-458a-8380-33cd788a756e",
+        "is_favorite": false,
+        "layout": 0,
+        "name": "⭐️ Getting started",
+        "parent_view_id": "8df7f755-fa5d-480e-9f8e-48ea0fed12b3"
+      },
+      {
+        "children": {
+          "items": [
+            {
+              "id": "11c697ba-5ed1-41c0-adfc-576db28ad27b"
+            },
+            {
+              "id": "4a5c25e2-a734-440c-973b-4c0e7ab0039c"
+            }
+          ]
+        },
+        "created_at": 1693147096,
+        "desc": "",
+        "icon": null,
+        "id": "e0811131-9928-4541-a174-20b7553d9e4c",
+        "is_favorite": false,
+        "layout": 1,
+        "name": "database",
+        "parent_view_id": "e203afb3-de5d-458a-8380-33cd788a756e"
+      },
+      {
+        "children": {
+          "items": []
+        },
+        "created_at": 1693147124,
+        "desc": "",
+        "icon": null,
+        "id": "11c697ba-5ed1-41c0-adfc-576db28ad27b",
+        "is_favorite": false,
+        "layout": 3,
+        "name": "calendar",
+        "parent_view_id": "e0811131-9928-4541-a174-20b7553d9e4c"
+      },
+      {
+        "children": {
+          "items": []
+        },
+        "created_at": 1693147125,
+        "desc": "",
+        "icon": null,
+        "id": "4a5c25e2-a734-440c-973b-4c0e7ab0039c",
+        "is_favorite": false,
+        "layout": 2,
+        "name": "board",
+        "parent_view_id": "e0811131-9928-4541-a174-20b7553d9e4c"
+      },
+      {
+        "children": {
+          "items": []
+        },
+        "created_at": 1693147133,
+        "desc": "",
+        "icon": null,
+        "id": "53333949-c262-447b-8597-107589697059",
+        "is_favorite": false,
+        "layout": 0,
+        "name": "document",
+        "parent_view_id": "e203afb3-de5d-458a-8380-33cd788a756e"
+      }
+    ],
+    "workspaces": [
+      {
+        "child_views": {
+          "items": [
+            {
+              "id": "e203afb3-de5d-458a-8380-33cd788a756e"
+            }
+          ]
+        },
+        "created_at": 1693147093,
+        "id": "8df7f755-fa5d-480e-9f8e-48ea0fed12b3",
+        "name": "Workspace"
+      }
+    ]
+  }))
+  .unwrap()
+}

+ 4 - 0
frontend/rust-lib/flowy-test/tests/user/supabase_test/history_user_db/README.md

@@ -0,0 +1,4 @@
+
+## Don't modify the zip files in this folder
+
+The zip files in this folder are used for integration tests. If the tests fail, it means users upgrading to this version of AppFlowy will encounter issues

BIN
frontend/rust-lib/flowy-test/tests/user/supabase_test/history_user_db/workspace_sync.zip


+ 41 - 0
frontend/rust-lib/flowy-test/tests/util.rs

@@ -1,12 +1,17 @@
+use std::fs::{create_dir_all, File};
+use std::io::copy;
 use std::ops::Deref;
+use std::path::{Path, PathBuf};
 use std::sync::Arc;
 use std::time::Duration;
 
 use anyhow::Error;
 use collab_folder::core::FolderData;
 use collab_plugins::cloud_storage::RemoteCollabStorage;
+use nanoid::nanoid;
 use tokio::sync::mpsc::Receiver;
 use tokio::time::timeout;
+use zip::ZipArchive;
 
 use flowy_database_deps::cloud::DatabaseCloudService;
 use flowy_folder_deps::cloud::{FolderCloudService, FolderSnapshot};
@@ -14,6 +19,7 @@ use flowy_server::supabase::api::*;
 use flowy_server::{AppFlowyEncryption, EncryptionImpl};
 use flowy_server_config::supabase_config::SupabaseConfiguration;
 use flowy_test::event_builder::EventBuilder;
+use flowy_test::Cleaner;
 use flowy_test::FlowyCoreTest;
 use flowy_user::entities::{AuthTypePB, UpdateUserProfilePayloadPB, UserCredentialsPB};
 use flowy_user::errors::FlowyError;
@@ -170,3 +176,38 @@ pub fn appflowy_server(
   let server = Arc::new(RESTfulPostgresServer::new(config, encryption));
   (SupabaseServerServiceImpl::new(server), encryption_impl)
 }
+
+pub fn unzip_history_user_db(root: &str, folder_name: &str) -> std::io::Result<(Cleaner, PathBuf)> {
+  // Open the zip file
+  let zip_file_path = format!("{}/{}.zip", root, folder_name);
+  let reader = File::open(zip_file_path)?;
+  let output_folder_path = format!("{}/unit_test_{}", root, nanoid!(6));
+
+  // Create a ZipArchive from the file
+  let mut archive = ZipArchive::new(reader)?;
+
+  // Iterate through each file in the zip
+  for i in 0..archive.len() {
+    let mut file = archive.by_index(i)?;
+    let output_path = Path::new(&output_folder_path).join(file.mangled_name());
+
+    if file.name().ends_with('/') {
+      // Create directory
+      create_dir_all(&output_path)?;
+    } else {
+      // Write file
+      if let Some(p) = output_path.parent() {
+        if !p.exists() {
+          create_dir_all(p)?;
+        }
+      }
+      let mut outfile = File::create(&output_path)?;
+      copy(&mut file, &mut outfile)?;
+    }
+  }
+  let path = format!("{}/{}", output_folder_path, folder_name);
+  Ok((
+    Cleaner::new(PathBuf::from(output_folder_path)),
+    PathBuf::from(path),
+  ))
+}

+ 1 - 1
frontend/rust-lib/flowy-user-deps/src/cloud.rs

@@ -58,7 +58,7 @@ impl Display for UserCloudConfig {
 
 /// Provide the generic interface for the user cloud service
 /// The user cloud service is responsible for the user authentication and user profile management
-pub trait UserCloudService: Send + Sync {
+pub trait UserCloudService: Send + Sync + 'static {
   /// Sign up a new account.
   /// The type of the params is defined the this trait's implementation.
   /// Use the `unbox_or_error` of the [BoxAny] to get the params.

+ 3 - 2
frontend/rust-lib/flowy-user-deps/src/entities.rs

@@ -167,7 +167,8 @@ pub struct UserWorkspace {
   pub name: String,
   pub created_at: DateTime<Utc>,
   /// The database storage id is used indexing all the database in current workspace.
-  pub database_storage_id: String,
+  #[serde(rename = "database_storage_id")]
+  pub database_views_aggregate_id: String,
 }
 
 impl UserWorkspace {
@@ -176,7 +177,7 @@ impl UserWorkspace {
       id: workspace_id.to_string(),
       name: "".to_string(),
       created_at: Utc::now(),
-      database_storage_id: uuid::Uuid::new_v4().to_string(),
+      database_views_aggregate_id: uuid::Uuid::new_v4().to_string(),
     }
   }
 }

+ 3 - 0
frontend/rust-lib/flowy-user/Cargo.toml

@@ -10,6 +10,7 @@ flowy-derive = { path = "../../../shared-lib/flowy-derive" }
 flowy-sqlite = { path = "../flowy-sqlite", optional = true }
 flowy-encrypt = { path = "../flowy-encrypt" }
 flowy-error = { path = "../flowy-error", features = ["impl_from_sqlite", "impl_from_dispatch_error"] }
+flowy-folder-deps = { path = "../flowy-folder-deps" }
 lib-infra = { path = "../../../shared-lib/lib-infra" }
 flowy-notification = { path = "../flowy-notification" }
 flowy-server-config = { path = "../flowy-server-config" }
@@ -18,8 +19,10 @@ appflowy-integrate = { version = "0.1.0" }
 collab = { version = "0.1.0" }
 collab-folder = { version = "0.1.0" }
 collab-document = { version = "0.1.0" }
+collab-database = { version = "0.1.0" }
 collab-user = { version = "0.1.0" }
 flowy-user-deps = { path = "../flowy-user-deps" }
+anyhow = "1.0.75"
 
 tracing = { version = "0.1", features = ["log"] }
 bytes = "1.4"

+ 2 - 2
frontend/rust-lib/flowy-user/src/event_map.rs

@@ -88,7 +88,7 @@ pub trait UserStatusCallback: Send + Sync + 'static {
   /// Will be called after the user signed up.
   fn did_sign_up(
     &self,
-    context: SignUpContext,
+    is_new_user: bool,
     user_profile: &UserProfile,
     user_workspace: &UserWorkspace,
     device_id: &str,
@@ -163,7 +163,7 @@ impl UserStatusCallback for DefaultUserStatusCallback {
 
   fn did_sign_up(
     &self,
-    _context: SignUpContext,
+    _is_new_user: bool,
     _user_profile: &UserProfile,
     _user_workspace: &UserWorkspace,
     _device_id: &str,

+ 25 - 20
frontend/rust-lib/flowy-user/src/manager.rs

@@ -3,7 +3,6 @@ use std::sync::{Arc, Weak};
 
 use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
 use appflowy_integrate::RocksCollabDB;
-use collab_folder::core::FolderData;
 use collab_user::core::MutexUserAwareness;
 use serde_json::Value;
 use tokio::sync::{Mutex, RwLock};
@@ -19,12 +18,11 @@ use flowy_user_deps::entities::*;
 use lib_infra::box_any::BoxAny;
 
 use crate::entities::{AuthStateChangedPB, AuthStatePB, UserProfilePB, UserSettingPB};
-use crate::event_map::{
-  DefaultUserStatusCallback, SignUpContext, UserCloudServiceProvider, UserStatusCallback,
-};
+use crate::event_map::{DefaultUserStatusCallback, UserCloudServiceProvider, UserStatusCallback};
 use crate::migrations::historical_document::HistoricalEmptyDocumentMigration;
-use crate::migrations::local_user_to_cloud::migration_user_to_cloud;
+use crate::migrations::migrate_to_new_user::migration_local_user_on_sign_up;
 use crate::migrations::migration::UserLocalDataMigration;
+use crate::migrations::sync_new_user::sync_user_data_to_cloud;
 use crate::migrations::MigrationUser;
 use crate::services::cloud_config::get_cloud_config;
 use crate::services::database::UserDB;
@@ -305,10 +303,7 @@ impl UserManager {
     } else {
       UserAwarenessDataSource::Remote
     };
-    let mut sign_up_context = SignUpContext {
-      is_new: response.is_new_user,
-      local_folder: None,
-    };
+
     if response.is_new_user {
       if let Some(old_user) = migration_user {
         let new_user = MigrationUser {
@@ -320,10 +315,9 @@ impl UserManager {
           old_user.user_profile.uid,
           new_user.user_profile.uid
         );
-        match self.migrate_local_user_to_cloud(&old_user, &new_user).await {
-          Ok(folder_data) => sign_up_context.local_folder = folder_data,
-          Err(e) => tracing::error!("{:?}", e),
-        }
+        self
+          .migrate_local_user_to_cloud(&old_user, &new_user)
+          .await?;
         let _ = self.database.close(old_user.session.user_id);
       }
     }
@@ -331,20 +325,20 @@ impl UserManager {
       .initialize_user_awareness(&new_session, user_awareness_source)
       .await;
 
+    self
+      .save_auth_data(&response, auth_type, &new_session)
+      .await?;
     self
       .user_status_callback
       .read()
       .await
       .did_sign_up(
-        sign_up_context,
+        response.is_new_user,
         user_profile,
         &new_session.user_workspace,
         &new_session.device_id,
       )
       .await?;
-    self
-      .save_auth_data(&response, auth_type, &new_session)
-      .await?;
 
     send_auth_state_notification(AuthStateChangedPB {
       state: AuthStatePB::AuthStateSignIn,
@@ -596,17 +590,28 @@ impl UserManager {
     &self,
     old_user: &MigrationUser,
     new_user: &MigrationUser,
-  ) -> Result<Option<FolderData>, FlowyError> {
+  ) -> Result<(), FlowyError> {
     let old_collab_db = self.database.get_collab_db(old_user.session.user_id)?;
     let new_collab_db = self.database.get_collab_db(new_user.session.user_id)?;
-    let folder_data = migration_user_to_cloud(old_user, &old_collab_db, new_user, &new_collab_db)?;
+    migration_local_user_on_sign_up(old_user, &old_collab_db, new_user, &new_collab_db)?;
+
+    if let Err(err) = sync_user_data_to_cloud(
+      self.cloud_services.get_user_service()?,
+      new_user,
+      &new_collab_db,
+    )
+    .await
+    {
+      tracing::error!("Sync user data to cloud failed: {:?}", err);
+    }
+
     // Save the old user workspace setting.
     save_user_workspaces(
       old_user.session.user_id,
       self.database.get_pool(old_user.session.user_id)?,
       &[old_user.session.user_workspace.clone()],
     )?;
-    Ok(folder_data)
+    Ok(())
   }
 }
 

+ 0 - 143
frontend/rust-lib/flowy-user/src/migrations/local_user_to_cloud.rs

@@ -1,143 +0,0 @@
-use std::sync::Arc;
-
-use appflowy_integrate::{PersistenceError, RocksCollabDB, YrsDocAction};
-use collab::core::collab::{CollabRawData, MutexCollab};
-use collab::core::origin::{CollabClient, CollabOrigin};
-use collab::preclude::Collab;
-use collab_folder::core::{Folder, FolderData};
-
-use flowy_error::{ErrorCode, FlowyError, FlowyResult};
-
-use crate::migrations::MigrationUser;
-
-/// Migration the collab objects of the old user to new user. Currently, it only happens when
-/// the user is a local user and try to use AppFlowy cloud service.
-pub fn migration_user_to_cloud(
-  old_user: &MigrationUser,
-  old_collab_db: &Arc<RocksCollabDB>,
-  new_user: &MigrationUser,
-  new_collab_db: &Arc<RocksCollabDB>,
-) -> FlowyResult<Option<FolderData>> {
-  let mut folder_data = None;
-  new_collab_db
-    .with_write_txn(|w_txn| {
-      let read_txn = old_collab_db.read_txn();
-      if let Ok(object_ids) = read_txn.get_all_docs() {
-        // Migration of all objects
-        for object_id in object_ids {
-          tracing::debug!("migrate object: {:?}", object_id);
-          if let Ok(updates) = read_txn.get_all_updates(old_user.session.user_id, &object_id) {
-            // If the object is a folder, migrate the folder data
-            if object_id == old_user.session.user_workspace.id {
-              folder_data = migrate_folder(
-                old_user.session.user_id,
-                &object_id,
-                &new_user.session.user_workspace.id,
-                updates,
-              );
-            } else if object_id == old_user.session.user_workspace.database_storage_id {
-              migrate_database_storage(
-                old_user.session.user_id,
-                &object_id,
-                new_user.session.user_id,
-                &new_user.session.user_workspace.database_storage_id,
-                updates,
-                w_txn,
-              );
-            } else {
-              migrate_object(
-                old_user.session.user_id,
-                new_user.session.user_id,
-                &object_id,
-                updates,
-                w_txn,
-              );
-            }
-          }
-        }
-      }
-      Ok(())
-    })
-    .map_err(|err| FlowyError::new(ErrorCode::Internal, err))?;
-  Ok(folder_data)
-}
-
-fn migrate_database_storage<'a, W>(
-  old_uid: i64,
-  old_object_id: &str,
-  new_uid: i64,
-  new_object_id: &str,
-  updates: CollabRawData,
-  w_txn: &'a W,
-) where
-  W: YrsDocAction<'a>,
-  PersistenceError: From<W::Error>,
-{
-  let origin = CollabOrigin::Client(CollabClient::new(old_uid, "phantom"));
-  match Collab::new_with_raw_data(origin, old_object_id, updates, vec![]) {
-    Ok(collab) => {
-      let txn = collab.transact();
-      if let Err(err) = w_txn.create_new_doc(new_uid, new_object_id, &txn) {
-        tracing::error!("🔴migrate database storage failed: {:?}", err);
-      }
-    },
-    Err(err) => tracing::error!("🔴construct migration database storage failed: {:?} ", err),
-  }
-}
-
-fn migrate_object<'a, W>(
-  old_uid: i64,
-  new_uid: i64,
-  object_id: &str,
-  updates: CollabRawData,
-  w_txn: &'a W,
-) where
-  W: YrsDocAction<'a>,
-  PersistenceError: From<W::Error>,
-{
-  let origin = CollabOrigin::Client(CollabClient::new(old_uid, "phantom"));
-  match Collab::new_with_raw_data(origin, object_id, updates, vec![]) {
-    Ok(collab) => {
-      let txn = collab.transact();
-      if let Err(err) = w_txn.create_new_doc(new_uid, object_id, &txn) {
-        tracing::error!("🔴migrate collab failed: {:?}", err);
-      }
-    },
-    Err(err) => tracing::error!("🔴construct migration collab failed: {:?} ", err),
-  }
-}
-
-fn migrate_folder(
-  old_uid: i64,
-  old_object_id: &str,
-  new_workspace_id: &str,
-  updates: CollabRawData,
-) -> Option<FolderData> {
-  let origin = CollabOrigin::Client(CollabClient::new(old_uid, "phantom"));
-  let old_folder_collab = Collab::new_with_raw_data(origin, old_object_id, updates, vec![]).ok()?;
-  let mutex_collab = Arc::new(MutexCollab::from_collab(old_folder_collab));
-  let old_folder = Folder::open(mutex_collab, None);
-
-  let mut folder_data = old_folder.get_folder_data()?;
-  let old_workspace_id = folder_data.current_workspace_id;
-  folder_data.current_workspace_id = new_workspace_id.to_string();
-
-  let mut workspace = folder_data.workspaces.pop()?;
-  if folder_data.workspaces.len() > 1 {
-    tracing::error!("🔴migrate folder: more than one workspace");
-  }
-  workspace.id = new_workspace_id.to_string();
-
-  // Only take one workspace
-  folder_data.workspaces.clear();
-  folder_data.workspaces.push(workspace);
-
-  // Update the view's parent view id to new workspace id
-  folder_data.views.iter_mut().for_each(|view| {
-    if view.parent_view_id == old_workspace_id {
-      view.parent_view_id = new_workspace_id.to_string();
-    }
-  });
-
-  Some(folder_data)
-}

+ 430 - 0
frontend/rust-lib/flowy-user/src/migrations/migrate_to_new_user.rs

@@ -0,0 +1,430 @@
+use std::collections::{HashMap, HashSet};
+use std::ops::{Deref, DerefMut};
+use std::sync::Arc;
+
+use anyhow::anyhow;
+use appflowy_integrate::{PersistenceError, RocksCollabDB, YrsDocAction};
+use collab::core::collab::MutexCollab;
+use collab::core::origin::{CollabClient, CollabOrigin};
+use collab::preclude::Collab;
+use collab_database::database::{
+  is_database_collab, mut_database_views_with_collab, reset_inline_view_id,
+};
+use collab_database::rows::{database_row_document_id_from_row_id, mut_row_with_collab, RowId};
+use collab_database::user::DatabaseWithViewsArray;
+use collab_folder::core::Folder;
+use parking_lot::{Mutex, RwLock};
+
+use flowy_error::{ErrorCode, FlowyError, FlowyResult};
+use flowy_folder_deps::cloud::gen_view_id;
+
+use crate::migrations::MigrationUser;
+
+/// Migration the collab objects of the old user to new user. Currently, it only happens when
+/// the user is a local user and try to use AppFlowy cloud service.
+pub fn migration_local_user_on_sign_up(
+  old_user: &MigrationUser,
+  old_collab_db: &Arc<RocksCollabDB>,
+  new_user: &MigrationUser,
+  new_collab_db: &Arc<RocksCollabDB>,
+) -> FlowyResult<()> {
+  new_collab_db
+    .with_write_txn(|new_collab_w_txn| {
+      let old_collab_r_txn = old_collab_db.read_txn();
+      let old_to_new_id_map = Arc::new(Mutex::new(OldToNewIdMap::new()));
+
+      migrate_user_awareness(old_to_new_id_map.lock().deref_mut(), old_user, new_user)?;
+
+      migrate_database_with_views_object(
+        &mut old_to_new_id_map.lock(),
+        old_user,
+        &old_collab_r_txn,
+        new_user,
+        new_collab_w_txn,
+      )?;
+
+      let mut object_ids = old_collab_r_txn
+        .get_all_docs()
+        .map(|iter| iter.collect::<Vec<String>>())
+        .unwrap_or_default();
+
+      // Migration of all objects except the folder and database_with_views
+      object_ids.retain(|id| {
+        id != &old_user.session.user_workspace.id
+          && id != &old_user.session.user_workspace.database_views_aggregate_id
+      });
+
+      tracing::info!("migrate collab objects: {:?}", object_ids.len());
+      let collab_by_oid = make_collab_by_oid(old_user, &old_collab_r_txn, &object_ids);
+      migrate_databases(
+        &old_to_new_id_map,
+        new_user,
+        new_collab_w_txn,
+        &mut object_ids,
+        &collab_by_oid,
+      )?;
+
+      // Migrates the folder, replacing all existing view IDs with new ones.
+      // This function handles the process of migrating folder data between two users. As a part of this migration,
+      // all existing view IDs associated with the old user will be replaced by new IDs relevant to the new user.
+      migrate_workspace_folder(
+        &mut old_to_new_id_map.lock(),
+        old_user,
+        &old_collab_r_txn,
+        new_user,
+        new_collab_w_txn,
+      )?;
+
+      // Migrate other collab objects
+      for object_id in &object_ids {
+        if let Some(collab) = collab_by_oid.get(object_id) {
+          let new_object_id = old_to_new_id_map.lock().get_new_id(object_id);
+          tracing::debug!("migrate from: {}, to: {}", object_id, new_object_id,);
+          migrate_collab_object(
+            collab,
+            new_user.session.user_id,
+            &new_object_id,
+            new_collab_w_txn,
+          );
+        }
+      }
+
+      Ok(())
+    })
+    .map_err(|err| FlowyError::new(ErrorCode::Internal, err))?;
+
+  Ok(())
+}
+
+#[derive(Default)]
+pub struct OldToNewIdMap(HashMap<String, String>);
+
+impl OldToNewIdMap {
+  fn new() -> Self {
+    Self::default()
+  }
+  fn get_new_id(&mut self, old_id: &str) -> String {
+    let view_id = self
+      .0
+      .entry(old_id.to_string())
+      .or_insert(gen_view_id().to_string());
+    (*view_id).clone()
+  }
+}
+
+impl Deref for OldToNewIdMap {
+  type Target = HashMap<String, String>;
+
+  fn deref(&self) -> &Self::Target {
+    &self.0
+  }
+}
+
+impl DerefMut for OldToNewIdMap {
+  fn deref_mut(&mut self) -> &mut Self::Target {
+    &mut self.0
+  }
+}
+
+fn migrate_database_with_views_object<'a, W>(
+  old_to_new_id_map: &mut OldToNewIdMap,
+  old_user: &MigrationUser,
+  old_collab_r_txn: &'a W,
+  new_user: &MigrationUser,
+  new_collab_w_txn: &'a W,
+) -> Result<(), PersistenceError>
+where
+  W: YrsDocAction<'a>,
+  PersistenceError: From<W::Error>,
+{
+  let database_with_views_collab = Collab::new(
+    old_user.session.user_id,
+    &old_user.session.user_workspace.database_views_aggregate_id,
+    "phantom",
+    vec![],
+  );
+  database_with_views_collab.with_origin_transact_mut(|txn| {
+    old_collab_r_txn.load_doc(
+      old_user.session.user_id,
+      &old_user.session.user_workspace.database_views_aggregate_id,
+      txn,
+    )
+  })?;
+
+  let new_uid = new_user.session.user_id;
+  let new_object_id = &new_user.session.user_workspace.database_views_aggregate_id;
+
+  let array = DatabaseWithViewsArray::from_collab(&database_with_views_collab);
+  for database_view in array.get_all_databases() {
+    array.update_database(&database_view.database_id, |update| {
+      let new_linked_views = update
+        .linked_views
+        .iter()
+        .map(|view_id| old_to_new_id_map.get_new_id(view_id))
+        .collect();
+      update.database_id = old_to_new_id_map.get_new_id(&update.database_id);
+      update.linked_views = new_linked_views;
+    })
+  }
+
+  let txn = database_with_views_collab.transact();
+  if let Err(err) = new_collab_w_txn.create_new_doc(new_uid, new_object_id, &txn) {
+    tracing::error!("🔴migrate database storage failed: {:?}", err);
+  }
+  drop(txn);
+  Ok(())
+}
+
+fn migrate_collab_object<'a, W>(collab: &Collab, new_uid: i64, new_object_id: &str, w_txn: &'a W)
+where
+  W: YrsDocAction<'a>,
+  PersistenceError: From<W::Error>,
+{
+  let txn = collab.transact();
+  if let Err(err) = w_txn.create_new_doc(new_uid, &new_object_id, &txn) {
+    tracing::error!("🔴migrate collab failed: {:?}", err);
+  }
+}
+
+fn migrate_workspace_folder<'a, W>(
+  old_to_new_id_map: &mut OldToNewIdMap,
+  old_user: &MigrationUser,
+  old_collab_r_txn: &'a W,
+  new_user: &MigrationUser,
+  new_collab_w_txn: &'a W,
+) -> Result<(), PersistenceError>
+where
+  W: YrsDocAction<'a>,
+  PersistenceError: From<W::Error>,
+{
+  let old_uid = old_user.session.user_id;
+  let old_workspace_id = &old_user.session.user_workspace.id;
+  let new_uid = new_user.session.user_id;
+  let new_workspace_id = &new_user.session.user_workspace.id;
+
+  let old_folder_collab = Collab::new(old_uid, old_workspace_id, "phantom", vec![]);
+  old_folder_collab
+    .with_origin_transact_mut(|txn| old_collab_r_txn.load_doc(old_uid, old_workspace_id, txn))?;
+  let old_folder = Folder::open(Arc::new(MutexCollab::from_collab(old_folder_collab)), None);
+  let mut folder_data = old_folder
+    .get_folder_data()
+    .ok_or(PersistenceError::Internal(
+      anyhow!("Can't migrate the folder data").into(),
+    ))?;
+
+  old_to_new_id_map
+    .0
+    .insert(old_workspace_id.to_string(), new_workspace_id.to_string());
+
+  // 1. Replace the workspace views id to new id
+  debug_assert!(folder_data.workspaces.len() == 1);
+
+  folder_data
+    .workspaces
+    .iter_mut()
+    .enumerate()
+    .for_each(|(index, workspace)| {
+      if index == 0 {
+        workspace.id = new_workspace_id.to_string();
+      } else {
+        tracing::warn!("🔴migrate folder: more than one workspace");
+        workspace.id = old_to_new_id_map.get_new_id(&workspace.id);
+      }
+      workspace
+        .child_views
+        .iter_mut()
+        .for_each(|view_identifier| {
+          view_identifier.id = old_to_new_id_map.get_new_id(&view_identifier.id);
+        });
+    });
+
+  folder_data.views.iter_mut().for_each(|view| {
+    // 2. replace the old parent view id of the view
+    view.parent_view_id = old_to_new_id_map.get_new_id(&view.parent_view_id);
+
+    // 3. replace the old id of the view
+    view.id = old_to_new_id_map.get_new_id(&view.id);
+
+    // 4. replace the old id of the children views
+    view.children.iter_mut().for_each(|view_identifier| {
+      view_identifier.id = old_to_new_id_map.get_new_id(&view_identifier.id);
+    });
+  });
+
+  match old_to_new_id_map.get(&folder_data.current_workspace_id) {
+    Some(new_workspace_id) => {
+      folder_data.current_workspace_id = new_workspace_id.clone();
+    },
+    None => {
+      tracing::error!("🔴migrate folder: current workspace id not found");
+    },
+  }
+
+  match old_to_new_id_map.get(&folder_data.current_view) {
+    Some(new_view_id) => {
+      folder_data.current_view = new_view_id.clone();
+    },
+    None => {
+      tracing::error!("🔴migrate folder: current view id not found");
+      folder_data.current_view = "".to_string();
+    },
+  }
+
+  let origin = CollabOrigin::Client(CollabClient::new(new_uid, "phantom"));
+  let new_folder_collab = Collab::new_with_raw_data(origin, new_workspace_id, vec![], vec![])
+    .map_err(|err| PersistenceError::Internal(Box::new(err)))?;
+  let mutex_collab = Arc::new(MutexCollab::from_collab(new_folder_collab));
+  let _ = Folder::create(mutex_collab.clone(), None, Some(folder_data));
+
+  {
+    let mutex_collab = mutex_collab.lock();
+    let txn = mutex_collab.transact();
+    if let Err(err) = new_collab_w_txn.create_new_doc(new_uid, new_workspace_id, &txn) {
+      tracing::error!("🔴migrate folder failed: {:?}", err);
+    }
+  }
+  Ok(())
+}
+
+fn migrate_user_awareness(
+  old_to_new_id_map: &mut OldToNewIdMap,
+  old_user: &MigrationUser,
+  new_user: &MigrationUser,
+) -> Result<(), PersistenceError> {
+  let old_uid = old_user.session.user_id;
+  let new_uid = new_user.session.user_id;
+  tracing::debug!("migrate user awareness from: {}, to: {}", old_uid, new_uid);
+  old_to_new_id_map.insert(old_uid.to_string(), new_uid.to_string());
+  Ok(())
+}
+
+fn migrate_databases<'a, W>(
+  old_to_new_id_map: &Arc<Mutex<OldToNewIdMap>>,
+  new_user: &MigrationUser,
+  new_collab_w_txn: &'a W,
+  object_ids: &mut Vec<String>,
+  collab_by_oid: &HashMap<String, Collab>,
+) -> Result<(), PersistenceError>
+where
+  W: YrsDocAction<'a>,
+  PersistenceError: From<W::Error>,
+{
+  // Migrate databases
+  let mut database_object_ids = vec![];
+  let database_row_object_ids = RwLock::new(HashSet::new());
+
+  for object_id in &mut *object_ids {
+    if let Some(collab) = collab_by_oid.get(object_id) {
+      if !is_database_collab(collab) {
+        continue;
+      }
+
+      database_object_ids.push(object_id.clone());
+      reset_inline_view_id(collab, |old_inline_view_id| {
+        old_to_new_id_map.lock().get_new_id(&old_inline_view_id)
+      });
+
+      mut_database_views_with_collab(collab, |database_view| {
+        let new_view_id = old_to_new_id_map.lock().get_new_id(&database_view.id);
+        let new_database_id = old_to_new_id_map
+          .lock()
+          .get_new_id(&database_view.database_id);
+
+        tracing::trace!(
+          "migrate database view id from: {}, to: {}",
+          database_view.id,
+          new_view_id,
+        );
+        tracing::trace!(
+          "migrate database view database id from: {}, to: {}",
+          database_view.database_id,
+          new_database_id,
+        );
+
+        database_view.id = new_view_id;
+        database_view.database_id = new_database_id;
+        database_view.row_orders.iter_mut().for_each(|row_order| {
+          let old_row_id = String::from(row_order.id.clone());
+          let old_row_document_id = database_row_document_id_from_row_id(&old_row_id);
+          let new_row_id = old_to_new_id_map.lock().get_new_id(&old_row_id);
+          let new_row_document_id = database_row_document_id_from_row_id(&new_row_id);
+          tracing::debug!("migrate row id: {} to {}", row_order.id, new_row_id);
+          tracing::debug!(
+            "migrate row document id: {} to {}",
+            old_row_document_id,
+            new_row_document_id
+          );
+          old_to_new_id_map
+            .lock()
+            .insert(old_row_document_id, new_row_document_id);
+
+          row_order.id = RowId::from(new_row_id);
+          database_row_object_ids.write().insert(old_row_id);
+        });
+      });
+
+      let new_object_id = old_to_new_id_map.lock().get_new_id(object_id);
+      tracing::debug!(
+        "migrate database from: {}, to: {}",
+        object_id,
+        new_object_id,
+      );
+      migrate_collab_object(
+        collab,
+        new_user.session.user_id,
+        &new_object_id,
+        new_collab_w_txn,
+      );
+    }
+  }
+  object_ids.retain(|id| !database_object_ids.contains(id));
+
+  let database_row_object_ids = database_row_object_ids.read();
+  for object_id in &*database_row_object_ids {
+    if let Some(collab) = collab_by_oid.get(object_id) {
+      let new_object_id = old_to_new_id_map.lock().get_new_id(object_id);
+      tracing::info!(
+        "migrate database row from: {}, to: {}",
+        object_id,
+        new_object_id,
+      );
+      mut_row_with_collab(collab, |row_update| {
+        row_update.set_row_id(RowId::from(new_object_id.clone()));
+      });
+      migrate_collab_object(
+        collab,
+        new_user.session.user_id,
+        &new_object_id,
+        new_collab_w_txn,
+      );
+    }
+  }
+  object_ids.retain(|id| !database_row_object_ids.contains(id));
+
+  Ok(())
+}
+
+fn make_collab_by_oid<'a, R>(
+  old_user: &MigrationUser,
+  old_collab_r_txn: &R,
+  object_ids: &[String],
+) -> HashMap<String, Collab>
+where
+  R: YrsDocAction<'a>,
+  PersistenceError: From<R::Error>,
+{
+  let mut collab_by_oid = HashMap::new();
+  for object_id in object_ids {
+    let collab = Collab::new(old_user.session.user_id, object_id, "phantom", vec![]);
+    match collab.with_origin_transact_mut(|txn| {
+      old_collab_r_txn.load_doc(old_user.session.user_id, &object_id, txn)
+    }) {
+      Ok(_) => {
+        collab_by_oid.insert(object_id.clone(), collab);
+      },
+      Err(err) => tracing::error!("🔴Initialize migration collab failed: {:?} ", err),
+    }
+  }
+
+  collab_by_oid
+}

+ 4 - 3
frontend/rust-lib/flowy-user/src/migrations/mod.rs

@@ -1,6 +1,7 @@
+pub use define::*;
+
 mod define;
 pub mod historical_document;
-pub mod local_user_to_cloud;
+pub mod migrate_to_new_user;
 pub mod migration;
-
-pub use define::*;
+pub mod sync_new_user;

+ 327 - 0
frontend/rust-lib/flowy-user/src/migrations/sync_new_user.rs

@@ -0,0 +1,327 @@
+use std::future::Future;
+use std::ops::Deref;
+use std::pin::Pin;
+use std::sync::Arc;
+
+use anyhow::{anyhow, Error};
+use appflowy_integrate::{CollabObject, CollabType, PersistenceError, RocksCollabDB, YrsDocAction};
+use collab::core::collab::MutexCollab;
+use collab::preclude::Collab;
+use collab_database::database::get_database_row_ids;
+use collab_database::rows::database_row_document_id_from_row_id;
+use collab_database::user::{get_database_with_views, DatabaseWithViews};
+use collab_folder::core::{Folder, View, ViewLayout};
+use parking_lot::Mutex;
+
+use flowy_error::FlowyResult;
+use flowy_user_deps::cloud::UserCloudService;
+
+use crate::migrations::MigrationUser;
+
+#[tracing::instrument(level = "info", skip_all, err)]
+pub async fn sync_user_data_to_cloud(
+  user_service: Arc<dyn UserCloudService>,
+  new_user: &MigrationUser,
+  collab_db: &Arc<RocksCollabDB>,
+) -> FlowyResult<()> {
+  let workspace_id = new_user.session.user_workspace.id.clone();
+  let uid = new_user.session.user_id;
+  let folder = Arc::new(sync_folder(uid, &workspace_id, collab_db, user_service.clone()).await?);
+
+  let database_records = sync_database_views(
+    uid,
+    &workspace_id,
+    &new_user.session.user_workspace.database_views_aggregate_id,
+    collab_db,
+    user_service.clone(),
+  )
+  .await;
+
+  let views = folder.lock().get_current_workspace_views();
+  for view in views {
+    let view_id = view.id.clone();
+    if let Err(err) = sync_views(
+      uid,
+      folder.clone(),
+      database_records.clone(),
+      workspace_id.to_string(),
+      view,
+      collab_db.clone(),
+      user_service.clone(),
+    )
+    .await
+    {
+      tracing::error!("🔴sync {} failed: {:?}", view_id, err);
+    }
+  }
+  Ok(())
+}
+
+fn sync_views(
+  uid: i64,
+  folder: Arc<MutexFolder>,
+  database_records: Vec<Arc<DatabaseWithViews>>,
+  workspace_id: String,
+  view: Arc<View>,
+  collab_db: Arc<RocksCollabDB>,
+  user_service: Arc<dyn UserCloudService>,
+) -> Pin<Box<dyn Future<Output = Result<(), Error>> + Send + Sync>> {
+  Box::pin(async move {
+    let collab_type = collab_type_from_view_layout(&view.layout);
+    let object_id = object_id_from_view(&view, &database_records)?;
+    tracing::debug!(
+      "sync view: {:?}:{} with object_id: {}",
+      view.layout,
+      view.id,
+      object_id
+    );
+
+    let collab_object =
+      CollabObject::new(uid, object_id, collab_type).with_workspace_id(workspace_id.to_string());
+
+    match view.layout {
+      ViewLayout::Document => {
+        let update = get_collab_init_update(uid, &collab_object, &collab_db)?;
+        tracing::info!(
+          "sync object: {} with update: {}",
+          collab_object,
+          update.len()
+        );
+        user_service
+          .create_collab_object(&collab_object, update)
+          .await?;
+      },
+      ViewLayout::Grid | ViewLayout::Board | ViewLayout::Calendar => {
+        let (database_update, row_ids) = get_database_init_update(uid, &collab_object, &collab_db)?;
+        tracing::info!(
+          "sync object: {} with update: {}",
+          collab_object,
+          database_update.len()
+        );
+        user_service
+          .create_collab_object(&collab_object, database_update)
+          .await?;
+
+        // sync database's row
+        for row_id in row_ids {
+          tracing::debug!("sync row: {}", row_id);
+          let document_id = database_row_document_id_from_row_id(&row_id);
+
+          let database_row_collab_object = CollabObject::new(uid, row_id, CollabType::DatabaseRow)
+            .with_workspace_id(workspace_id.to_string());
+          let database_row_update =
+            get_collab_init_update(uid, &database_row_collab_object, &collab_db)?;
+          tracing::info!(
+            "sync object: {} with update: {}",
+            database_row_collab_object,
+            database_row_update.len()
+          );
+
+          let _ = user_service
+            .create_collab_object(&database_row_collab_object, database_row_update)
+            .await;
+
+          let database_row_document = CollabObject::new(uid, document_id, CollabType::Document)
+            .with_workspace_id(workspace_id.to_string());
+          // sync document in the row if exist
+          if let Ok(document_update) =
+            get_collab_init_update(uid, &database_row_document, &collab_db)
+          {
+            tracing::info!(
+              "sync database row document: {} with update: {}",
+              database_row_document,
+              document_update.len()
+            );
+            let _ = user_service
+              .create_collab_object(&database_row_document, document_update)
+              .await;
+          }
+        }
+      },
+    }
+
+    let child_views = folder.lock().views.get_views_belong_to(&view.id);
+    for child_view in child_views {
+      let cloned_child_view = child_view.clone();
+      if let Err(err) = Box::pin(sync_views(
+        uid,
+        folder.clone(),
+        database_records.clone(),
+        workspace_id.clone(),
+        child_view,
+        collab_db.clone(),
+        user_service.clone(),
+      ))
+      .await
+      {
+        tracing::error!(
+          "🔴sync {:?}:{} failed: {:?}",
+          cloned_child_view.layout,
+          cloned_child_view.id,
+          err
+        )
+      }
+    }
+    Ok(())
+  })
+}
+
+fn get_collab_init_update(
+  uid: i64,
+  collab_object: &CollabObject,
+  collab_db: &Arc<RocksCollabDB>,
+) -> Result<Vec<u8>, PersistenceError> {
+  let collab = Collab::new(uid, &collab_object.object_id, "phantom", vec![]);
+  let _ = collab.with_origin_transact_mut(|txn| {
+    collab_db
+      .read_txn()
+      .load_doc(uid, &collab_object.object_id, txn)
+  })?;
+  let update = collab.encode_as_update_v1().0;
+  if update.is_empty() {
+    return Err(PersistenceError::UnexpectedEmptyUpdates);
+  }
+
+  Ok(update)
+}
+
+fn get_database_init_update(
+  uid: i64,
+  collab_object: &CollabObject,
+  collab_db: &Arc<RocksCollabDB>,
+) -> Result<(Vec<u8>, Vec<String>), PersistenceError> {
+  let collab = Collab::new(uid, &collab_object.object_id, "phantom", vec![]);
+  let _ = collab.with_origin_transact_mut(|txn| {
+    collab_db
+      .read_txn()
+      .load_doc(uid, &collab_object.object_id, txn)
+  })?;
+
+  let row_ids = get_database_row_ids(&collab).unwrap_or_default();
+  let update = collab.encode_as_update_v1().0;
+  if update.is_empty() {
+    return Err(PersistenceError::UnexpectedEmptyUpdates);
+  }
+
+  Ok((update, row_ids))
+}
+
+async fn sync_folder(
+  uid: i64,
+  workspace_id: &str,
+  collab_db: &Arc<RocksCollabDB>,
+  user_service: Arc<dyn UserCloudService>,
+) -> Result<MutexFolder, Error> {
+  let (folder, update) = {
+    let collab = Collab::new(uid, workspace_id, "phantom", vec![]);
+    // Use the temporary result to short the lifetime of the TransactionMut
+    collab.with_origin_transact_mut(|txn| collab_db.read_txn().load_doc(uid, workspace_id, txn))?;
+    let update = collab.encode_as_update_v1().0;
+    (
+      MutexFolder::new(Folder::open(
+        Arc::new(MutexCollab::from_collab(collab)),
+        None,
+      )),
+      update,
+    )
+  };
+
+  let collab_object = CollabObject::new(uid, workspace_id.to_string(), CollabType::Folder)
+    .with_workspace_id(workspace_id.to_string());
+  tracing::info!(
+    "sync object: {} with update: {}",
+    collab_object,
+    update.len()
+  );
+  if let Err(err) = user_service
+    .create_collab_object(&collab_object, update)
+    .await
+  {
+    tracing::error!("🔴sync folder failed: {:?}", err);
+  }
+
+  Ok(folder)
+}
+
+async fn sync_database_views(
+  uid: i64,
+  workspace_id: &str,
+  database_views_aggregate_id: &str,
+  collab_db: &Arc<RocksCollabDB>,
+  user_service: Arc<dyn UserCloudService>,
+) -> Vec<Arc<DatabaseWithViews>> {
+  let collab_object = CollabObject::new(
+    uid,
+    database_views_aggregate_id.to_string(),
+    CollabType::WorkspaceDatabase,
+  )
+  .with_workspace_id(workspace_id.to_string());
+
+  // Use the temporary result to short the lifetime of the TransactionMut
+  let result = {
+    let collab = Collab::new(uid, database_views_aggregate_id, "phantom", vec![]);
+    collab
+      .with_origin_transact_mut(|txn| {
+        collab_db
+          .read_txn()
+          .load_doc(uid, database_views_aggregate_id, txn)
+      })
+      .map(|_| {
+        (
+          get_database_with_views(&collab),
+          collab.encode_as_update_v1().0,
+        )
+      })
+  };
+
+  if let Ok((records, update)) = result {
+    let _ = user_service
+      .create_collab_object(&collab_object, update)
+      .await;
+    records.into_iter().map(Arc::new).collect()
+  } else {
+    vec![]
+  }
+}
+
+struct MutexFolder(Mutex<Folder>);
+impl MutexFolder {
+  pub fn new(folder: Folder) -> Self {
+    Self(Mutex::new(folder))
+  }
+}
+impl Deref for MutexFolder {
+  type Target = Mutex<Folder>;
+  fn deref(&self) -> &Self::Target {
+    &self.0
+  }
+}
+unsafe impl Sync for MutexFolder {}
+unsafe impl Send for MutexFolder {}
+
+fn collab_type_from_view_layout(view_layout: &ViewLayout) -> CollabType {
+  match view_layout {
+    ViewLayout::Document => CollabType::Document,
+    ViewLayout::Grid | ViewLayout::Board | ViewLayout::Calendar => CollabType::Database,
+  }
+}
+
+fn object_id_from_view(
+  view: &Arc<View>,
+  database_records: &[Arc<DatabaseWithViews>],
+) -> Result<String, Error> {
+  if view.layout.is_database() {
+    match database_records
+      .iter()
+      .find(|record| record.linked_views.contains(&view.id))
+    {
+      None => Err(anyhow!(
+        "🔴sync view: {} failed: no database for this view",
+        view.id
+      )),
+      Some(record) => Ok(record.database_id.clone()),
+    }
+  } else {
+    Ok(view.id.clone())
+  }
+}

+ 1 - 1
frontend/rust-lib/flowy-user/src/services/entities.rs

@@ -66,7 +66,7 @@ impl<'de> Visitor<'de> for SessionVisitor {
           name: "My Workspace".to_string(),
           created_at: Utc::now(),
           // For historical reasons, the database_storage_id is constructed by the user_id.
-          database_storage_id: STANDARD.encode(format!("{}:user:database", user_id)),
+          database_views_aggregate_id: STANDARD.encode(format!("{}:user:database", user_id)),
         })
       }
     }

+ 3 - 3
frontend/rust-lib/flowy-user/src/services/user_workspace_sql.rs

@@ -23,7 +23,7 @@ impl TryFrom<(i64, &UserWorkspace)> for UserWorkspaceTable {
     if value.1.id.is_empty() {
       return Err(FlowyError::invalid_data().with_context("The id is empty"));
     }
-    if value.1.database_storage_id.is_empty() {
+    if value.1.database_views_aggregate_id.is_empty() {
       return Err(FlowyError::invalid_data().with_context("The database storage id is empty"));
     }
 
@@ -32,7 +32,7 @@ impl TryFrom<(i64, &UserWorkspace)> for UserWorkspaceTable {
       name: value.1.name.clone(),
       uid: value.0,
       created_at: value.1.created_at.timestamp(),
-      database_storage_id: value.1.database_storage_id.clone(),
+      database_storage_id: value.1.database_views_aggregate_id.clone(),
     })
   }
 }
@@ -46,7 +46,7 @@ impl From<UserWorkspaceTable> for UserWorkspace {
         .timestamp_opt(value.created_at, 0)
         .single()
         .unwrap_or_default(),
-      database_storage_id: value.database_storage_id,
+      database_views_aggregate_id: value.database_storage_id,
     }
   }
 }