Browse Source

add server folder manager

appflowy 3 years ago
parent
commit
1e66aae051
59 changed files with 1987 additions and 941 deletions
  1. 2 2
      backend/src/services/document/persistence.rs
  2. 10 8
      backend/src/services/document/ws_receiver.rs
  3. 1 1
      backend/tests/api_test/workspace_test.rs
  4. 1 1
      backend/tests/document_test/edit_script.rs
  5. 1 1
      backend/tests/util/helper.rs
  6. 1 1
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/document_info.pb.dart
  7. 7 0
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/document_info.pbenum.dart
  8. 1 1
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/document_info.pbjson.dart
  9. 9 0
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/document_info.pbserver.dart
  10. 101 0
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/folder_info.pb.dart
  11. 1 1
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/folder_info.pbenum.dart
  12. 23 0
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/folder_info.pbjson.dart
  13. 2 2
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/folder_info.pbserver.dart
  14. 3 2
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/protobuf.dart
  15. 3 3
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws_data.pb.dart
  16. 1 1
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws_data.pbenum.dart
  17. 1 1
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws_data.pbjson.dart
  18. 2 2
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws_data.pbserver.dart
  19. 116 116
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-core-data-model/trash.pb.dart
  20. 22 22
      frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-core-data-model/trash.pbjson.dart
  21. 1 1
      frontend/rust-lib/flowy-core/src/services/folder_editor.rs
  22. 1 1
      frontend/rust-lib/flowy-core/src/services/persistence/mod.rs
  23. 1 1
      frontend/rust-lib/flowy-core/src/services/view/controller.rs
  24. 1 1
      frontend/rust-lib/flowy-core/src/services/view/event_handler.rs
  25. 75 33
      frontend/rust-lib/flowy-core/src/services/web_socket.rs
  26. 2 2
      frontend/rust-lib/flowy-document/src/controller.rs
  27. 2 2
      frontend/rust-lib/flowy-document/src/core/editor.rs
  28. 4 33
      frontend/rust-lib/flowy-document/src/core/queue.rs
  29. 16 34
      frontend/rust-lib/flowy-document/src/core/web_socket.rs
  30. 1 1
      frontend/rust-lib/flowy-document/src/lib.rs
  31. 1 1
      frontend/rust-lib/flowy-net/src/http_server/document.rs
  32. 73 18
      frontend/rust-lib/flowy-net/src/local_server/persistence.rs
  33. 53 4
      frontend/rust-lib/flowy-net/src/local_server/server.rs
  34. 63 65
      frontend/rust-lib/flowy-sync/src/conflict_resolve.rs
  35. 7 8
      frontend/rust-lib/flowy-sync/src/ws_manager.rs
  36. 1 1
      frontend/rust-lib/flowy-test/src/helper.rs
  37. 4 1
      shared-lib/flowy-collaboration/src/client_document/document_pad.rs
  38. 1 25
      shared-lib/flowy-collaboration/src/entities/document_info.rs
  39. 19 0
      shared-lib/flowy-collaboration/src/entities/folder_info.rs
  40. 2 1
      shared-lib/flowy-collaboration/src/entities/mod.rs
  41. 6 6
      shared-lib/flowy-collaboration/src/entities/ws_data.rs
  42. 32 13
      shared-lib/flowy-collaboration/src/folder/folder_pad.rs
  43. 1 0
      shared-lib/flowy-collaboration/src/lib.rs
  44. 22 22
      shared-lib/flowy-collaboration/src/protobuf/model/document_info.rs
  45. 327 0
      shared-lib/flowy-collaboration/src/protobuf/model/folder_info.rs
  46. 7 4
      shared-lib/flowy-collaboration/src/protobuf/model/mod.rs
  47. 28 28
      shared-lib/flowy-collaboration/src/protobuf/model/ws_data.rs
  48. 0 0
      shared-lib/flowy-collaboration/src/protobuf/proto/document_info.proto
  49. 8 0
      shared-lib/flowy-collaboration/src/protobuf/proto/folder_info.proto
  50. 0 0
      shared-lib/flowy-collaboration/src/protobuf/proto/ws_data.proto
  51. 71 69
      shared-lib/flowy-collaboration/src/server_document/document_manager.rs
  52. 332 0
      shared-lib/flowy-collaboration/src/server_folder/folder_manager.rs
  53. 38 0
      shared-lib/flowy-collaboration/src/server_folder/folder_pad.rs
  54. 4 0
      shared-lib/flowy-collaboration/src/server_folder/mod.rs
  55. 31 31
      shared-lib/flowy-collaboration/src/synchronizer.rs
  56. 86 7
      shared-lib/flowy-collaboration/src/util.rs
  57. 338 344
      shared-lib/flowy-core-data-model/src/protobuf/model/trash.rs
  58. 8 8
      shared-lib/flowy-core-data-model/src/protobuf/proto/trash.proto
  59. 12 11
      shared-lib/flowy-derive/src/derive_cache/derive_cache.rs

+ 2 - 2
backend/src/services/document/persistence.rs

@@ -15,7 +15,7 @@ use flowy_collaboration::{
         Revision as RevisionPB,
         Revision as RevisionPB,
     },
     },
     server_document::ServerDocumentManager,
     server_document::ServerDocumentManager,
-    util::make_doc_from_revisions,
+    util::make_document_info_pb_from_revisions_pb,
 };
 };
 
 
 use protobuf::Message;
 use protobuf::Message;
@@ -39,7 +39,7 @@ pub async fn read_document(
 ) -> Result<DocumentInfo, ServerError> {
 ) -> Result<DocumentInfo, ServerError> {
     let _ = Uuid::parse_str(&params.doc_id).context("Parse document id to uuid failed")?;
     let _ = Uuid::parse_str(&params.doc_id).context("Parse document id to uuid failed")?;
     let revisions = kv_store.get_revisions(&params.doc_id, None).await?;
     let revisions = kv_store.get_revisions(&params.doc_id, None).await?;
-    match make_doc_from_revisions(&params.doc_id, revisions) {
+    match make_document_info_pb_from_revisions_pb(&params.doc_id, revisions) {
         Ok(Some(document_info)) => Ok(document_info),
         Ok(Some(document_info)) => Ok(document_info),
         Ok(None) => Err(ServerError::record_not_found().context(format!("{} not exist", params.doc_id))),
         Ok(None) => Err(ServerError::record_not_found().context(format!("{} not exist", params.doc_id))),
         Err(e) => Err(ServerError::internal().context(e)),
         Err(e) => Err(ServerError::internal().context(e)),

+ 10 - 8
backend/src/services/document/ws_receiver.rs

@@ -10,7 +10,7 @@ use crate::{
 };
 };
 use backend_service::errors::ServerError;
 use backend_service::errors::ServerError;
 use flowy_collaboration::{
 use flowy_collaboration::{
-    entities::doc::DocumentInfo,
+    entities::document_info::DocumentInfo,
     errors::CollaborateError,
     errors::CollaborateError,
     protobuf::{
     protobuf::{
         CreateDocParams as CreateDocParamsPB,
         CreateDocParams as CreateDocParamsPB,
@@ -19,7 +19,7 @@ use flowy_collaboration::{
         Revision as RevisionPB,
         Revision as RevisionPB,
     },
     },
     server_document::{DocumentCloudPersistence, ServerDocumentManager},
     server_document::{DocumentCloudPersistence, ServerDocumentManager},
-    util::repeated_revision_from_repeated_revision_pb,
+    util::make_document_info_from_revisions_pb,
 };
 };
 use lib_infra::future::BoxResultFuture;
 use lib_infra::future::BoxResultFuture;
 use std::{
 use std::{
@@ -103,12 +103,11 @@ impl DocumentCloudPersistence for HttpDocumentCloudPersistence {
         &self,
         &self,
         doc_id: &str,
         doc_id: &str,
         repeated_revision: RepeatedRevisionPB,
         repeated_revision: RepeatedRevisionPB,
-    ) -> BoxResultFuture<DocumentInfo, CollaborateError> {
+    ) -> BoxResultFuture<Option<DocumentInfo>, CollaborateError> {
         let kv_store = self.0.clone();
         let kv_store = self.0.clone();
         let doc_id = doc_id.to_owned();
         let doc_id = doc_id.to_owned();
         Box::pin(async move {
         Box::pin(async move {
-            let revisions = repeated_revision_from_repeated_revision_pb(repeated_revision.clone())?.into_inner();
-            let doc = DocumentInfo::from_revisions(&doc_id, revisions)?;
+            let document_info = make_document_info_from_revisions_pb(&doc_id, repeated_revision.clone())?;
             let doc_id = doc_id.to_owned();
             let doc_id = doc_id.to_owned();
             let mut params = CreateDocParamsPB::new();
             let mut params = CreateDocParamsPB::new();
             params.set_id(doc_id);
             params.set_id(doc_id);
@@ -116,11 +115,11 @@ impl DocumentCloudPersistence for HttpDocumentCloudPersistence {
             let _ = create_document(&kv_store, params)
             let _ = create_document(&kv_store, params)
                 .await
                 .await
                 .map_err(server_error_to_collaborate_error)?;
                 .map_err(server_error_to_collaborate_error)?;
-            Ok(doc)
+            Ok(document_info)
         })
         })
     }
     }
 
 
-    fn read_revisions(
+    fn read_document_revisions(
         &self,
         &self,
         doc_id: &str,
         doc_id: &str,
         rev_ids: Option<Vec<i64>>,
         rev_ids: Option<Vec<i64>>,
@@ -135,7 +134,10 @@ impl DocumentCloudPersistence for HttpDocumentCloudPersistence {
         Box::pin(async move { f().await.map_err(server_error_to_collaborate_error) })
         Box::pin(async move { f().await.map_err(server_error_to_collaborate_error) })
     }
     }
 
 
-    fn save_revisions(&self, mut repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError> {
+    fn save_document_revisions(
+        &self,
+        mut repeated_revision: RepeatedRevisionPB,
+    ) -> BoxResultFuture<(), CollaborateError> {
         let kv_store = self.0.clone();
         let kv_store = self.0.clone();
         let f = || async move {
         let f = || async move {
             let revisions = repeated_revision.take_items().into();
             let revisions = repeated_revision.take_items().into();

+ 1 - 1
backend/tests/api_test/workspace_test.rs

@@ -4,7 +4,7 @@ use crate::util::helper::{ViewTest, *};
 use flowy_collaboration::{
 use flowy_collaboration::{
     client_document::{ClientDocument, PlainDoc},
     client_document::{ClientDocument, PlainDoc},
     entities::{
     entities::{
-        doc::{CreateDocParams, DocumentId},
+        document_info::{CreateDocParams, DocumentId},
         revision::{md5, RepeatedRevision, Revision},
         revision::{md5, RepeatedRevision, Revision},
     },
     },
 };
 };

+ 1 - 1
backend/tests/document_test/edit_script.rs

@@ -10,7 +10,7 @@ use std::sync::Arc;
 use bytes::Bytes;
 use bytes::Bytes;
 use tokio::time::{sleep, Duration};
 use tokio::time::{sleep, Duration};
 use crate::util::helper::{spawn_server, TestServer};
 use crate::util::helper::{spawn_server, TestServer};
-use flowy_collaboration::{entities::doc::DocumentId, protobuf::ResetDocumentParams as ResetDocumentParamsPB};
+use flowy_collaboration::{entities::document_info::DocumentId, protobuf::ResetDocumentParams as ResetDocumentParamsPB};
 use lib_ot::rich_text::{RichTextAttribute, RichTextDelta};
 use lib_ot::rich_text::{RichTextAttribute, RichTextDelta};
 use parking_lot::RwLock;
 use parking_lot::RwLock;
 use backend::services::document::persistence::{read_document, reset_document};
 use backend::services::document::persistence::{read_document, reset_document};

+ 1 - 1
backend/tests/util/helper.rs

@@ -9,7 +9,7 @@ use backend_service::{
 };
 };
 use flowy_collaboration::{
 use flowy_collaboration::{
     client_document::default::initial_delta_string,
     client_document::default::initial_delta_string,
-    entities::doc::{CreateDocParams, DocumentId, DocumentInfo},
+    entities::document_info::{CreateDocParams, DocumentId, DocumentInfo},
 };
 };
 use flowy_core_data_model::entities::prelude::*;
 use flowy_core_data_model::entities::prelude::*;
 use flowy_net::http_server::{
 use flowy_net::http_server::{

+ 1 - 1
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/doc.pb.dart → frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/document_info.pb.dart

@@ -1,6 +1,6 @@
 ///
 ///
 //  Generated code. Do not modify.
 //  Generated code. Do not modify.
-//  source: doc.proto
+//  source: document_info.proto
 //
 //
 // @dart = 2.12
 // @dart = 2.12
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields

+ 7 - 0
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/document_info.pbenum.dart

@@ -0,0 +1,7 @@
+///
+//  Generated code. Do not modify.
+//  source: document_info.proto
+//
+// @dart = 2.12
+// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
+

+ 1 - 1
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/doc.pbjson.dart → frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/document_info.pbjson.dart

@@ -1,6 +1,6 @@
 ///
 ///
 //  Generated code. Do not modify.
 //  Generated code. Do not modify.
-//  source: doc.proto
+//  source: document_info.proto
 //
 //
 // @dart = 2.12
 // @dart = 2.12
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package

+ 9 - 0
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/document_info.pbserver.dart

@@ -0,0 +1,9 @@
+///
+//  Generated code. Do not modify.
+//  source: document_info.proto
+//
+// @dart = 2.12
+// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
+
+export 'document_info.pb.dart';
+

+ 101 - 0
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/folder_info.pb.dart

@@ -0,0 +1,101 @@
+///
+//  Generated code. Do not modify.
+//  source: folder_info.proto
+//
+// @dart = 2.12
+// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
+
+import 'dart:core' as $core;
+
+import 'package:fixnum/fixnum.dart' as $fixnum;
+import 'package:protobuf/protobuf.dart' as $pb;
+
+class FolderInfo extends $pb.GeneratedMessage {
+  static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'FolderInfo', createEmptyInstance: create)
+    ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'folderId')
+    ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'text')
+    ..aInt64(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'revId')
+    ..aInt64(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'baseRevId')
+    ..hasRequiredFields = false
+  ;
+
+  FolderInfo._() : super();
+  factory FolderInfo({
+    $core.String? folderId,
+    $core.String? text,
+    $fixnum.Int64? revId,
+    $fixnum.Int64? baseRevId,
+  }) {
+    final _result = create();
+    if (folderId != null) {
+      _result.folderId = folderId;
+    }
+    if (text != null) {
+      _result.text = text;
+    }
+    if (revId != null) {
+      _result.revId = revId;
+    }
+    if (baseRevId != null) {
+      _result.baseRevId = baseRevId;
+    }
+    return _result;
+  }
+  factory FolderInfo.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
+  factory FolderInfo.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
+  @$core.Deprecated(
+  'Using this can add significant overhead to your binary. '
+  'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
+  'Will be removed in next major version')
+  FolderInfo clone() => FolderInfo()..mergeFromMessage(this);
+  @$core.Deprecated(
+  'Using this can add significant overhead to your binary. '
+  'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
+  'Will be removed in next major version')
+  FolderInfo copyWith(void Function(FolderInfo) updates) => super.copyWith((message) => updates(message as FolderInfo)) as FolderInfo; // ignore: deprecated_member_use
+  $pb.BuilderInfo get info_ => _i;
+  @$core.pragma('dart2js:noInline')
+  static FolderInfo create() => FolderInfo._();
+  FolderInfo createEmptyInstance() => create();
+  static $pb.PbList<FolderInfo> createRepeated() => $pb.PbList<FolderInfo>();
+  @$core.pragma('dart2js:noInline')
+  static FolderInfo getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<FolderInfo>(create);
+  static FolderInfo? _defaultInstance;
+
+  @$pb.TagNumber(1)
+  $core.String get folderId => $_getSZ(0);
+  @$pb.TagNumber(1)
+  set folderId($core.String v) { $_setString(0, v); }
+  @$pb.TagNumber(1)
+  $core.bool hasFolderId() => $_has(0);
+  @$pb.TagNumber(1)
+  void clearFolderId() => clearField(1);
+
+  @$pb.TagNumber(2)
+  $core.String get text => $_getSZ(1);
+  @$pb.TagNumber(2)
+  set text($core.String v) { $_setString(1, v); }
+  @$pb.TagNumber(2)
+  $core.bool hasText() => $_has(1);
+  @$pb.TagNumber(2)
+  void clearText() => clearField(2);
+
+  @$pb.TagNumber(3)
+  $fixnum.Int64 get revId => $_getI64(2);
+  @$pb.TagNumber(3)
+  set revId($fixnum.Int64 v) { $_setInt64(2, v); }
+  @$pb.TagNumber(3)
+  $core.bool hasRevId() => $_has(2);
+  @$pb.TagNumber(3)
+  void clearRevId() => clearField(3);
+
+  @$pb.TagNumber(4)
+  $fixnum.Int64 get baseRevId => $_getI64(3);
+  @$pb.TagNumber(4)
+  set baseRevId($fixnum.Int64 v) { $_setInt64(3, v); }
+  @$pb.TagNumber(4)
+  $core.bool hasBaseRevId() => $_has(3);
+  @$pb.TagNumber(4)
+  void clearBaseRevId() => clearField(4);
+}
+

+ 1 - 1
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/doc.pbenum.dart → frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/folder_info.pbenum.dart

@@ -1,6 +1,6 @@
 ///
 ///
 //  Generated code. Do not modify.
 //  Generated code. Do not modify.
-//  source: doc.proto
+//  source: folder_info.proto
 //
 //
 // @dart = 2.12
 // @dart = 2.12
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields

+ 23 - 0
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/folder_info.pbjson.dart

@@ -0,0 +1,23 @@
+///
+//  Generated code. Do not modify.
+//  source: folder_info.proto
+//
+// @dart = 2.12
+// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
+
+import 'dart:core' as $core;
+import 'dart:convert' as $convert;
+import 'dart:typed_data' as $typed_data;
+@$core.Deprecated('Use folderInfoDescriptor instead')
+const FolderInfo$json = const {
+  '1': 'FolderInfo',
+  '2': const [
+    const {'1': 'folder_id', '3': 1, '4': 1, '5': 9, '10': 'folderId'},
+    const {'1': 'text', '3': 2, '4': 1, '5': 9, '10': 'text'},
+    const {'1': 'rev_id', '3': 3, '4': 1, '5': 3, '10': 'revId'},
+    const {'1': 'base_rev_id', '3': 4, '4': 1, '5': 3, '10': 'baseRevId'},
+  ],
+};
+
+/// Descriptor for `FolderInfo`. Decode as a `google.protobuf.DescriptorProto`.
+final $typed_data.Uint8List folderInfoDescriptor = $convert.base64Decode('CgpGb2xkZXJJbmZvEhsKCWZvbGRlcl9pZBgBIAEoCVIIZm9sZGVySWQSEgoEdGV4dBgCIAEoCVIEdGV4dBIVCgZyZXZfaWQYAyABKANSBXJldklkEh4KC2Jhc2VfcmV2X2lkGAQgASgDUgliYXNlUmV2SWQ=');

+ 2 - 2
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/doc.pbserver.dart → frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/folder_info.pbserver.dart

@@ -1,9 +1,9 @@
 ///
 ///
 //  Generated code. Do not modify.
 //  Generated code. Do not modify.
-//  source: doc.proto
+//  source: folder_info.proto
 //
 //
 // @dart = 2.12
 // @dart = 2.12
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
 
 
-export 'doc.pb.dart';
+export 'folder_info.pb.dart';
 
 

+ 3 - 2
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/protobuf.dart

@@ -1,4 +1,5 @@
 // Auto-generated, do not edit 
 // Auto-generated, do not edit 
-export './ws.pb.dart';
+export './folder_info.pb.dart';
+export './ws_data.pb.dart';
 export './revision.pb.dart';
 export './revision.pb.dart';
-export './doc.pb.dart';
+export './document_info.pb.dart';

+ 3 - 3
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws.pb.dart → frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws_data.pb.dart

@@ -1,6 +1,6 @@
 ///
 ///
 //  Generated code. Do not modify.
 //  Generated code. Do not modify.
-//  source: ws.proto
+//  source: ws_data.proto
 //
 //
 // @dart = 2.12
 // @dart = 2.12
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
@@ -11,9 +11,9 @@ import 'package:protobuf/protobuf.dart' as $pb;
 
 
 import 'revision.pb.dart' as $0;
 import 'revision.pb.dart' as $0;
 
 
-import 'ws.pbenum.dart';
+import 'ws_data.pbenum.dart';
 
 
-export 'ws.pbenum.dart';
+export 'ws_data.pbenum.dart';
 
 
 class ClientRevisionWSData extends $pb.GeneratedMessage {
 class ClientRevisionWSData extends $pb.GeneratedMessage {
   static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ClientRevisionWSData', createEmptyInstance: create)
   static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ClientRevisionWSData', createEmptyInstance: create)

+ 1 - 1
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws.pbenum.dart → frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws_data.pbenum.dart

@@ -1,6 +1,6 @@
 ///
 ///
 //  Generated code. Do not modify.
 //  Generated code. Do not modify.
-//  source: ws.proto
+//  source: ws_data.proto
 //
 //
 // @dart = 2.12
 // @dart = 2.12
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields

+ 1 - 1
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws.pbjson.dart → frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws_data.pbjson.dart

@@ -1,6 +1,6 @@
 ///
 ///
 //  Generated code. Do not modify.
 //  Generated code. Do not modify.
-//  source: ws.proto
+//  source: ws_data.proto
 //
 //
 // @dart = 2.12
 // @dart = 2.12
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package

+ 2 - 2
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws.pbserver.dart → frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-collaboration/ws_data.pbserver.dart

@@ -1,9 +1,9 @@
 ///
 ///
 //  Generated code. Do not modify.
 //  Generated code. Do not modify.
-//  source: ws.proto
+//  source: ws_data.proto
 //
 //
 // @dart = 2.12
 // @dart = 2.12
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
 // ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
 
 
-export 'ws.pb.dart';
+export 'ws_data.pb.dart';
 
 

+ 116 - 116
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-core-data-model/trash.pb.dart

@@ -14,122 +14,6 @@ import 'trash.pbenum.dart';
 
 
 export 'trash.pbenum.dart';
 export 'trash.pbenum.dart';
 
 
-class RepeatedTrashId extends $pb.GeneratedMessage {
-  static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'RepeatedTrashId', createEmptyInstance: create)
-    ..pc<TrashId>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'items', $pb.PbFieldType.PM, subBuilder: TrashId.create)
-    ..aOB(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'deleteAll')
-    ..hasRequiredFields = false
-  ;
-
-  RepeatedTrashId._() : super();
-  factory RepeatedTrashId({
-    $core.Iterable<TrashId>? items,
-    $core.bool? deleteAll,
-  }) {
-    final _result = create();
-    if (items != null) {
-      _result.items.addAll(items);
-    }
-    if (deleteAll != null) {
-      _result.deleteAll = deleteAll;
-    }
-    return _result;
-  }
-  factory RepeatedTrashId.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
-  factory RepeatedTrashId.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
-  @$core.Deprecated(
-  'Using this can add significant overhead to your binary. '
-  'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
-  'Will be removed in next major version')
-  RepeatedTrashId clone() => RepeatedTrashId()..mergeFromMessage(this);
-  @$core.Deprecated(
-  'Using this can add significant overhead to your binary. '
-  'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
-  'Will be removed in next major version')
-  RepeatedTrashId copyWith(void Function(RepeatedTrashId) updates) => super.copyWith((message) => updates(message as RepeatedTrashId)) as RepeatedTrashId; // ignore: deprecated_member_use
-  $pb.BuilderInfo get info_ => _i;
-  @$core.pragma('dart2js:noInline')
-  static RepeatedTrashId create() => RepeatedTrashId._();
-  RepeatedTrashId createEmptyInstance() => create();
-  static $pb.PbList<RepeatedTrashId> createRepeated() => $pb.PbList<RepeatedTrashId>();
-  @$core.pragma('dart2js:noInline')
-  static RepeatedTrashId getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<RepeatedTrashId>(create);
-  static RepeatedTrashId? _defaultInstance;
-
-  @$pb.TagNumber(1)
-  $core.List<TrashId> get items => $_getList(0);
-
-  @$pb.TagNumber(2)
-  $core.bool get deleteAll => $_getBF(1);
-  @$pb.TagNumber(2)
-  set deleteAll($core.bool v) { $_setBool(1, v); }
-  @$pb.TagNumber(2)
-  $core.bool hasDeleteAll() => $_has(1);
-  @$pb.TagNumber(2)
-  void clearDeleteAll() => clearField(2);
-}
-
-class TrashId extends $pb.GeneratedMessage {
-  static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TrashId', createEmptyInstance: create)
-    ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'id')
-    ..e<TrashType>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'ty', $pb.PbFieldType.OE, defaultOrMaker: TrashType.Unknown, valueOf: TrashType.valueOf, enumValues: TrashType.values)
-    ..hasRequiredFields = false
-  ;
-
-  TrashId._() : super();
-  factory TrashId({
-    $core.String? id,
-    TrashType? ty,
-  }) {
-    final _result = create();
-    if (id != null) {
-      _result.id = id;
-    }
-    if (ty != null) {
-      _result.ty = ty;
-    }
-    return _result;
-  }
-  factory TrashId.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
-  factory TrashId.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
-  @$core.Deprecated(
-  'Using this can add significant overhead to your binary. '
-  'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
-  'Will be removed in next major version')
-  TrashId clone() => TrashId()..mergeFromMessage(this);
-  @$core.Deprecated(
-  'Using this can add significant overhead to your binary. '
-  'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
-  'Will be removed in next major version')
-  TrashId copyWith(void Function(TrashId) updates) => super.copyWith((message) => updates(message as TrashId)) as TrashId; // ignore: deprecated_member_use
-  $pb.BuilderInfo get info_ => _i;
-  @$core.pragma('dart2js:noInline')
-  static TrashId create() => TrashId._();
-  TrashId createEmptyInstance() => create();
-  static $pb.PbList<TrashId> createRepeated() => $pb.PbList<TrashId>();
-  @$core.pragma('dart2js:noInline')
-  static TrashId getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<TrashId>(create);
-  static TrashId? _defaultInstance;
-
-  @$pb.TagNumber(1)
-  $core.String get id => $_getSZ(0);
-  @$pb.TagNumber(1)
-  set id($core.String v) { $_setString(0, v); }
-  @$pb.TagNumber(1)
-  $core.bool hasId() => $_has(0);
-  @$pb.TagNumber(1)
-  void clearId() => clearField(1);
-
-  @$pb.TagNumber(2)
-  TrashType get ty => $_getN(1);
-  @$pb.TagNumber(2)
-  set ty(TrashType v) { setField(2, v); }
-  @$pb.TagNumber(2)
-  $core.bool hasTy() => $_has(1);
-  @$pb.TagNumber(2)
-  void clearTy() => clearField(2);
-}
-
 class Trash extends $pb.GeneratedMessage {
 class Trash extends $pb.GeneratedMessage {
   static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Trash', createEmptyInstance: create)
   static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Trash', createEmptyInstance: create)
     ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'id')
     ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'id')
@@ -274,3 +158,119 @@ class RepeatedTrash extends $pb.GeneratedMessage {
   $core.List<Trash> get items => $_getList(0);
   $core.List<Trash> get items => $_getList(0);
 }
 }
 
 
+class RepeatedTrashId extends $pb.GeneratedMessage {
+  static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'RepeatedTrashId', createEmptyInstance: create)
+    ..pc<TrashId>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'items', $pb.PbFieldType.PM, subBuilder: TrashId.create)
+    ..aOB(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'deleteAll')
+    ..hasRequiredFields = false
+  ;
+
+  RepeatedTrashId._() : super();
+  factory RepeatedTrashId({
+    $core.Iterable<TrashId>? items,
+    $core.bool? deleteAll,
+  }) {
+    final _result = create();
+    if (items != null) {
+      _result.items.addAll(items);
+    }
+    if (deleteAll != null) {
+      _result.deleteAll = deleteAll;
+    }
+    return _result;
+  }
+  factory RepeatedTrashId.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
+  factory RepeatedTrashId.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
+  @$core.Deprecated(
+  'Using this can add significant overhead to your binary. '
+  'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
+  'Will be removed in next major version')
+  RepeatedTrashId clone() => RepeatedTrashId()..mergeFromMessage(this);
+  @$core.Deprecated(
+  'Using this can add significant overhead to your binary. '
+  'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
+  'Will be removed in next major version')
+  RepeatedTrashId copyWith(void Function(RepeatedTrashId) updates) => super.copyWith((message) => updates(message as RepeatedTrashId)) as RepeatedTrashId; // ignore: deprecated_member_use
+  $pb.BuilderInfo get info_ => _i;
+  @$core.pragma('dart2js:noInline')
+  static RepeatedTrashId create() => RepeatedTrashId._();
+  RepeatedTrashId createEmptyInstance() => create();
+  static $pb.PbList<RepeatedTrashId> createRepeated() => $pb.PbList<RepeatedTrashId>();
+  @$core.pragma('dart2js:noInline')
+  static RepeatedTrashId getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<RepeatedTrashId>(create);
+  static RepeatedTrashId? _defaultInstance;
+
+  @$pb.TagNumber(1)
+  $core.List<TrashId> get items => $_getList(0);
+
+  @$pb.TagNumber(2)
+  $core.bool get deleteAll => $_getBF(1);
+  @$pb.TagNumber(2)
+  set deleteAll($core.bool v) { $_setBool(1, v); }
+  @$pb.TagNumber(2)
+  $core.bool hasDeleteAll() => $_has(1);
+  @$pb.TagNumber(2)
+  void clearDeleteAll() => clearField(2);
+}
+
+class TrashId extends $pb.GeneratedMessage {
+  static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TrashId', createEmptyInstance: create)
+    ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'id')
+    ..e<TrashType>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'ty', $pb.PbFieldType.OE, defaultOrMaker: TrashType.Unknown, valueOf: TrashType.valueOf, enumValues: TrashType.values)
+    ..hasRequiredFields = false
+  ;
+
+  TrashId._() : super();
+  factory TrashId({
+    $core.String? id,
+    TrashType? ty,
+  }) {
+    final _result = create();
+    if (id != null) {
+      _result.id = id;
+    }
+    if (ty != null) {
+      _result.ty = ty;
+    }
+    return _result;
+  }
+  factory TrashId.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
+  factory TrashId.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
+  @$core.Deprecated(
+  'Using this can add significant overhead to your binary. '
+  'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
+  'Will be removed in next major version')
+  TrashId clone() => TrashId()..mergeFromMessage(this);
+  @$core.Deprecated(
+  'Using this can add significant overhead to your binary. '
+  'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
+  'Will be removed in next major version')
+  TrashId copyWith(void Function(TrashId) updates) => super.copyWith((message) => updates(message as TrashId)) as TrashId; // ignore: deprecated_member_use
+  $pb.BuilderInfo get info_ => _i;
+  @$core.pragma('dart2js:noInline')
+  static TrashId create() => TrashId._();
+  TrashId createEmptyInstance() => create();
+  static $pb.PbList<TrashId> createRepeated() => $pb.PbList<TrashId>();
+  @$core.pragma('dart2js:noInline')
+  static TrashId getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<TrashId>(create);
+  static TrashId? _defaultInstance;
+
+  @$pb.TagNumber(1)
+  $core.String get id => $_getSZ(0);
+  @$pb.TagNumber(1)
+  set id($core.String v) { $_setString(0, v); }
+  @$pb.TagNumber(1)
+  $core.bool hasId() => $_has(0);
+  @$pb.TagNumber(1)
+  void clearId() => clearField(1);
+
+  @$pb.TagNumber(2)
+  TrashType get ty => $_getN(1);
+  @$pb.TagNumber(2)
+  set ty(TrashType v) { setField(2, v); }
+  @$pb.TagNumber(2)
+  $core.bool hasTy() => $_has(1);
+  @$pb.TagNumber(2)
+  void clearTy() => clearField(2);
+}
+

+ 22 - 22
frontend/app_flowy/packages/flowy_sdk/lib/protobuf/flowy-core-data-model/trash.pbjson.dart

@@ -20,28 +20,6 @@ const TrashType$json = const {
 
 
 /// Descriptor for `TrashType`. Decode as a `google.protobuf.EnumDescriptorProto`.
 /// Descriptor for `TrashType`. Decode as a `google.protobuf.EnumDescriptorProto`.
 final $typed_data.Uint8List trashTypeDescriptor = $convert.base64Decode('CglUcmFzaFR5cGUSCwoHVW5rbm93bhAAEggKBFZpZXcQARIHCgNBcHAQAg==');
 final $typed_data.Uint8List trashTypeDescriptor = $convert.base64Decode('CglUcmFzaFR5cGUSCwoHVW5rbm93bhAAEggKBFZpZXcQARIHCgNBcHAQAg==');
-@$core.Deprecated('Use repeatedTrashIdDescriptor instead')
-const RepeatedTrashId$json = const {
-  '1': 'RepeatedTrashId',
-  '2': const [
-    const {'1': 'items', '3': 1, '4': 3, '5': 11, '6': '.TrashId', '10': 'items'},
-    const {'1': 'delete_all', '3': 2, '4': 1, '5': 8, '10': 'deleteAll'},
-  ],
-};
-
-/// Descriptor for `RepeatedTrashId`. Decode as a `google.protobuf.DescriptorProto`.
-final $typed_data.Uint8List repeatedTrashIdDescriptor = $convert.base64Decode('Cg9SZXBlYXRlZFRyYXNoSWQSHgoFaXRlbXMYASADKAsyCC5UcmFzaElkUgVpdGVtcxIdCgpkZWxldGVfYWxsGAIgASgIUglkZWxldGVBbGw=');
-@$core.Deprecated('Use trashIdDescriptor instead')
-const TrashId$json = const {
-  '1': 'TrashId',
-  '2': const [
-    const {'1': 'id', '3': 1, '4': 1, '5': 9, '10': 'id'},
-    const {'1': 'ty', '3': 2, '4': 1, '5': 14, '6': '.TrashType', '10': 'ty'},
-  ],
-};
-
-/// Descriptor for `TrashId`. Decode as a `google.protobuf.DescriptorProto`.
-final $typed_data.Uint8List trashIdDescriptor = $convert.base64Decode('CgdUcmFzaElkEg4KAmlkGAEgASgJUgJpZBIaCgJ0eRgCIAEoDjIKLlRyYXNoVHlwZVICdHk=');
 @$core.Deprecated('Use trashDescriptor instead')
 @$core.Deprecated('Use trashDescriptor instead')
 const Trash$json = const {
 const Trash$json = const {
   '1': 'Trash',
   '1': 'Trash',
@@ -66,3 +44,25 @@ const RepeatedTrash$json = const {
 
 
 /// Descriptor for `RepeatedTrash`. Decode as a `google.protobuf.DescriptorProto`.
 /// Descriptor for `RepeatedTrash`. Decode as a `google.protobuf.DescriptorProto`.
 final $typed_data.Uint8List repeatedTrashDescriptor = $convert.base64Decode('Cg1SZXBlYXRlZFRyYXNoEhwKBWl0ZW1zGAEgAygLMgYuVHJhc2hSBWl0ZW1z');
 final $typed_data.Uint8List repeatedTrashDescriptor = $convert.base64Decode('Cg1SZXBlYXRlZFRyYXNoEhwKBWl0ZW1zGAEgAygLMgYuVHJhc2hSBWl0ZW1z');
+@$core.Deprecated('Use repeatedTrashIdDescriptor instead')
+const RepeatedTrashId$json = const {
+  '1': 'RepeatedTrashId',
+  '2': const [
+    const {'1': 'items', '3': 1, '4': 3, '5': 11, '6': '.TrashId', '10': 'items'},
+    const {'1': 'delete_all', '3': 2, '4': 1, '5': 8, '10': 'deleteAll'},
+  ],
+};
+
+/// Descriptor for `RepeatedTrashId`. Decode as a `google.protobuf.DescriptorProto`.
+final $typed_data.Uint8List repeatedTrashIdDescriptor = $convert.base64Decode('Cg9SZXBlYXRlZFRyYXNoSWQSHgoFaXRlbXMYASADKAsyCC5UcmFzaElkUgVpdGVtcxIdCgpkZWxldGVfYWxsGAIgASgIUglkZWxldGVBbGw=');
+@$core.Deprecated('Use trashIdDescriptor instead')
+const TrashId$json = const {
+  '1': 'TrashId',
+  '2': const [
+    const {'1': 'id', '3': 1, '4': 1, '5': 9, '10': 'id'},
+    const {'1': 'ty', '3': 2, '4': 1, '5': 14, '6': '.TrashType', '10': 'ty'},
+  ],
+};
+
+/// Descriptor for `TrashId`. Decode as a `google.protobuf.DescriptorProto`.
+final $typed_data.Uint8List trashIdDescriptor = $convert.base64Decode('CgdUcmFzaElkEg4KAmlkGAEgASgJUgJpZBIaCgJ0eRgCIAEoDjIKLlRyYXNoVHlwZVICdHk=');

+ 1 - 1
frontend/rust-lib/flowy-core/src/services/folder_editor.rs

@@ -39,7 +39,7 @@ impl FolderEditor {
         });
         });
         let folder_pad = Arc::new(RwLock::new(rev_manager.load::<FolderPadBuilder>(cloud).await?));
         let folder_pad = Arc::new(RwLock::new(rev_manager.load::<FolderPadBuilder>(cloud).await?));
         let rev_manager = Arc::new(rev_manager);
         let rev_manager = Arc::new(rev_manager);
-        let ws_manager = make_folder_ws_manager(rev_manager.clone(), web_socket, folder_pad.clone()).await;
+        let ws_manager = make_folder_ws_manager(user_id, rev_manager.clone(), web_socket, folder_pad.clone()).await;
 
 
         let user_id = user_id.to_owned();
         let user_id = user_id.to_owned();
         Ok(Self {
         Ok(Self {

+ 1 - 1
frontend/rust-lib/flowy-core/src/services/persistence/mod.rs

@@ -11,7 +11,7 @@ use tokio::sync::RwLock;
 pub use version_1::{app_sql::*, trash_sql::*, v1_impl::V1Transaction, view_sql::*, workspace_sql::*};
 pub use version_1::{app_sql::*, trash_sql::*, v1_impl::V1Transaction, view_sql::*, workspace_sql::*};
 
 
 use crate::{
 use crate::{
-    module::{WorkspaceDatabase, WorkspaceUser},
+    module::WorkspaceDatabase,
     services::{folder_editor::FolderEditor, persistence::migration::FolderMigration},
     services::{folder_editor::FolderEditor, persistence::migration::FolderMigration},
 };
 };
 use flowy_core_data_model::entities::{
 use flowy_core_data_model::entities::{

+ 1 - 1
frontend/rust-lib/flowy-core/src/services/view/controller.rs

@@ -1,6 +1,6 @@
 use bytes::Bytes;
 use bytes::Bytes;
 use flowy_collaboration::entities::{
 use flowy_collaboration::entities::{
-    doc::{DocumentDelta, DocumentId},
+    document_info::{DocumentDelta, DocumentId},
     revision::{RepeatedRevision, Revision},
     revision::{RepeatedRevision, Revision},
 };
 };
 
 

+ 1 - 1
frontend/rust-lib/flowy-core/src/services/view/event_handler.rs

@@ -15,7 +15,7 @@ use crate::{
     errors::FlowyError,
     errors::FlowyError,
     services::{TrashController, ViewController},
     services::{TrashController, ViewController},
 };
 };
-use flowy_collaboration::entities::doc::DocumentDelta;
+use flowy_collaboration::entities::document_info::DocumentDelta;
 use flowy_core_data_model::entities::share::{ExportData, ExportParams, ExportRequest};
 use flowy_core_data_model::entities::share::{ExportData, ExportParams, ExportRequest};
 use lib_dispatch::prelude::{data_result, Data, DataResult, Unit};
 use lib_dispatch::prelude::{data_result, Data, DataResult, Unit};
 use std::{convert::TryInto, sync::Arc};
 use std::{convert::TryInto, sync::Arc};

+ 75 - 33
frontend/rust-lib/flowy-core/src/services/web_socket.rs

@@ -8,31 +8,32 @@ use flowy_collaboration::{
     folder::FolderPad,
     folder::FolderPad,
 };
 };
 use flowy_error::FlowyError;
 use flowy_error::FlowyError;
-use flowy_sync::{
-    CompositeWSSinkDataProvider,
-    RevisionManager,
-    RevisionWSSinkDataProvider,
-    RevisionWSSteamConsumer,
-    RevisionWebSocket,
-    RevisionWebSocketManager,
-};
-use lib_infra::future::FutureResult;
+use flowy_sync::*;
+use lib_infra::future::{BoxResultFuture, FutureResult};
+use lib_ot::core::{Delta, OperationTransformable, PlainDelta, PlainTextAttributes};
 use parking_lot::RwLock;
 use parking_lot::RwLock;
 use std::{sync::Arc, time::Duration};
 use std::{sync::Arc, time::Duration};
 
 
 pub(crate) async fn make_folder_ws_manager(
 pub(crate) async fn make_folder_ws_manager(
+    user_id: &str,
     rev_manager: Arc<RevisionManager>,
     rev_manager: Arc<RevisionManager>,
     web_socket: Arc<dyn RevisionWebSocket>,
     web_socket: Arc<dyn RevisionWebSocket>,
     folder_pad: Arc<RwLock<FolderPad>>,
     folder_pad: Arc<RwLock<FolderPad>>,
 ) -> Arc<RevisionWebSocketManager> {
 ) -> Arc<RevisionWebSocketManager> {
     let object_id = FOLDER_ID;
     let object_id = FOLDER_ID;
     let composite_sink_provider = Arc::new(CompositeWSSinkDataProvider::new(object_id, rev_manager.clone()));
     let composite_sink_provider = Arc::new(CompositeWSSinkDataProvider::new(object_id, rev_manager.clone()));
-    let ws_stream_consumer = Arc::new(FolderWSStreamConsumerAdapter {
-        object_id: object_id.to_string(),
-        folder_pad,
+    let resolve_target = Arc::new(FolderRevisionResolveTarget { folder_pad });
+    let resolver = RevisionConflictResolver::<PlainTextAttributes>::new(
+        user_id,
+        resolve_target,
+        Arc::new(composite_sink_provider.clone()),
         rev_manager,
         rev_manager,
-        sink_provider: composite_sink_provider.clone(),
+    );
+
+    let ws_stream_consumer = Arc::new(FolderWSStreamConsumerAdapter {
+        resolver: Arc::new(resolver),
     });
     });
+
     let sink_provider = Arc::new(FolderWSSinkDataProviderAdapter(composite_sink_provider));
     let sink_provider = Arc::new(FolderWSSinkDataProviderAdapter(composite_sink_provider));
     let ping_duration = Duration::from_millis(2000);
     let ping_duration = Duration::from_millis(2000);
     Arc::new(RevisionWebSocketManager::new(
     Arc::new(RevisionWebSocketManager::new(
@@ -52,34 +53,75 @@ impl RevisionWSSinkDataProvider for FolderWSSinkDataProviderAdapter {
     }
     }
 }
 }
 
 
-struct FolderWSStreamConsumerAdapter {
-    object_id: String,
+struct FolderRevisionResolveTarget {
     folder_pad: Arc<RwLock<FolderPad>>,
     folder_pad: Arc<RwLock<FolderPad>>,
-    rev_manager: Arc<RevisionManager>,
-    sink_provider: Arc<CompositeWSSinkDataProvider>,
+}
+
+impl ResolverTarget<PlainTextAttributes> for FolderRevisionResolveTarget {
+    fn compose_delta(&self, delta: Delta<PlainTextAttributes>) -> BoxResultFuture<DeltaMD5, FlowyError> {
+        let folder_pad = self.folder_pad.clone();
+        Box::pin(async move {
+            let md5 = folder_pad.write().compose_remote_delta(delta)?;
+            Ok(md5)
+        })
+    }
+
+    fn transform_delta(
+        &self,
+        delta: Delta<PlainTextAttributes>,
+    ) -> BoxResultFuture<TransformDeltas<PlainTextAttributes>, FlowyError> {
+        let folder_pad = self.folder_pad.clone();
+        Box::pin(async move {
+            let read_guard = folder_pad.read();
+            let mut server_prime: Option<PlainDelta> = None;
+            let client_prime: PlainDelta;
+            if read_guard.is_empty() {
+                // Do nothing
+                client_prime = delta;
+            } else {
+                let (s_prime, c_prime) = read_guard.delta().transform(&delta)?;
+                client_prime = c_prime;
+                server_prime = Some(s_prime);
+            }
+            drop(read_guard);
+            Ok(TransformDeltas {
+                client_prime,
+                server_prime,
+            })
+        })
+    }
+
+    fn reset_delta(&self, delta: Delta<PlainTextAttributes>) -> BoxResultFuture<DeltaMD5, FlowyError> {
+        let folder_pad = self.folder_pad.clone();
+        Box::pin(async move {
+            let md5 = folder_pad.write().reset_folder(delta)?;
+            Ok(md5)
+        })
+    }
+}
+
+struct FolderWSStreamConsumerAdapter {
+    resolver: Arc<RevisionConflictResolver<PlainTextAttributes>>,
 }
 }
 
 
 impl RevisionWSSteamConsumer for FolderWSStreamConsumerAdapter {
 impl RevisionWSSteamConsumer for FolderWSStreamConsumerAdapter {
-    fn receive_push_revision(&self, bytes: Bytes) -> FutureResult<(), FlowyError> { todo!() }
+    fn receive_push_revision(&self, bytes: Bytes) -> BoxResultFuture<(), FlowyError> {
+        let resolver = self.resolver.clone();
+        Box::pin(async move { resolver.receive_bytes(bytes).await })
+    }
 
 
-    fn receive_ack(&self, id: String, ty: ServerRevisionWSDataType) -> FutureResult<(), FlowyError> {
-        let sink_provider = self.sink_provider.clone();
-        FutureResult::new(async move { sink_provider.ack_data(id, ty).await })
+    fn receive_ack(&self, id: String, ty: ServerRevisionWSDataType) -> BoxResultFuture<(), FlowyError> {
+        let resolver = self.resolver.clone();
+        Box::pin(async move { resolver.ack_revision(id, ty).await })
     }
     }
 
 
-    fn receive_new_user_connect(&self, _new_user: NewDocumentUser) -> FutureResult<(), FlowyError> {
-        FutureResult::new(async move { Ok(()) })
+    fn receive_new_user_connect(&self, _new_user: NewDocumentUser) -> BoxResultFuture<(), FlowyError> {
+        // Do nothing by now, just a placeholder for future extension.
+        Box::pin(async move { Ok(()) })
     }
     }
 
 
-    fn pull_revisions_in_range(&self, range: RevisionRange) -> FutureResult<(), FlowyError> {
-        let rev_manager = self.rev_manager.clone();
-        let sink_provider = self.sink_provider.clone();
-        let object_id = self.object_id.clone();
-        FutureResult::new(async move {
-            let revisions = rev_manager.get_revisions_in_range(range).await?;
-            let data = ClientRevisionWSData::from_revisions(&object_id, revisions);
-            sink_provider.push_data(data).await;
-            Ok(())
-        })
+    fn pull_revisions_in_range(&self, range: RevisionRange) -> BoxResultFuture<(), FlowyError> {
+        let resolver = self.resolver.clone();
+        Box::pin(async move { resolver.send_revisions(range).await })
     }
     }
 }
 }

+ 2 - 2
frontend/rust-lib/flowy-document/src/controller.rs

@@ -3,7 +3,7 @@ use async_trait::async_trait;
 use bytes::Bytes;
 use bytes::Bytes;
 use dashmap::DashMap;
 use dashmap::DashMap;
 use flowy_collaboration::entities::{
 use flowy_collaboration::entities::{
-    doc::{DocumentDelta, DocumentId},
+    document_info::{DocumentDelta, DocumentId},
     revision::{md5, RepeatedRevision, Revision},
     revision::{md5, RepeatedRevision, Revision},
     ws_data::ServerRevisionWSData,
     ws_data::ServerRevisionWSData,
 };
 };
@@ -100,7 +100,7 @@ impl DocumentController {
         let result: Result<ServerRevisionWSData, protobuf::ProtobufError> = data.try_into();
         let result: Result<ServerRevisionWSData, protobuf::ProtobufError> = data.try_into();
         match result {
         match result {
             Ok(data) => match self.ws_receivers.get(&data.object_id) {
             Ok(data) => match self.ws_receivers.get(&data.object_id) {
-                None => tracing::error!("Can't find any source handler for {:?}", data.object_id),
+                None => tracing::error!("Can't find any source handler for {:?}-{:?}", data.object_id, data.ty),
                 Some(handler) => match handler.receive_ws_data(data).await {
                 Some(handler) => match handler.receive_ws_data(data).await {
                     Ok(_) => {},
                     Ok(_) => {},
                     Err(e) => tracing::error!("{}", e),
                     Err(e) => tracing::error!("{}", e),

+ 2 - 2
frontend/rust-lib/flowy-document/src/core/editor.rs

@@ -6,7 +6,7 @@ use crate::{
 };
 };
 use bytes::Bytes;
 use bytes::Bytes;
 use flowy_collaboration::{
 use flowy_collaboration::{
-    entities::{doc::DocumentInfo, revision::Revision},
+    entities::{document_info::DocumentInfo, revision::Revision},
     errors::CollaborateResult,
     errors::CollaborateResult,
     util::make_delta_from_revisions,
     util::make_delta_from_revisions,
 };
 };
@@ -184,7 +184,7 @@ fn spawn_edit_queue(
 #[cfg(feature = "flowy_unit_test")]
 #[cfg(feature = "flowy_unit_test")]
 impl ClientDocumentEditor {
 impl ClientDocumentEditor {
     pub async fn doc_json(&self) -> FlowyResult<String> {
     pub async fn doc_json(&self) -> FlowyResult<String> {
-        let (ret, rx) = oneshot::channel::<CollaborateResult<crate::core::DocumentMD5>>();
+        let (ret, rx) = oneshot::channel::<CollaborateResult<String>>();
         let msg = EditorCommand::ReadDocumentAsJson { ret };
         let msg = EditorCommand::ReadDocumentAsJson { ret };
         let _ = self.edit_cmd_tx.send(msg).await;
         let _ = self.edit_cmd_tx.send(msg).await;
         let s = rx.await.map_err(internal_error)??;
         let s = rx.await.map_err(internal_error)??;

+ 4 - 33
frontend/rust-lib/flowy-document/src/core/queue.rs

@@ -1,10 +1,9 @@
 use crate::{context::DocumentUser, core::web_socket::EditorCommandReceiver};
 use crate::{context::DocumentUser, core::web_socket::EditorCommandReceiver};
 use async_stream::stream;
 use async_stream::stream;
 use flowy_collaboration::{
 use flowy_collaboration::{
-    client_document::{history::UndoResult, ClientDocument, NewlineDoc},
-    entities::revision::{RepeatedRevision, RevId, Revision},
+    client_document::{history::UndoResult, ClientDocument},
+    entities::revision::{RevId, Revision},
     errors::CollaborateError,
     errors::CollaborateError,
-    util::make_delta_from_revisions,
 };
 };
 use flowy_error::FlowyError;
 use flowy_error::FlowyError;
 use flowy_sync::{DeltaMD5, RevisionManager, TransformDeltas};
 use flowy_sync::{DeltaMD5, RevisionManager, TransformDeltas};
@@ -91,8 +90,8 @@ impl EditorCommandQueue {
                     let read_guard = self.document.read().await;
                     let read_guard = self.document.read().await;
                     let mut server_prime: Option<RichTextDelta> = None;
                     let mut server_prime: Option<RichTextDelta> = None;
                     let client_prime: RichTextDelta;
                     let client_prime: RichTextDelta;
-                    // The document is empty if its text is equal to the initial text.
-                    if read_guard.is_empty::<NewlineDoc>() {
+
+                    if read_guard.is_empty() {
                         // Do nothing
                         // Do nothing
                         client_prime = delta;
                         client_prime = delta;
                     } else {
                     } else {
@@ -189,35 +188,7 @@ impl EditorCommandQueue {
     }
     }
 }
 }
 
 
-fn make_client_and_server_revision(
-    doc_id: &str,
-    user_id: &str,
-    base_rev_id: i64,
-    rev_id: i64,
-    client_delta: RichTextDelta,
-    server_delta: Option<RichTextDelta>,
-    md5: DocumentMD5,
-) -> (Revision, Option<Revision>) {
-    let client_revision = Revision::new(
-        &doc_id,
-        base_rev_id,
-        rev_id,
-        client_delta.to_bytes(),
-        &user_id,
-        md5.clone(),
-    );
-
-    match server_delta {
-        None => (client_revision, None),
-        Some(server_delta) => {
-            let server_revision = Revision::new(&doc_id, base_rev_id, rev_id, server_delta.to_bytes(), &user_id, md5);
-            (client_revision, Some(server_revision))
-        },
-    }
-}
-
 pub(crate) type Ret<T> = oneshot::Sender<Result<T, CollaborateError>>;
 pub(crate) type Ret<T> = oneshot::Sender<Result<T, CollaborateError>>;
-pub(crate) type DocumentMD5 = String;
 
 
 pub(crate) enum EditorCommand {
 pub(crate) enum EditorCommand {
     ComposeLocalDelta {
     ComposeLocalDelta {

+ 16 - 34
frontend/rust-lib/flowy-document/src/core/web_socket.rs

@@ -12,18 +12,7 @@ use flowy_collaboration::{
     errors::CollaborateResult,
     errors::CollaborateResult,
 };
 };
 use flowy_error::{internal_error, FlowyError};
 use flowy_error::{internal_error, FlowyError};
-use flowy_sync::{
-    CompositeWSSinkDataProvider,
-    DeltaMD5,
-    ResolverTarget,
-    RevisionConflictResolver,
-    RevisionManager,
-    RevisionWSSinkDataProvider,
-    RevisionWSSteamConsumer,
-    RevisionWebSocket,
-    RevisionWebSocketManager,
-    TransformDeltas,
-};
+use flowy_sync::*;
 use lib_infra::future::{BoxResultFuture, FutureResult};
 use lib_infra::future::{BoxResultFuture, FutureResult};
 use lib_ot::{core::Delta, rich_text::RichTextAttributes};
 use lib_ot::{core::Delta, rich_text::RichTextAttributes};
 use lib_ws::WSConnectState;
 use lib_ws::WSConnectState;
@@ -45,15 +34,14 @@ pub(crate) async fn make_document_ws_manager(
     web_socket: Arc<dyn RevisionWebSocket>,
     web_socket: Arc<dyn RevisionWebSocket>,
 ) -> Arc<RevisionWebSocketManager> {
 ) -> Arc<RevisionWebSocketManager> {
     let composite_sink_provider = Arc::new(CompositeWSSinkDataProvider::new(&doc_id, rev_manager.clone()));
     let composite_sink_provider = Arc::new(CompositeWSSinkDataProvider::new(&doc_id, rev_manager.clone()));
-    let resolver_target = Arc::new(DocumentRevisionResolver { edit_cmd_tx });
+    let resolve_target = Arc::new(DocumentRevisionResolveTarget { edit_cmd_tx });
     let resolver = RevisionConflictResolver::<RichTextAttributes>::new(
     let resolver = RevisionConflictResolver::<RichTextAttributes>::new(
         &user_id,
         &user_id,
-        resolver_target,
+        resolve_target,
         Arc::new(composite_sink_provider.clone()),
         Arc::new(composite_sink_provider.clone()),
-        rev_manager.clone(),
+        rev_manager,
     );
     );
     let ws_stream_consumer = Arc::new(DocumentWSSteamConsumerAdapter {
     let ws_stream_consumer = Arc::new(DocumentWSSteamConsumerAdapter {
-        object_id: doc_id.clone(),
         resolver: Arc::new(resolver),
         resolver: Arc::new(resolver),
     });
     });
 
 
@@ -66,16 +54,11 @@ pub(crate) async fn make_document_ws_manager(
         ws_stream_consumer,
         ws_stream_consumer,
         ping_duration,
         ping_duration,
     ));
     ));
-    listen_document_ws_state(&user_id, &doc_id, ws_manager.scribe_state(), rev_manager);
+    listen_document_ws_state(&user_id, &doc_id, ws_manager.scribe_state());
     ws_manager
     ws_manager
 }
 }
 
 
-fn listen_document_ws_state(
-    _user_id: &str,
-    _doc_id: &str,
-    mut subscriber: broadcast::Receiver<WSConnectState>,
-    _rev_manager: Arc<RevisionManager>,
-) {
+fn listen_document_ws_state(_user_id: &str, _doc_id: &str, mut subscriber: broadcast::Receiver<WSConnectState>) {
     tokio::spawn(async move {
     tokio::spawn(async move {
         while let Ok(state) = subscriber.recv().await {
         while let Ok(state) = subscriber.recv().await {
             match state {
             match state {
@@ -89,29 +72,28 @@ fn listen_document_ws_state(
 }
 }
 
 
 pub(crate) struct DocumentWSSteamConsumerAdapter {
 pub(crate) struct DocumentWSSteamConsumerAdapter {
-    object_id: String,
     resolver: Arc<RevisionConflictResolver<RichTextAttributes>>,
     resolver: Arc<RevisionConflictResolver<RichTextAttributes>>,
 }
 }
 
 
 impl RevisionWSSteamConsumer for DocumentWSSteamConsumerAdapter {
 impl RevisionWSSteamConsumer for DocumentWSSteamConsumerAdapter {
-    fn receive_push_revision(&self, bytes: Bytes) -> FutureResult<(), FlowyError> {
+    fn receive_push_revision(&self, bytes: Bytes) -> BoxResultFuture<(), FlowyError> {
         let resolver = self.resolver.clone();
         let resolver = self.resolver.clone();
-        FutureResult::new(async move { resolver.receive_bytes(bytes).await })
+        Box::pin(async move { resolver.receive_bytes(bytes).await })
     }
     }
 
 
-    fn receive_ack(&self, id: String, ty: ServerRevisionWSDataType) -> FutureResult<(), FlowyError> {
+    fn receive_ack(&self, id: String, ty: ServerRevisionWSDataType) -> BoxResultFuture<(), FlowyError> {
         let resolver = self.resolver.clone();
         let resolver = self.resolver.clone();
-        FutureResult::new(async move { resolver.ack_revision(id, ty).await })
+        Box::pin(async move { resolver.ack_revision(id, ty).await })
     }
     }
 
 
-    fn receive_new_user_connect(&self, _new_user: NewDocumentUser) -> FutureResult<(), FlowyError> {
+    fn receive_new_user_connect(&self, _new_user: NewDocumentUser) -> BoxResultFuture<(), FlowyError> {
         // Do nothing by now, just a placeholder for future extension.
         // Do nothing by now, just a placeholder for future extension.
-        FutureResult::new(async move { Ok(()) })
+        Box::pin(async move { Ok(()) })
     }
     }
 
 
-    fn pull_revisions_in_range(&self, range: RevisionRange) -> FutureResult<(), FlowyError> {
+    fn pull_revisions_in_range(&self, range: RevisionRange) -> BoxResultFuture<(), FlowyError> {
         let resolver = self.resolver.clone();
         let resolver = self.resolver.clone();
-        FutureResult::new(async move { resolver.send_revisions(range).await })
+        Box::pin(async move { resolver.send_revisions(range).await })
     }
     }
 }
 }
 
 
@@ -123,11 +105,11 @@ impl RevisionWSSinkDataProvider for DocumentWSSinkDataProviderAdapter {
     }
     }
 }
 }
 
 
-struct DocumentRevisionResolver {
+struct DocumentRevisionResolveTarget {
     edit_cmd_tx: EditorCommandSender,
     edit_cmd_tx: EditorCommandSender,
 }
 }
 
 
-impl ResolverTarget<RichTextAttributes> for DocumentRevisionResolver {
+impl ResolverTarget<RichTextAttributes> for DocumentRevisionResolveTarget {
     fn compose_delta(&self, delta: Delta<RichTextAttributes>) -> BoxResultFuture<DeltaMD5, FlowyError> {
     fn compose_delta(&self, delta: Delta<RichTextAttributes>) -> BoxResultFuture<DeltaMD5, FlowyError> {
         let tx = self.edit_cmd_tx.clone();
         let tx = self.edit_cmd_tx.clone();
         Box::pin(async move {
         Box::pin(async move {

+ 1 - 1
frontend/rust-lib/flowy-document/src/lib.rs

@@ -9,7 +9,7 @@ pub mod errors {
 }
 }
 
 
 use crate::errors::FlowyError;
 use crate::errors::FlowyError;
-use flowy_collaboration::entities::doc::{CreateDocParams, DocumentId, DocumentInfo, ResetDocumentParams};
+use flowy_collaboration::entities::document_info::{CreateDocParams, DocumentId, DocumentInfo, ResetDocumentParams};
 use lib_infra::future::FutureResult;
 use lib_infra::future::FutureResult;
 
 
 pub trait DocumentCloudService: Send + Sync {
 pub trait DocumentCloudService: Send + Sync {

+ 1 - 1
frontend/rust-lib/flowy-net/src/http_server/document.rs

@@ -3,7 +3,7 @@ use backend_service::{
     request::{HttpRequestBuilder, ResponseMiddleware},
     request::{HttpRequestBuilder, ResponseMiddleware},
     response::FlowyResponse,
     response::FlowyResponse,
 };
 };
-use flowy_collaboration::entities::doc::{CreateDocParams, DocumentId, DocumentInfo, ResetDocumentParams};
+use flowy_collaboration::entities::document_info::{CreateDocParams, DocumentId, DocumentInfo, ResetDocumentParams};
 use flowy_document::DocumentCloudService;
 use flowy_document::DocumentCloudService;
 use flowy_error::FlowyError;
 use flowy_error::FlowyError;
 use lazy_static::lazy_static;
 use lazy_static::lazy_static;

+ 73 - 18
frontend/rust-lib/flowy-net/src/local_server/persistence.rs

@@ -1,13 +1,13 @@
 use flowy_collaboration::{
 use flowy_collaboration::{
-    entities::doc::DocumentInfo,
+    entities::{document_info::DocumentInfo, folder_info::FolderInfo},
     errors::CollaborateError,
     errors::CollaborateError,
     protobuf::{RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
     protobuf::{RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
     server_document::*,
     server_document::*,
-    util::{make_doc_from_revisions, repeated_revision_from_repeated_revision_pb},
+    server_folder::FolderCloudPersistence,
+    util::{make_document_info_from_revisions_pb, make_folder_from_revisions_pb},
 };
 };
 use lib_infra::future::BoxResultFuture;
 use lib_infra::future::BoxResultFuture;
 use std::{
 use std::{
-    convert::TryInto,
     fmt::{Debug, Formatter},
     fmt::{Debug, Formatter},
     sync::Arc,
     sync::Arc,
 };
 };
@@ -45,21 +45,78 @@ impl std::default::Default for LocalDocumentCloudPersistence {
     }
     }
 }
 }
 
 
+impl FolderCloudPersistence for LocalDocumentCloudPersistence {
+    fn read_folder(&self, _user_id: &str, folder_id: &str) -> BoxResultFuture<FolderInfo, CollaborateError> {
+        let storage = self.storage.clone();
+        let folder_id = folder_id.to_owned();
+        Box::pin(async move {
+            let repeated_revision = storage.get_revisions(&folder_id, None).await?;
+            match make_folder_from_revisions_pb(&folder_id, repeated_revision)? {
+                Some(folder_info) => Ok(folder_info),
+                None => Err(CollaborateError::record_not_found()),
+            }
+        })
+    }
+
+    fn create_folder(
+        &self,
+        _user_id: &str,
+        folder_id: &str,
+        repeated_revision: RepeatedRevisionPB,
+    ) -> BoxResultFuture<Option<FolderInfo>, CollaborateError> {
+        let folder_id = folder_id.to_owned();
+        let storage = self.storage.clone();
+        Box::pin(async move {
+            let _ = storage.set_revisions(repeated_revision.clone()).await?;
+            make_folder_from_revisions_pb(&folder_id, repeated_revision)
+        })
+    }
+
+    fn save_folder_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError> {
+        let storage = self.storage.clone();
+        Box::pin(async move {
+            let _ = storage.set_revisions(repeated_revision).await?;
+            Ok(())
+        })
+    }
+
+    fn read_folder_revisions(
+        &self,
+        folder_id: &str,
+        rev_ids: Option<Vec<i64>>,
+    ) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError> {
+        let folder_id = folder_id.to_owned();
+        let storage = self.storage.clone();
+        Box::pin(async move {
+            let mut repeated_revision = storage.get_revisions(&folder_id, rev_ids).await?;
+            let revisions: Vec<RevisionPB> = repeated_revision.take_items().into();
+            Ok(revisions)
+        })
+    }
+
+    fn reset_folder(
+        &self,
+        folder_id: &str,
+        repeated_revision: RepeatedRevisionPB,
+    ) -> BoxResultFuture<(), CollaborateError> {
+        let storage = self.storage.clone();
+        let folder_id = folder_id.to_owned();
+        Box::pin(async move {
+            let _ = storage.reset_object(&folder_id, repeated_revision).await?;
+            Ok(())
+        })
+    }
+}
+
 impl DocumentCloudPersistence for LocalDocumentCloudPersistence {
 impl DocumentCloudPersistence for LocalDocumentCloudPersistence {
     fn read_document(&self, doc_id: &str) -> BoxResultFuture<DocumentInfo, CollaborateError> {
     fn read_document(&self, doc_id: &str) -> BoxResultFuture<DocumentInfo, CollaborateError> {
         let storage = self.storage.clone();
         let storage = self.storage.clone();
         let doc_id = doc_id.to_owned();
         let doc_id = doc_id.to_owned();
         Box::pin(async move {
         Box::pin(async move {
             let repeated_revision = storage.get_revisions(&doc_id, None).await?;
             let repeated_revision = storage.get_revisions(&doc_id, None).await?;
-            match make_doc_from_revisions(&doc_id, repeated_revision) {
-                Ok(Some(mut document_info_pb)) => {
-                    let document_info: DocumentInfo = (&mut document_info_pb)
-                        .try_into()
-                        .map_err(|e| CollaborateError::internal().context(e))?;
-                    Ok(document_info)
-                },
-                Ok(None) => Err(CollaborateError::record_not_found()),
-                Err(e) => Err(CollaborateError::internal().context(e)),
+            match make_document_info_from_revisions_pb(&doc_id, repeated_revision)? {
+                Some(document_info) => Ok(document_info),
+                None => Err(CollaborateError::record_not_found()),
             }
             }
         })
         })
     }
     }
@@ -68,18 +125,16 @@ impl DocumentCloudPersistence for LocalDocumentCloudPersistence {
         &self,
         &self,
         doc_id: &str,
         doc_id: &str,
         repeated_revision: RepeatedRevisionPB,
         repeated_revision: RepeatedRevisionPB,
-    ) -> BoxResultFuture<DocumentInfo, CollaborateError> {
+    ) -> BoxResultFuture<Option<DocumentInfo>, CollaborateError> {
         let doc_id = doc_id.to_owned();
         let doc_id = doc_id.to_owned();
         let storage = self.storage.clone();
         let storage = self.storage.clone();
         Box::pin(async move {
         Box::pin(async move {
             let _ = storage.set_revisions(repeated_revision.clone()).await?;
             let _ = storage.set_revisions(repeated_revision.clone()).await?;
-            let repeated_revision = repeated_revision_from_repeated_revision_pb(repeated_revision)?;
-            let document_info = DocumentInfo::from_revisions(&doc_id, repeated_revision.into_inner())?;
-            Ok(document_info)
+            make_document_info_from_revisions_pb(&doc_id, repeated_revision)
         })
         })
     }
     }
 
 
-    fn read_revisions(
+    fn read_document_revisions(
         &self,
         &self,
         doc_id: &str,
         doc_id: &str,
         rev_ids: Option<Vec<i64>>,
         rev_ids: Option<Vec<i64>>,
@@ -93,7 +148,7 @@ impl DocumentCloudPersistence for LocalDocumentCloudPersistence {
         })
         })
     }
     }
 
 
-    fn save_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError> {
+    fn save_document_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError> {
         let storage = self.storage.clone();
         let storage = self.storage.clone();
         Box::pin(async move {
         Box::pin(async move {
             let _ = storage.set_revisions(repeated_revision).await?;
             let _ = storage.set_revisions(repeated_revision).await?;

+ 53 - 4
frontend/rust-lib/flowy-net/src/local_server/server.rs

@@ -4,12 +4,13 @@ use bytes::Bytes;
 use flowy_collaboration::{
 use flowy_collaboration::{
     client_document::default::initial_delta_string,
     client_document::default::initial_delta_string,
     entities::{
     entities::{
-        doc::{CreateDocParams, DocumentId, DocumentInfo, ResetDocumentParams},
+        document_info::{CreateDocParams, DocumentId, DocumentInfo, ResetDocumentParams},
         ws_data::{ClientRevisionWSData, ClientRevisionWSDataType},
         ws_data::{ClientRevisionWSData, ClientRevisionWSDataType},
     },
     },
     errors::CollaborateError,
     errors::CollaborateError,
     protobuf::ClientRevisionWSData as ClientRevisionWSDataPB,
     protobuf::ClientRevisionWSData as ClientRevisionWSDataPB,
     server_document::ServerDocumentManager,
     server_document::ServerDocumentManager,
+    server_folder::ServerFolderManager,
     synchronizer::{RevisionSyncResponse, RevisionUser},
     synchronizer::{RevisionSyncResponse, RevisionUser},
 };
 };
 use flowy_core::module::{FolderCouldServiceV1, FolderCouldServiceV2};
 use flowy_core::module::{FolderCouldServiceV1, FolderCouldServiceV2};
@@ -26,6 +27,7 @@ use tokio::sync::{broadcast, mpsc, mpsc::UnboundedSender};
 
 
 pub struct LocalServer {
 pub struct LocalServer {
     doc_manager: Arc<ServerDocumentManager>,
     doc_manager: Arc<ServerDocumentManager>,
+    folder_manager: Arc<ServerFolderManager>,
     stop_tx: RwLock<Option<mpsc::Sender<()>>>,
     stop_tx: RwLock<Option<mpsc::Sender<()>>>,
     client_ws_sender: mpsc::UnboundedSender<WebSocketRawMessage>,
     client_ws_sender: mpsc::UnboundedSender<WebSocketRawMessage>,
     client_ws_receiver: broadcast::Sender<WebSocketRawMessage>,
     client_ws_receiver: broadcast::Sender<WebSocketRawMessage>,
@@ -37,11 +39,13 @@ impl LocalServer {
         client_ws_receiver: broadcast::Sender<WebSocketRawMessage>,
         client_ws_receiver: broadcast::Sender<WebSocketRawMessage>,
     ) -> Self {
     ) -> Self {
         let persistence = Arc::new(LocalDocumentCloudPersistence::default());
         let persistence = Arc::new(LocalDocumentCloudPersistence::default());
-        let doc_manager = Arc::new(ServerDocumentManager::new(persistence));
+        let doc_manager = Arc::new(ServerDocumentManager::new(persistence.clone()));
+        let folder_manager = Arc::new(ServerFolderManager::new(persistence));
         let stop_tx = RwLock::new(None);
         let stop_tx = RwLock::new(None);
 
 
         LocalServer {
         LocalServer {
             doc_manager,
             doc_manager,
+            folder_manager,
             stop_tx,
             stop_tx,
             client_ws_sender,
             client_ws_sender,
             client_ws_receiver,
             client_ws_receiver,
@@ -59,6 +63,7 @@ impl LocalServer {
         *self.stop_tx.write() = Some(stop_tx);
         *self.stop_tx.write() = Some(stop_tx);
         let runner = LocalWebSocketRunner {
         let runner = LocalWebSocketRunner {
             doc_manager: self.doc_manager.clone(),
             doc_manager: self.doc_manager.clone(),
+            folder_manager: self.folder_manager.clone(),
             stop_rx: Some(stop_rx),
             stop_rx: Some(stop_rx),
             client_ws_sender: self.client_ws_sender.clone(),
             client_ws_sender: self.client_ws_sender.clone(),
             client_ws_receiver: Some(self.client_ws_receiver.subscribe()),
             client_ws_receiver: Some(self.client_ws_receiver.subscribe()),
@@ -69,6 +74,7 @@ impl LocalServer {
 
 
 struct LocalWebSocketRunner {
 struct LocalWebSocketRunner {
     doc_manager: Arc<ServerDocumentManager>,
     doc_manager: Arc<ServerDocumentManager>,
+    folder_manager: Arc<ServerFolderManager>,
     stop_rx: Option<mpsc::Receiver<()>>,
     stop_rx: Option<mpsc::Receiver<()>>,
     client_ws_sender: mpsc::UnboundedSender<WebSocketRawMessage>,
     client_ws_sender: mpsc::UnboundedSender<WebSocketRawMessage>,
     client_ws_receiver: Option<broadcast::Receiver<WebSocketRawMessage>>,
     client_ws_receiver: Option<broadcast::Receiver<WebSocketRawMessage>>,
@@ -107,11 +113,54 @@ impl LocalWebSocketRunner {
     async fn handle_message(&self, message: WebSocketRawMessage) -> Result<(), FlowyError> {
     async fn handle_message(&self, message: WebSocketRawMessage) -> Result<(), FlowyError> {
         let bytes = Bytes::from(message.data);
         let bytes = Bytes::from(message.data);
         let client_data = ClientRevisionWSData::try_from(bytes).map_err(internal_error)?;
         let client_data = ClientRevisionWSData::try_from(bytes).map_err(internal_error)?;
-        let _ = self.handle_client_data(client_data, "".to_owned()).await?;
+        match message.module {
+            WSModule::Doc => {
+                let _ = self.handle_document_client_data(client_data, "".to_owned()).await?;
+                Ok(())
+            },
+            WSModule::Folder => {
+                let _ = self.handle_folder_client_data(client_data, "".to_owned()).await?;
+                Ok(())
+            },
+        }
+    }
+
+    pub async fn handle_folder_client_data(
+        &self,
+        client_data: ClientRevisionWSData,
+        user_id: String,
+    ) -> Result<(), CollaborateError> {
+        tracing::trace!(
+            "[LocalFolderServer] receive: {}:{}-{:?} ",
+            client_data.object_id,
+            client_data.id(),
+            client_data.ty,
+        );
+        let client_ws_sender = self.client_ws_sender.clone();
+        let user = Arc::new(LocalRevisionUser {
+            user_id,
+            client_ws_sender,
+        });
+        let ty = client_data.ty.clone();
+        let document_client_data: ClientRevisionWSDataPB = client_data.try_into().unwrap();
+        match ty {
+            ClientRevisionWSDataType::ClientPushRev => {
+                let _ = self
+                    .folder_manager
+                    .handle_client_revisions(user, document_client_data)
+                    .await?;
+            },
+            ClientRevisionWSDataType::ClientPing => {
+                let _ = self
+                    .folder_manager
+                    .handle_client_ping(user, document_client_data)
+                    .await?;
+            },
+        }
         Ok(())
         Ok(())
     }
     }
 
 
-    pub async fn handle_client_data(
+    pub async fn handle_document_client_data(
         &self,
         &self,
         client_data: ClientRevisionWSData,
         client_data: ClientRevisionWSData,
         user_id: String,
         user_id: String,

+ 63 - 65
frontend/rust-lib/flowy-sync/src/conflict_resolve.rs

@@ -3,7 +3,7 @@ use bytes::Bytes;
 use flowy_collaboration::{
 use flowy_collaboration::{
     entities::{
     entities::{
         revision::{RepeatedRevision, Revision, RevisionRange},
         revision::{RepeatedRevision, Revision, RevisionRange},
-        ws_data::{ClientRevisionWSData, ServerRevisionWSDataType},
+        ws_data::ServerRevisionWSDataType,
     },
     },
     util::make_delta_from_revisions,
     util::make_delta_from_revisions,
 };
 };
@@ -12,7 +12,6 @@ use lib_infra::future::BoxResultFuture;
 use lib_ot::core::{Attributes, Delta};
 use lib_ot::core::{Attributes, Delta};
 use serde::de::DeserializeOwned;
 use serde::de::DeserializeOwned;
 use std::{convert::TryFrom, sync::Arc};
 use std::{convert::TryFrom, sync::Arc};
-use tokio::sync::oneshot;
 
 
 pub type DeltaMD5 = String;
 pub type DeltaMD5 = String;
 
 
@@ -25,7 +24,7 @@ where
     fn reset_delta(&self, delta: Delta<T>) -> BoxResultFuture<DeltaMD5, FlowyError>;
     fn reset_delta(&self, delta: Delta<T>) -> BoxResultFuture<DeltaMD5, FlowyError>;
 }
 }
 
 
-pub trait ResolverRevisionSink: Send + Sync {
+pub trait ResolverRevisionSink: Send + Sync + 'static {
     fn send(&self, revisions: Vec<Revision>) -> BoxResultFuture<(), FlowyError>;
     fn send(&self, revisions: Vec<Revision>) -> BoxResultFuture<(), FlowyError>;
     fn ack(&self, rev_id: String, ty: ServerRevisionWSDataType) -> BoxResultFuture<(), FlowyError>;
     fn ack(&self, rev_id: String, ty: ServerRevisionWSDataType) -> BoxResultFuture<(), FlowyError>;
 }
 }
@@ -35,19 +34,18 @@ where
     T: Attributes + Send + Sync,
     T: Attributes + Send + Sync,
 {
 {
     user_id: String,
     user_id: String,
-    target: Arc<dyn ResolverTarget<T>>,
+    target: Arc<dyn ResolverTarget<T> + Send + Sync>,
     rev_sink: Arc<dyn ResolverRevisionSink>,
     rev_sink: Arc<dyn ResolverRevisionSink>,
     rev_manager: Arc<RevisionManager>,
     rev_manager: Arc<RevisionManager>,
 }
 }
 
 
 impl<T> RevisionConflictResolver<T>
 impl<T> RevisionConflictResolver<T>
 where
 where
-    T: Attributes + Send + Sync,
-    // DeserializeOwned + serde::Serialize,
+    T: Attributes + Send + Sync + DeserializeOwned + serde::Serialize,
 {
 {
     pub fn new(
     pub fn new(
         user_id: &str,
         user_id: &str,
-        target: Arc<dyn ResolverTarget<T>>,
+        target: Arc<dyn ResolverTarget<T> + Send + Sync>,
         rev_sink: Arc<dyn ResolverRevisionSink>,
         rev_sink: Arc<dyn ResolverRevisionSink>,
         rev_manager: Arc<RevisionManager>,
         rev_manager: Arc<RevisionManager>,
     ) -> Self {
     ) -> Self {
@@ -66,75 +64,75 @@ where
             return Ok(());
             return Ok(());
         }
         }
 
 
-        // match self.handle_revision(repeated_revision).await? {
-        //     None => {},
-        //     Some(server_revision) => {
-        //         self.rev_sink.send(vec![server_revision]);
-        //     },
-        // }
+        match self.handle_revision(repeated_revision).await? {
+            None => {},
+            Some(server_revision) => {
+                self.rev_sink.send(vec![server_revision]).await?;
+            },
+        }
         Ok(())
         Ok(())
     }
     }
 
 
     pub async fn ack_revision(&self, rev_id: String, ty: ServerRevisionWSDataType) -> FlowyResult<()> {
     pub async fn ack_revision(&self, rev_id: String, ty: ServerRevisionWSDataType) -> FlowyResult<()> {
-        self.rev_sink.ack(rev_id, ty).await
+        let _ = self.rev_sink.ack(rev_id, ty).await?;
+        Ok(())
     }
     }
 
 
     pub async fn send_revisions(&self, range: RevisionRange) -> FlowyResult<()> {
     pub async fn send_revisions(&self, range: RevisionRange) -> FlowyResult<()> {
         let revisions = self.rev_manager.get_revisions_in_range(range).await?;
         let revisions = self.rev_manager.get_revisions_in_range(range).await?;
-        self.rev_sink.send(revisions).await;
+        let _ = self.rev_sink.send(revisions).await?;
         Ok(())
         Ok(())
     }
     }
 
 
-    // async fn handle_revision(&self, repeated_revision: RepeatedRevision) ->
-    // FlowyResult<Option<Revision>> {     let mut revisions =
-    // repeated_revision.into_inner();     let first_revision =
-    // revisions.first().unwrap();     if let Some(local_revision) =
-    // self.rev_manager.get_revision(first_revision.rev_id).await {         if
-    // local_revision.md5 == first_revision.md5 {             // The local
-    // revision is equal to the pushed revision. Just ignore it.
-    // revisions = revisions.split_off(1);             if revisions.is_empty() {
-    //                 return Ok(None);
-    //             }
-    //         } else {
-    //             return Ok(None);
-    //         }
-    //     }
-    //
-    //     let new_delta = make_delta_from_revisions(revisions.clone())?;
-    //
-    //     let TransformDeltas {
-    //         client_prime,
-    //         server_prime,
-    //     } = self.target.transform_delta(new_delta).await?;
-    //
-    //     match server_prime {
-    //         None => {
-    //             // The server_prime is None means the client local revisions
-    // conflict with the             // server, and it needs to override the
-    // client delta.             let md5 =
-    // self.target.reset_delta(client_prime).await?;             let
-    // repeated_revision = RepeatedRevision::new(revisions);
-    // assert_eq!(repeated_revision.last().unwrap().md5, md5);             let _
-    // = self.rev_manager.reset_object(repeated_revision).await?;
-    // Ok(None)         },
-    //         Some(server_prime) => {
-    //             let md5 = self.target.compose_delta(client_prime.clone()).await?;
-    //             for revision in &revisions {
-    //                 let _ =
-    // self.rev_manager.add_remote_revision(revision).await?;             }
-    //             let (client_revision, server_revision) =
-    // make_client_and_server_revision(                 &self.user_id,
-    //                 &self.rev_manager,
-    //                 client_prime,
-    //                 Some(server_prime),
-    //                 md5,
-    //             );
-    //             let _ =
-    // self.rev_manager.add_remote_revision(&client_revision).await?;
-    //             Ok(server_revision)
-    //         },
-    //     }
-    // }
+    async fn handle_revision(&self, repeated_revision: RepeatedRevision) -> FlowyResult<Option<Revision>> {
+        let mut revisions = repeated_revision.into_inner();
+        let first_revision = revisions.first().unwrap();
+        if let Some(local_revision) = self.rev_manager.get_revision(first_revision.rev_id).await {
+            if local_revision.md5 == first_revision.md5 {
+                // The local revision is equal to the pushed revision. Just ignore it.
+                revisions = revisions.split_off(1);
+                if revisions.is_empty() {
+                    return Ok(None);
+                }
+            } else {
+                return Ok(None);
+            }
+        }
+
+        let new_delta = make_delta_from_revisions(revisions.clone())?;
+
+        let TransformDeltas {
+            client_prime,
+            server_prime,
+        } = self.target.transform_delta(new_delta).await?;
+
+        match server_prime {
+            None => {
+                // The server_prime is None means the client local revisions conflict with the
+                // // server, and it needs to override the client delta.
+                let md5 = self.target.reset_delta(client_prime).await?;
+                let repeated_revision = RepeatedRevision::new(revisions);
+                assert_eq!(repeated_revision.last().unwrap().md5, md5);
+                let _ = self.rev_manager.reset_object(repeated_revision).await?;
+                Ok(None)
+            },
+            Some(server_prime) => {
+                let md5 = self.target.compose_delta(client_prime.clone()).await?;
+                for revision in &revisions {
+                    let _ = self.rev_manager.add_remote_revision(revision).await?;
+                }
+                let (client_revision, server_revision) = make_client_and_server_revision(
+                    &self.user_id,
+                    &self.rev_manager,
+                    client_prime,
+                    Some(server_prime),
+                    md5,
+                );
+                let _ = self.rev_manager.add_remote_revision(&client_revision).await?;
+                Ok(server_revision)
+            },
+        }
+    }
 }
 }
 
 
 fn make_client_and_server_revision<T>(
 fn make_client_and_server_revision<T>(

+ 7 - 8
frontend/rust-lib/flowy-sync/src/ws_manager.rs

@@ -5,7 +5,7 @@ use flowy_collaboration::entities::{
     revision::{RevId, Revision, RevisionRange},
     revision::{RevId, Revision, RevisionRange},
     ws_data::{ClientRevisionWSData, NewDocumentUser, ServerRevisionWSData, ServerRevisionWSDataType},
     ws_data::{ClientRevisionWSData, NewDocumentUser, ServerRevisionWSData, ServerRevisionWSDataType},
 };
 };
-use flowy_error::{internal_error, FlowyError, FlowyResult};
+use flowy_error::{FlowyError, FlowyResult};
 use futures_util::stream::StreamExt;
 use futures_util::stream::StreamExt;
 use lib_infra::future::{BoxResultFuture, FutureResult};
 use lib_infra::future::{BoxResultFuture, FutureResult};
 use lib_ws::WSConnectState;
 use lib_ws::WSConnectState;
@@ -17,16 +17,15 @@ use tokio::{
         mpsc::{Receiver, Sender},
         mpsc::{Receiver, Sender},
         RwLock,
         RwLock,
     },
     },
-    task::spawn_blocking,
     time::{interval, Duration},
     time::{interval, Duration},
 };
 };
 
 
 // The consumer consumes the messages pushed by the web socket.
 // The consumer consumes the messages pushed by the web socket.
 pub trait RevisionWSSteamConsumer: Send + Sync {
 pub trait RevisionWSSteamConsumer: Send + Sync {
-    fn receive_push_revision(&self, bytes: Bytes) -> FutureResult<(), FlowyError>;
-    fn receive_ack(&self, id: String, ty: ServerRevisionWSDataType) -> FutureResult<(), FlowyError>;
-    fn receive_new_user_connect(&self, new_user: NewDocumentUser) -> FutureResult<(), FlowyError>;
-    fn pull_revisions_in_range(&self, range: RevisionRange) -> FutureResult<(), FlowyError>;
+    fn receive_push_revision(&self, bytes: Bytes) -> BoxResultFuture<(), FlowyError>;
+    fn receive_ack(&self, id: String, ty: ServerRevisionWSDataType) -> BoxResultFuture<(), FlowyError>;
+    fn receive_new_user_connect(&self, new_user: NewDocumentUser) -> BoxResultFuture<(), FlowyError>;
+    fn pull_revisions_in_range(&self, range: RevisionRange) -> BoxResultFuture<(), FlowyError>;
 }
 }
 
 
 // The sink provides the data that will be sent through the web socket to the
 // The sink provides the data that will be sent through the web socket to the
@@ -173,9 +172,9 @@ impl RevisionWSStream {
     }
     }
 
 
     async fn handle_message(&self, msg: ServerRevisionWSData) -> FlowyResult<()> {
     async fn handle_message(&self, msg: ServerRevisionWSData) -> FlowyResult<()> {
-        let ServerRevisionWSData { object_id: _, ty, data } = msg;
+        let ServerRevisionWSData { object_id, ty, data } = msg;
         let bytes = Bytes::from(data);
         let bytes = Bytes::from(data);
-        tracing::trace!("[RevisionWSStream]: new message: {:?}", ty);
+        tracing::trace!("[RevisionWSStream]: new message: {}:{:?}", object_id, ty);
         match ty {
         match ty {
             ServerRevisionWSDataType::ServerPushRev => {
             ServerRevisionWSDataType::ServerPushRev => {
                 let _ = self.consumer.receive_push_revision(bytes).await?;
                 let _ = self.consumer.receive_push_revision(bytes).await?;

+ 1 - 1
frontend/rust-lib/flowy-test/src/helper.rs

@@ -1,5 +1,5 @@
 use crate::prelude::*;
 use crate::prelude::*;
-use flowy_collaboration::entities::doc::DocumentInfo;
+use flowy_collaboration::entities::document_info::DocumentInfo;
 use flowy_core::{
 use flowy_core::{
     entities::{
     entities::{
         app::*,
         app::*,

+ 4 - 1
shared-lib/flowy-collaboration/src/client_document/document_pad.rs

@@ -181,7 +181,10 @@ impl ClientDocument {
         }
         }
     }
     }
 
 
-    pub fn is_empty<C: InitialDocumentText>(&self) -> bool { self.delta == C::initial_delta() }
+    pub fn is_empty(&self) -> bool {
+        // The document is empty if its text is equal to the initial text.
+        self.delta == NewlineDoc::initial_delta()
+    }
 }
 }
 
 
 impl ClientDocument {
 impl ClientDocument {

+ 1 - 25
shared-lib/flowy-collaboration/src/entities/doc.rs → shared-lib/flowy-collaboration/src/entities/document_info.rs

@@ -3,7 +3,7 @@ use crate::{
     errors::CollaborateError,
     errors::CollaborateError,
 };
 };
 use flowy_derive::ProtoBuf;
 use flowy_derive::ProtoBuf;
-use lib_ot::{core::OperationTransformable, errors::OTError, rich_text::RichTextDelta};
+use lib_ot::{errors::OTError, rich_text::RichTextDelta};
 
 
 #[derive(ProtoBuf, Default, Debug, Clone)]
 #[derive(ProtoBuf, Default, Debug, Clone)]
 pub struct CreateDocParams {
 pub struct CreateDocParams {
@@ -34,30 +34,6 @@ impl DocumentInfo {
         let delta = RichTextDelta::from_bytes(&self.text)?;
         let delta = RichTextDelta::from_bytes(&self.text)?;
         Ok(delta)
         Ok(delta)
     }
     }
-
-    pub fn from_revisions(doc_id: &str, revisions: Vec<Revision>) -> Result<Self, CollaborateError> {
-        let mut document_delta = RichTextDelta::new();
-        let mut base_rev_id = 0;
-        let mut rev_id = 0;
-
-        for revision in revisions {
-            base_rev_id = revision.base_rev_id;
-            rev_id = revision.rev_id;
-            let delta = RichTextDelta::from_bytes(revision.delta_data)
-                .map_err(|e| CollaborateError::internal().context(format!("Parser revision failed. {:?}", e)))?;
-            document_delta = document_delta
-                .compose(&delta)
-                .map_err(|e| CollaborateError::internal().context(format!("Compose delta failed. {:?}", e)))?;
-        }
-        let text = document_delta.to_json();
-
-        Ok(DocumentInfo {
-            doc_id: doc_id.to_string(),
-            text,
-            rev_id,
-            base_rev_id,
-        })
-    }
 }
 }
 
 
 impl std::convert::TryFrom<Revision> for DocumentInfo {
 impl std::convert::TryFrom<Revision> for DocumentInfo {

+ 19 - 0
shared-lib/flowy-collaboration/src/entities/folder_info.rs

@@ -0,0 +1,19 @@
+use flowy_derive::ProtoBuf;
+use lib_ot::core::PlainDelta;
+
+pub type FolderDelta = PlainDelta;
+
+#[derive(ProtoBuf, Default, Debug, Clone, Eq, PartialEq)]
+pub struct FolderInfo {
+    #[pb(index = 1)]
+    pub folder_id: String,
+
+    #[pb(index = 2)]
+    pub text: String,
+
+    #[pb(index = 3)]
+    pub rev_id: i64,
+
+    #[pb(index = 4)]
+    pub base_rev_id: i64,
+}

+ 2 - 1
shared-lib/flowy-collaboration/src/entities/mod.rs

@@ -1,4 +1,5 @@
-pub mod doc;
+pub mod document_info;
+pub mod folder_info;
 pub mod parser;
 pub mod parser;
 pub mod revision;
 pub mod revision;
 pub mod ws_data;
 pub mod ws_data;

+ 6 - 6
shared-lib/flowy-collaboration/src/entities/ws_data.rs

@@ -93,29 +93,29 @@ pub struct ServerRevisionWSData {
 
 
 pub struct ServerRevisionWSDataBuilder();
 pub struct ServerRevisionWSDataBuilder();
 impl ServerRevisionWSDataBuilder {
 impl ServerRevisionWSDataBuilder {
-    pub fn build_push_message(doc_id: &str, repeated_revision: RepeatedRevision) -> ServerRevisionWSData {
+    pub fn build_push_message(object_id: &str, repeated_revision: RepeatedRevision) -> ServerRevisionWSData {
         let bytes: Bytes = repeated_revision.try_into().unwrap();
         let bytes: Bytes = repeated_revision.try_into().unwrap();
         ServerRevisionWSData {
         ServerRevisionWSData {
-            object_id: doc_id.to_string(),
+            object_id: object_id.to_string(),
             ty: ServerRevisionWSDataType::ServerPushRev,
             ty: ServerRevisionWSDataType::ServerPushRev,
             data: bytes.to_vec(),
             data: bytes.to_vec(),
         }
         }
     }
     }
 
 
-    pub fn build_pull_message(doc_id: &str, range: RevisionRange) -> ServerRevisionWSData {
+    pub fn build_pull_message(object_id: &str, range: RevisionRange) -> ServerRevisionWSData {
         let bytes: Bytes = range.try_into().unwrap();
         let bytes: Bytes = range.try_into().unwrap();
         ServerRevisionWSData {
         ServerRevisionWSData {
-            object_id: doc_id.to_string(),
+            object_id: object_id.to_string(),
             ty: ServerRevisionWSDataType::ServerPullRev,
             ty: ServerRevisionWSDataType::ServerPullRev,
             data: bytes.to_vec(),
             data: bytes.to_vec(),
         }
         }
     }
     }
 
 
-    pub fn build_ack_message(doc_id: &str, rev_id: i64) -> ServerRevisionWSData {
+    pub fn build_ack_message(object_id: &str, rev_id: i64) -> ServerRevisionWSData {
         let rev_id: RevId = rev_id.into();
         let rev_id: RevId = rev_id.into();
         let bytes: Bytes = rev_id.try_into().unwrap();
         let bytes: Bytes = rev_id.try_into().unwrap();
         ServerRevisionWSData {
         ServerRevisionWSData {
-            object_id: doc_id.to_string(),
+            object_id: object_id.to_string(),
             ty: ServerRevisionWSDataType::ServerAck,
             ty: ServerRevisionWSDataType::ServerAck,
             data: bytes.to_vec(),
             data: bytes.to_vec(),
         }
         }

+ 32 - 13
shared-lib/flowy-collaboration/src/folder/folder_pad.rs

@@ -1,11 +1,14 @@
 use crate::{
 use crate::{
-    entities::revision::{md5, Revision},
+    entities::{
+        folder_info::FolderDelta,
+        revision::{md5, Revision},
+    },
     errors::{CollaborateError, CollaborateResult},
     errors::{CollaborateError, CollaborateResult},
     folder::builder::FolderPadBuilder,
     folder::builder::FolderPadBuilder,
 };
 };
 use dissimilar::*;
 use dissimilar::*;
 use flowy_core_data_model::entities::{app::App, trash::Trash, view::View, workspace::Workspace};
 use flowy_core_data_model::entities::{app::App, trash::Trash, view::View, workspace::Workspace};
-use lib_ot::core::{Delta, FlowyStr, OperationTransformable, PlainDelta, PlainDeltaBuilder, PlainTextAttributes};
+use lib_ot::core::{Delta, FlowyStr, OperationTransformable, PlainDeltaBuilder, PlainTextAttributes};
 use serde::{Deserialize, Serialize};
 use serde::{Deserialize, Serialize};
 use std::sync::Arc;
 use std::sync::Arc;
 
 
@@ -14,10 +17,10 @@ pub struct FolderPad {
     pub(crate) workspaces: Vec<Arc<Workspace>>,
     pub(crate) workspaces: Vec<Arc<Workspace>>,
     pub(crate) trash: Vec<Arc<Trash>>,
     pub(crate) trash: Vec<Arc<Trash>>,
     #[serde(skip)]
     #[serde(skip)]
-    pub(crate) root: PlainDelta,
+    pub(crate) root: FolderDelta,
 }
 }
 
 
-pub fn default_folder_delta() -> PlainDelta {
+pub fn default_folder_delta() -> FolderDelta {
     PlainDeltaBuilder::new()
     PlainDeltaBuilder::new()
         .insert(r#"{"workspaces":[],"trash":[]}"#)
         .insert(r#"{"workspaces":[],"trash":[]}"#)
         .build()
         .build()
@@ -34,7 +37,7 @@ impl std::default::Default for FolderPad {
 }
 }
 
 
 pub struct FolderChange {
 pub struct FolderChange {
-    pub delta: PlainDelta,
+    pub delta: FolderDelta,
     /// md5: the md5 of the FolderPad's delta after applying the change.
     /// md5: the md5 of the FolderPad's delta after applying the change.
     pub md5: String,
     pub md5: String,
 }
 }
@@ -51,9 +54,25 @@ impl FolderPad {
         FolderPadBuilder::new().build_with_revisions(revisions)
         FolderPadBuilder::new().build_with_revisions(revisions)
     }
     }
 
 
-    pub fn from_delta(delta: PlainDelta) -> CollaborateResult<Self> { FolderPadBuilder::new().build_with_delta(delta) }
+    pub fn from_delta(delta: FolderDelta) -> CollaborateResult<Self> { FolderPadBuilder::new().build_with_delta(delta) }
 
 
-    pub fn delta(&self) -> &PlainDelta { &self.root }
+    pub fn delta(&self) -> &FolderDelta { &self.root }
+
+    pub fn reset_folder(&mut self, delta: FolderDelta) -> CollaborateResult<String> {
+        let folder = FolderPad::from_delta(delta)?;
+        self.workspaces = folder.workspaces;
+        self.trash = folder.trash;
+        self.root = folder.root;
+
+        Ok(self.md5())
+    }
+
+    pub fn compose_remote_delta(&mut self, delta: FolderDelta) -> CollaborateResult<String> {
+        let composed_delta = self.root.compose(&delta)?;
+        self.reset_folder(composed_delta)
+    }
+
+    pub fn is_empty(&self) -> bool { self.workspaces.is_empty() && self.trash.is_empty() }
 
 
     pub fn create_workspace(&mut self, workspace: Workspace) -> CollaborateResult<Option<FolderChange>> {
     pub fn create_workspace(&mut self, workspace: Workspace) -> CollaborateResult<Option<FolderChange>> {
         let workspace = Arc::new(workspace);
         let workspace = Arc::new(workspace);
@@ -374,7 +393,7 @@ fn cal_diff(old: String, new: String) -> Delta<PlainTextAttributes> {
 #[cfg(test)]
 #[cfg(test)]
 mod tests {
 mod tests {
     #![allow(clippy::all)]
     #![allow(clippy::all)]
-    use crate::folder::folder_pad::FolderPad;
+    use crate::{entities::folder_info::FolderDelta, folder::folder_pad::FolderPad};
     use chrono::Utc;
     use chrono::Utc;
     use flowy_core_data_model::entities::{app::App, trash::Trash, view::View, workspace::Workspace};
     use flowy_core_data_model::entities::{app::App, trash::Trash, view::View, workspace::Workspace};
     use lib_ot::core::{OperationTransformable, PlainDelta, PlainDeltaBuilder};
     use lib_ot::core::{OperationTransformable, PlainDelta, PlainDeltaBuilder};
@@ -689,7 +708,7 @@ mod tests {
         );
         );
     }
     }
 
 
-    fn test_folder() -> (FolderPad, PlainDelta, Workspace) {
+    fn test_folder() -> (FolderPad, FolderDelta, Workspace) {
         let mut folder = FolderPad::default();
         let mut folder = FolderPad::default();
         let folder_json = serde_json::to_string(&folder).unwrap();
         let folder_json = serde_json::to_string(&folder).unwrap();
         let mut delta = PlainDeltaBuilder::new().insert(&folder_json).build();
         let mut delta = PlainDeltaBuilder::new().insert(&folder_json).build();
@@ -705,7 +724,7 @@ mod tests {
         (folder, delta, workspace)
         (folder, delta, workspace)
     }
     }
 
 
-    fn test_app_folder() -> (FolderPad, PlainDelta, App) {
+    fn test_app_folder() -> (FolderPad, FolderDelta, App) {
         let (mut folder, mut initial_delta, workspace) = test_folder();
         let (mut folder, mut initial_delta, workspace) = test_folder();
         let mut app = App::default();
         let mut app = App::default();
         app.workspace_id = workspace.id;
         app.workspace_id = workspace.id;
@@ -718,7 +737,7 @@ mod tests {
         (folder, initial_delta, app)
         (folder, initial_delta, app)
     }
     }
 
 
-    fn test_view_folder() -> (FolderPad, PlainDelta, View) {
+    fn test_view_folder() -> (FolderPad, FolderDelta, View) {
         let (mut folder, mut initial_delta, app) = test_app_folder();
         let (mut folder, mut initial_delta, app) = test_app_folder();
         let mut view = View::default();
         let mut view = View::default();
         view.belong_to_id = app.id.clone();
         view.belong_to_id = app.id.clone();
@@ -731,7 +750,7 @@ mod tests {
         (folder, initial_delta, view)
         (folder, initial_delta, view)
     }
     }
 
 
-    fn test_trash() -> (FolderPad, PlainDelta, Trash) {
+    fn test_trash() -> (FolderPad, FolderDelta, Trash) {
         let mut folder = FolderPad::default();
         let mut folder = FolderPad::default();
         let folder_json = serde_json::to_string(&folder).unwrap();
         let folder_json = serde_json::to_string(&folder).unwrap();
         let mut delta = PlainDeltaBuilder::new().insert(&folder_json).build();
         let mut delta = PlainDeltaBuilder::new().insert(&folder_json).build();
@@ -747,7 +766,7 @@ mod tests {
         (folder, delta, trash)
         (folder, delta, trash)
     }
     }
 
 
-    fn make_folder_from_delta(mut initial_delta: PlainDelta, deltas: Vec<PlainDelta>) -> FolderPad {
+    fn make_folder_from_delta(mut initial_delta: FolderDelta, deltas: Vec<PlainDelta>) -> FolderPad {
         for delta in deltas {
         for delta in deltas {
             initial_delta = initial_delta.compose(&delta).unwrap();
             initial_delta = initial_delta.compose(&delta).unwrap();
         }
         }

+ 1 - 0
shared-lib/flowy-collaboration/src/lib.rs

@@ -4,6 +4,7 @@ pub mod errors;
 pub mod folder;
 pub mod folder;
 pub mod protobuf;
 pub mod protobuf;
 pub mod server_document;
 pub mod server_document;
+pub mod server_folder;
 pub mod synchronizer;
 pub mod synchronizer;
 pub mod util;
 pub mod util;
 
 

+ 22 - 22
shared-lib/flowy-collaboration/src/protobuf/model/doc.rs → shared-lib/flowy-collaboration/src/protobuf/model/document_info.rs

@@ -17,7 +17,7 @@
 #![allow(trivial_casts)]
 #![allow(trivial_casts)]
 #![allow(unused_imports)]
 #![allow(unused_imports)]
 #![allow(unused_results)]
 #![allow(unused_results)]
-//! Generated file from `doc.proto`
+//! Generated file from `document_info.proto`
 
 
 /// Generated files are compatible only with the same version
 /// Generated files are compatible only with the same version
 /// of protobuf runtime.
 /// of protobuf runtime.
@@ -1323,27 +1323,27 @@ impl ::protobuf::reflect::ProtobufValue for DocumentId {
 }
 }
 
 
 static file_descriptor_proto_data: &'static [u8] = b"\
 static file_descriptor_proto_data: &'static [u8] = b"\
-    \n\tdoc.proto\x1a\x0erevision.proto\"R\n\x0fCreateDocParams\x12\x0e\n\
-    \x02id\x18\x01\x20\x01(\tR\x02id\x12/\n\trevisions\x18\x02\x20\x01(\x0b2\
-    \x11.RepeatedRevisionR\trevisions\"p\n\x0cDocumentInfo\x12\x15\n\x06doc_\
-    id\x18\x01\x20\x01(\tR\x05docId\x12\x12\n\x04text\x18\x02\x20\x01(\tR\
-    \x04text\x12\x15\n\x06rev_id\x18\x03\x20\x01(\x03R\x05revId\x12\x1e\n\
-    \x0bbase_rev_id\x18\x04\x20\x01(\x03R\tbaseRevId\"]\n\x13ResetDocumentPa\
-    rams\x12\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12/\n\trevisions\
-    \x18\x02\x20\x01(\x0b2\x11.RepeatedRevisionR\trevisions\"E\n\rDocumentDe\
-    lta\x12\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12\x1d\n\ndelta_js\
-    on\x18\x02\x20\x01(\tR\tdeltaJson\"S\n\nNewDocUser\x12\x17\n\x07user_id\
-    \x18\x01\x20\x01(\tR\x06userId\x12\x15\n\x06rev_id\x18\x02\x20\x01(\x03R\
-    \x05revId\x12\x15\n\x06doc_id\x18\x03\x20\x01(\tR\x05docId\"#\n\nDocumen\
-    tId\x12\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docIdJ\xaf\x07\n\x06\x12\
-    \x04\0\0\x1c\x01\n\x08\n\x01\x0c\x12\x03\0\0\x12\n\t\n\x02\x03\0\x12\x03\
-    \x01\0\x18\n\n\n\x02\x04\0\x12\x04\x03\0\x06\x01\n\n\n\x03\x04\0\x01\x12\
-    \x03\x03\x08\x17\n\x0b\n\x04\x04\0\x02\0\x12\x03\x04\x04\x12\n\x0c\n\x05\
-    \x04\0\x02\0\x05\x12\x03\x04\x04\n\n\x0c\n\x05\x04\0\x02\0\x01\x12\x03\
-    \x04\x0b\r\n\x0c\n\x05\x04\0\x02\0\x03\x12\x03\x04\x10\x11\n\x0b\n\x04\
-    \x04\0\x02\x01\x12\x03\x05\x04#\n\x0c\n\x05\x04\0\x02\x01\x06\x12\x03\
-    \x05\x04\x14\n\x0c\n\x05\x04\0\x02\x01\x01\x12\x03\x05\x15\x1e\n\x0c\n\
-    \x05\x04\0\x02\x01\x03\x12\x03\x05!\"\n\n\n\x02\x04\x01\x12\x04\x07\0\
+    \n\x13document_info.proto\x1a\x0erevision.proto\"R\n\x0fCreateDocParams\
+    \x12\x0e\n\x02id\x18\x01\x20\x01(\tR\x02id\x12/\n\trevisions\x18\x02\x20\
+    \x01(\x0b2\x11.RepeatedRevisionR\trevisions\"p\n\x0cDocumentInfo\x12\x15\
+    \n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12\x12\n\x04text\x18\x02\x20\
+    \x01(\tR\x04text\x12\x15\n\x06rev_id\x18\x03\x20\x01(\x03R\x05revId\x12\
+    \x1e\n\x0bbase_rev_id\x18\x04\x20\x01(\x03R\tbaseRevId\"]\n\x13ResetDocu\
+    mentParams\x12\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12/\n\trevi\
+    sions\x18\x02\x20\x01(\x0b2\x11.RepeatedRevisionR\trevisions\"E\n\rDocum\
+    entDelta\x12\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12\x1d\n\ndel\
+    ta_json\x18\x02\x20\x01(\tR\tdeltaJson\"S\n\nNewDocUser\x12\x17\n\x07use\
+    r_id\x18\x01\x20\x01(\tR\x06userId\x12\x15\n\x06rev_id\x18\x02\x20\x01(\
+    \x03R\x05revId\x12\x15\n\x06doc_id\x18\x03\x20\x01(\tR\x05docId\"#\n\nDo\
+    cumentId\x12\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docIdJ\xaf\x07\n\x06\
+    \x12\x04\0\0\x1c\x01\n\x08\n\x01\x0c\x12\x03\0\0\x12\n\t\n\x02\x03\0\x12\
+    \x03\x01\0\x18\n\n\n\x02\x04\0\x12\x04\x03\0\x06\x01\n\n\n\x03\x04\0\x01\
+    \x12\x03\x03\x08\x17\n\x0b\n\x04\x04\0\x02\0\x12\x03\x04\x04\x12\n\x0c\n\
+    \x05\x04\0\x02\0\x05\x12\x03\x04\x04\n\n\x0c\n\x05\x04\0\x02\0\x01\x12\
+    \x03\x04\x0b\r\n\x0c\n\x05\x04\0\x02\0\x03\x12\x03\x04\x10\x11\n\x0b\n\
+    \x04\x04\0\x02\x01\x12\x03\x05\x04#\n\x0c\n\x05\x04\0\x02\x01\x06\x12\
+    \x03\x05\x04\x14\n\x0c\n\x05\x04\0\x02\x01\x01\x12\x03\x05\x15\x1e\n\x0c\
+    \n\x05\x04\0\x02\x01\x03\x12\x03\x05!\"\n\n\n\x02\x04\x01\x12\x04\x07\0\
     \x0c\x01\n\n\n\x03\x04\x01\x01\x12\x03\x07\x08\x14\n\x0b\n\x04\x04\x01\
     \x0c\x01\n\n\n\x03\x04\x01\x01\x12\x03\x07\x08\x14\n\x0b\n\x04\x04\x01\
     \x02\0\x12\x03\x08\x04\x16\n\x0c\n\x05\x04\x01\x02\0\x05\x12\x03\x08\x04\
     \x02\0\x12\x03\x08\x04\x16\n\x0c\n\x05\x04\x01\x02\0\x05\x12\x03\x08\x04\
     \n\n\x0c\n\x05\x04\x01\x02\0\x01\x12\x03\x08\x0b\x11\n\x0c\n\x05\x04\x01\
     \n\n\x0c\n\x05\x04\x01\x02\0\x01\x12\x03\x08\x0b\x11\n\x0c\n\x05\x04\x01\

+ 327 - 0
shared-lib/flowy-collaboration/src/protobuf/model/folder_info.rs

@@ -0,0 +1,327 @@
+// This file is generated by rust-protobuf 2.22.1. Do not edit
+// @generated
+
+// https://github.com/rust-lang/rust-clippy/issues/702
+#![allow(unknown_lints)]
+#![allow(clippy::all)]
+
+#![allow(unused_attributes)]
+#![cfg_attr(rustfmt, rustfmt::skip)]
+
+#![allow(box_pointers)]
+#![allow(dead_code)]
+#![allow(missing_docs)]
+#![allow(non_camel_case_types)]
+#![allow(non_snake_case)]
+#![allow(non_upper_case_globals)]
+#![allow(trivial_casts)]
+#![allow(unused_imports)]
+#![allow(unused_results)]
+//! Generated file from `folder_info.proto`
+
+/// Generated files are compatible only with the same version
+/// of protobuf runtime.
+// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_22_1;
+
+#[derive(PartialEq,Clone,Default)]
+pub struct FolderInfo {
+    // message fields
+    pub folder_id: ::std::string::String,
+    pub text: ::std::string::String,
+    pub rev_id: i64,
+    pub base_rev_id: i64,
+    // special fields
+    pub unknown_fields: ::protobuf::UnknownFields,
+    pub cached_size: ::protobuf::CachedSize,
+}
+
+impl<'a> ::std::default::Default for &'a FolderInfo {
+    fn default() -> &'a FolderInfo {
+        <FolderInfo as ::protobuf::Message>::default_instance()
+    }
+}
+
+impl FolderInfo {
+    pub fn new() -> FolderInfo {
+        ::std::default::Default::default()
+    }
+
+    // string folder_id = 1;
+
+
+    pub fn get_folder_id(&self) -> &str {
+        &self.folder_id
+    }
+    pub fn clear_folder_id(&mut self) {
+        self.folder_id.clear();
+    }
+
+    // Param is passed by value, moved
+    pub fn set_folder_id(&mut self, v: ::std::string::String) {
+        self.folder_id = v;
+    }
+
+    // Mutable pointer to the field.
+    // If field is not initialized, it is initialized with default value first.
+    pub fn mut_folder_id(&mut self) -> &mut ::std::string::String {
+        &mut self.folder_id
+    }
+
+    // Take field
+    pub fn take_folder_id(&mut self) -> ::std::string::String {
+        ::std::mem::replace(&mut self.folder_id, ::std::string::String::new())
+    }
+
+    // string text = 2;
+
+
+    pub fn get_text(&self) -> &str {
+        &self.text
+    }
+    pub fn clear_text(&mut self) {
+        self.text.clear();
+    }
+
+    // Param is passed by value, moved
+    pub fn set_text(&mut self, v: ::std::string::String) {
+        self.text = v;
+    }
+
+    // Mutable pointer to the field.
+    // If field is not initialized, it is initialized with default value first.
+    pub fn mut_text(&mut self) -> &mut ::std::string::String {
+        &mut self.text
+    }
+
+    // Take field
+    pub fn take_text(&mut self) -> ::std::string::String {
+        ::std::mem::replace(&mut self.text, ::std::string::String::new())
+    }
+
+    // int64 rev_id = 3;
+
+
+    pub fn get_rev_id(&self) -> i64 {
+        self.rev_id
+    }
+    pub fn clear_rev_id(&mut self) {
+        self.rev_id = 0;
+    }
+
+    // Param is passed by value, moved
+    pub fn set_rev_id(&mut self, v: i64) {
+        self.rev_id = v;
+    }
+
+    // int64 base_rev_id = 4;
+
+
+    pub fn get_base_rev_id(&self) -> i64 {
+        self.base_rev_id
+    }
+    pub fn clear_base_rev_id(&mut self) {
+        self.base_rev_id = 0;
+    }
+
+    // Param is passed by value, moved
+    pub fn set_base_rev_id(&mut self, v: i64) {
+        self.base_rev_id = v;
+    }
+}
+
+impl ::protobuf::Message for FolderInfo {
+    fn is_initialized(&self) -> bool {
+        true
+    }
+
+    fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
+        while !is.eof()? {
+            let (field_number, wire_type) = is.read_tag_unpack()?;
+            match field_number {
+                1 => {
+                    ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.folder_id)?;
+                },
+                2 => {
+                    ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.text)?;
+                },
+                3 => {
+                    if wire_type != ::protobuf::wire_format::WireTypeVarint {
+                        return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
+                    }
+                    let tmp = is.read_int64()?;
+                    self.rev_id = tmp;
+                },
+                4 => {
+                    if wire_type != ::protobuf::wire_format::WireTypeVarint {
+                        return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
+                    }
+                    let tmp = is.read_int64()?;
+                    self.base_rev_id = tmp;
+                },
+                _ => {
+                    ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
+                },
+            };
+        }
+        ::std::result::Result::Ok(())
+    }
+
+    // Compute sizes of nested messages
+    #[allow(unused_variables)]
+    fn compute_size(&self) -> u32 {
+        let mut my_size = 0;
+        if !self.folder_id.is_empty() {
+            my_size += ::protobuf::rt::string_size(1, &self.folder_id);
+        }
+        if !self.text.is_empty() {
+            my_size += ::protobuf::rt::string_size(2, &self.text);
+        }
+        if self.rev_id != 0 {
+            my_size += ::protobuf::rt::value_size(3, self.rev_id, ::protobuf::wire_format::WireTypeVarint);
+        }
+        if self.base_rev_id != 0 {
+            my_size += ::protobuf::rt::value_size(4, self.base_rev_id, ::protobuf::wire_format::WireTypeVarint);
+        }
+        my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
+        self.cached_size.set(my_size);
+        my_size
+    }
+
+    fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
+        if !self.folder_id.is_empty() {
+            os.write_string(1, &self.folder_id)?;
+        }
+        if !self.text.is_empty() {
+            os.write_string(2, &self.text)?;
+        }
+        if self.rev_id != 0 {
+            os.write_int64(3, self.rev_id)?;
+        }
+        if self.base_rev_id != 0 {
+            os.write_int64(4, self.base_rev_id)?;
+        }
+        os.write_unknown_fields(self.get_unknown_fields())?;
+        ::std::result::Result::Ok(())
+    }
+
+    fn get_cached_size(&self) -> u32 {
+        self.cached_size.get()
+    }
+
+    fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
+        &self.unknown_fields
+    }
+
+    fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
+        &mut self.unknown_fields
+    }
+
+    fn as_any(&self) -> &dyn (::std::any::Any) {
+        self as &dyn (::std::any::Any)
+    }
+    fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
+        self as &mut dyn (::std::any::Any)
+    }
+    fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
+        self
+    }
+
+    fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
+        Self::descriptor_static()
+    }
+
+    fn new() -> FolderInfo {
+        FolderInfo::new()
+    }
+
+    fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
+        static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
+        descriptor.get(|| {
+            let mut fields = ::std::vec::Vec::new();
+            fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
+                "folder_id",
+                |m: &FolderInfo| { &m.folder_id },
+                |m: &mut FolderInfo| { &mut m.folder_id },
+            ));
+            fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
+                "text",
+                |m: &FolderInfo| { &m.text },
+                |m: &mut FolderInfo| { &mut m.text },
+            ));
+            fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
+                "rev_id",
+                |m: &FolderInfo| { &m.rev_id },
+                |m: &mut FolderInfo| { &mut m.rev_id },
+            ));
+            fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
+                "base_rev_id",
+                |m: &FolderInfo| { &m.base_rev_id },
+                |m: &mut FolderInfo| { &mut m.base_rev_id },
+            ));
+            ::protobuf::reflect::MessageDescriptor::new_pb_name::<FolderInfo>(
+                "FolderInfo",
+                fields,
+                file_descriptor_proto()
+            )
+        })
+    }
+
+    fn default_instance() -> &'static FolderInfo {
+        static instance: ::protobuf::rt::LazyV2<FolderInfo> = ::protobuf::rt::LazyV2::INIT;
+        instance.get(FolderInfo::new)
+    }
+}
+
+impl ::protobuf::Clear for FolderInfo {
+    fn clear(&mut self) {
+        self.folder_id.clear();
+        self.text.clear();
+        self.rev_id = 0;
+        self.base_rev_id = 0;
+        self.unknown_fields.clear();
+    }
+}
+
+impl ::std::fmt::Debug for FolderInfo {
+    fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+        ::protobuf::text_format::fmt(self, f)
+    }
+}
+
+impl ::protobuf::reflect::ProtobufValue for FolderInfo {
+    fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
+        ::protobuf::reflect::ReflectValueRef::Message(self)
+    }
+}
+
+static file_descriptor_proto_data: &'static [u8] = b"\
+    \n\x11folder_info.proto\"t\n\nFolderInfo\x12\x1b\n\tfolder_id\x18\x01\
+    \x20\x01(\tR\x08folderId\x12\x12\n\x04text\x18\x02\x20\x01(\tR\x04text\
+    \x12\x15\n\x06rev_id\x18\x03\x20\x01(\x03R\x05revId\x12\x1e\n\x0bbase_re\
+    v_id\x18\x04\x20\x01(\x03R\tbaseRevIdJ\x86\x02\n\x06\x12\x04\0\0\x07\x01\
+    \n\x08\n\x01\x0c\x12\x03\0\0\x12\n\n\n\x02\x04\0\x12\x04\x02\0\x07\x01\n\
+    \n\n\x03\x04\0\x01\x12\x03\x02\x08\x12\n\x0b\n\x04\x04\0\x02\0\x12\x03\
+    \x03\x04\x19\n\x0c\n\x05\x04\0\x02\0\x05\x12\x03\x03\x04\n\n\x0c\n\x05\
+    \x04\0\x02\0\x01\x12\x03\x03\x0b\x14\n\x0c\n\x05\x04\0\x02\0\x03\x12\x03\
+    \x03\x17\x18\n\x0b\n\x04\x04\0\x02\x01\x12\x03\x04\x04\x14\n\x0c\n\x05\
+    \x04\0\x02\x01\x05\x12\x03\x04\x04\n\n\x0c\n\x05\x04\0\x02\x01\x01\x12\
+    \x03\x04\x0b\x0f\n\x0c\n\x05\x04\0\x02\x01\x03\x12\x03\x04\x12\x13\n\x0b\
+    \n\x04\x04\0\x02\x02\x12\x03\x05\x04\x15\n\x0c\n\x05\x04\0\x02\x02\x05\
+    \x12\x03\x05\x04\t\n\x0c\n\x05\x04\0\x02\x02\x01\x12\x03\x05\n\x10\n\x0c\
+    \n\x05\x04\0\x02\x02\x03\x12\x03\x05\x13\x14\n\x0b\n\x04\x04\0\x02\x03\
+    \x12\x03\x06\x04\x1a\n\x0c\n\x05\x04\0\x02\x03\x05\x12\x03\x06\x04\t\n\
+    \x0c\n\x05\x04\0\x02\x03\x01\x12\x03\x06\n\x15\n\x0c\n\x05\x04\0\x02\x03\
+    \x03\x12\x03\x06\x18\x19b\x06proto3\
+";
+
+static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
+
+fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
+    ::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()
+}
+
+pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
+    file_descriptor_proto_lazy.get(|| {
+        parse_descriptor_proto()
+    })
+}

+ 7 - 4
shared-lib/flowy-collaboration/src/protobuf/model/mod.rs

@@ -1,11 +1,14 @@
 #![cfg_attr(rustfmt, rustfmt::skip)]
 #![cfg_attr(rustfmt, rustfmt::skip)]
 // Auto-generated, do not edit
 // Auto-generated, do not edit
 
 
-mod ws;
-pub use ws::*;
+mod folder_info;
+pub use folder_info::*;
+
+mod ws_data;
+pub use ws_data::*;
 
 
 mod revision;
 mod revision;
 pub use revision::*;
 pub use revision::*;
 
 
-mod doc;
-pub use doc::*;
+mod document_info;
+pub use document_info::*;

+ 28 - 28
shared-lib/flowy-collaboration/src/protobuf/model/ws.rs → shared-lib/flowy-collaboration/src/protobuf/model/ws_data.rs

@@ -17,7 +17,7 @@
 #![allow(trivial_casts)]
 #![allow(trivial_casts)]
 #![allow(unused_imports)]
 #![allow(unused_imports)]
 #![allow(unused_results)]
 #![allow(unused_results)]
-//! Generated file from `ws.proto`
+//! Generated file from `ws_data.proto`
 
 
 /// Generated files are compatible only with the same version
 /// Generated files are compatible only with the same version
 /// of protobuf runtime.
 /// of protobuf runtime.
@@ -894,34 +894,34 @@ impl ::protobuf::reflect::ProtobufValue for ServerRevisionWSDataType {
 }
 }
 
 
 static file_descriptor_proto_data: &'static [u8] = b"\
 static file_descriptor_proto_data: &'static [u8] = b"\
-    \n\x08ws.proto\x1a\x0erevision.proto\"\xa8\x01\n\x14ClientRevisionWSData\
+    \n\rws_data.proto\x1a\x0erevision.proto\"\xa8\x01\n\x14ClientRevisionWSD\
+    ata\x12\x1b\n\tobject_id\x18\x01\x20\x01(\tR\x08objectId\x12)\n\x02ty\
+    \x18\x02\x20\x01(\x0e2\x19.ClientRevisionWSDataTypeR\x02ty\x12/\n\trevis\
+    ions\x18\x03\x20\x01(\x0b2\x11.RepeatedRevisionR\trevisions\x12\x17\n\
+    \x07data_id\x18\x04\x20\x01(\tR\x06dataId\"r\n\x14ServerRevisionWSData\
     \x12\x1b\n\tobject_id\x18\x01\x20\x01(\tR\x08objectId\x12)\n\x02ty\x18\
     \x12\x1b\n\tobject_id\x18\x01\x20\x01(\tR\x08objectId\x12)\n\x02ty\x18\
-    \x02\x20\x01(\x0e2\x19.ClientRevisionWSDataTypeR\x02ty\x12/\n\trevisions\
-    \x18\x03\x20\x01(\x0b2\x11.RepeatedRevisionR\trevisions\x12\x17\n\x07dat\
-    a_id\x18\x04\x20\x01(\tR\x06dataId\"r\n\x14ServerRevisionWSData\x12\x1b\
-    \n\tobject_id\x18\x01\x20\x01(\tR\x08objectId\x12)\n\x02ty\x18\x02\x20\
-    \x01(\x0e2\x19.ServerRevisionWSDataTypeR\x02ty\x12\x12\n\x04data\x18\x03\
-    \x20\x01(\x0cR\x04data\"f\n\x0fNewDocumentUser\x12\x17\n\x07user_id\x18\
-    \x01\x20\x01(\tR\x06userId\x12\x15\n\x06doc_id\x18\x02\x20\x01(\tR\x05do\
-    cId\x12#\n\rrevision_data\x18\x03\x20\x01(\x0cR\x0crevisionData*=\n\x18C\
-    lientRevisionWSDataType\x12\x11\n\rClientPushRev\x10\0\x12\x0e\n\nClient\
-    Ping\x10\x01*`\n\x18ServerRevisionWSDataType\x12\r\n\tServerAck\x10\0\
-    \x12\x11\n\rServerPushRev\x10\x01\x12\x11\n\rServerPullRev\x10\x02\x12\
-    \x0f\n\x0bUserConnect\x10\x03J\xb1\x07\n\x06\x12\x04\0\0\x1c\x01\n\x08\n\
-    \x01\x0c\x12\x03\0\0\x12\n\t\n\x02\x03\0\x12\x03\x01\0\x18\n\n\n\x02\x04\
-    \0\x12\x04\x03\0\x08\x01\n\n\n\x03\x04\0\x01\x12\x03\x03\x08\x1c\n\x0b\n\
-    \x04\x04\0\x02\0\x12\x03\x04\x04\x19\n\x0c\n\x05\x04\0\x02\0\x05\x12\x03\
-    \x04\x04\n\n\x0c\n\x05\x04\0\x02\0\x01\x12\x03\x04\x0b\x14\n\x0c\n\x05\
-    \x04\0\x02\0\x03\x12\x03\x04\x17\x18\n\x0b\n\x04\x04\0\x02\x01\x12\x03\
-    \x05\x04$\n\x0c\n\x05\x04\0\x02\x01\x06\x12\x03\x05\x04\x1c\n\x0c\n\x05\
-    \x04\0\x02\x01\x01\x12\x03\x05\x1d\x1f\n\x0c\n\x05\x04\0\x02\x01\x03\x12\
-    \x03\x05\"#\n\x0b\n\x04\x04\0\x02\x02\x12\x03\x06\x04#\n\x0c\n\x05\x04\0\
-    \x02\x02\x06\x12\x03\x06\x04\x14\n\x0c\n\x05\x04\0\x02\x02\x01\x12\x03\
-    \x06\x15\x1e\n\x0c\n\x05\x04\0\x02\x02\x03\x12\x03\x06!\"\n\x0b\n\x04\
-    \x04\0\x02\x03\x12\x03\x07\x04\x17\n\x0c\n\x05\x04\0\x02\x03\x05\x12\x03\
-    \x07\x04\n\n\x0c\n\x05\x04\0\x02\x03\x01\x12\x03\x07\x0b\x12\n\x0c\n\x05\
-    \x04\0\x02\x03\x03\x12\x03\x07\x15\x16\n\n\n\x02\x04\x01\x12\x04\t\0\r\
-    \x01\n\n\n\x03\x04\x01\x01\x12\x03\t\x08\x1c\n\x0b\n\x04\x04\x01\x02\0\
+    \x02\x20\x01(\x0e2\x19.ServerRevisionWSDataTypeR\x02ty\x12\x12\n\x04data\
+    \x18\x03\x20\x01(\x0cR\x04data\"f\n\x0fNewDocumentUser\x12\x17\n\x07user\
+    _id\x18\x01\x20\x01(\tR\x06userId\x12\x15\n\x06doc_id\x18\x02\x20\x01(\t\
+    R\x05docId\x12#\n\rrevision_data\x18\x03\x20\x01(\x0cR\x0crevisionData*=\
+    \n\x18ClientRevisionWSDataType\x12\x11\n\rClientPushRev\x10\0\x12\x0e\n\
+    \nClientPing\x10\x01*`\n\x18ServerRevisionWSDataType\x12\r\n\tServerAck\
+    \x10\0\x12\x11\n\rServerPushRev\x10\x01\x12\x11\n\rServerPullRev\x10\x02\
+    \x12\x0f\n\x0bUserConnect\x10\x03J\xb1\x07\n\x06\x12\x04\0\0\x1c\x01\n\
+    \x08\n\x01\x0c\x12\x03\0\0\x12\n\t\n\x02\x03\0\x12\x03\x01\0\x18\n\n\n\
+    \x02\x04\0\x12\x04\x03\0\x08\x01\n\n\n\x03\x04\0\x01\x12\x03\x03\x08\x1c\
+    \n\x0b\n\x04\x04\0\x02\0\x12\x03\x04\x04\x19\n\x0c\n\x05\x04\0\x02\0\x05\
+    \x12\x03\x04\x04\n\n\x0c\n\x05\x04\0\x02\0\x01\x12\x03\x04\x0b\x14\n\x0c\
+    \n\x05\x04\0\x02\0\x03\x12\x03\x04\x17\x18\n\x0b\n\x04\x04\0\x02\x01\x12\
+    \x03\x05\x04$\n\x0c\n\x05\x04\0\x02\x01\x06\x12\x03\x05\x04\x1c\n\x0c\n\
+    \x05\x04\0\x02\x01\x01\x12\x03\x05\x1d\x1f\n\x0c\n\x05\x04\0\x02\x01\x03\
+    \x12\x03\x05\"#\n\x0b\n\x04\x04\0\x02\x02\x12\x03\x06\x04#\n\x0c\n\x05\
+    \x04\0\x02\x02\x06\x12\x03\x06\x04\x14\n\x0c\n\x05\x04\0\x02\x02\x01\x12\
+    \x03\x06\x15\x1e\n\x0c\n\x05\x04\0\x02\x02\x03\x12\x03\x06!\"\n\x0b\n\
+    \x04\x04\0\x02\x03\x12\x03\x07\x04\x17\n\x0c\n\x05\x04\0\x02\x03\x05\x12\
+    \x03\x07\x04\n\n\x0c\n\x05\x04\0\x02\x03\x01\x12\x03\x07\x0b\x12\n\x0c\n\
+    \x05\x04\0\x02\x03\x03\x12\x03\x07\x15\x16\n\n\n\x02\x04\x01\x12\x04\t\0\
+    \r\x01\n\n\n\x03\x04\x01\x01\x12\x03\t\x08\x1c\n\x0b\n\x04\x04\x01\x02\0\
     \x12\x03\n\x04\x19\n\x0c\n\x05\x04\x01\x02\0\x05\x12\x03\n\x04\n\n\x0c\n\
     \x12\x03\n\x04\x19\n\x0c\n\x05\x04\x01\x02\0\x05\x12\x03\n\x04\n\n\x0c\n\
     \x05\x04\x01\x02\0\x01\x12\x03\n\x0b\x14\n\x0c\n\x05\x04\x01\x02\0\x03\
     \x05\x04\x01\x02\0\x01\x12\x03\n\x0b\x14\n\x0c\n\x05\x04\x01\x02\0\x03\
     \x12\x03\n\x17\x18\n\x0b\n\x04\x04\x01\x02\x01\x12\x03\x0b\x04$\n\x0c\n\
     \x12\x03\n\x17\x18\n\x0b\n\x04\x04\x01\x02\x01\x12\x03\x0b\x04$\n\x0c\n\

+ 0 - 0
shared-lib/flowy-collaboration/src/protobuf/proto/doc.proto → shared-lib/flowy-collaboration/src/protobuf/proto/document_info.proto


+ 8 - 0
shared-lib/flowy-collaboration/src/protobuf/proto/folder_info.proto

@@ -0,0 +1,8 @@
+syntax = "proto3";
+
+message FolderInfo {
+    string folder_id = 1;
+    string text = 2;
+    int64 rev_id = 3;
+    int64 base_rev_id = 4;
+}

+ 0 - 0
shared-lib/flowy-collaboration/src/protobuf/proto/ws.proto → shared-lib/flowy-collaboration/src/protobuf/proto/ws_data.proto


+ 71 - 69
shared-lib/flowy-collaboration/src/server_document/document_manager.rs

@@ -1,9 +1,10 @@
 use crate::{
 use crate::{
-    entities::{doc::DocumentInfo, ws_data::ServerRevisionWSDataBuilder},
+    entities::{document_info::DocumentInfo, ws_data::ServerRevisionWSDataBuilder},
     errors::{internal_error, CollaborateError, CollaborateResult},
     errors::{internal_error, CollaborateError, CollaborateResult},
     protobuf::{ClientRevisionWSData, RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
     protobuf::{ClientRevisionWSData, RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
     server_document::document_pad::ServerDocument,
     server_document::document_pad::ServerDocument,
     synchronizer::{RevisionSyncPersistence, RevisionSyncResponse, RevisionSynchronizer, RevisionUser},
     synchronizer::{RevisionSyncPersistence, RevisionSyncResponse, RevisionSynchronizer, RevisionUser},
+    util::rev_id_from_str,
 };
 };
 use async_stream::stream;
 use async_stream::stream;
 use dashmap::DashMap;
 use dashmap::DashMap;
@@ -16,8 +17,6 @@ use tokio::{
     task::spawn_blocking,
     task::spawn_blocking,
 };
 };
 
 
-type RichTextRevisionSynchronizer = RevisionSynchronizer<RichTextAttributes>;
-
 pub trait DocumentCloudPersistence: Send + Sync + Debug {
 pub trait DocumentCloudPersistence: Send + Sync + Debug {
     fn read_document(&self, doc_id: &str) -> BoxResultFuture<DocumentInfo, CollaborateError>;
     fn read_document(&self, doc_id: &str) -> BoxResultFuture<DocumentInfo, CollaborateError>;
 
 
@@ -25,15 +24,15 @@ pub trait DocumentCloudPersistence: Send + Sync + Debug {
         &self,
         &self,
         doc_id: &str,
         doc_id: &str,
         repeated_revision: RepeatedRevisionPB,
         repeated_revision: RepeatedRevisionPB,
-    ) -> BoxResultFuture<DocumentInfo, CollaborateError>;
+    ) -> BoxResultFuture<Option<DocumentInfo>, CollaborateError>;
 
 
-    fn read_revisions(
+    fn read_document_revisions(
         &self,
         &self,
         doc_id: &str,
         doc_id: &str,
         rev_ids: Option<Vec<i64>>,
         rev_ids: Option<Vec<i64>>,
     ) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError>;
     ) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError>;
 
 
-    fn save_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError>;
+    fn save_document_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError>;
 
 
     fn reset_document(
     fn reset_document(
         &self,
         &self,
@@ -42,15 +41,37 @@ pub trait DocumentCloudPersistence: Send + Sync + Debug {
     ) -> BoxResultFuture<(), CollaborateError>;
     ) -> BoxResultFuture<(), CollaborateError>;
 }
 }
 
 
+impl RevisionSyncPersistence for Arc<dyn DocumentCloudPersistence> {
+    fn read_revisions(
+        &self,
+        object_id: &str,
+        rev_ids: Option<Vec<i64>>,
+    ) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError> {
+        (**self).read_document_revisions(object_id, rev_ids)
+    }
+
+    fn save_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError> {
+        (**self).save_document_revisions(repeated_revision)
+    }
+
+    fn reset_object(
+        &self,
+        object_id: &str,
+        repeated_revision: RepeatedRevisionPB,
+    ) -> BoxResultFuture<(), CollaborateError> {
+        (**self).reset_document(object_id, repeated_revision)
+    }
+}
+
 pub struct ServerDocumentManager {
 pub struct ServerDocumentManager {
-    open_doc_map: Arc<RwLock<HashMap<String, Arc<OpenDocHandle>>>>,
+    document_handlers: Arc<RwLock<HashMap<String, Arc<OpenDocumentHandler>>>>,
     persistence: Arc<dyn DocumentCloudPersistence>,
     persistence: Arc<dyn DocumentCloudPersistence>,
 }
 }
 
 
 impl ServerDocumentManager {
 impl ServerDocumentManager {
     pub fn new(persistence: Arc<dyn DocumentCloudPersistence>) -> Self {
     pub fn new(persistence: Arc<dyn DocumentCloudPersistence>) -> Self {
         Self {
         Self {
-            open_doc_map: Arc::new(RwLock::new(HashMap::new())),
+            document_handlers: Arc::new(RwLock::new(HashMap::new())),
             persistence,
             persistence,
         }
         }
     }
     }
@@ -68,7 +89,7 @@ impl ServerDocumentManager {
         let result = match self.get_document_handler(&object_id).await {
         let result = match self.get_document_handler(&object_id).await {
             None => {
             None => {
                 let _ = self.create_document(&object_id, repeated_revision).await.map_err(|e| {
                 let _ = self.create_document(&object_id, repeated_revision).await.map_err(|e| {
-                    CollaborateError::internal().context(format!("Server crate document failed: {}", e))
+                    CollaborateError::internal().context(format!("Server create document failed: {}", e))
                 })?;
                 })?;
                 Ok(())
                 Ok(())
             },
             },
@@ -123,12 +144,12 @@ impl ServerDocumentManager {
         }
         }
     }
     }
 
 
-    async fn get_document_handler(&self, doc_id: &str) -> Option<Arc<OpenDocHandle>> {
-        if let Some(handler) = self.open_doc_map.read().await.get(doc_id).cloned() {
+    async fn get_document_handler(&self, doc_id: &str) -> Option<Arc<OpenDocumentHandler>> {
+        if let Some(handler) = self.document_handlers.read().await.get(doc_id).cloned() {
             return Some(handler);
             return Some(handler);
         }
         }
 
 
-        let mut write_guard = self.open_doc_map.write().await;
+        let mut write_guard = self.document_handlers.write().await;
         match self.persistence.read_document(doc_id).await {
         match self.persistence.read_document(doc_id).await {
             Ok(doc) => {
             Ok(doc) => {
                 let handler = self.create_document_handler(doc).await.map_err(internal_error).unwrap();
                 let handler = self.create_document_handler(doc).await.map_err(internal_error).unwrap();
@@ -145,21 +166,25 @@ impl ServerDocumentManager {
         &self,
         &self,
         doc_id: &str,
         doc_id: &str,
         repeated_revision: RepeatedRevisionPB,
         repeated_revision: RepeatedRevisionPB,
-    ) -> Result<Arc<OpenDocHandle>, CollaborateError> {
-        let doc = self.persistence.create_document(doc_id, repeated_revision).await?;
-        let handler = self.create_document_handler(doc).await?;
-        self.open_doc_map
-            .write()
-            .await
-            .insert(doc_id.to_owned(), handler.clone());
-        Ok(handler)
+    ) -> Result<Arc<OpenDocumentHandler>, CollaborateError> {
+        match self.persistence.create_document(doc_id, repeated_revision).await? {
+            None => Err(CollaborateError::internal().context("Create document info from revisions failed")),
+            Some(doc) => {
+                let handler = self.create_document_handler(doc).await?;
+                self.document_handlers
+                    .write()
+                    .await
+                    .insert(doc_id.to_owned(), handler.clone());
+                Ok(handler)
+            },
+        }
     }
     }
 
 
-    async fn create_document_handler(&self, doc: DocumentInfo) -> Result<Arc<OpenDocHandle>, CollaborateError> {
+    async fn create_document_handler(&self, doc: DocumentInfo) -> Result<Arc<OpenDocumentHandler>, CollaborateError> {
         let persistence = self.persistence.clone();
         let persistence = self.persistence.clone();
-        let handle = spawn_blocking(|| OpenDocHandle::new(doc, persistence))
+        let handle = spawn_blocking(|| OpenDocumentHandler::new(doc, persistence))
             .await
             .await
-            .map_err(|e| CollaborateError::internal().context(format!("Create open doc handler failed: {}", e)))?;
+            .map_err(|e| CollaborateError::internal().context(format!("Create document handler failed: {}", e)))?;
         Ok(Arc::new(handle?))
         Ok(Arc::new(handle?))
     }
     }
 }
 }
@@ -170,28 +195,35 @@ impl std::ops::Drop for ServerDocumentManager {
     }
     }
 }
 }
 
 
-struct OpenDocHandle {
+type DocumentRevisionSynchronizer = RevisionSynchronizer<RichTextAttributes>;
+
+struct OpenDocumentHandler {
     doc_id: String,
     doc_id: String,
     sender: mpsc::Sender<DocumentCommand>,
     sender: mpsc::Sender<DocumentCommand>,
     users: DashMap<String, Arc<dyn RevisionUser>>,
     users: DashMap<String, Arc<dyn RevisionUser>>,
 }
 }
 
 
-impl OpenDocHandle {
+impl OpenDocumentHandler {
     fn new(doc: DocumentInfo, persistence: Arc<dyn DocumentCloudPersistence>) -> Result<Self, CollaborateError> {
     fn new(doc: DocumentInfo, persistence: Arc<dyn DocumentCloudPersistence>) -> Result<Self, CollaborateError> {
         let doc_id = doc.doc_id.clone();
         let doc_id = doc.doc_id.clone();
-        let (sender, receiver) = mpsc::channel(100);
+        let (sender, receiver) = mpsc::channel(1000);
         let users = DashMap::new();
         let users = DashMap::new();
 
 
         let delta = RichTextDelta::from_bytes(&doc.text)?;
         let delta = RichTextDelta::from_bytes(&doc.text)?;
         let sync_object = ServerDocument::from_delta(&doc_id, delta);
         let sync_object = ServerDocument::from_delta(&doc_id, delta);
-        let synchronizer = Arc::new(RichTextRevisionSynchronizer::new(doc.rev_id, sync_object, persistence));
+        let synchronizer = Arc::new(DocumentRevisionSynchronizer::new(doc.rev_id, sync_object, persistence));
 
 
-        let queue = DocumentCommandQueue::new(&doc.doc_id, receiver, synchronizer)?;
+        let queue = DocumentCommandRunner::new(&doc.doc_id, receiver, synchronizer);
         tokio::task::spawn(queue.run());
         tokio::task::spawn(queue.run());
         Ok(Self { doc_id, sender, users })
         Ok(Self { doc_id, sender, users })
     }
     }
 
 
-    #[tracing::instrument(level = "debug", skip(self, user, repeated_revision), err)]
+    #[tracing::instrument(
+        name = "server_document_apply_revision",
+        level = "trace",
+        skip(self, user, repeated_revision),
+        err
+    )]
     async fn apply_revisions(
     async fn apply_revisions(
         &self,
         &self,
         user: Arc<dyn RevisionUser>,
         user: Arc<dyn RevisionUser>,
@@ -235,34 +267,12 @@ impl OpenDocHandle {
     }
     }
 }
 }
 
 
-impl std::ops::Drop for OpenDocHandle {
+impl std::ops::Drop for OpenDocumentHandler {
     fn drop(&mut self) {
     fn drop(&mut self) {
         tracing::trace!("{} OpenDocHandle was dropped", self.doc_id);
         tracing::trace!("{} OpenDocHandle was dropped", self.doc_id);
     }
     }
 }
 }
 
 
-impl RevisionSyncPersistence for Arc<dyn DocumentCloudPersistence> {
-    fn read_revisions(
-        &self,
-        object_id: &str,
-        rev_ids: Option<Vec<i64>>,
-    ) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError> {
-        (**self).read_revisions(object_id, rev_ids)
-    }
-
-    fn save_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError> {
-        (**self).save_revisions(repeated_revision)
-    }
-
-    fn reset_object(
-        &self,
-        object_id: &str,
-        repeated_revision: RepeatedRevisionPB,
-    ) -> BoxResultFuture<(), CollaborateError> {
-        (**self).reset_document(object_id, repeated_revision)
-    }
-}
-
 // #[derive(Debug)]
 // #[derive(Debug)]
 enum DocumentCommand {
 enum DocumentCommand {
     ApplyRevisions {
     ApplyRevisions {
@@ -281,30 +291,30 @@ enum DocumentCommand {
     },
     },
 }
 }
 
 
-struct DocumentCommandQueue {
+struct DocumentCommandRunner {
     pub doc_id: String,
     pub doc_id: String,
     receiver: Option<mpsc::Receiver<DocumentCommand>>,
     receiver: Option<mpsc::Receiver<DocumentCommand>>,
-    synchronizer: Arc<RichTextRevisionSynchronizer>,
+    synchronizer: Arc<DocumentRevisionSynchronizer>,
 }
 }
 
 
-impl DocumentCommandQueue {
+impl DocumentCommandRunner {
     fn new(
     fn new(
         doc_id: &str,
         doc_id: &str,
         receiver: mpsc::Receiver<DocumentCommand>,
         receiver: mpsc::Receiver<DocumentCommand>,
-        synchronizer: Arc<RichTextRevisionSynchronizer>,
-    ) -> Result<Self, CollaborateError> {
-        Ok(Self {
+        synchronizer: Arc<DocumentRevisionSynchronizer>,
+    ) -> Self {
+        Self {
             doc_id: doc_id.to_owned(),
             doc_id: doc_id.to_owned(),
             receiver: Some(receiver),
             receiver: Some(receiver),
             synchronizer,
             synchronizer,
-        })
+        }
     }
     }
 
 
     async fn run(mut self) {
     async fn run(mut self) {
         let mut receiver = self
         let mut receiver = self
             .receiver
             .receiver
             .take()
             .take()
-            .expect("DocActor's receiver should only take one time");
+            .expect("DocumentCommandRunner's receiver should only take one time");
 
 
         let stream = stream! {
         let stream = stream! {
             loop {
             loop {
@@ -343,16 +353,8 @@ impl DocumentCommandQueue {
     }
     }
 }
 }
 
 
-impl std::ops::Drop for DocumentCommandQueue {
+impl std::ops::Drop for DocumentCommandRunner {
     fn drop(&mut self) {
     fn drop(&mut self) {
         tracing::trace!("{} DocumentCommandQueue was dropped", self.doc_id);
         tracing::trace!("{} DocumentCommandQueue was dropped", self.doc_id);
     }
     }
 }
 }
-
-fn rev_id_from_str(s: &str) -> Result<i64, CollaborateError> {
-    let rev_id = s
-        .to_owned()
-        .parse::<i64>()
-        .map_err(|e| CollaborateError::internal().context(format!("Parse rev_id from {} failed. {}", s, e)))?;
-    Ok(rev_id)
-}

+ 332 - 0
shared-lib/flowy-collaboration/src/server_folder/folder_manager.rs

@@ -0,0 +1,332 @@
+use crate::{
+    entities::{
+        folder_info::{FolderDelta, FolderInfo},
+        ws_data::ServerRevisionWSDataBuilder,
+    },
+    errors::{internal_error, CollaborateError, CollaborateResult},
+    protobuf::{ClientRevisionWSData, RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
+    server_folder::folder_pad::ServerFolder,
+    synchronizer::{RevisionSyncPersistence, RevisionSyncResponse, RevisionSynchronizer, RevisionUser},
+    util::rev_id_from_str,
+};
+use async_stream::stream;
+use futures::stream::StreamExt;
+use lib_infra::future::BoxResultFuture;
+use lib_ot::core::PlainTextAttributes;
+use std::{collections::HashMap, fmt::Debug, sync::Arc};
+use tokio::{
+    sync::{mpsc, oneshot, RwLock},
+    task::spawn_blocking,
+};
+
+pub trait FolderCloudPersistence: Send + Sync + Debug {
+    fn read_folder(&self, user_id: &str, folder_id: &str) -> BoxResultFuture<FolderInfo, CollaborateError>;
+
+    fn create_folder(
+        &self,
+        user_id: &str,
+        folder_id: &str,
+        repeated_revision: RepeatedRevisionPB,
+    ) -> BoxResultFuture<Option<FolderInfo>, CollaborateError>;
+
+    fn save_folder_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError>;
+
+    fn read_folder_revisions(
+        &self,
+        folder_id: &str,
+        rev_ids: Option<Vec<i64>>,
+    ) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError>;
+
+    fn reset_folder(
+        &self,
+        folder_id: &str,
+        repeated_revision: RepeatedRevisionPB,
+    ) -> BoxResultFuture<(), CollaborateError>;
+}
+
+impl RevisionSyncPersistence for Arc<dyn FolderCloudPersistence> {
+    fn read_revisions(
+        &self,
+        object_id: &str,
+        rev_ids: Option<Vec<i64>>,
+    ) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError> {
+        (**self).read_folder_revisions(object_id, rev_ids)
+    }
+
+    fn save_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError> {
+        (**self).save_folder_revisions(repeated_revision)
+    }
+
+    fn reset_object(
+        &self,
+        object_id: &str,
+        repeated_revision: RepeatedRevisionPB,
+    ) -> BoxResultFuture<(), CollaborateError> {
+        (**self).reset_folder(object_id, repeated_revision)
+    }
+}
+
+pub struct ServerFolderManager {
+    folder_handlers: Arc<RwLock<HashMap<String, Arc<OpenFolderHandler>>>>,
+    persistence: Arc<dyn FolderCloudPersistence>,
+}
+
+impl ServerFolderManager {
+    pub fn new(persistence: Arc<dyn FolderCloudPersistence>) -> Self {
+        Self {
+            folder_handlers: Arc::new(RwLock::new(HashMap::new())),
+            persistence,
+        }
+    }
+
+    pub async fn handle_client_revisions(
+        &self,
+        user: Arc<dyn RevisionUser>,
+        mut client_data: ClientRevisionWSData,
+    ) -> Result<(), CollaborateError> {
+        let repeated_revision = client_data.take_revisions();
+        let cloned_user = user.clone();
+        let ack_id = rev_id_from_str(&client_data.data_id)?;
+        let folder_id = client_data.object_id;
+        let user_id = user.user_id();
+
+        let result = match self.get_folder_handler(&user_id, &folder_id).await {
+            None => {
+                let _ = self
+                    .create_folder(&user_id, &folder_id, repeated_revision)
+                    .await
+                    .map_err(|e| CollaborateError::internal().context(format!("Server create folder failed: {}", e)))?;
+                Ok(())
+            },
+            Some(handler) => {
+                let _ = handler.apply_revisions(user, repeated_revision).await?;
+                Ok(())
+            },
+        };
+
+        if result.is_ok() {
+            cloned_user.receive(RevisionSyncResponse::Ack(
+                ServerRevisionWSDataBuilder::build_ack_message(&folder_id, ack_id),
+            ));
+        }
+        result
+    }
+
+    pub async fn handle_client_ping(
+        &self,
+        user: Arc<dyn RevisionUser>,
+        client_data: ClientRevisionWSData,
+    ) -> Result<(), CollaborateError> {
+        let user_id = user.user_id();
+        let rev_id = rev_id_from_str(&client_data.data_id)?;
+        let folder_id = client_data.object_id.clone();
+        match self.get_folder_handler(&user_id, &folder_id).await {
+            None => {
+                tracing::trace!("Folder:{} doesn't exist, ignore client ping", folder_id);
+                Ok(())
+            },
+            Some(handler) => {
+                let _ = handler.apply_ping(rev_id, user).await?;
+                Ok(())
+            },
+        }
+    }
+
+    async fn get_folder_handler(&self, user_id: &str, folder_id: &str) -> Option<Arc<OpenFolderHandler>> {
+        let folder_id = folder_id.to_owned();
+        if let Some(handler) = self.folder_handlers.read().await.get(&folder_id).cloned() {
+            return Some(handler);
+        }
+
+        let mut write_guard = self.folder_handlers.write().await;
+        match self.persistence.read_folder(user_id, &folder_id).await {
+            Ok(folder_info) => {
+                let handler = self
+                    .create_folder_handler(folder_info)
+                    .await
+                    .map_err(internal_error)
+                    .unwrap();
+                write_guard.insert(folder_id, handler.clone());
+                drop(write_guard);
+                Some(handler)
+            },
+            Err(_) => None,
+        }
+    }
+
+    async fn create_folder_handler(&self, folder_info: FolderInfo) -> Result<Arc<OpenFolderHandler>, CollaborateError> {
+        let persistence = self.persistence.clone();
+        let handle = spawn_blocking(|| OpenFolderHandler::new(folder_info, persistence))
+            .await
+            .map_err(|e| CollaborateError::internal().context(format!("Create folder handler failed: {}", e)))?;
+        Ok(Arc::new(handle?))
+    }
+
+    #[tracing::instrument(level = "debug", skip(self, repeated_revision), err)]
+    async fn create_folder(
+        &self,
+        user_id: &str,
+        folder_id: &str,
+        repeated_revision: RepeatedRevisionPB,
+    ) -> Result<Arc<OpenFolderHandler>, CollaborateError> {
+        match self
+            .persistence
+            .create_folder(user_id, folder_id, repeated_revision)
+            .await?
+        {
+            Some(folder_info) => {
+                let handler = self.create_folder_handler(folder_info).await?;
+                self.folder_handlers
+                    .write()
+                    .await
+                    .insert(folder_id.to_owned(), handler.clone());
+                Ok(handler)
+            },
+            None => Err(CollaborateError::internal().context(format!(""))),
+        }
+    }
+}
+
+type FolderRevisionSynchronizer = RevisionSynchronizer<PlainTextAttributes>;
+
+struct OpenFolderHandler {
+    folder_id: String,
+    sender: mpsc::Sender<FolderCommand>,
+}
+
+impl OpenFolderHandler {
+    fn new(folder_info: FolderInfo, persistence: Arc<dyn FolderCloudPersistence>) -> CollaborateResult<Self> {
+        let (sender, receiver) = mpsc::channel(1000);
+        let folder_id = folder_info.folder_id.clone();
+        let delta = FolderDelta::from_bytes(&folder_info.text)?;
+        let sync_object = ServerFolder::from_delta(&folder_id, delta);
+        let synchronizer = Arc::new(FolderRevisionSynchronizer::new(
+            folder_info.rev_id,
+            sync_object,
+            persistence,
+        ));
+
+        let queue = FolderCommandRunner::new(&folder_id, receiver, synchronizer);
+        tokio::task::spawn(queue.run());
+
+        Ok(Self { folder_id, sender })
+    }
+
+    #[tracing::instrument(
+        name = "server_folder_apply_revision",
+        level = "trace",
+        skip(self, user, repeated_revision),
+        err
+    )]
+    async fn apply_revisions(
+        &self,
+        user: Arc<dyn RevisionUser>,
+        repeated_revision: RepeatedRevisionPB,
+    ) -> CollaborateResult<()> {
+        let (ret, rx) = oneshot::channel();
+        let msg = FolderCommand::ApplyRevisions {
+            user,
+            repeated_revision,
+            ret,
+        };
+
+        self.send(msg, rx).await?
+    }
+
+    async fn apply_ping(&self, rev_id: i64, user: Arc<dyn RevisionUser>) -> Result<(), CollaborateError> {
+        let (ret, rx) = oneshot::channel();
+        let msg = FolderCommand::Ping { user, rev_id, ret };
+        self.send(msg, rx).await?
+    }
+
+    async fn send<T>(&self, msg: FolderCommand, rx: oneshot::Receiver<T>) -> CollaborateResult<T> {
+        let _ = self
+            .sender
+            .send(msg)
+            .await
+            .map_err(|e| CollaborateError::internal().context(format!("Send folder command failed: {}", e)))?;
+        Ok(rx.await.map_err(internal_error)?)
+    }
+}
+
+impl std::ops::Drop for OpenFolderHandler {
+    fn drop(&mut self) {
+        tracing::trace!("{} OpenFolderHandler was dropped", self.folder_id);
+    }
+}
+
+enum FolderCommand {
+    ApplyRevisions {
+        user: Arc<dyn RevisionUser>,
+        repeated_revision: RepeatedRevisionPB,
+        ret: oneshot::Sender<CollaborateResult<()>>,
+    },
+    Ping {
+        user: Arc<dyn RevisionUser>,
+        rev_id: i64,
+        ret: oneshot::Sender<CollaborateResult<()>>,
+    },
+}
+
+struct FolderCommandRunner {
+    folder_id: String,
+    receiver: Option<mpsc::Receiver<FolderCommand>>,
+    synchronizer: Arc<FolderRevisionSynchronizer>,
+}
+impl FolderCommandRunner {
+    fn new(
+        folder_id: &str,
+        receiver: mpsc::Receiver<FolderCommand>,
+        synchronizer: Arc<FolderRevisionSynchronizer>,
+    ) -> Self {
+        Self {
+            folder_id: folder_id.to_owned(),
+            receiver: Some(receiver),
+            synchronizer,
+        }
+    }
+
+    async fn run(mut self) {
+        let mut receiver = self
+            .receiver
+            .take()
+            .expect("FolderCommandRunner's receiver should only take one time");
+
+        let stream = stream! {
+            loop {
+                match receiver.recv().await {
+                    Some(msg) => yield msg,
+                    None => break,
+                }
+            }
+        };
+        stream.for_each(|msg| self.handle_message(msg)).await;
+    }
+
+    async fn handle_message(&self, msg: FolderCommand) {
+        match msg {
+            FolderCommand::ApplyRevisions {
+                user,
+                repeated_revision,
+                ret,
+            } => {
+                let result = self
+                    .synchronizer
+                    .sync_revisions(user, repeated_revision)
+                    .await
+                    .map_err(internal_error);
+                let _ = ret.send(result);
+            },
+            FolderCommand::Ping { user, rev_id, ret } => {
+                let result = self.synchronizer.pong(user, rev_id).await.map_err(internal_error);
+                let _ = ret.send(result);
+            },
+        }
+    }
+}
+
+impl std::ops::Drop for FolderCommandRunner {
+    fn drop(&mut self) {
+        tracing::trace!("{} FolderCommandRunner was dropped", self.folder_id);
+    }
+}

+ 38 - 0
shared-lib/flowy-collaboration/src/server_folder/folder_pad.rs

@@ -0,0 +1,38 @@
+use crate::{entities::folder_info::FolderDelta, errors::CollaborateError, synchronizer::RevisionSyncObject};
+use lib_ot::core::{Delta, OperationTransformable, PlainTextAttributes};
+
+pub struct ServerFolder {
+    folder_id: String,
+    delta: FolderDelta,
+}
+
+impl ServerFolder {
+    pub fn from_delta(folder_id: &str, delta: FolderDelta) -> Self {
+        Self {
+            folder_id: folder_id.to_owned(),
+            delta,
+        }
+    }
+}
+
+impl RevisionSyncObject<PlainTextAttributes> for ServerFolder {
+    fn id(&self) -> &str { &self.folder_id }
+
+    fn compose(&mut self, other: &Delta<PlainTextAttributes>) -> Result<(), CollaborateError> {
+        let new_delta = self.delta.compose(other)?;
+        self.delta = new_delta;
+        Ok(())
+    }
+
+    fn transform(
+        &self,
+        other: &Delta<PlainTextAttributes>,
+    ) -> Result<(Delta<PlainTextAttributes>, Delta<PlainTextAttributes>), CollaborateError> {
+        let value = self.delta.transform(other)?;
+        Ok(value)
+    }
+
+    fn to_json(&self) -> String { self.delta.to_json() }
+
+    fn set_delta(&mut self, new_delta: Delta<PlainTextAttributes>) { self.delta = new_delta; }
+}

+ 4 - 0
shared-lib/flowy-collaboration/src/server_folder/mod.rs

@@ -0,0 +1,4 @@
+mod folder_manager;
+mod folder_pad;
+
+pub use folder_manager::*;

+ 31 - 31
shared-lib/flowy-collaboration/src/synchronizer.rs

@@ -89,12 +89,12 @@ where
         user: Arc<dyn RevisionUser>,
         user: Arc<dyn RevisionUser>,
         repeated_revision: RepeatedRevisionPB,
         repeated_revision: RepeatedRevisionPB,
     ) -> Result<(), CollaborateError> {
     ) -> Result<(), CollaborateError> {
-        let doc_id = self.object_id.clone();
+        let object_id = self.object_id.clone();
         if repeated_revision.get_items().is_empty() {
         if repeated_revision.get_items().is_empty() {
             // Return all the revisions to client
             // Return all the revisions to client
-            let revisions = self.persistence.read_revisions(&doc_id, None).await?;
+            let revisions = self.persistence.read_revisions(&object_id, None).await?;
             let repeated_revision = repeated_revision_from_revision_pbs(revisions)?;
             let repeated_revision = repeated_revision_from_revision_pbs(revisions)?;
-            let data = ServerRevisionWSDataBuilder::build_push_message(&doc_id, repeated_revision);
+            let data = ServerRevisionWSDataBuilder::build_push_message(&object_id, repeated_revision);
             user.receive(RevisionSyncResponse::Push(data));
             user.receive(RevisionSyncResponse::Push(data));
             return Ok(());
             return Ok(());
         }
         }
@@ -116,7 +116,7 @@ where
                     }
                     }
                     let _ = self.persistence.save_revisions(repeated_revision).await?;
                     let _ = self.persistence.save_revisions(repeated_revision).await?;
                 } else {
                 } else {
-                    // The server document is outdated, pull the missing revision from the client.
+                    // The server delta is outdated, pull the missing revision from the client.
                     let range = RevisionRange {
                     let range = RevisionRange {
                         object_id: self.object_id.clone(),
                         object_id: self.object_id.clone(),
                         start: server_rev_id,
                         start: server_rev_id,
@@ -128,10 +128,10 @@ where
             },
             },
             Ordering::Equal => {
             Ordering::Equal => {
                 // Do nothing
                 // Do nothing
-                tracing::warn!("Applied revision rev_id is the same as cur_rev_id");
+                tracing::warn!("Applied {} revision rev_id is the same as cur_rev_id", self.object_id);
             },
             },
             Ordering::Greater => {
             Ordering::Greater => {
-                // The client document is outdated. Transform the client revision delta and then
+                // The client delta is outdated. Transform the client revision delta and then
                 // send the prime delta to the client. Client should compose the this prime
                 // send the prime delta to the client. Client should compose the this prime
                 // delta.
                 // delta.
                 let from_rev_id = first_revision.rev_id;
                 let from_rev_id = first_revision.rev_id;
@@ -144,7 +144,7 @@ where
 
 
     #[tracing::instrument(level = "trace", skip(self, user), fields(server_rev_id), err)]
     #[tracing::instrument(level = "trace", skip(self, user), fields(server_rev_id), err)]
     pub async fn pong(&self, user: Arc<dyn RevisionUser>, client_rev_id: i64) -> Result<(), CollaborateError> {
     pub async fn pong(&self, user: Arc<dyn RevisionUser>, client_rev_id: i64) -> Result<(), CollaborateError> {
-        let doc_id = self.object_id.clone();
+        let object_id = self.object_id.clone();
         let server_rev_id = self.rev_id();
         let server_rev_id = self.rev_id();
         tracing::Span::current().record("server_rev_id", &server_rev_id);
         tracing::Span::current().record("server_rev_id", &server_rev_id);
 
 
@@ -152,9 +152,9 @@ where
             Ordering::Less => {
             Ordering::Less => {
                 tracing::error!("Client should not send ping and the server should pull the revisions from the client")
                 tracing::error!("Client should not send ping and the server should pull the revisions from the client")
             },
             },
-            Ordering::Equal => tracing::trace!("{} is up to date.", doc_id),
+            Ordering::Equal => tracing::trace!("{} is up to date.", object_id),
             Ordering::Greater => {
             Ordering::Greater => {
-                // The client document is outdated. Transform the client revision delta and then
+                // The client delta is outdated. Transform the client revision delta and then
                 // send the prime delta to the client. Client should compose the this prime
                 // send the prime delta to the client. Client should compose the this prime
                 // delta.
                 // delta.
                 let from_rev_id = client_rev_id;
                 let from_rev_id = client_rev_id;
@@ -166,14 +166,14 @@ where
         Ok(())
         Ok(())
     }
     }
 
 
-    #[tracing::instrument(level = "debug", skip(self, repeated_revision), fields(doc_id), err)]
+    #[tracing::instrument(level = "debug", skip(self, repeated_revision), fields(object_id), err)]
     pub async fn reset(&self, repeated_revision: RepeatedRevisionPB) -> Result<(), CollaborateError> {
     pub async fn reset(&self, repeated_revision: RepeatedRevisionPB) -> Result<(), CollaborateError> {
-        let doc_id = self.object_id.clone();
-        tracing::Span::current().record("doc_id", &doc_id.as_str());
+        let object_id = self.object_id.clone();
+        tracing::Span::current().record("object_id", &object_id.as_str());
         let revisions: Vec<RevisionPB> = repeated_revision.get_items().to_vec();
         let revisions: Vec<RevisionPB> = repeated_revision.get_items().to_vec();
         let (_, rev_id) = pair_rev_id_from_revision_pbs(&revisions);
         let (_, rev_id) = pair_rev_id_from_revision_pbs(&revisions);
         let delta = make_delta_from_revision_pb(revisions)?;
         let delta = make_delta_from_revision_pb(revisions)?;
-        let _ = self.persistence.reset_object(&doc_id, repeated_revision).await?;
+        let _ = self.persistence.reset_object(&object_id, repeated_revision).await?;
         self.object.write().set_delta(delta);
         self.object.write().set_delta(delta);
         let _ = self.rev_id.fetch_update(SeqCst, SeqCst, |_e| Some(rev_id));
         let _ = self.rev_id.fetch_update(SeqCst, SeqCst, |_e| Some(rev_id));
         Ok(())
         Ok(())
@@ -201,7 +201,7 @@ where
         }
         }
 
 
         match self.object.try_write_for(Duration::from_millis(300)) {
         match self.object.try_write_for(Duration::from_millis(300)) {
-            None => log::error!("Failed to acquire write lock of document"),
+            None => log::error!("Failed to acquire write lock of object"),
             Some(mut write_guard) => {
             Some(mut write_guard) => {
                 let _ = write_guard.compose(&delta)?;
                 let _ = write_guard.compose(&delta)?;
             },
             },
@@ -230,29 +230,29 @@ where
 
 
     async fn push_revisions_to_user(&self, user: Arc<dyn RevisionUser>, from: i64, to: i64) {
     async fn push_revisions_to_user(&self, user: Arc<dyn RevisionUser>, from: i64, to: i64) {
         let rev_ids: Vec<i64> = (from..=to).collect();
         let rev_ids: Vec<i64> = (from..=to).collect();
-        let revisions = match self.persistence.read_revisions(&self.object_id, Some(rev_ids)).await {
+        tracing::debug!("Push revision: {} -> {} to client", from, to);
+        match self
+            .persistence
+            .read_revisions(&self.object_id, Some(rev_ids.clone()))
+            .await
+        {
             Ok(revisions) => {
             Ok(revisions) => {
-                assert_eq!(
-                    revisions.is_empty(),
-                    false,
-                    "revisions should not be empty if the doc exists"
-                );
-                revisions
+                if !rev_ids.is_empty() && revisions.is_empty() {
+                    tracing::trace!("{}: can not read the revisions in range {:?}", self.object_id, rev_ids);
+                    // assert_eq!(revisions.is_empty(), rev_ids.is_empty(),);
+                }
+                match repeated_revision_from_revision_pbs(revisions) {
+                    Ok(repeated_revision) => {
+                        let data = ServerRevisionWSDataBuilder::build_push_message(&self.object_id, repeated_revision);
+                        user.receive(RevisionSyncResponse::Push(data));
+                    },
+                    Err(e) => tracing::error!("{}", e),
+                }
             },
             },
             Err(e) => {
             Err(e) => {
                 tracing::error!("{}", e);
                 tracing::error!("{}", e);
-                vec![]
             },
             },
         };
         };
-
-        tracing::debug!("Push revision: {} -> {} to client", from, to);
-        match repeated_revision_from_revision_pbs(revisions) {
-            Ok(repeated_revision) => {
-                let data = ServerRevisionWSDataBuilder::build_push_message(&self.object_id, repeated_revision);
-                user.receive(RevisionSyncResponse::Push(data));
-            },
-            Err(e) => tracing::error!("{}", e),
-        }
     }
     }
 }
 }
 
 

+ 86 - 7
shared-lib/flowy-collaboration/src/util.rs

@@ -1,11 +1,19 @@
 use crate::{
 use crate::{
-    entities::revision::{RepeatedRevision, Revision},
+    entities::{
+        document_info::DocumentInfo,
+        folder_info::{FolderDelta, FolderInfo},
+        revision::{RepeatedRevision, Revision},
+    },
     errors::{CollaborateError, CollaborateResult},
     errors::{CollaborateError, CollaborateResult},
-    protobuf::{DocumentInfo as DocumentInfoPB, RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
+    protobuf::{
+        DocumentInfo as DocumentInfoPB,
+        FolderInfo as FolderInfoPB,
+        RepeatedRevision as RepeatedRevisionPB,
+        Revision as RevisionPB,
+    },
 };
 };
 use lib_ot::{
 use lib_ot::{
     core::{Attributes, Delta, OperationTransformable, NEW_LINE, WHITESPACE},
     core::{Attributes, Delta, OperationTransformable, NEW_LINE, WHITESPACE},
-    errors::OTError,
     rich_text::RichTextDelta,
     rich_text::RichTextDelta,
 };
 };
 use serde::de::DeserializeOwned;
 use serde::de::DeserializeOwned;
@@ -126,14 +134,76 @@ pub fn pair_rev_id_from_revisions(revisions: &[Revision]) -> (i64, i64) {
 }
 }
 
 
 #[inline]
 #[inline]
-pub fn make_doc_from_revisions(
+pub fn make_folder_from_revisions_pb(
+    folder_id: &str,
+    revisions: RepeatedRevisionPB,
+) -> Result<Option<FolderInfo>, CollaborateError> {
+    match make_folder_pb_from_revisions_pb(folder_id, revisions)? {
+        None => Ok(None),
+        Some(mut pb) => {
+            let folder_info: FolderInfo = (&mut pb)
+                .try_into()
+                .map_err(|e| CollaborateError::internal().context(e))?;
+            Ok(Some(folder_info))
+        },
+    }
+}
+
+#[inline]
+pub fn make_folder_pb_from_revisions_pb(
+    folder_id: &str,
+    mut revisions: RepeatedRevisionPB,
+) -> Result<Option<FolderInfoPB>, CollaborateError> {
+    let revisions = revisions.take_items();
+    if revisions.is_empty() {
+        return Ok(None);
+    }
+
+    let mut folder_delta = FolderDelta::new();
+    let mut base_rev_id = 0;
+    let mut rev_id = 0;
+    for revision in revisions {
+        base_rev_id = revision.base_rev_id;
+        rev_id = revision.rev_id;
+        if revision.delta_data.is_empty() {
+            tracing::warn!("revision delta_data is empty");
+        }
+        let delta = FolderDelta::from_bytes(revision.delta_data)?;
+        folder_delta = folder_delta.compose(&delta)?;
+    }
+
+    let text = folder_delta.to_json();
+    let mut folder_info = FolderInfoPB::new();
+    folder_info.set_folder_id(folder_id.to_owned());
+    folder_info.set_text(text);
+    folder_info.set_base_rev_id(base_rev_id);
+    folder_info.set_rev_id(rev_id);
+    Ok(Some(folder_info))
+}
+
+#[inline]
+pub fn make_document_info_from_revisions_pb(
+    doc_id: &str,
+    revisions: RepeatedRevisionPB,
+) -> Result<Option<DocumentInfo>, CollaborateError> {
+    match make_document_info_pb_from_revisions_pb(doc_id, revisions)? {
+        None => Ok(None),
+        Some(mut pb) => {
+            let document_info: DocumentInfo = (&mut pb).try_into().map_err(|e| {
+                CollaborateError::internal().context(format!("Deserialize document info from pb failed: {}", e))
+            })?;
+            Ok(Some(document_info))
+        },
+    }
+}
+
+#[inline]
+pub fn make_document_info_pb_from_revisions_pb(
     doc_id: &str,
     doc_id: &str,
     mut revisions: RepeatedRevisionPB,
     mut revisions: RepeatedRevisionPB,
-) -> Result<Option<DocumentInfoPB>, OTError> {
+) -> Result<Option<DocumentInfoPB>, CollaborateError> {
     let revisions = revisions.take_items();
     let revisions = revisions.take_items();
     if revisions.is_empty() {
     if revisions.is_empty() {
-        // return Err(CollaborateError::record_not_found().context(format!("{} not
-        // exist", doc_id)));
         return Ok(None);
         return Ok(None);
     }
     }
 
 
@@ -160,3 +230,12 @@ pub fn make_doc_from_revisions(
     document_info.set_rev_id(rev_id);
     document_info.set_rev_id(rev_id);
     Ok(Some(document_info))
     Ok(Some(document_info))
 }
 }
+
+#[inline]
+pub fn rev_id_from_str(s: &str) -> Result<i64, CollaborateError> {
+    let rev_id = s
+        .to_owned()
+        .parse::<i64>()
+        .map_err(|e| CollaborateError::internal().context(format!("Parse rev_id from {} failed. {}", s, e)))?;
+    Ok(rev_id)
+}

File diff suppressed because it is too large
+ 338 - 344
shared-lib/flowy-core-data-model/src/protobuf/model/trash.rs


+ 8 - 8
shared-lib/flowy-core-data-model/src/protobuf/proto/trash.proto

@@ -1,13 +1,5 @@
 syntax = "proto3";
 syntax = "proto3";
 
 
-message RepeatedTrashId {
-    repeated TrashId items = 1;
-    bool delete_all = 2;
-}
-message TrashId {
-    string id = 1;
-    TrashType ty = 2;
-}
 message Trash {
 message Trash {
     string id = 1;
     string id = 1;
     string name = 2;
     string name = 2;
@@ -18,6 +10,14 @@ message Trash {
 message RepeatedTrash {
 message RepeatedTrash {
     repeated Trash items = 1;
     repeated Trash items = 1;
 }
 }
+message RepeatedTrashId {
+    repeated TrashId items = 1;
+    bool delete_all = 2;
+}
+message TrashId {
+    string id = 1;
+    TrashType ty = 2;
+}
 enum TrashType {
 enum TrashType {
     Unknown = 0;
     Unknown = 0;
     View = 1;
     View = 1;

+ 12 - 11
shared-lib/flowy-derive/src/derive_cache/derive_cache.rs

@@ -31,19 +31,20 @@ pub fn category_from_str(type_str: &str) -> TypeCategory {
         | "UserProfile"
         | "UserProfile"
         | "UpdateUserRequest"
         | "UpdateUserRequest"
         | "UpdateUserParams"
         | "UpdateUserParams"
+        | "ClientRevisionWSData"
+        | "ServerRevisionWSData"
+        | "NewDocumentUser"
+        | "FolderInfo"
+        | "Revision"
+        | "RepeatedRevision"
+        | "RevId"
+        | "RevisionRange"
         | "CreateDocParams"
         | "CreateDocParams"
         | "DocumentInfo"
         | "DocumentInfo"
         | "ResetDocumentParams"
         | "ResetDocumentParams"
         | "DocumentDelta"
         | "DocumentDelta"
         | "NewDocUser"
         | "NewDocUser"
         | "DocumentId"
         | "DocumentId"
-        | "Revision"
-        | "RepeatedRevision"
-        | "RevId"
-        | "RevisionRange"
-        | "ClientRevisionWSData"
-        | "ServerRevisionWSData"
-        | "NewDocumentUser"
         | "Workspace"
         | "Workspace"
         | "RepeatedWorkspace"
         | "RepeatedWorkspace"
         | "CreateWorkspaceRequest"
         | "CreateWorkspaceRequest"
@@ -64,10 +65,10 @@ pub fn category_from_str(type_str: &str) -> TypeCategory {
         | "AppId"
         | "AppId"
         | "UpdateAppRequest"
         | "UpdateAppRequest"
         | "UpdateAppParams"
         | "UpdateAppParams"
-        | "RepeatedTrashId"
-        | "TrashId"
         | "Trash"
         | "Trash"
         | "RepeatedTrash"
         | "RepeatedTrash"
+        | "RepeatedTrashId"
+        | "TrashId"
         | "View"
         | "View"
         | "RepeatedView"
         | "RepeatedView"
         | "CreateViewRequest"
         | "CreateViewRequest"
@@ -87,10 +88,10 @@ pub fn category_from_str(type_str: &str) -> TypeCategory {
         | "NetworkType"
         | "NetworkType"
         | "UserEvent"
         | "UserEvent"
         | "UserNotification"
         | "UserNotification"
-        | "RevisionState"
-        | "RevType"
         | "ClientRevisionWSDataType"
         | "ClientRevisionWSDataType"
         | "ServerRevisionWSDataType"
         | "ServerRevisionWSDataType"
+        | "RevisionState"
+        | "RevType"
         | "ExportType"
         | "ExportType"
         | "TrashType"
         | "TrashType"
         | "ViewType"
         | "ViewType"

Some files were not shown because too many files changed in this diff