Browse Source

Merge pull request #1528 from AppFlowy-IO/feat/folder_node

Feat/folder node
Nathan.fooo 2 năm trước cách đây
mục cha
commit
806a924fff
100 tập tin đã thay đổi với 1852 bổ sung2343 xóa
  1. 36 22
      frontend/rust-lib/Cargo.lock
  2. 3 2
      frontend/rust-lib/dart-ffi/Cargo.toml
  3. 1 3
      frontend/rust-lib/dart-ffi/build.rs
  4. 2 2
      frontend/rust-lib/dart-notify/Cargo.toml
  5. 1 3
      frontend/rust-lib/dart-notify/build.rs
  6. 3 2
      frontend/rust-lib/flowy-document/Cargo.toml
  7. 2 4
      frontend/rust-lib/flowy-document/build.rs
  8. 2 2
      frontend/rust-lib/flowy-error/Cargo.toml
  9. 1 3
      frontend/rust-lib/flowy-error/build.rs
  10. 3 2
      frontend/rust-lib/flowy-folder/Cargo.toml
  11. 2 4
      frontend/rust-lib/flowy-folder/build.rs
  12. 3 2
      frontend/rust-lib/flowy-grid/Cargo.toml
  13. 2 4
      frontend/rust-lib/flowy-grid/build.rs
  14. 1 1
      frontend/rust-lib/flowy-grid/tests/grid/filter_test/checklist_filter_test.rs
  15. 2 2
      frontend/rust-lib/flowy-net/Cargo.toml
  16. 2 4
      frontend/rust-lib/flowy-net/build.rs
  17. 2 2
      frontend/rust-lib/flowy-user/Cargo.toml
  18. 2 4
      frontend/rust-lib/flowy-user/build.rs
  19. 0 10
      frontend/scripts/flowy-tool/.gitignore
  20. 0 27
      frontend/scripts/flowy-tool/Cargo.toml
  21. 0 15
      frontend/scripts/flowy-tool/src/config/mod.rs
  22. 0 136
      frontend/scripts/flowy-tool/src/dart_event/dart_event.rs
  23. 0 72
      frontend/scripts/flowy-tool/src/dart_event/event_template.rs
  24. 0 49
      frontend/scripts/flowy-tool/src/dart_event/event_template.tera
  25. 0 95
      frontend/scripts/flowy-tool/src/main.rs
  26. 0 189
      frontend/scripts/flowy-tool/src/proto/ast.rs
  27. 0 41
      frontend/scripts/flowy-tool/src/proto/builder.rs
  28. 0 8
      frontend/scripts/flowy-tool/src/proto/mod.rs
  29. 0 170
      frontend/scripts/flowy-tool/src/proto/proto_gen.rs
  30. 0 137
      frontend/scripts/flowy-tool/src/proto/proto_info.rs
  31. 0 84
      frontend/scripts/flowy-tool/src/proto/template/derive_meta/derive_meta.rs
  32. 0 40
      frontend/scripts/flowy-tool/src/proto/template/proto_file/enum_template.rs
  33. 0 112
      frontend/scripts/flowy-tool/src/proto/template/proto_file/struct_template.rs
  34. 0 38
      frontend/scripts/flowy-tool/src/util/crate_config.rs
  35. 0 171
      frontend/scripts/flowy-tool/src/util/file.rs
  36. 0 5
      frontend/scripts/flowy-tool/src/util/mod.rs
  37. 0 73
      frontend/scripts/makefile/protobuf.toml
  38. 1 1
      frontend/scripts/makefile/tool.toml
  39. 28 19
      shared-lib/Cargo.lock
  40. 1 0
      shared-lib/Cargo.toml
  41. 59 23
      shared-lib/flowy-ast/src/ast.rs
  42. 0 508
      shared-lib/flowy-ast/src/attr.rs
  43. 4 4
      shared-lib/flowy-ast/src/ctxt.rs
  44. 145 0
      shared-lib/flowy-ast/src/event_attrs.rs
  45. 7 7
      shared-lib/flowy-ast/src/lib.rs
  46. 99 0
      shared-lib/flowy-ast/src/node_attrs.rs
  47. 444 0
      shared-lib/flowy-ast/src/pb_attrs.rs
  48. 36 9
      shared-lib/flowy-ast/src/symbol.rs
  49. 16 14
      shared-lib/flowy-ast/src/ty_ext.rs
  50. 49 0
      shared-lib/flowy-codegen/Cargo.toml
  51. 9 8
      shared-lib/flowy-codegen/src/dart_event/ast.rs
  52. 13 8
      shared-lib/flowy-codegen/src/dart_event/dart_event.rs
  53. 1 1
      shared-lib/flowy-codegen/src/dart_event/event_template.rs
  54. 0 0
      shared-lib/flowy-codegen/src/dart_event/event_template.tera
  55. 1 0
      shared-lib/flowy-codegen/src/dart_event/mod.rs
  56. 0 0
      shared-lib/flowy-codegen/src/flowy_toml.rs
  57. 0 0
      shared-lib/flowy-codegen/src/lib.rs
  58. 11 11
      shared-lib/flowy-codegen/src/protobuf_file/ast.rs
  59. 1 1
      shared-lib/flowy-codegen/src/protobuf_file/mod.rs
  60. 5 5
      shared-lib/flowy-codegen/src/protobuf_file/proto_gen.rs
  61. 2 2
      shared-lib/flowy-codegen/src/protobuf_file/proto_info.rs
  62. 1 1
      shared-lib/flowy-codegen/src/protobuf_file/template/derive_meta/derive_meta.rs
  63. 0 0
      shared-lib/flowy-codegen/src/protobuf_file/template/derive_meta/derive_meta.tera
  64. 0 0
      shared-lib/flowy-codegen/src/protobuf_file/template/derive_meta/mod.rs
  65. 0 0
      shared-lib/flowy-codegen/src/protobuf_file/template/mod.rs
  66. 0 0
      shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/enum.tera
  67. 2 2
      shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/enum_template.rs
  68. 0 0
      shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/mod.rs
  69. 0 0
      shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/struct.tera
  70. 2 2
      shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/struct_template.rs
  71. 1 1
      shared-lib/flowy-codegen/src/util.rs
  72. 1 1
      shared-lib/flowy-derive/Cargo.toml
  73. 7 0
      shared-lib/flowy-derive/src/lib.rs
  74. 228 0
      shared-lib/flowy-derive/src/node/mod.rs
  75. 26 21
      shared-lib/flowy-derive/src/proto_buf/deserialize.rs
  76. 2 2
      shared-lib/flowy-derive/src/proto_buf/enum_serde.rs
  77. 11 11
      shared-lib/flowy-derive/src/proto_buf/mod.rs
  78. 26 21
      shared-lib/flowy-derive/src/proto_buf/serialize.rs
  79. 7 7
      shared-lib/flowy-derive/src/proto_buf/util.rs
  80. 3 2
      shared-lib/flowy-error-code/Cargo.toml
  81. 1 3
      shared-lib/flowy-error-code/build.rs
  82. 2 1
      shared-lib/flowy-http-model/Cargo.toml
  83. 1 3
      shared-lib/flowy-http-model/build.rs
  84. 142 0
      shared-lib/flowy-sync/src/client_folder/folder_node.rs
  85. 7 0
      shared-lib/flowy-sync/src/client_folder/mod.rs
  86. 20 0
      shared-lib/flowy-sync/src/client_folder/trash_node.rs
  87. 54 0
      shared-lib/flowy-sync/src/client_folder/util.rs
  88. 62 0
      shared-lib/flowy-sync/src/client_folder/workspace_node.rs
  89. 4 2
      shared-lib/flowy-sync/src/errors.rs
  90. 58 0
      shared-lib/flowy-sync/tests/client_folder/folder_test.rs
  91. 3 0
      shared-lib/flowy-sync/tests/client_folder/mod.rs
  92. 89 0
      shared-lib/flowy-sync/tests/client_folder/script.rs
  93. 86 0
      shared-lib/flowy-sync/tests/client_folder/workspace_test.rs
  94. 1 0
      shared-lib/flowy-sync/tests/main.rs
  95. 3 3
      shared-lib/grid-rev-model/src/grid_setting_rev.rs
  96. 0 40
      shared-lib/lib-infra/Cargo.toml
  97. 0 5
      shared-lib/lib-infra/src/code_gen/dart_event/mod.rs
  98. 0 45
      shared-lib/lib-infra/src/code_gen/protobuf_file/template/derive_meta/derive_meta.tera
  99. 0 4
      shared-lib/lib-infra/src/code_gen/protobuf_file/template/derive_meta/mod.rs
  100. 0 5
      shared-lib/lib-infra/src/code_gen/protobuf_file/template/mod.rs

+ 36 - 22
frontend/rust-lib/Cargo.lock

@@ -570,10 +570,10 @@ dependencies = [
  "crossbeam-utils",
  "crossbeam-utils",
  "dart-notify",
  "dart-notify",
  "ffi-support",
  "ffi-support",
+ "flowy-codegen",
  "flowy-derive",
  "flowy-derive",
  "flowy-sdk",
  "flowy-sdk",
  "lib-dispatch",
  "lib-dispatch",
- "lib-infra",
  "log",
  "log",
  "once_cell",
  "once_cell",
  "protobuf",
  "protobuf",
@@ -588,10 +588,10 @@ version = "0.1.0"
 dependencies = [
 dependencies = [
  "allo-isolate",
  "allo-isolate",
  "bytes",
  "bytes",
+ "flowy-codegen",
  "flowy-derive",
  "flowy-derive",
  "lazy_static",
  "lazy_static",
  "lib-dispatch",
  "lib-dispatch",
- "lib-infra",
  "log",
  "log",
  "protobuf",
  "protobuf",
 ]
 ]
@@ -816,6 +816,30 @@ dependencies = [
  "syn",
  "syn",
 ]
 ]
 
 
+[[package]]
+name = "flowy-codegen"
+version = "0.1.0"
+dependencies = [
+ "cmd_lib",
+ "console",
+ "fancy-regex",
+ "flowy-ast",
+ "itertools",
+ "lazy_static",
+ "log",
+ "phf 0.8.0",
+ "protoc-bin-vendored",
+ "protoc-rust",
+ "quote",
+ "serde",
+ "serde_json",
+ "similar",
+ "syn",
+ "tera",
+ "toml",
+ "walkdir",
+]
+
 [[package]]
 [[package]]
 name = "flowy-database"
 name = "flowy-database"
 version = "0.1.0"
 version = "0.1.0"
@@ -834,8 +858,8 @@ version = "0.1.0"
 dependencies = [
 dependencies = [
  "dashmap",
  "dashmap",
  "flowy-ast",
  "flowy-ast",
+ "flowy-codegen",
  "lazy_static",
  "lazy_static",
- "lib-infra",
  "proc-macro2",
  "proc-macro2",
  "quote",
  "quote",
  "serde_json",
  "serde_json",
@@ -857,6 +881,7 @@ dependencies = [
  "derive_more",
  "derive_more",
  "diesel",
  "diesel",
  "diesel_derives",
  "diesel_derives",
+ "flowy-codegen",
  "flowy-database",
  "flowy-database",
  "flowy-derive",
  "flowy-derive",
  "flowy-document",
  "flowy-document",
@@ -891,13 +916,13 @@ name = "flowy-error"
 version = "0.1.0"
 version = "0.1.0"
 dependencies = [
 dependencies = [
  "bytes",
  "bytes",
+ "flowy-codegen",
  "flowy-database",
  "flowy-database",
  "flowy-derive",
  "flowy-derive",
  "flowy-error-code",
  "flowy-error-code",
  "flowy-sync",
  "flowy-sync",
  "http-flowy",
  "http-flowy",
  "lib-dispatch",
  "lib-dispatch",
- "lib-infra",
  "lib-ot",
  "lib-ot",
  "lib-sqlite",
  "lib-sqlite",
  "protobuf",
  "protobuf",
@@ -910,8 +935,8 @@ name = "flowy-error-code"
 version = "0.1.0"
 version = "0.1.0"
 dependencies = [
 dependencies = [
  "derive_more",
  "derive_more",
+ "flowy-codegen",
  "flowy-derive",
  "flowy-derive",
- "lib-infra",
  "protobuf",
  "protobuf",
 ]
 ]
 
 
@@ -923,6 +948,7 @@ dependencies = [
  "dart-notify",
  "dart-notify",
  "diesel",
  "diesel",
  "diesel_derives",
  "diesel_derives",
+ "flowy-codegen",
  "flowy-database",
  "flowy-database",
  "flowy-derive",
  "flowy-derive",
  "flowy-document",
  "flowy-document",
@@ -965,6 +991,7 @@ dependencies = [
  "dashmap",
  "dashmap",
  "diesel",
  "diesel",
  "fancy-regex",
  "fancy-regex",
+ "flowy-codegen",
  "flowy-database",
  "flowy-database",
  "flowy-derive",
  "flowy-derive",
  "flowy-error",
  "flowy-error",
@@ -1002,6 +1029,7 @@ name = "flowy-http-model"
 version = "0.1.0"
 version = "0.1.0"
 dependencies = [
 dependencies = [
  "bytes",
  "bytes",
+ "flowy-codegen",
  "flowy-derive",
  "flowy-derive",
  "lib-infra",
  "lib-infra",
  "md5",
  "md5",
@@ -1017,6 +1045,7 @@ dependencies = [
  "bytes",
  "bytes",
  "config",
  "config",
  "dashmap",
  "dashmap",
+ "flowy-codegen",
  "flowy-derive",
  "flowy-derive",
  "flowy-document",
  "flowy-document",
  "flowy-error",
  "flowy-error",
@@ -1178,6 +1207,7 @@ dependencies = [
  "diesel_derives",
  "diesel_derives",
  "fake",
  "fake",
  "fancy-regex",
  "fancy-regex",
+ "flowy-codegen",
  "flowy-database",
  "flowy-database",
  "flowy-derive",
  "flowy-derive",
  "flowy-error",
  "flowy-error",
@@ -1740,27 +1770,10 @@ version = "0.1.0"
 dependencies = [
 dependencies = [
  "bytes",
  "bytes",
  "chrono",
  "chrono",
- "cmd_lib",
- "console",
- "fancy-regex",
- "flowy-ast",
  "futures-core",
  "futures-core",
- "itertools",
- "lazy_static",
- "log",
- "phf 0.8.0",
  "pin-project",
  "pin-project",
- "protoc-bin-vendored",
- "protoc-rust",
  "rand 0.8.5",
  "rand 0.8.5",
- "serde",
- "serde_json",
- "similar",
- "syn",
- "tera",
  "tokio",
  "tokio",
- "toml",
- "walkdir",
 ]
 ]
 
 
 [[package]]
 [[package]]
@@ -1827,6 +1840,7 @@ version = "0.1.0"
 dependencies = [
 dependencies = [
  "bytes",
  "bytes",
  "dashmap",
  "dashmap",
+ "flowy-codegen",
  "flowy-derive",
  "flowy-derive",
  "futures",
  "futures",
  "futures-channel",
  "futures-channel",

+ 3 - 2
frontend/rust-lib/dart-ffi/Cargo.toml

@@ -37,6 +37,7 @@ http_sync = ["flowy-sdk/http_sync", "flowy-sdk/use_bunyan"]
 openssl_vendored = ["flowy-sdk/openssl_vendored"]
 openssl_vendored = ["flowy-sdk/openssl_vendored"]
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../../../shared-lib/lib-infra", features = [
+flowy-codegen= { path = "../../../shared-lib/flowy-codegen", features = [
     "dart",
     "dart",
-] }
+]}
+

+ 1 - 3
frontend/rust-lib/dart-ffi/build.rs

@@ -1,5 +1,3 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
-    code_gen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
+    flowy_codegen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
 }
 }

+ 2 - 2
frontend/rust-lib/dart-notify/Cargo.toml

@@ -16,7 +16,7 @@ flowy-derive = {path = "../../../shared-lib/flowy-derive" }
 lib-dispatch = {path = "../lib-dispatch" }
 lib-dispatch = {path = "../lib-dispatch" }
 
 
 [features]
 [features]
-dart = ["lib-infra/dart"]
+dart = ["flowy-codegen/dart"]
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../../../shared-lib/lib-infra", features = ["proto_gen"] }
+flowy-codegen= { path = "../../../shared-lib/flowy-codegen"}

+ 1 - 3
frontend/rust-lib/dart-notify/build.rs

@@ -1,5 +1,3 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
-    code_gen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
+    flowy_codegen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
 }
 }

+ 3 - 2
frontend/rust-lib/flowy-document/Cargo.toml

@@ -52,10 +52,11 @@ criterion = "0.3"
 rand = "0.8.5"
 rand = "0.8.5"
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../../../shared-lib/lib-infra", features = [ "proto_gen"] }
+flowy-codegen= { path = "../../../shared-lib/flowy-codegen"}
+
 
 
 [features]
 [features]
 sync = []
 sync = []
 cloud_sync = ["sync"]
 cloud_sync = ["sync"]
 flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"]
 flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"]
-dart = ["lib-infra/dart"]
+dart = ["flowy-codegen/dart"]

+ 2 - 4
frontend/rust-lib/flowy-document/build.rs

@@ -1,9 +1,7 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
     let crate_name = env!("CARGO_PKG_NAME");
     let crate_name = env!("CARGO_PKG_NAME");
-    code_gen::protobuf_file::gen(crate_name);
+    flowy_codegen::protobuf_file::gen(crate_name);
 
 
     #[cfg(feature = "dart")]
     #[cfg(feature = "dart")]
-    code_gen::dart_event::gen(crate_name);
+    flowy_codegen::dart_event::gen(crate_name);
 }
 }

+ 2 - 2
frontend/rust-lib/flowy-error/Cargo.toml

@@ -27,7 +27,7 @@ ot = ["lib-ot"]
 serde = ["serde_json"]
 serde = ["serde_json"]
 http_server = ["http-flowy"]
 http_server = ["http-flowy"]
 db = ["flowy-database", "lib-sqlite", "r2d2"]
 db = ["flowy-database", "lib-sqlite", "r2d2"]
-dart = ["flowy-error-code/dart", "lib-infra/dart"]
+dart = ["flowy-error-code/dart", "flowy-codegen/dart"]
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../../../shared-lib/lib-infra", features = ["proto_gen"] }
+flowy-codegen= { path = "../../../shared-lib/flowy-codegen"}

+ 1 - 3
frontend/rust-lib/flowy-error/build.rs

@@ -1,5 +1,3 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
-    code_gen::protobuf_file::gen("flowy-error");
+    flowy_codegen::protobuf_file::gen("flowy-error");
 }
 }

+ 3 - 2
frontend/rust-lib/flowy-folder/Cargo.toml

@@ -42,11 +42,12 @@ flowy-folder = { path = "../flowy-folder", features = ["flowy_unit_test"]}
 flowy-test = { path = "../flowy-test" }
 flowy-test = { path = "../flowy-test" }
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../../../shared-lib/lib-infra", features = [ "proto_gen"] }
+flowy-codegen= { path = "../../../shared-lib/flowy-codegen"}
+
 
 
 [features]
 [features]
 default = []
 default = []
 sync = []
 sync = []
 cloud_sync = ["sync"]
 cloud_sync = ["sync"]
 flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"]
 flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"]
-dart = ["lib-infra/dart"]
+dart = ["flowy-codegen/dart"]

+ 2 - 4
frontend/rust-lib/flowy-folder/build.rs

@@ -1,9 +1,7 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
     let crate_name = env!("CARGO_PKG_NAME");
     let crate_name = env!("CARGO_PKG_NAME");
-    code_gen::protobuf_file::gen(crate_name);
+    flowy_codegen::protobuf_file::gen(crate_name);
 
 
     #[cfg(feature = "dart")]
     #[cfg(feature = "dart")]
-    code_gen::dart_event::gen(crate_name);
+    flowy_codegen::dart_event::gen(crate_name);
 }
 }

+ 3 - 2
frontend/rust-lib/flowy-grid/Cargo.toml

@@ -51,10 +51,11 @@ flowy-test = { path = "../flowy-test" }
 flowy-grid = { path = "../flowy-grid", features = ["flowy_unit_test"]}
 flowy-grid = { path = "../flowy-grid", features = ["flowy_unit_test"]}
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../../../shared-lib/lib-infra", features = ["proto_gen"] }
+flowy-codegen= { path = "../../../shared-lib/flowy-codegen"}
+
 
 
 
 
 [features]
 [features]
 default = []
 default = []
-dart = ["lib-infra/dart"]
+dart = ["flowy-codegen/dart"]
 flowy_unit_test = ["flowy-revision/flowy_unit_test"]
 flowy_unit_test = ["flowy-revision/flowy_unit_test"]

+ 2 - 4
frontend/rust-lib/flowy-grid/build.rs

@@ -1,9 +1,7 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
     let crate_name = env!("CARGO_PKG_NAME");
     let crate_name = env!("CARGO_PKG_NAME");
-    code_gen::protobuf_file::gen(crate_name);
+    flowy_codegen::protobuf_file::gen(crate_name);
 
 
     #[cfg(feature = "dart")]
     #[cfg(feature = "dart")]
-    code_gen::dart_event::gen(crate_name);
+    flowy_codegen::dart_event::gen(crate_name);
 }
 }

+ 1 - 1
frontend/rust-lib/flowy-grid/tests/grid/filter_test/checklist_filter_test.rs

@@ -1,6 +1,6 @@
 use crate::grid::filter_test::script::FilterScript::*;
 use crate::grid::filter_test::script::FilterScript::*;
 use crate::grid::filter_test::script::GridFilterTest;
 use crate::grid::filter_test::script::GridFilterTest;
-use flowy_grid::entities::{ChecklistFilterCondition, SelectOptionCondition};
+use flowy_grid::entities::ChecklistFilterCondition;
 
 
 #[tokio::test]
 #[tokio::test]
 async fn grid_filter_checklist_is_incomplete_test() {
 async fn grid_filter_checklist_is_incomplete_test() {

+ 2 - 2
frontend/rust-lib/flowy-net/Cargo.toml

@@ -42,10 +42,10 @@ nanoid = "0.4.0"
 [features]
 [features]
 http_server = []
 http_server = []
 dart = [
 dart = [
-    "lib-infra/dart",
+    "flowy-codegen/dart",
     "flowy-user/dart",
     "flowy-user/dart",
     "flowy-error/dart",
     "flowy-error/dart",
 ]
 ]
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../../../shared-lib/lib-infra", features = ["proto_gen"] }
+flowy-codegen = { path = "../../../shared-lib/flowy-codegen"}

+ 2 - 4
frontend/rust-lib/flowy-net/build.rs

@@ -1,9 +1,7 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
     let crate_name = env!("CARGO_PKG_NAME");
     let crate_name = env!("CARGO_PKG_NAME");
-    code_gen::protobuf_file::gen(crate_name);
+    flowy_codegen::protobuf_file::gen(crate_name);
 
 
     #[cfg(feature = "dart")]
     #[cfg(feature = "dart")]
-    code_gen::dart_event::gen(crate_name);
+    flowy_codegen::dart_event::gen(crate_name);
 }
 }

+ 2 - 2
frontend/rust-lib/flowy-user/Cargo.toml

@@ -45,7 +45,7 @@ rand_core = "0.6.3"
 rand = "0.8.5"
 rand = "0.8.5"
 
 
 [features]
 [features]
-dart = ["lib-infra/dart"]
+dart = ["flowy-codegen/dart"]
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../../../shared-lib/lib-infra", features = ["proto_gen"] }
+flowy-codegen= { path = "../../../shared-lib/flowy-codegen"}

+ 2 - 4
frontend/rust-lib/flowy-user/build.rs

@@ -1,9 +1,7 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
     let crate_name = env!("CARGO_PKG_NAME");
     let crate_name = env!("CARGO_PKG_NAME");
-    code_gen::protobuf_file::gen(crate_name);
+    flowy_codegen::protobuf_file::gen(crate_name);
 
 
     #[cfg(feature = "dart")]
     #[cfg(feature = "dart")]
-    code_gen::dart_event::gen(crate_name);
+    flowy_codegen::dart_event::gen(crate_name);
 }
 }

+ 0 - 10
frontend/scripts/flowy-tool/.gitignore

@@ -1,10 +0,0 @@
-# Generated by Cargo
-# will have compiled files and executables
-/target/
-
-# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
-# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
-Cargo.lock
-
-# These are backup files generated by rustfmt
-**/*.rs.bk

+ 0 - 27
frontend/scripts/flowy-tool/Cargo.toml

@@ -1,27 +0,0 @@
-[package]
-name = "flowy-tool"
-version = "0.1.0"
-edition = "2018"
-
-# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
-
-[dependencies]
-clap = "2.33.3"
-walkdir = "2.3.1"
-syn = { version = "1.0.60", features = ["extra-traits", "parsing", "derive", "full"]}
-tera = { version = "1.5.0" }
-log = "0.4.11"
-env_logger = "0.8.2"
-#shell = { git="https://github.com/google/rust-shell.git"}
-cmd_lib = "1.1"
-flowy-ast = { path = "../../../shared-lib/flowy-ast" }
-console = "0.14.0"
-fancy-regex = "0.10.0"
-lazy_static = "1.4.0"
-phf = { version = "0.8.0", features = ["macros"] }
-similar = "1.2.2"
-dialoguer = "0.8.0"
-toml = "0.5.8"
-serde = { version = "1.0", features = ["derive"] }
-pathdiff = "0.2.0"
-itertools = "0.10"

+ 0 - 15
frontend/scripts/flowy-tool/src/config/mod.rs

@@ -1,15 +0,0 @@
-use std::fs;
-
-#[derive(serde::Deserialize)]
-pub struct FlowyConfig {
-    pub proto_crates: Vec<String>,
-    pub event_files: Vec<String>,
-}
-
-impl FlowyConfig {
-    pub fn from_toml_file(path: &str) -> Self {
-        let content = fs::read_to_string(path).unwrap();
-        let config: FlowyConfig = toml::from_str(content.as_ref()).unwrap();
-        config
-    }
-}

+ 0 - 136
frontend/scripts/flowy-tool/src/dart_event/dart_event.rs

@@ -1,136 +0,0 @@
-use super::event_template::*;
-
-use crate::util::*;
-use flowy_ast::{event_ast::*, *};
-use syn::Item;
-use walkdir::WalkDir;
-
-pub struct DartEventCodeGen {
-    pub rust_sources: Vec<String>,
-    pub output_dir: String,
-}
-
-impl DartEventCodeGen {
-    pub fn gen(&self) {
-        let event_crates = parse_dart_event_files(self.rust_sources.clone());
-        let event_ast = event_crates
-            .iter()
-            .map(parse_event_crate)
-            .flatten()
-            .collect::<Vec<_>>();
-
-        let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
-
-        let mut render_result = String::new();
-        for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
-            let mut event_template = EventTemplate::new();
-
-            if let Some(content) = event_template.render(render_ctx, index) {
-                render_result.push_str(content.as_ref())
-            }
-        }
-
-        save_content_to_file_with_diff_prompt(
-            render_result.as_ref(),
-            self.output_dir.as_str(),
-            true,
-        );
-    }
-}
-
-pub struct DartEventCrate {
-    crate_path: String,
-    #[allow(dead_code)]
-    crate_name: String,
-    event_files: Vec<String>,
-}
-
-impl DartEventCrate {
-    pub fn from_config(config: &CrateConfig) -> Self {
-        DartEventCrate {
-            crate_path: config.crate_path.clone(),
-            crate_name: config.folder_name.clone(),
-            event_files: config.flowy_config.event_files.clone(),
-        }
-    }
-}
-
-pub fn parse_dart_event_files(roots: Vec<String>) -> Vec<DartEventCrate> {
-    let mut dart_event_crates: Vec<DartEventCrate> = vec![];
-    roots.iter().for_each(|root| {
-        let crates = WalkDir::new(root)
-            .into_iter()
-            .filter_entry(|e| !is_hidden(e))
-            .filter_map(|e| e.ok())
-            .filter(is_crate_dir)
-            .flat_map(|e| parse_crate_config_from(&e))
-            .map(|crate_config| DartEventCrate::from_config(&crate_config))
-            .collect::<Vec<DartEventCrate>>();
-        dart_event_crates.extend(crates);
-    });
-    dart_event_crates
-}
-
-pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
-    event_crate
-        .event_files
-        .iter()
-        .map(|event_file| {
-            let file_path = format!("{}/{}", event_crate.crate_path, event_file);
-            let file_content = read_file(file_path.as_ref()).unwrap();
-            let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
-            ast.items
-                .iter()
-                .map(|item| match item {
-                    Item::Enum(item_enum) => {
-                        let ctxt = Ctxt::new();
-                        let attrs = flowy_ast::enum_from_ast(
-                            &ctxt,
-                            &item_enum.ident,
-                            &item_enum.variants,
-                            &item_enum.attrs,
-                        );
-                        ctxt.check().unwrap();
-                        attrs
-                            .iter()
-                            .filter(|attr| !attr.attrs.event_attrs.ignore)
-                            .enumerate()
-                            .map(|(_index, attr)| EventASTContext::from(&attr.attrs))
-                            .collect::<Vec<_>>()
-                    }
-                    _ => vec![],
-                })
-                .flatten()
-                .collect::<Vec<_>>()
-        })
-        .flatten()
-        .collect::<Vec<EventASTContext>>()
-}
-
-pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
-    ast.iter()
-        .map(|event_ast| {
-            let input_deserializer = event_ast
-                .event_input
-                .as_ref()
-                .map(|event_input| event_input.get_ident().unwrap().to_string());
-
-            let output_deserializer = event_ast
-                .event_output
-                .as_ref()
-                .map(|event_output| event_output.get_ident().unwrap().to_string());
-            // eprintln!(
-            //     "😁 {:?} / {:?}",
-            //     event_ast.event_input, event_ast.event_output
-            // );
-
-            EventRenderContext {
-                input_deserializer,
-                output_deserializer,
-                error_deserializer: event_ast.event_error.clone(),
-                event: event_ast.event.to_string(),
-                event_ty: event_ast.event_ty.to_string(),
-            }
-        })
-        .collect::<Vec<EventRenderContext>>()
-}

+ 0 - 72
frontend/scripts/flowy-tool/src/dart_event/event_template.rs

@@ -1,72 +0,0 @@
-use crate::util::get_tera;
-use tera::Context;
-
-pub struct EventTemplate {
-    tera_context: Context,
-}
-
-pub const DART_IMPORTED: &str = r#"
-/// Auto gen code from rust ast, do not edit
-part of 'dispatch.dart';
-"#;
-
-pub struct EventRenderContext {
-    pub input_deserializer: Option<String>,
-    pub output_deserializer: Option<String>,
-    pub error_deserializer: String,
-    pub event: String,
-    pub event_ty: String,
-}
-
-#[allow(dead_code)]
-impl EventTemplate {
-    pub fn new() -> Self {
-        EventTemplate {
-            tera_context: Context::new(),
-        }
-    }
-
-    pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
-        if index == 0 {
-            self.tera_context
-                .insert("imported_dart_files", DART_IMPORTED)
-        }
-        self.tera_context.insert("index", &index);
-        let dart_class_name = format!("{}{}", ctx.event_ty, ctx.event);
-        let event = format!("{}.{}", ctx.event_ty, ctx.event);
-        self.tera_context.insert("event_class", &dart_class_name);
-        self.tera_context.insert("event", &event);
-
-        self.tera_context
-            .insert("has_input", &ctx.input_deserializer.is_some());
-        match ctx.input_deserializer {
-            None => self.tera_context.insert("input_deserializer", "Unit"),
-            Some(ref input) => self.tera_context.insert("input_deserializer", input),
-        }
-
-        // eprintln!(
-        //     "😁 {:?} / {:?}",
-        //     &ctx.input_deserializer, &ctx.output_deserializer
-        // );
-
-        let has_output = ctx.output_deserializer.is_some();
-        self.tera_context.insert("has_output", &has_output);
-
-        match ctx.output_deserializer {
-            None => self.tera_context.insert("output_deserializer", "Unit"),
-            Some(ref output) => self.tera_context.insert("output_deserializer", output),
-        }
-
-        self.tera_context
-            .insert("error_deserializer", &ctx.error_deserializer);
-
-        let tera = get_tera("dart_event");
-        match tera.render("event_template.tera", &self.tera_context) {
-            Ok(r) => Some(r),
-            Err(e) => {
-                log::error!("{:?}", e);
-                None
-            }
-        }
-    }
-}

+ 0 - 49
frontend/scripts/flowy-tool/src/dart_event/event_template.tera

@@ -1,49 +0,0 @@
-{%- if index == 0 %}
-{{ imported_dart_files }}
-{%- endif -%}
-
-class {{ event_class }} {
-{%- if has_input  %}
-     {{ input_deserializer }} request;
-     {{ event_class }}(this.request);
-{%- else %}
-    {{ event_class }}();
-{%- endif %}
-
-    Future<Either<{{ output_deserializer }}, {{ error_deserializer }}>> send() {
-
-{%- if has_input  %}
-    final request = FFIRequest.create()
-          ..event = {{ event }}.toString()
-          ..payload = requestToBytes(this.request);
-
-    return Dispatch.asyncRequest(request)
-        .then((bytesResult) => bytesResult.fold(
-
-        {%- if has_output  %}
-           (okBytes) => left({{ output_deserializer }}.fromBuffer(okBytes)),
-        {%- else %}
-           (bytes) => left(unit),
-        {%- endif %}
-           (errBytes) => right({{ error_deserializer }}.fromBuffer(errBytes)),
-        ));
-
-{%- else %}
-     final request = FFIRequest.create()
-        ..event = {{ event }}.toString();
-        {%- if has_input  %}
-        ..payload = bytes;
-        {%- endif %}
-
-     return Dispatch.asyncRequest(request).then((bytesResult) => bytesResult.fold(
-     {%- if has_output  %}
-        (okBytes) => left({{ output_deserializer }}.fromBuffer(okBytes)),
-     {%- else %}
-        (bytes) => left(unit),
-     {%- endif %}
-        (errBytes) => right({{ error_deserializer }}.fromBuffer(errBytes)),
-      ));
-{%- endif %}
-    }
-}
-

+ 0 - 95
frontend/scripts/flowy-tool/src/main.rs

@@ -1,95 +0,0 @@
-mod config;
-mod dart_event;
-mod proto;
-mod util;
-use clap::{App, Arg};
-
-fn main() {
-    std::env::set_var("RUST_LOG", "Info");
-    env_logger::init();
-
-    let matches = app().get_matches();
-
-    if let Some(matches) = matches.subcommand_matches("pb-gen") {
-        let rust_sources: Vec<String> = matches
-            .values_of("rust_sources")
-            .unwrap()
-            .map(|value| value.to_owned())
-            .collect();
-        let derive_meta = matches.value_of("derive_meta").unwrap();
-        let flutter_package_lib = matches.value_of("flutter_package_lib").unwrap();
-
-        proto::ProtoGenBuilder::new()
-            .set_rust_source_dirs(rust_sources)
-            .set_derive_meta_dir(derive_meta)
-            .set_flutter_package_lib(flutter_package_lib)
-            .build()
-            .gen();
-    }
-
-    if let Some(matches) = matches.subcommand_matches("dart-event") {
-        let rust_sources: Vec<String> = matches
-            .values_of("rust_sources")
-            .unwrap()
-            .map(|value| value.to_owned())
-            .collect();
-        let output_dir = matches.value_of("output").unwrap().to_string();
-
-        let code_gen = dart_event::DartEventCodeGen {
-            rust_sources,
-            output_dir,
-        };
-        code_gen.gen();
-    }
-}
-
-pub fn app<'a, 'b>() -> App<'a, 'b> {
-    let app = App::new("flowy-tool")
-        .version("0.1")
-        .author("nathan")
-        .about("flowy tool")
-        .subcommand(
-            App::new("pb-gen")
-                .about("Generate proto file from rust code")
-                .arg(
-                    Arg::with_name("rust_sources")
-                        .long("rust_sources")
-                        .multiple(true)
-                        .required(true)
-                        .min_values(1)
-                        .value_name("DIRECTORY")
-                        .help("Directories of the cargo workspace"),
-                )
-                .arg(
-                    Arg::with_name("derive_meta")
-                        .long("derive_meta")
-                        .value_name("PATH")
-                        .help("Caching information used by flowy-derive"),
-                )
-                .arg(
-                    Arg::with_name("flutter_package_lib")
-                        .long("flutter_package_lib")
-                        .value_name("DIRECTORY"),
-                ),
-        )
-        .subcommand(
-            App::new("dart-event")
-                .about("Generate the codes that sending events from rust ast")
-                .arg(
-                    Arg::with_name("rust_sources")
-                        .long("rust_sources")
-                        .multiple(true)
-                        .required(true)
-                        .min_values(1)
-                        .value_name("DIRECTORY")
-                        .help("Directories of the cargo workspace"),
-                )
-                .arg(
-                    Arg::with_name("output")
-                        .long("output")
-                        .value_name("DIRECTORY"),
-                ),
-        );
-
-    app
-}

+ 0 - 189
frontend/scripts/flowy-tool/src/proto/ast.rs

@@ -1,189 +0,0 @@
-use crate::proto::proto_info::*;
-use crate::proto::template::{EnumTemplate, StructTemplate};
-use crate::util::*;
-use fancy_regex::Regex;
-use flowy_ast::*;
-use lazy_static::lazy_static;
-use std::{fs::File, io::Read, path::Path};
-use syn::Item;
-use walkdir::WalkDir;
-
-pub fn parse_crate_protobuf(roots: Vec<String>) -> Vec<CrateProtoInfo> {
-    let crate_infos = parse_crate_info_from_path(roots);
-    crate_infos
-        .into_iter()
-        .map(|crate_info| {
-            let proto_output_dir = crate_info.proto_file_output_dir();
-            let files = crate_info
-                .proto_paths
-                .iter()
-                .map(|proto_crate_path| parse_files_protobuf(proto_crate_path, &proto_output_dir))
-                .flatten()
-                .collect::<Vec<ProtoFile>>();
-
-            CrateProtoInfo::from_crate_info(crate_info, files)
-        })
-        .collect::<Vec<CrateProtoInfo>>()
-}
-
-fn parse_files_protobuf(proto_crate_path: &str, proto_output_dir: &str) -> Vec<ProtoFile> {
-    let mut gen_proto_vec: Vec<ProtoFile> = vec![];
-    // file_stem https://doc.rust-lang.org/std/path/struct.Path.html#method.file_stem
-    for (path, file_name) in WalkDir::new(proto_crate_path)
-        .into_iter()
-        .filter_entry(|e| !is_hidden(e))
-        .filter_map(|e| e.ok())
-        .filter(|e| !e.file_type().is_dir())
-        .map(|e| {
-            let path = e.path().to_str().unwrap().to_string();
-            let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
-            (path, file_name)
-        })
-    {
-        if file_name == "mod" {
-            continue;
-        }
-
-        // https://docs.rs/syn/1.0.54/syn/struct.File.html
-        let ast = syn::parse_file(read_file(&path).unwrap().as_ref())
-            .unwrap_or_else(|_| panic!("Unable to parse file at {}", path));
-        let structs = get_ast_structs(&ast);
-        let proto_file_path = format!("{}/{}.proto", &proto_output_dir, &file_name);
-        let mut proto_file_content = parse_or_init_proto_file(proto_file_path.as_ref());
-
-        structs.iter().for_each(|s| {
-            let mut struct_template = StructTemplate::new();
-            struct_template.set_message_struct_name(&s.name);
-
-            s.fields
-                .iter()
-                .filter(|f| f.attrs.pb_index().is_some())
-                .for_each(|f| {
-                    struct_template.set_field(f);
-                });
-
-            let s = struct_template.render().unwrap();
-            proto_file_content.push_str(s.as_ref());
-            proto_file_content.push('\n');
-        });
-
-        let enums = get_ast_enums(&ast);
-        enums.iter().for_each(|e| {
-            let mut enum_template = EnumTemplate::new();
-            enum_template.set_message_enum(e);
-            let s = enum_template.render().unwrap();
-            proto_file_content.push_str(s.as_ref());
-            proto_file_content.push('\n');
-        });
-
-        if !enums.is_empty() || !structs.is_empty() {
-            let info = ProtoFile {
-                file_path: path.clone(),
-                file_name: file_name.clone(),
-                structs: structs.iter().map(|s| s.name.clone()).collect(),
-                enums: enums.iter().map(|e| e.name.clone()).collect(),
-                generated_content: proto_file_content.clone(),
-            };
-            gen_proto_vec.push(info);
-        }
-    }
-
-    gen_proto_vec
-}
-
-pub fn parse_or_init_proto_file(path: &str) -> String {
-    let mut proto_file_content = String::new();
-    let imported_content = find_proto_file_import(path);
-    proto_file_content.push_str(imported_content.as_ref());
-    proto_file_content.push('\n');
-    proto_file_content
-}
-
-pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
-    // let mut content = format!("{:#?}", &ast);
-    // let mut file = File::create("./foo.txt").unwrap();
-    // file.write_all(content.as_bytes()).unwrap();
-    let ctxt = Ctxt::new();
-    let mut proto_structs: Vec<Struct> = vec![];
-    ast.items.iter().for_each(|item| {
-        if let Item::Struct(item_struct) = item {
-            let (_, fields) = struct_from_ast(&ctxt, &item_struct.fields);
-
-            if fields
-                .iter()
-                .filter(|f| f.attrs.pb_index().is_some())
-                .count()
-                > 0
-            {
-                proto_structs.push(Struct {
-                    name: item_struct.ident.to_string(),
-                    fields,
-                });
-            }
-        }
-    });
-    ctxt.check().unwrap();
-    proto_structs
-}
-
-pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
-    let mut flowy_enums: Vec<FlowyEnum> = vec![];
-    let ctxt = Ctxt::new();
-
-    ast.items.iter().for_each(|item| {
-        // https://docs.rs/syn/1.0.54/syn/enum.Item.html
-        if let Item::Enum(item_enum) = item {
-            let attrs =
-                flowy_ast::enum_from_ast(&ctxt, &item_enum.ident, &item_enum.variants, &ast.attrs);
-            flowy_enums.push(FlowyEnum {
-                name: item_enum.ident.to_string(),
-                attrs,
-            });
-        }
-    });
-    ctxt.check().unwrap();
-    flowy_enums
-}
-
-pub struct FlowyEnum<'a> {
-    pub name: String,
-    pub attrs: Vec<ASTEnumVariant<'a>>,
-}
-
-pub struct Struct<'a> {
-    pub name: String,
-    pub fields: Vec<ASTField<'a>>,
-}
-
-lazy_static! {
-    static ref SYNTAX_REGEX: Regex = Regex::new("syntax.*;").unwrap();
-    static ref IMPORT_REGEX: Regex = Regex::new("(import\\s).*;").unwrap();
-}
-
-fn find_proto_file_import(path: &str) -> String {
-    let mut result = String::new();
-    if !Path::new(path).exists() {
-        // log::error!("{} not exist", path);
-        result = String::from("syntax = \"proto3\";");
-        return result;
-    }
-
-    let mut file = File::open(path).unwrap();
-    let mut content = String::new();
-    file.read_to_string(&mut content).unwrap();
-
-    content.lines().for_each(|line| {
-        ////Result<Option<Match<'t>>>
-        if let Ok(Some(m)) = SYNTAX_REGEX.find(line) {
-            result.push_str(m.as_str());
-            result.push('\n');
-        }
-
-        if let Ok(Some(m)) = IMPORT_REGEX.find(line) {
-            result.push_str(m.as_str());
-            result.push('\n');
-        }
-    });
-
-    result
-}

+ 0 - 41
frontend/scripts/flowy-tool/src/proto/builder.rs

@@ -1,41 +0,0 @@
-use crate::proto::ProtoGen;
-
-#[allow(dead_code)]
-pub struct ProtoGenBuilder {
-    rust_source_dirs: Option<Vec<String>>,
-    flutter_package_lib: Option<String>,
-    derive_meta_dir: Option<String>,
-}
-
-impl ProtoGenBuilder {
-    pub fn new() -> Self {
-        ProtoGenBuilder {
-            rust_source_dirs: None,
-            flutter_package_lib: None,
-            derive_meta_dir: None,
-        }
-    }
-
-    pub fn set_rust_source_dirs(mut self, dirs: Vec<String>) -> Self {
-        self.rust_source_dirs = Some(dirs);
-        self
-    }
-
-    pub fn set_flutter_package_lib(mut self, dir: &str) -> Self {
-        self.flutter_package_lib = Some(dir.to_string());
-        self
-    }
-
-    pub fn set_derive_meta_dir(mut self, dir: &str) -> Self {
-        self.derive_meta_dir = Some(dir.to_string());
-        self
-    }
-
-    pub fn build(self) -> ProtoGen {
-        ProtoGen {
-            rust_source_dirs: self.rust_source_dirs.unwrap(),
-            flutter_package_lib: self.flutter_package_lib.unwrap(),
-            derive_meta_dir: self.derive_meta_dir.unwrap(),
-        }
-    }
-}

+ 0 - 8
frontend/scripts/flowy-tool/src/proto/mod.rs

@@ -1,8 +0,0 @@
-mod ast;
-mod builder;
-mod proto_gen;
-mod proto_info;
-mod template;
-
-pub use builder::*;
-pub use proto_gen::*;

+ 0 - 170
frontend/scripts/flowy-tool/src/proto/proto_gen.rs

@@ -1,170 +0,0 @@
-#![allow(clippy::all)]
-#![allow(unused_attributes)]
-#![allow(dead_code)]
-#![allow(unused_imports)]
-#![allow(unused_results)]
-use crate::proto::ast::*;
-use crate::proto::proto_info::*;
-use crate::{proto::template::*, util::*};
-use std::path::Path;
-use std::{fs::OpenOptions, io::Write};
-
-pub struct ProtoGen {
-    pub(crate) rust_source_dirs: Vec<String>,
-    pub(crate) flutter_package_lib: String,
-    pub(crate) derive_meta_dir: String,
-}
-
-impl ProtoGen {
-    pub fn gen(&self) {
-        let crate_proto_infos = parse_crate_protobuf(self.rust_source_dirs.clone());
-        write_proto_files(&crate_proto_infos);
-        run_rust_protoc(&crate_proto_infos);
-
-        // write_rust_crate_mod_file(&crate_proto_infos);
-        write_derive_meta(&crate_proto_infos, self.derive_meta_dir.as_ref());
-
-        // let flutter_package = FlutterProtobufInfo::new(self.flutter_package_lib.as_ref());
-        // run_flutter_protoc(&crate_proto_infos, &flutter_package);
-        // write_flutter_protobuf_package_mod_file(&crate_proto_infos, &flutter_package);
-    }
-}
-
-fn write_proto_files(crate_infos: &[CrateProtoInfo]) {
-    for crate_info in crate_infos {
-        let dir = crate_info.inner.proto_file_output_dir();
-        crate_info.files.iter().for_each(|info| {
-            let proto_file_path = format!("{}/{}.proto", dir, &info.file_name);
-            save_content_to_file_with_diff_prompt(
-                &info.generated_content,
-                proto_file_path.as_ref(),
-                false,
-            );
-        });
-    }
-}
-
-fn write_rust_crate_mod_file(crate_infos: &[CrateProtoInfo]) {
-    for crate_info in crate_infos {
-        let mod_path = crate_info.inner.proto_model_mod_file();
-        match OpenOptions::new()
-            .create(true)
-            .write(true)
-            .append(false)
-            .truncate(true)
-            .open(&mod_path)
-        {
-            Ok(ref mut file) => {
-                let mut mod_file_content = String::new();
-
-                mod_file_content.push_str("#![cfg_attr(rustfmt, rustfmt::skip)]\n");
-                mod_file_content.push_str("// Auto-generated, do not edit\n");
-                walk_dir(
-                    crate_info.inner.proto_file_output_dir().as_ref(),
-                    |e| !e.file_type().is_dir(),
-                    |_, name| {
-                        let c = format!("\nmod {};\npub use {}::*;\n", &name, &name);
-                        mod_file_content.push_str(c.as_ref());
-                    },
-                );
-                file.write_all(mod_file_content.as_bytes()).unwrap();
-            }
-            Err(err) => {
-                panic!("Failed to open file: {}", err);
-            }
-        }
-    }
-}
-
-fn write_flutter_protobuf_package_mod_file(
-    crate_infos: &[CrateProtoInfo],
-    package_info: &FlutterProtobufInfo,
-) {
-    let model_dir = package_info.model_dir();
-    for crate_info in crate_infos {
-        let mod_path = crate_info.flutter_mod_file(model_dir.as_str());
-        match OpenOptions::new()
-            .create(true)
-            .write(true)
-            .append(false)
-            .truncate(true)
-            .open(&mod_path)
-        {
-            Ok(ref mut file) => {
-                let mut mod_file_content = String::new();
-                mod_file_content.push_str("// Auto-generated, do not edit \n");
-
-                walk_dir(
-                    crate_info.inner.proto_file_output_dir().as_ref(),
-                    |e| !e.file_type().is_dir(),
-                    |_, name| {
-                        let c = format!("export './{}.pb.dart';\n", &name);
-                        mod_file_content.push_str(c.as_ref());
-                    },
-                );
-
-                file.write_all(mod_file_content.as_bytes()).unwrap();
-                file.flush().unwrap();
-            }
-            Err(err) => {
-                panic!("Failed to open file: {}", err);
-            }
-        }
-    }
-}
-
-fn run_rust_protoc(crate_infos: &[CrateProtoInfo]) {
-    for crate_info in crate_infos {
-        let rust_out = crate_info.inner.proto_struct_output_dir();
-        let proto_path = crate_info.inner.proto_file_output_dir();
-        walk_dir(
-            proto_path.as_ref(),
-            |e| is_proto_file(e),
-            |proto_file, _| {
-                if cmd_lib::run_cmd! {
-                    protoc --rust_out=${rust_out} --proto_path=${proto_path} ${proto_file}
-                }
-                .is_err()
-                {
-                    panic!("Run flutter protoc fail")
-                };
-            },
-        );
-
-        crate_info.create_crate_mod_file();
-    }
-}
-
-fn run_flutter_protoc(crate_infos: &[CrateProtoInfo], package_info: &FlutterProtobufInfo) {
-    let model_dir = package_info.model_dir();
-    if !Path::new(&model_dir).exists() {
-        std::fs::create_dir_all(&model_dir).unwrap();
-    }
-
-    for crate_info in crate_infos {
-        let proto_path = crate_info.inner.proto_file_output_dir();
-        let crate_module_dir = crate_info.flutter_mod_dir(model_dir.as_str());
-        remove_everything_in_dir(crate_module_dir.as_str());
-
-        walk_dir(
-            proto_path.as_ref(),
-            |e| is_proto_file(e),
-            |proto_file, _| {
-                if cmd_lib::run_cmd! {
-                    protoc --dart_out=${crate_module_dir} --proto_path=${proto_path} ${proto_file}
-                }
-                .is_err()
-                {
-                    panic!("Run flutter protoc fail")
-                };
-            },
-        );
-    }
-}
-
-fn remove_everything_in_dir(dir: &str) {
-    if Path::new(dir).exists() && std::fs::remove_dir_all(dir).is_err() {
-        panic!("Reset protobuf directory failed")
-    }
-    std::fs::create_dir_all(dir).unwrap();
-}

+ 0 - 137
frontend/scripts/flowy-tool/src/proto/proto_info.rs

@@ -1,137 +0,0 @@
-#![allow(clippy::all)]
-#![allow(dead_code)]
-#![allow(unused_imports)]
-use crate::util::*;
-use std::fs::OpenOptions;
-use std::io::Write;
-use walkdir::WalkDir;
-
-pub struct CrateProtoInfo {
-    pub files: Vec<ProtoFile>,
-    pub inner: ProtobufCrate,
-}
-
-impl CrateProtoInfo {
-    pub fn from_crate_info(inner: ProtobufCrate, files: Vec<ProtoFile>) -> Self {
-        Self { files, inner }
-    }
-
-    pub fn create_crate_mod_file(&self) {
-        // mod model;
-        // pub use model::*;
-        let mod_file_path = format!("{}/mod.rs", self.inner.protobuf_crate_name());
-        let mut content = "#![cfg_attr(rustfmt, rustfmt::skip)]\n".to_owned();
-        content.push_str("// Auto-generated, do not edit\n");
-        content.push_str("mod model;\npub use model::*;");
-        match OpenOptions::new()
-            .create(true)
-            .write(true)
-            .append(false)
-            .truncate(true)
-            .open(&mod_file_path)
-        {
-            Ok(ref mut file) => {
-                file.write_all(content.as_bytes()).unwrap();
-            }
-            Err(err) => {
-                panic!("Failed to open protobuf mod file: {}", err);
-            }
-        }
-    }
-
-    pub fn flutter_mod_dir(&self, root: &str) -> String {
-        let crate_module_dir = format!("{}/{}", root, self.inner.folder_name);
-        crate_module_dir
-    }
-
-    pub fn flutter_mod_file(&self, root: &str) -> String {
-        let crate_module_dir = format!("{}/{}/protobuf.dart", root, self.inner.folder_name);
-        crate_module_dir
-    }
-}
-
-#[derive(Clone, Debug)]
-pub struct ProtobufCrate {
-    pub folder_name: String,
-    pub proto_paths: Vec<String>,
-    pub crate_path: String,
-}
-
-impl ProtobufCrate {
-    pub fn from_config(config: CrateConfig) -> Self {
-        let proto_paths = config.proto_paths();
-        ProtobufCrate {
-            folder_name: config.folder_name,
-            proto_paths,
-            crate_path: config.crate_path,
-        }
-    }
-
-    fn protobuf_crate_name(&self) -> String {
-        format!("{}/src/protobuf", self.crate_path)
-    }
-
-    pub fn proto_file_output_dir(&self) -> String {
-        let dir = format!("{}/proto", self.protobuf_crate_name());
-        create_dir_if_not_exist(dir.as_ref());
-        dir
-    }
-
-    pub fn proto_struct_output_dir(&self) -> String {
-        let dir = format!("{}/model", self.protobuf_crate_name());
-        create_dir_if_not_exist(dir.as_ref());
-        dir
-    }
-
-    pub fn proto_model_mod_file(&self) -> String {
-        format!("{}/mod.rs", self.proto_struct_output_dir())
-    }
-}
-
-#[derive(Debug)]
-pub struct ProtoFile {
-    pub file_path: String,
-    pub file_name: String,
-    pub structs: Vec<String>,
-    pub enums: Vec<String>,
-    pub generated_content: String,
-}
-
-pub fn parse_crate_info_from_path(roots: Vec<String>) -> Vec<ProtobufCrate> {
-    let mut protobuf_crates: Vec<ProtobufCrate> = vec![];
-    roots.iter().for_each(|root| {
-        let crates = WalkDir::new(root)
-            .into_iter()
-            .filter_entry(|e| !is_hidden(e))
-            .filter_map(|e| e.ok())
-            .filter(|e| is_crate_dir(e))
-            .flat_map(|e| parse_crate_config_from(&e))
-            .map(ProtobufCrate::from_config)
-            .collect::<Vec<ProtobufCrate>>();
-        protobuf_crates.extend(crates);
-    });
-    protobuf_crates
-}
-
-pub struct FlutterProtobufInfo {
-    package_path: String,
-}
-impl FlutterProtobufInfo {
-    pub fn new(root: &str) -> Self {
-        FlutterProtobufInfo {
-            package_path: root.to_owned(),
-        }
-    }
-
-    pub fn model_dir(&self) -> String {
-        let model_dir = format!("{}/protobuf", self.package_path);
-        create_dir_if_not_exist(model_dir.as_ref());
-        model_dir
-    }
-
-    #[allow(dead_code)]
-    pub fn mod_file_path(&self) -> String {
-        let mod_file_path = format!("{}/protobuf.dart", self.package_path);
-        mod_file_path
-    }
-}

+ 0 - 84
frontend/scripts/flowy-tool/src/proto/template/derive_meta/derive_meta.rs

@@ -1,84 +0,0 @@
-#![allow(clippy::all)]
-#![allow(unused_attributes)]
-#![allow(dead_code)]
-#![allow(unused_imports)]
-#![allow(unused_results)]
-use crate::proto::proto_info::{CrateProtoInfo, ProtoFile};
-use crate::util::{get_tera, read_file};
-use itertools::Itertools;
-use std::fs::OpenOptions;
-use std::io::Write;
-use tera::Context;
-
-pub struct ProtobufDeriveMeta {
-    context: Context,
-    structs: Vec<String>,
-    enums: Vec<String>,
-}
-
-#[allow(dead_code)]
-impl ProtobufDeriveMeta {
-    pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self {
-        let enums: Vec<_> = enums.into_iter().unique().collect();
-        ProtobufDeriveMeta {
-            context: Context::new(),
-            structs,
-            enums,
-        }
-    }
-
-    pub fn render(&mut self) -> Option<String> {
-        self.context.insert("names", &self.structs);
-        self.context.insert("enums", &self.enums);
-
-        let tera = get_tera("proto/template/derive_meta");
-        match tera.render("derive_meta.tera", &self.context) {
-            Ok(r) => Some(r),
-            Err(e) => {
-                log::error!("{:?}", e);
-                None
-            }
-        }
-    }
-}
-
-pub fn write_derive_meta(crate_infos: &[CrateProtoInfo], derive_meta_dir: &str) {
-    let file_proto_infos = crate_infos
-        .iter()
-        .map(|ref crate_info| &crate_info.files)
-        .flatten()
-        .collect::<Vec<&ProtoFile>>();
-
-    let structs: Vec<String> = file_proto_infos
-        .iter()
-        .map(|info| info.structs.clone())
-        .flatten()
-        .collect();
-    let enums: Vec<String> = file_proto_infos
-        .iter()
-        .map(|info| info.enums.clone())
-        .flatten()
-        .collect();
-
-    let mut derive_template = ProtobufDeriveMeta::new(structs, enums);
-    let new_content = derive_template.render().unwrap();
-    let old_content = read_file(derive_meta_dir).unwrap();
-    if new_content == old_content {
-        return;
-    }
-    // println!("{}", diff_lines(&old_content, &new_content));
-    match OpenOptions::new()
-        .create(true)
-        .write(true)
-        .append(false)
-        .truncate(true)
-        .open(derive_meta_dir)
-    {
-        Ok(ref mut file) => {
-            file.write_all(new_content.as_bytes()).unwrap();
-        }
-        Err(err) => {
-            panic!("Failed to open log file: {}", err);
-        }
-    }
-}

+ 0 - 40
frontend/scripts/flowy-tool/src/proto/template/proto_file/enum_template.rs

@@ -1,40 +0,0 @@
-use crate::proto::ast::FlowyEnum;
-use crate::util::get_tera;
-use tera::Context;
-
-pub struct EnumTemplate {
-    context: Context,
-    items: Vec<String>,
-}
-
-#[allow(dead_code)]
-impl EnumTemplate {
-    pub fn new() -> Self {
-        EnumTemplate {
-            context: Context::new(),
-            items: vec![],
-        }
-    }
-
-    pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
-        self.context.insert("enum_name", &flowy_enum.name);
-        flowy_enum.attrs.iter().for_each(|item| {
-            self.items.push(format!(
-                "{} = {};",
-                item.attrs.enum_item_name, item.attrs.value
-            ))
-        })
-    }
-
-    pub fn render(&mut self) -> Option<String> {
-        self.context.insert("items", &self.items);
-        let tera = get_tera("proto/template/proto_file");
-        match tera.render("enum.tera", &self.context) {
-            Ok(r) => Some(r),
-            Err(e) => {
-                log::error!("{:?}", e);
-                None
-            }
-        }
-    }
-}

+ 0 - 112
frontend/scripts/flowy-tool/src/proto/template/proto_file/struct_template.rs

@@ -1,112 +0,0 @@
-use crate::util::get_tera;
-use flowy_ast::*;
-use phf::phf_map;
-
-use tera::Context;
-
-// Protobuf data type : https://developers.google.com/protocol-buffers/docs/proto3
-static RUST_TYPE_MAP: phf::Map<&'static str, &'static str> = phf_map! {
-    "String" => "string",
-    "i64" => "int64",
-    "i32" => "int32",
-    "u64" => "uint64",
-    "u32" => "uint32",
-    "u8" => "uint8",
-    "Vec" => "repeated",
-    "f64" => "double",
-    "HashMap" => "map",
-};
-
-pub struct StructTemplate {
-    context: Context,
-    fields: Vec<String>,
-}
-
-#[allow(dead_code)]
-impl StructTemplate {
-    pub fn new() -> Self {
-        StructTemplate {
-            context: Context::new(),
-            fields: vec![],
-        }
-    }
-
-    pub fn set_message_struct_name(&mut self, name: &str) {
-        self.context.insert("struct_name", name);
-    }
-
-    pub fn set_field(&mut self, field: &ASTField) {
-        // {{ field_type }} {{ field_name }} = {{index}};
-        let name = field.name().unwrap().to_string();
-        let index = field.attrs.pb_index().unwrap();
-
-        let ty: &str = &field.ty_as_str();
-        let mut mapped_ty: &str = ty;
-
-        if RUST_TYPE_MAP.contains_key(ty) {
-            mapped_ty = RUST_TYPE_MAP[ty];
-        }
-
-        if let Some(ref category) = field.bracket_category {
-            match category {
-                BracketCategory::Opt => match &field.bracket_inner_ty {
-                    None => {}
-                    Some(inner_ty) => match inner_ty.to_string().as_str() {
-                        //TODO: support hashmap or something else wrapped by Option
-                        "Vec" => {
-                            self.fields.push(format!(
-                                "oneof one_of_{} {{ bytes {} = {}; }};",
-                                name, name, index
-                            ));
-                        }
-                        _ => {
-                            self.fields.push(format!(
-                                "oneof one_of_{} {{ {} {} = {}; }};",
-                                name, mapped_ty, name, index
-                            ));
-                        }
-                    },
-                },
-                BracketCategory::Map((k, v)) => {
-                    let key: &str = k;
-                    let value: &str = v;
-                    self.fields.push(format!(
-                        // map<string, string> attrs = 1;
-                        "map<{}, {}> {} = {};",
-                        RUST_TYPE_MAP.get(key).unwrap_or(&key),
-                        RUST_TYPE_MAP.get(value).unwrap_or(&value),
-                        name,
-                        index
-                    ));
-                }
-                BracketCategory::Vec => {
-                    let bracket_ty: &str = &field.bracket_ty.as_ref().unwrap().to_string();
-                    // Vec<u8>
-                    if mapped_ty == "u8" && bracket_ty == "Vec" {
-                        self.fields.push(format!("bytes {} = {};", name, index))
-                    } else {
-                        self.fields.push(format!(
-                            "{} {} {} = {};",
-                            RUST_TYPE_MAP[bracket_ty], mapped_ty, name, index
-                        ))
-                    }
-                }
-                BracketCategory::Other => self
-                    .fields
-                    .push(format!("{} {} = {};", mapped_ty, name, index)),
-            }
-        }
-    }
-
-    pub fn render(&mut self) -> Option<String> {
-        self.context.insert("fields", &self.fields);
-        let tera = get_tera("proto/template/proto_file");
-        match tera.render("struct.tera", &self.context) {
-            Ok(r) => Some(r),
-            Err(e) => {
-                log::error!("{:?}", e);
-                None
-            }
-        }
-    }
-}

+ 0 - 38
frontend/scripts/flowy-tool/src/util/crate_config.rs

@@ -1,38 +0,0 @@
-use crate::config::FlowyConfig;
-
-pub struct CrateConfig {
-    pub(crate) crate_path: String,
-    pub(crate) folder_name: String,
-    pub(crate) flowy_config: FlowyConfig,
-}
-
-impl CrateConfig {
-    pub fn proto_paths(&self) -> Vec<String> {
-        let proto_paths = self
-            .flowy_config
-            .proto_crates
-            .iter()
-            .map(|name| format!("{}/{}", self.crate_path, name))
-            .collect::<Vec<String>>();
-        proto_paths
-    }
-}
-
-pub fn parse_crate_config_from(entry: &walkdir::DirEntry) -> Option<CrateConfig> {
-    let path = entry.path().parent().unwrap();
-    let crate_path = path.to_str().unwrap().to_string();
-    let folder_name = path.file_stem().unwrap().to_str().unwrap().to_string();
-    let config_path = format!("{}/Flowy.toml", crate_path);
-
-    if !std::path::Path::new(&config_path).exists() {
-        return None;
-    }
-
-    let flowy_config = FlowyConfig::from_toml_file(config_path.as_ref());
-
-    Some(CrateConfig {
-        crate_path,
-        folder_name,
-        flowy_config,
-    })
-}

+ 0 - 171
frontend/scripts/flowy-tool/src/util/file.rs

@@ -1,171 +0,0 @@
-use console::Style;
-
-use similar::{ChangeTag, TextDiff};
-use std::{
-    fs::{File, OpenOptions},
-    io::{Read, Write},
-    path::Path,
-};
-use tera::Tera;
-use walkdir::WalkDir;
-
-pub fn read_file(path: &str) -> Option<String> {
-    let mut file = File::open(path).unwrap_or_else(|_| panic!("Unable to open file at {}", path));
-    let mut content = String::new();
-    match file.read_to_string(&mut content) {
-        Ok(_) => Some(content),
-        Err(e) => {
-            log::error!("{}, with error: {:?}", path, e);
-            Some("".to_string())
-        }
-    }
-}
-
-pub fn save_content_to_file_with_diff_prompt(content: &str, output_file: &str, _force_write: bool) {
-    if Path::new(output_file).exists() {
-        let old_content = read_file(output_file).unwrap();
-        let new_content = content.to_owned();
-        let write_to_file = || match OpenOptions::new()
-            .create(true)
-            .write(true)
-            .append(false)
-            .truncate(true)
-            .open(output_file)
-        {
-            Ok(ref mut file) => {
-                file.write_all(new_content.as_bytes()).unwrap();
-            }
-            Err(err) => {
-                panic!("Failed to open log file: {}", err);
-            }
-        };
-        if new_content != old_content {
-            print_diff(old_content, new_content.clone());
-            write_to_file()
-            // if force_write {
-            //     write_to_file()
-            // } else {
-            //     if Confirm::new().with_prompt("Override?").interact().unwrap() {
-            //         write_to_file()
-            //     } else {
-            //         tracing::info!("never mind then :(");
-            //     }
-            // }
-        }
-    } else {
-        match OpenOptions::new()
-            .create(true)
-            .write(true)
-            .open(output_file)
-        {
-            Ok(ref mut file) => file.write_all(content.as_bytes()).unwrap(),
-            Err(err) => panic!("Open or create to {} fail: {}", output_file, err),
-        }
-    }
-}
-
-pub fn print_diff(old_content: String, new_content: String) {
-    let diff = TextDiff::from_lines(&old_content, &new_content);
-    for op in diff.ops() {
-        for change in diff.iter_changes(op) {
-            let (sign, style) = match change.tag() {
-                ChangeTag::Delete => ("-", Style::new().red()),
-                ChangeTag::Insert => ("+", Style::new().green()),
-                ChangeTag::Equal => (" ", Style::new()),
-            };
-
-            match change.tag() {
-                ChangeTag::Delete => {
-                    print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
-                }
-                ChangeTag::Insert => {
-                    print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
-                }
-                ChangeTag::Equal => {}
-            };
-        }
-        println!("---------------------------------------------------");
-    }
-}
-
-pub fn get_tera(directory: &str) -> Tera {
-    let mut root = "./scripts/flowy-tool/src/".to_owned();
-    root.push_str(directory);
-
-    let root_absolute_path = std::fs::canonicalize(root)
-        .unwrap()
-        .as_path()
-        .display()
-        .to_string();
-    let mut template_path = format!("{}/**/*.tera", root_absolute_path);
-    if cfg!(windows) {
-        // remove "\\?\" prefix on windows
-        template_path = format!("{}/**/*.tera", &root_absolute_path[4..]);
-    }
-
-    match Tera::new(template_path.as_ref()) {
-        Ok(t) => t,
-        Err(e) => {
-            log::error!("Parsing error(s): {}", e);
-            ::std::process::exit(1);
-        }
-    }
-}
-
-pub fn is_crate_dir(e: &walkdir::DirEntry) -> bool {
-    let cargo = e.path().file_stem().unwrap().to_str().unwrap().to_string();
-    cargo == *"Cargo"
-}
-
-pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
-    if e.path().extension().is_none() {
-        return false;
-    }
-    let ext = e.path().extension().unwrap().to_str().unwrap().to_string();
-    ext == *"proto"
-}
-
-pub fn is_hidden(entry: &walkdir::DirEntry) -> bool {
-    entry
-        .file_name()
-        .to_str()
-        .map(|s| s.starts_with('.'))
-        .unwrap_or(false)
-}
-
-pub fn create_dir_if_not_exist(dir: &str) {
-    if !std::path::Path::new(&dir).exists() {
-        std::fs::create_dir_all(&dir).unwrap();
-    }
-}
-
-pub(crate) fn walk_dir<F1, F2>(dir: &str, filter: F2, mut path_and_name: F1)
-where
-    F1: FnMut(String, String),
-    F2: Fn(&walkdir::DirEntry) -> bool,
-{
-    for (path, name) in WalkDir::new(dir)
-        .into_iter()
-        .filter_map(|e| e.ok())
-        .filter(|e| filter(e))
-        .map(|e| {
-            (
-                e.path().to_str().unwrap().to_string(),
-                e.path().file_stem().unwrap().to_str().unwrap().to_string(),
-            )
-        })
-    {
-        path_and_name(path, name);
-    }
-}
-
-#[allow(dead_code)]
-pub fn suffix_relative_to_path(path: &str, base: &str) -> String {
-    let base = Path::new(base);
-    let path = Path::new(path);
-    path.strip_prefix(base)
-        .unwrap()
-        .to_str()
-        .unwrap()
-        .to_owned()
-}

+ 0 - 5
frontend/scripts/flowy-tool/src/util/mod.rs

@@ -1,5 +0,0 @@
-mod crate_config;
-mod file;
-
-pub use crate_config::*;
-pub use file::*;

+ 0 - 73
frontend/scripts/makefile/protobuf.toml

@@ -55,79 +55,6 @@ dart pub global activate protoc_plugin
 script_runner = "@shell"
 script_runner = "@shell"
 
 
 
 
-[tasks.gen_pb_file]
-script = [
-    """
-    flowy_tool=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/scripts/flowy-tool/Cargo.toml
-    rust_lib=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/
-    shared_lib=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/../shared-lib
-    flutter_lib=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/app_flowy/packages
-
-    derive_meta=${shared_lib}/flowy-derive/src/derive_cache/derive_cache.rs
-    flutter_package_lib=${flutter_lib}/flowy_sdk/lib
-
-    cargo run \
-     --manifest-path ${flowy_tool} pb-gen \
-     --rust_sources ${rust_lib} ${shared_lib} \
-     --derive_meta=${derive_meta} \
-     --flutter_package_lib=${flutter_package_lib}
-    """,
-]
-script_runner = "@shell"
-
-
-[tasks.gen_pb_file.windows]
-script = [
-    """
-    flowy_tool=set ${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/scripts/flowy-tool/Cargo.toml
-    rust_source=set ${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/
-    # rust_lib=set ${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib
-    shared_lib=set ${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/../shared-lib
-    flutter_lib=set ${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/app_flowy/packages
-
-    derive_meta=set ${shared_lib}/flowy-derive/src/derive_cache/derive_cache.rs
-    flutter_package_lib=set ${flutter_lib}/flowy_sdk/lib
-
-    exec cmd /c cargo run \
-     --manifest-path ${flowy_tool} pb-gen \
-     --rust_source=${rust_source} \
-     --derive_meta=${derive_meta} \
-     --flutter_package_lib=${flutter_package_lib}
-    """,
-]
-script_runner = "@duckscript"
-
-
-[tasks.gen_dart_event]
-script = [
-    """
-    flowy_tool=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/scripts/flowy-tool/Cargo.toml
-    flutter_lib=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/app_flowy/packages
 
 
-    rust_source=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/
-    output=${flutter_lib}/flowy_sdk/lib/dispatch/dart_event.dart
 
 
-    cargo run \
-     --manifest-path ${flowy_tool} dart-event \
-     --rust_sources=${rust_source} \
-     --output=${output}
-    """,
-]
-script_runner = "@shell"
 
 
-[tasks.gen_dart_event.windows]
-script = [
-    """
-    flowy_tool=set ${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/scripts/flowy-tool/Cargo.toml
-    flutter_lib=set ${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/app_flowy/packages
-
-    rust_source=set ${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/
-    output=set ${flutter_lib}/flowy_sdk/lib/dispatch/dart_event.dart
-
-    exec cmd.exe /c cargo run \
-     --manifest-path ${flowy_tool} dart-event \
-     --rust_source=${rust_source} \
-     --output=${output}
-    """,
-]
-script_runner = "@duckscript"

+ 1 - 1
frontend/scripts/makefile/tool.toml

@@ -17,7 +17,7 @@ script_runner = "@shell"
 [tasks.rm_macro_build_cache]
 [tasks.rm_macro_build_cache]
 script = [
 script = [
   """
   """
-    path = canonicalize ../shared-lib/lib-infra/.cache
+    path = canonicalize ../shared-lib/flowy-codegen/.cache
     if is_path_exists ${path}
     if is_path_exists ${path}
       rm -rf ${path}
       rm -rf ${path}
     end
     end

+ 28 - 19
shared-lib/Cargo.lock

@@ -375,14 +375,38 @@ dependencies = [
  "syn",
  "syn",
 ]
 ]
 
 
+[[package]]
+name = "flowy-codegen"
+version = "0.1.0"
+dependencies = [
+ "cmd_lib",
+ "console",
+ "fancy-regex",
+ "flowy-ast",
+ "itertools",
+ "lazy_static",
+ "log",
+ "phf 0.8.0",
+ "protoc-bin-vendored",
+ "protoc-rust",
+ "quote",
+ "serde",
+ "serde_json",
+ "similar",
+ "syn",
+ "tera",
+ "toml",
+ "walkdir",
+]
+
 [[package]]
 [[package]]
 name = "flowy-derive"
 name = "flowy-derive"
 version = "0.1.0"
 version = "0.1.0"
 dependencies = [
 dependencies = [
  "dashmap",
  "dashmap",
  "flowy-ast",
  "flowy-ast",
+ "flowy-codegen",
  "lazy_static",
  "lazy_static",
- "lib-infra",
  "log",
  "log",
  "proc-macro2",
  "proc-macro2",
  "quote",
  "quote",
@@ -398,8 +422,8 @@ name = "flowy-error-code"
 version = "0.1.0"
 version = "0.1.0"
 dependencies = [
 dependencies = [
  "derive_more",
  "derive_more",
+ "flowy-codegen",
  "flowy-derive",
  "flowy-derive",
- "lib-infra",
  "protobuf",
  "protobuf",
 ]
 ]
 
 
@@ -408,6 +432,7 @@ name = "flowy-http-model"
 version = "0.1.0"
 version = "0.1.0"
 dependencies = [
 dependencies = [
  "bytes",
  "bytes",
+ "flowy-codegen",
  "flowy-derive",
  "flowy-derive",
  "lib-infra",
  "lib-infra",
  "md5",
  "md5",
@@ -785,27 +810,10 @@ version = "0.1.0"
 dependencies = [
 dependencies = [
  "bytes",
  "bytes",
  "chrono",
  "chrono",
- "cmd_lib",
- "console",
- "fancy-regex",
- "flowy-ast",
  "futures-core",
  "futures-core",
- "itertools",
- "lazy_static",
- "log",
- "phf 0.8.0",
  "pin-project",
  "pin-project",
- "protoc-bin-vendored",
- "protoc-rust",
  "rand 0.8.5",
  "rand 0.8.5",
- "serde",
- "serde_json",
- "similar",
- "syn",
- "tera",
  "tokio",
  "tokio",
- "toml",
- "walkdir",
 ]
 ]
 
 
 [[package]]
 [[package]]
@@ -839,6 +847,7 @@ dependencies = [
  "bytes",
  "bytes",
  "dashmap",
  "dashmap",
  "env_logger",
  "env_logger",
+ "flowy-codegen",
  "flowy-derive",
  "flowy-derive",
  "futures",
  "futures",
  "futures-channel",
  "futures-channel",

+ 1 - 0
shared-lib/Cargo.toml

@@ -8,6 +8,7 @@ members = [
   "lib-infra",
   "lib-infra",
   "flowy-derive",
   "flowy-derive",
   "flowy-ast",
   "flowy-ast",
+  "flowy-codegen",
   "flowy-error-code",
   "flowy-error-code",
   "grid-rev-model",
   "grid-rev-model",
 ]
 ]

+ 59 - 23
shared-lib/flowy-ast/src/ast.rs

@@ -1,21 +1,28 @@
 #![allow(clippy::all)]
 #![allow(clippy::all)]
 #![allow(unused_attributes)]
 #![allow(unused_attributes)]
 #![allow(unused_assignments)]
 #![allow(unused_assignments)]
-use crate::{attr, ty_ext::*, AttrsContainer, Ctxt};
+
+use crate::event_attrs::EventEnumAttrs;
+use crate::node_attrs::NodeStructAttrs;
+use crate::{is_recognizable_field, ty_ext::*, ASTResult, PBAttrsContainer, PBStructAttrs, NODE_TYPE};
+use proc_macro2::Ident;
+use syn::Meta::NameValue;
 use syn::{self, punctuated::Punctuated};
 use syn::{self, punctuated::Punctuated};
 
 
 pub struct ASTContainer<'a> {
 pub struct ASTContainer<'a> {
     /// The struct or enum name (without generics).
     /// The struct or enum name (without generics).
     pub ident: syn::Ident,
     pub ident: syn::Ident,
+
+    pub node_type: Option<String>,
     /// Attributes on the structure.
     /// Attributes on the structure.
-    pub attrs: AttrsContainer,
+    pub pb_attrs: PBAttrsContainer,
     /// The contents of the struct or enum.
     /// The contents of the struct or enum.
     pub data: ASTData<'a>,
     pub data: ASTData<'a>,
 }
 }
 
 
 impl<'a> ASTContainer<'a> {
 impl<'a> ASTContainer<'a> {
-    pub fn from_ast(cx: &Ctxt, ast: &'a syn::DeriveInput) -> Option<ASTContainer<'a>> {
-        let attrs = AttrsContainer::from_ast(cx, ast);
+    pub fn from_ast(ast_result: &ASTResult, ast: &'a syn::DeriveInput) -> Option<ASTContainer<'a>> {
+        let attrs = PBAttrsContainer::from_ast(ast_result, ast);
         // syn::DeriveInput
         // syn::DeriveInput
         //  1. syn::DataUnion
         //  1. syn::DataUnion
         //  2. syn::DataStruct
         //  2. syn::DataStruct
@@ -23,21 +30,27 @@ impl<'a> ASTContainer<'a> {
         let data = match &ast.data {
         let data = match &ast.data {
             syn::Data::Struct(data) => {
             syn::Data::Struct(data) => {
                 // https://docs.rs/syn/1.0.48/syn/struct.DataStruct.html
                 // https://docs.rs/syn/1.0.48/syn/struct.DataStruct.html
-                let (style, fields) = struct_from_ast(cx, &data.fields);
+                let (style, fields) = struct_from_ast(ast_result, &data.fields);
                 ASTData::Struct(style, fields)
                 ASTData::Struct(style, fields)
             }
             }
             syn::Data::Union(_) => {
             syn::Data::Union(_) => {
-                cx.error_spanned_by(ast, "Does not support derive for unions");
+                ast_result.error_spanned_by(ast, "Does not support derive for unions");
                 return None;
                 return None;
             }
             }
             syn::Data::Enum(data) => {
             syn::Data::Enum(data) => {
                 // https://docs.rs/syn/1.0.48/syn/struct.DataEnum.html
                 // https://docs.rs/syn/1.0.48/syn/struct.DataEnum.html
-                ASTData::Enum(enum_from_ast(cx, &ast.ident, &data.variants, &ast.attrs))
+                ASTData::Enum(enum_from_ast(ast_result, &ast.ident, &data.variants, &ast.attrs))
             }
             }
         };
         };
 
 
         let ident = ast.ident.clone();
         let ident = ast.ident.clone();
-        let item = ASTContainer { ident, attrs, data };
+        let node_type = get_node_type(ast_result, &ident, &ast.attrs);
+        let item = ASTContainer {
+            ident,
+            pb_attrs: attrs,
+            node_type,
+            data,
+        };
         Some(item)
         Some(item)
     }
     }
 }
 }
@@ -55,7 +68,7 @@ impl<'a> ASTData<'a> {
         }
         }
     }
     }
 
 
-    pub fn all_variants(&'a self) -> Box<dyn Iterator<Item = &'a attr::ASTEnumAttrVariant> + 'a> {
+    pub fn all_variants(&'a self) -> Box<dyn Iterator<Item = &'a EventEnumAttrs> + 'a> {
         match self {
         match self {
             ASTData::Enum(variants) => {
             ASTData::Enum(variants) => {
                 let iter = variants.iter().map(|variant| &variant.attrs);
                 let iter = variants.iter().map(|variant| &variant.attrs);
@@ -85,7 +98,7 @@ impl<'a> ASTData<'a> {
 /// A variant of an enum.
 /// A variant of an enum.
 pub struct ASTEnumVariant<'a> {
 pub struct ASTEnumVariant<'a> {
     pub ident: syn::Ident,
     pub ident: syn::Ident,
-    pub attrs: attr::ASTEnumAttrVariant,
+    pub attrs: EventEnumAttrs,
     pub style: ASTStyle,
     pub style: ASTStyle,
     pub fields: Vec<ASTField<'a>>,
     pub fields: Vec<ASTField<'a>>,
     pub original: &'a syn::Variant,
     pub original: &'a syn::Variant,
@@ -106,16 +119,19 @@ pub enum BracketCategory {
 
 
 pub struct ASTField<'a> {
 pub struct ASTField<'a> {
     pub member: syn::Member,
     pub member: syn::Member,
-    pub attrs: attr::ASTAttrField,
+    pub pb_attrs: PBStructAttrs,
+    pub node_attrs: NodeStructAttrs,
     pub ty: &'a syn::Type,
     pub ty: &'a syn::Type,
     pub original: &'a syn::Field,
     pub original: &'a syn::Field,
+    // If the field is Vec<String>, then the bracket_ty will be Vec
     pub bracket_ty: Option<syn::Ident>,
     pub bracket_ty: Option<syn::Ident>,
+    // If the field is Vec<String>, then the bracket_inner_ty will be String
     pub bracket_inner_ty: Option<syn::Ident>,
     pub bracket_inner_ty: Option<syn::Ident>,
     pub bracket_category: Option<BracketCategory>,
     pub bracket_category: Option<BracketCategory>,
 }
 }
 
 
 impl<'a> ASTField<'a> {
 impl<'a> ASTField<'a> {
-    pub fn new(cx: &Ctxt, field: &'a syn::Field, index: usize) -> Result<Self, String> {
+    pub fn new(cx: &ASTResult, field: &'a syn::Field, index: usize) -> Result<Self, String> {
         let mut bracket_inner_ty = None;
         let mut bracket_inner_ty = None;
         let mut bracket_ty = None;
         let mut bracket_ty = None;
         let mut bracket_category = Some(BracketCategory::Other);
         let mut bracket_category = Some(BracketCategory::Other);
@@ -161,7 +177,8 @@ impl<'a> ASTField<'a> {
                 Some(ident) => syn::Member::Named(ident.clone()),
                 Some(ident) => syn::Member::Named(ident.clone()),
                 None => syn::Member::Unnamed(index.into()),
                 None => syn::Member::Unnamed(index.into()),
             },
             },
-            attrs: attr::ASTAttrField::from_ast(cx, index, field),
+            pb_attrs: PBStructAttrs::from_ast(cx, index, field),
+            node_attrs: NodeStructAttrs::from_ast(cx, index, field),
             ty: &field.ty,
             ty: &field.ty,
             original: field,
             original: field,
             bracket_ty,
             bracket_ty,
@@ -177,7 +194,6 @@ impl<'a> ASTField<'a> {
         }
         }
     }
     }
 
 
-    #[allow(dead_code)]
     pub fn name(&self) -> Option<syn::Ident> {
     pub fn name(&self) -> Option<syn::Ident> {
         if let syn::Member::Named(ident) = &self.member {
         if let syn::Member::Named(ident) = &self.member {
             Some(ident.clone())
             Some(ident.clone())
@@ -185,10 +201,6 @@ impl<'a> ASTField<'a> {
             None
             None
         }
         }
     }
     }
-
-    pub fn is_option(&self) -> bool {
-        attr::is_option(self.ty)
-    }
 }
 }
 
 
 #[derive(Copy, Clone)]
 #[derive(Copy, Clone)]
@@ -202,7 +214,7 @@ pub enum ASTStyle {
     Unit,
     Unit,
 }
 }
 
 
-pub fn struct_from_ast<'a>(cx: &Ctxt, fields: &'a syn::Fields) -> (ASTStyle, Vec<ASTField<'a>>) {
+pub fn struct_from_ast<'a>(cx: &ASTResult, fields: &'a syn::Fields) -> (ASTStyle, Vec<ASTField<'a>>) {
     match fields {
     match fields {
         syn::Fields::Named(fields) => (ASTStyle::Struct, fields_from_ast(cx, &fields.named)),
         syn::Fields::Named(fields) => (ASTStyle::Struct, fields_from_ast(cx, &fields.named)),
         syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
         syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
@@ -214,7 +226,7 @@ pub fn struct_from_ast<'a>(cx: &Ctxt, fields: &'a syn::Fields) -> (ASTStyle, Vec
 }
 }
 
 
 pub fn enum_from_ast<'a>(
 pub fn enum_from_ast<'a>(
-    cx: &Ctxt,
+    cx: &ASTResult,
     ident: &syn::Ident,
     ident: &syn::Ident,
     variants: &'a Punctuated<syn::Variant, Token![,]>,
     variants: &'a Punctuated<syn::Variant, Token![,]>,
     enum_attrs: &[syn::Attribute],
     enum_attrs: &[syn::Attribute],
@@ -222,7 +234,7 @@ pub fn enum_from_ast<'a>(
     variants
     variants
         .iter()
         .iter()
         .flat_map(|variant| {
         .flat_map(|variant| {
-            let attrs = attr::ASTEnumAttrVariant::from_ast(cx, ident, variant, enum_attrs);
+            let attrs = EventEnumAttrs::from_ast(cx, ident, variant, enum_attrs);
             let (style, fields) = struct_from_ast(cx, &variant.fields);
             let (style, fields) = struct_from_ast(cx, &variant.fields);
             Some(ASTEnumVariant {
             Some(ASTEnumVariant {
                 ident: variant.ident.clone(),
                 ident: variant.ident.clone(),
@@ -235,10 +247,34 @@ pub fn enum_from_ast<'a>(
         .collect()
         .collect()
 }
 }
 
 
-fn fields_from_ast<'a>(cx: &Ctxt, fields: &'a Punctuated<syn::Field, Token![,]>) -> Vec<ASTField<'a>> {
+fn fields_from_ast<'a>(cx: &ASTResult, fields: &'a Punctuated<syn::Field, Token![,]>) -> Vec<ASTField<'a>> {
     fields
     fields
         .iter()
         .iter()
         .enumerate()
         .enumerate()
-        .flat_map(|(index, field)| ASTField::new(cx, field, index).ok())
+        .flat_map(|(index, field)| {
+            if is_recognizable_field(field) {
+                ASTField::new(cx, field, index).ok()
+            } else {
+                None
+            }
+        })
         .collect()
         .collect()
 }
 }
+
+fn get_node_type(ast_result: &ASTResult, struct_name: &Ident, attrs: &[syn::Attribute]) -> Option<String> {
+    let mut node_type = None;
+    attrs
+        .iter()
+        .filter(|attr| attr.path.segments.iter().any(|s| s.ident == NODE_TYPE))
+        .for_each(|attr| {
+            if let Ok(NameValue(named_value)) = attr.parse_meta() {
+                if node_type.is_some() {
+                    ast_result.error_spanned_by(struct_name, "Duplicate node type definition");
+                }
+                if let syn::Lit::Str(s) = named_value.lit {
+                    node_type = Some(s.value());
+                }
+            }
+        });
+    node_type
+}

+ 0 - 508
shared-lib/flowy-ast/src/attr.rs

@@ -1,508 +0,0 @@
-#![allow(clippy::all)]
-use crate::{symbol::*, Ctxt};
-use quote::ToTokens;
-use syn::{
-    self,
-    parse::{self, Parse},
-    Meta::{List, NameValue, Path},
-    NestedMeta::{Lit, Meta},
-};
-
-use proc_macro2::{Group, Span, TokenStream, TokenTree};
-
-#[allow(dead_code)]
-pub struct AttrsContainer {
-    name: String,
-    pb_struct_type: Option<syn::Type>,
-    pb_enum_type: Option<syn::Type>,
-}
-
-impl AttrsContainer {
-    /// Extract out the `#[pb(...)]` attributes from an item.
-    pub fn from_ast(cx: &Ctxt, item: &syn::DeriveInput) -> Self {
-        let mut pb_struct_type = ASTAttr::none(cx, PB_STRUCT);
-        let mut pb_enum_type = ASTAttr::none(cx, PB_ENUM);
-        for meta_item in item.attrs.iter().flat_map(|attr| get_meta_items(cx, attr)).flatten() {
-            match &meta_item {
-                // Parse `#[pb(struct = "Type")]
-                Meta(NameValue(m)) if m.path == PB_STRUCT => {
-                    if let Ok(into_ty) = parse_lit_into_ty(cx, PB_STRUCT, &m.lit) {
-                        pb_struct_type.set_opt(&m.path, Some(into_ty));
-                    }
-                }
-
-                // Parse `#[pb(enum = "Type")]
-                Meta(NameValue(m)) if m.path == PB_ENUM => {
-                    if let Ok(into_ty) = parse_lit_into_ty(cx, PB_ENUM, &m.lit) {
-                        pb_enum_type.set_opt(&m.path, Some(into_ty));
-                    }
-                }
-
-                Meta(meta_item) => {
-                    let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
-                    cx.error_spanned_by(meta_item.path(), format!("unknown pb container attribute `{}`", path));
-                }
-
-                Lit(lit) => {
-                    cx.error_spanned_by(lit, "unexpected literal in pb container attribute");
-                }
-            }
-        }
-        match &item.data {
-            syn::Data::Struct(_) => {
-                pb_struct_type.set_if_none(default_pb_type(&cx, &item.ident));
-            }
-            syn::Data::Enum(_) => {
-                pb_enum_type.set_if_none(default_pb_type(&cx, &item.ident));
-            }
-            _ => {}
-        }
-
-        AttrsContainer {
-            name: item.ident.to_string(),
-            pb_struct_type: pb_struct_type.get(),
-            pb_enum_type: pb_enum_type.get(),
-        }
-    }
-
-    pub fn pb_struct_type(&self) -> Option<&syn::Type> {
-        self.pb_struct_type.as_ref()
-    }
-
-    pub fn pb_enum_type(&self) -> Option<&syn::Type> {
-        self.pb_enum_type.as_ref()
-    }
-}
-
-struct ASTAttr<'c, T> {
-    cx: &'c Ctxt,
-    name: Symbol,
-    tokens: TokenStream,
-    value: Option<T>,
-}
-
-impl<'c, T> ASTAttr<'c, T> {
-    fn none(cx: &'c Ctxt, name: Symbol) -> Self {
-        ASTAttr {
-            cx,
-            name,
-            tokens: TokenStream::new(),
-            value: None,
-        }
-    }
-
-    fn set<A: ToTokens>(&mut self, obj: A, value: T) {
-        let tokens = obj.into_token_stream();
-
-        if self.value.is_some() {
-            self.cx
-                .error_spanned_by(tokens, format!("duplicate attribute `{}`", self.name));
-        } else {
-            self.tokens = tokens;
-            self.value = Some(value);
-        }
-    }
-
-    fn set_opt<A: ToTokens>(&mut self, obj: A, value: Option<T>) {
-        if let Some(value) = value {
-            self.set(obj, value);
-        }
-    }
-
-    fn set_if_none(&mut self, value: T) {
-        if self.value.is_none() {
-            self.value = Some(value);
-        }
-    }
-
-    fn get(self) -> Option<T> {
-        self.value
-    }
-
-    #[allow(dead_code)]
-    fn get_with_tokens(self) -> Option<(TokenStream, T)> {
-        match self.value {
-            Some(v) => Some((self.tokens, v)),
-            None => None,
-        }
-    }
-}
-
-pub struct ASTAttrField {
-    #[allow(dead_code)]
-    name: String,
-    pb_index: Option<syn::LitInt>,
-    pb_one_of: bool,
-    skip_serializing: bool,
-    skip_deserializing: bool,
-    serialize_with: Option<syn::ExprPath>,
-    deserialize_with: Option<syn::ExprPath>,
-}
-
-impl ASTAttrField {
-    /// Extract out the `#[pb(...)]` attributes from a struct field.
-    pub fn from_ast(cx: &Ctxt, index: usize, field: &syn::Field) -> Self {
-        let mut pb_index = ASTAttr::none(cx, PB_INDEX);
-        let mut pb_one_of = BoolAttr::none(cx, PB_ONE_OF);
-        let mut serialize_with = ASTAttr::none(cx, SERIALIZE_WITH);
-        let mut skip_serializing = BoolAttr::none(cx, SKIP_SERIALIZING);
-        let mut deserialize_with = ASTAttr::none(cx, DESERIALIZE_WITH);
-        let mut skip_deserializing = BoolAttr::none(cx, SKIP_DESERIALIZING);
-
-        let ident = match &field.ident {
-            Some(ident) => ident.to_string(),
-            None => index.to_string(),
-        };
-
-        for meta_item in field.attrs.iter().flat_map(|attr| get_meta_items(cx, attr)).flatten() {
-            match &meta_item {
-                // Parse `#[pb(skip)]`
-                Meta(Path(word)) if word == SKIP => {
-                    skip_serializing.set_true(word);
-                    skip_deserializing.set_true(word);
-                }
-
-                // Parse '#[pb(index = x)]'
-                Meta(NameValue(m)) if m.path == PB_INDEX => {
-                    if let syn::Lit::Int(lit) = &m.lit {
-                        pb_index.set(&m.path, lit.clone());
-                    }
-                }
-
-                // Parse `#[pb(one_of)]`
-                Meta(Path(path)) if path == PB_ONE_OF => {
-                    pb_one_of.set_true(path);
-                }
-
-                // Parse `#[pb(serialize_with = "...")]`
-                Meta(NameValue(m)) if m.path == SERIALIZE_WITH => {
-                    if let Ok(path) = parse_lit_into_expr_path(cx, SERIALIZE_WITH, &m.lit) {
-                        serialize_with.set(&m.path, path);
-                    }
-                }
-
-                // Parse `#[pb(deserialize_with = "...")]`
-                Meta(NameValue(m)) if m.path == DESERIALIZE_WITH => {
-                    if let Ok(path) = parse_lit_into_expr_path(cx, DESERIALIZE_WITH, &m.lit) {
-                        deserialize_with.set(&m.path, path);
-                    }
-                }
-
-                Meta(meta_item) => {
-                    let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
-                    cx.error_spanned_by(meta_item.path(), format!("unknown field attribute `{}`", path));
-                }
-
-                Lit(lit) => {
-                    cx.error_spanned_by(lit, "unexpected literal in pb field attribute");
-                }
-            }
-        }
-
-        ASTAttrField {
-            name: ident,
-            pb_index: pb_index.get(),
-            pb_one_of: pb_one_of.get(),
-            skip_serializing: skip_serializing.get(),
-            skip_deserializing: skip_deserializing.get(),
-            serialize_with: serialize_with.get(),
-            deserialize_with: deserialize_with.get(),
-        }
-    }
-
-    #[allow(dead_code)]
-    pub fn pb_index(&self) -> Option<String> {
-        self.pb_index.as_ref().map(|lit| lit.base10_digits().to_string())
-    }
-
-    pub fn is_one_of(&self) -> bool {
-        self.pb_one_of
-    }
-
-    pub fn serialize_with(&self) -> Option<&syn::ExprPath> {
-        self.serialize_with.as_ref()
-    }
-
-    pub fn deserialize_with(&self) -> Option<&syn::ExprPath> {
-        self.deserialize_with.as_ref()
-    }
-
-    pub fn skip_serializing(&self) -> bool {
-        self.skip_serializing
-    }
-
-    pub fn skip_deserializing(&self) -> bool {
-        self.skip_deserializing
-    }
-}
-
-pub enum Default {
-    /// Field must always be specified because it does not have a default.
-    None,
-    /// The default is given by `std::default::Default::default()`.
-    Default,
-    /// The default is given by this function.
-    Path(syn::ExprPath),
-}
-
-#[derive(Debug, Clone)]
-pub struct EventAttrs {
-    input: Option<syn::Path>,
-    output: Option<syn::Path>,
-    error_ty: Option<String>,
-    pub ignore: bool,
-}
-
-#[derive(Debug, Clone)]
-pub struct ASTEnumAttrVariant {
-    pub enum_name: String,
-    pub enum_item_name: String,
-    pub value: String,
-    pub event_attrs: EventAttrs,
-}
-
-impl ASTEnumAttrVariant {
-    pub fn from_ast(ctxt: &Ctxt, ident: &syn::Ident, variant: &syn::Variant, enum_attrs: &[syn::Attribute]) -> Self {
-        let enum_item_name = variant.ident.to_string();
-        let enum_name = ident.to_string();
-        let mut value = String::new();
-        if variant.discriminant.is_some() {
-            if let syn::Expr::Lit(ref expr_list) = variant.discriminant.as_ref().unwrap().1 {
-                let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
-                    int_value
-                } else {
-                    unimplemented!()
-                };
-                value = lit_int.base10_digits().to_string();
-            }
-        }
-        let event_attrs = get_event_attrs_from(ctxt, &variant.attrs, enum_attrs);
-        ASTEnumAttrVariant {
-            enum_name,
-            enum_item_name,
-            value,
-            event_attrs,
-        }
-    }
-
-    pub fn event_input(&self) -> Option<syn::Path> {
-        self.event_attrs.input.clone()
-    }
-
-    pub fn event_output(&self) -> Option<syn::Path> {
-        self.event_attrs.output.clone()
-    }
-
-    pub fn event_error(&self) -> String {
-        self.event_attrs.error_ty.as_ref().unwrap().clone()
-    }
-}
-
-fn get_event_attrs_from(ctxt: &Ctxt, variant_attrs: &[syn::Attribute], enum_attrs: &[syn::Attribute]) -> EventAttrs {
-    let mut event_attrs = EventAttrs {
-        input: None,
-        output: None,
-        error_ty: None,
-        ignore: false,
-    };
-
-    enum_attrs
-        .iter()
-        .filter(|attr| attr.path.segments.iter().any(|s| s.ident == EVENT_ERR))
-        .for_each(|attr| {
-            if let Ok(NameValue(named_value)) = attr.parse_meta() {
-                if let syn::Lit::Str(s) = named_value.lit {
-                    event_attrs.error_ty = Some(s.value());
-                } else {
-                    eprintln!("❌ {} should not be empty", EVENT_ERR);
-                }
-            } else {
-                eprintln!("❌ Can not find any {} on attr: {:#?}", EVENT_ERR, attr);
-            }
-        });
-
-    let mut extract_event_attr = |attr: &syn::Attribute, meta_item: &syn::NestedMeta| match &meta_item {
-        Meta(NameValue(name_value)) => {
-            if name_value.path == EVENT_INPUT {
-                if let syn::Lit::Str(s) = &name_value.lit {
-                    let input_type = parse_lit_str(s)
-                        .map_err(|_| {
-                            ctxt.error_spanned_by(s, format!("failed to parse request deserializer {:?}", s.value()))
-                        })
-                        .unwrap();
-                    event_attrs.input = Some(input_type);
-                }
-            }
-
-            if name_value.path == EVENT_OUTPUT {
-                if let syn::Lit::Str(s) = &name_value.lit {
-                    let output_type = parse_lit_str(s)
-                        .map_err(|_| {
-                            ctxt.error_spanned_by(s, format!("failed to parse response deserializer {:?}", s.value()))
-                        })
-                        .unwrap();
-                    event_attrs.output = Some(output_type);
-                }
-            }
-        }
-        Meta(Path(word)) => {
-            if word == EVENT_IGNORE && attr.path == EVENT {
-                event_attrs.ignore = true;
-            }
-        }
-        Lit(s) => ctxt.error_spanned_by(s, "unexpected attribute"),
-        _ => ctxt.error_spanned_by(meta_item, "unexpected attribute"),
-    };
-
-    let attr_meta_items_info = variant_attrs
-        .iter()
-        .flat_map(|attr| match get_meta_items(ctxt, attr) {
-            Ok(items) => Some((attr, items)),
-            Err(_) => None,
-        })
-        .collect::<Vec<(&syn::Attribute, Vec<syn::NestedMeta>)>>();
-
-    for (attr, nested_metas) in attr_meta_items_info {
-        nested_metas
-            .iter()
-            .for_each(|meta_item| extract_event_attr(attr, meta_item))
-    }
-
-    // eprintln!("😁{:#?}", event_attrs);
-    event_attrs
-}
-
-pub fn get_meta_items(cx: &Ctxt, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
-    if attr.path != PB_ATTRS && attr.path != EVENT {
-        return Ok(vec![]);
-    }
-
-    // http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
-    match attr.parse_meta() {
-        Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
-        Ok(other) => {
-            cx.error_spanned_by(other, "expected #[pb(...)] or or #[event(...)]");
-            Err(())
-        }
-        Err(err) => {
-            cx.error_spanned_by(attr, "attribute must be str, e.g. #[pb(xx = \"xxx\")]");
-            cx.syn_error(err);
-            Err(())
-        }
-    }
-}
-
-fn parse_lit_into_expr_path(cx: &Ctxt, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::ExprPath, ()> {
-    let string = get_lit_str(cx, attr_name, lit)?;
-    parse_lit_str(string).map_err(|_| cx.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value())))
-}
-
-fn get_lit_str<'a>(cx: &Ctxt, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'a syn::LitStr, ()> {
-    if let syn::Lit::Str(lit) = lit {
-        Ok(lit)
-    } else {
-        cx.error_spanned_by(
-            lit,
-            format!(
-                "expected pb {} attribute to be a string: `{} = \"...\"`",
-                attr_name, attr_name
-            ),
-        );
-        Err(())
-    }
-}
-
-fn parse_lit_into_ty(cx: &Ctxt, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::Type, ()> {
-    let string = get_lit_str(cx, attr_name, lit)?;
-
-    parse_lit_str(string).map_err(|_| {
-        cx.error_spanned_by(
-            lit,
-            format!("failed to parse type: {} = {:?}", attr_name, string.value()),
-        )
-    })
-}
-
-pub fn parse_lit_str<T>(s: &syn::LitStr) -> parse::Result<T>
-where
-    T: Parse,
-{
-    let tokens = spanned_tokens(s)?;
-    syn::parse2(tokens)
-}
-
-fn spanned_tokens(s: &syn::LitStr) -> parse::Result<TokenStream> {
-    let stream = syn::parse_str(&s.value())?;
-    Ok(respan_token_stream(stream, s.span()))
-}
-
-fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream {
-    stream.into_iter().map(|token| respan_token_tree(token, span)).collect()
-}
-
-fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
-    if let TokenTree::Group(g) = &mut token {
-        *g = Group::new(g.delimiter(), respan_token_stream(g.stream(), span));
-    }
-    token.set_span(span);
-    token
-}
-
-fn default_pb_type(ctxt: &Ctxt, ident: &syn::Ident) -> syn::Type {
-    let take_ident = ident.to_string();
-    let lit_str = syn::LitStr::new(&take_ident, ident.span());
-    if let Ok(tokens) = spanned_tokens(&lit_str) {
-        if let Ok(pb_struct_ty) = syn::parse2(tokens) {
-            return pb_struct_ty;
-        }
-    }
-    ctxt.error_spanned_by(ident, format!("❌ Can't find {} protobuf struct", take_ident));
-    panic!()
-}
-
-#[allow(dead_code)]
-pub fn is_option(ty: &syn::Type) -> bool {
-    let path = match ungroup(ty) {
-        syn::Type::Path(ty) => &ty.path,
-        _ => {
-            return false;
-        }
-    };
-    let seg = match path.segments.last() {
-        Some(seg) => seg,
-        None => {
-            return false;
-        }
-    };
-    let args = match &seg.arguments {
-        syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args,
-        _ => {
-            return false;
-        }
-    };
-    seg.ident == "Option" && args.len() == 1
-}
-
-#[allow(dead_code)]
-pub fn ungroup(mut ty: &syn::Type) -> &syn::Type {
-    while let syn::Type::Group(group) = ty {
-        ty = &group.elem;
-    }
-    ty
-}
-
-struct BoolAttr<'c>(ASTAttr<'c, ()>);
-
-impl<'c> BoolAttr<'c> {
-    fn none(cx: &'c Ctxt, name: Symbol) -> Self {
-        BoolAttr(ASTAttr::none(cx, name))
-    }
-
-    fn set_true<A: ToTokens>(&mut self, obj: A) {
-        self.0.set(obj, ());
-    }
-
-    fn get(&self) -> bool {
-        self.0.value.is_some()
-    }
-}

+ 4 - 4
shared-lib/flowy-ast/src/ctxt.rs

@@ -2,13 +2,13 @@ use quote::ToTokens;
 use std::{cell::RefCell, fmt::Display, thread};
 use std::{cell::RefCell, fmt::Display, thread};
 
 
 #[derive(Default)]
 #[derive(Default)]
-pub struct Ctxt {
+pub struct ASTResult {
     errors: RefCell<Option<Vec<syn::Error>>>,
     errors: RefCell<Option<Vec<syn::Error>>>,
 }
 }
 
 
-impl Ctxt {
+impl ASTResult {
     pub fn new() -> Self {
     pub fn new() -> Self {
-        Ctxt {
+        ASTResult {
             errors: RefCell::new(Some(Vec::new())),
             errors: RefCell::new(Some(Vec::new())),
         }
         }
     }
     }
@@ -34,7 +34,7 @@ impl Ctxt {
     }
     }
 }
 }
 
 
-impl Drop for Ctxt {
+impl Drop for ASTResult {
     fn drop(&mut self) {
     fn drop(&mut self) {
         if !thread::panicking() && self.errors.borrow().is_some() {
         if !thread::panicking() && self.errors.borrow().is_some() {
             panic!("forgot to check for errors");
             panic!("forgot to check for errors");

+ 145 - 0
shared-lib/flowy-ast/src/event_attrs.rs

@@ -0,0 +1,145 @@
+use crate::{get_event_meta_items, parse_lit_str, symbol::*, ASTResult};
+
+use syn::{
+    self,
+    Meta::{NameValue, Path},
+    NestedMeta::{Lit, Meta},
+};
+
+#[derive(Debug, Clone)]
+pub struct EventAttrs {
+    input: Option<syn::Path>,
+    output: Option<syn::Path>,
+    error_ty: Option<String>,
+    pub ignore: bool,
+}
+
+#[derive(Debug, Clone)]
+pub struct EventEnumAttrs {
+    pub enum_name: String,
+    pub enum_item_name: String,
+    pub value: String,
+    pub event_attrs: EventAttrs,
+}
+
+impl EventEnumAttrs {
+    pub fn from_ast(
+        ast_result: &ASTResult,
+        ident: &syn::Ident,
+        variant: &syn::Variant,
+        enum_attrs: &[syn::Attribute],
+    ) -> Self {
+        let enum_item_name = variant.ident.to_string();
+        let enum_name = ident.to_string();
+        let mut value = String::new();
+        if variant.discriminant.is_some() {
+            if let syn::Expr::Lit(ref expr_list) = variant.discriminant.as_ref().unwrap().1 {
+                let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
+                    int_value
+                } else {
+                    unimplemented!()
+                };
+                value = lit_int.base10_digits().to_string();
+            }
+        }
+        let event_attrs = get_event_attrs_from(ast_result, &variant.attrs, enum_attrs);
+        EventEnumAttrs {
+            enum_name,
+            enum_item_name,
+            value,
+            event_attrs,
+        }
+    }
+
+    pub fn event_input(&self) -> Option<syn::Path> {
+        self.event_attrs.input.clone()
+    }
+
+    pub fn event_output(&self) -> Option<syn::Path> {
+        self.event_attrs.output.clone()
+    }
+
+    pub fn event_error(&self) -> String {
+        self.event_attrs.error_ty.as_ref().unwrap().clone()
+    }
+}
+
+fn get_event_attrs_from(
+    ast_result: &ASTResult,
+    variant_attrs: &[syn::Attribute],
+    enum_attrs: &[syn::Attribute],
+) -> EventAttrs {
+    let mut event_attrs = EventAttrs {
+        input: None,
+        output: None,
+        error_ty: None,
+        ignore: false,
+    };
+
+    enum_attrs
+        .iter()
+        .filter(|attr| attr.path.segments.iter().any(|s| s.ident == EVENT_ERR))
+        .for_each(|attr| {
+            if let Ok(NameValue(named_value)) = attr.parse_meta() {
+                if let syn::Lit::Str(s) = named_value.lit {
+                    event_attrs.error_ty = Some(s.value());
+                } else {
+                    eprintln!("❌ {} should not be empty", EVENT_ERR);
+                }
+            } else {
+                eprintln!("❌ Can not find any {} on attr: {:#?}", EVENT_ERR, attr);
+            }
+        });
+
+    let mut extract_event_attr = |attr: &syn::Attribute, meta_item: &syn::NestedMeta| match &meta_item {
+        Meta(NameValue(name_value)) => {
+            if name_value.path == EVENT_INPUT {
+                if let syn::Lit::Str(s) = &name_value.lit {
+                    let input_type = parse_lit_str(s)
+                        .map_err(|_| {
+                            ast_result
+                                .error_spanned_by(s, format!("failed to parse request deserializer {:?}", s.value()))
+                        })
+                        .unwrap();
+                    event_attrs.input = Some(input_type);
+                }
+            }
+
+            if name_value.path == EVENT_OUTPUT {
+                if let syn::Lit::Str(s) = &name_value.lit {
+                    let output_type = parse_lit_str(s)
+                        .map_err(|_| {
+                            ast_result
+                                .error_spanned_by(s, format!("failed to parse response deserializer {:?}", s.value()))
+                        })
+                        .unwrap();
+                    event_attrs.output = Some(output_type);
+                }
+            }
+        }
+        Meta(Path(word)) => {
+            if word == EVENT_IGNORE && attr.path == EVENT {
+                event_attrs.ignore = true;
+            }
+        }
+        Lit(s) => ast_result.error_spanned_by(s, "unexpected attribute"),
+        _ => ast_result.error_spanned_by(meta_item, "unexpected attribute"),
+    };
+
+    let attr_meta_items_info = variant_attrs
+        .iter()
+        .flat_map(|attr| match get_event_meta_items(ast_result, attr) {
+            Ok(items) => Some((attr, items)),
+            Err(_) => None,
+        })
+        .collect::<Vec<(&syn::Attribute, Vec<syn::NestedMeta>)>>();
+
+    for (attr, nested_metas) in attr_meta_items_info {
+        nested_metas
+            .iter()
+            .for_each(|meta_item| extract_event_attr(attr, meta_item))
+    }
+
+    // eprintln!("😁{:#?}", event_attrs);
+    event_attrs
+}

+ 7 - 7
shared-lib/flowy-ast/src/lib.rs

@@ -1,17 +1,17 @@
 #[macro_use]
 #[macro_use]
 extern crate syn;
 extern crate syn;
 
 
-#[macro_use]
-extern crate quote;
-
 mod ast;
 mod ast;
-mod attr;
 mod ctxt;
 mod ctxt;
+mod pb_attrs;
 
 
-pub mod event_ast;
+mod event_attrs;
+mod node_attrs;
 pub mod symbol;
 pub mod symbol;
 pub mod ty_ext;
 pub mod ty_ext;
+
 pub use self::{symbol::*, ty_ext::*};
 pub use self::{symbol::*, ty_ext::*};
 pub use ast::*;
 pub use ast::*;
-pub use attr::*;
-pub use ctxt::Ctxt;
+pub use ctxt::ASTResult;
+pub use event_attrs::*;
+pub use pb_attrs::*;

+ 99 - 0
shared-lib/flowy-ast/src/node_attrs.rs

@@ -0,0 +1,99 @@
+use crate::{get_node_meta_items, parse_lit_into_expr_path, symbol::*, ASTAttr, ASTResult};
+use quote::ToTokens;
+use syn::{
+    self, LitStr,
+    Meta::NameValue,
+    NestedMeta::{Lit, Meta},
+};
+
+pub struct NodeStructAttrs {
+    pub rename: Option<LitStr>,
+    pub has_child: bool,
+    pub child_name: Option<LitStr>,
+    pub child_index: Option<syn::LitInt>,
+    pub get_node_value_with: Option<syn::ExprPath>,
+    pub set_node_value_with: Option<syn::ExprPath>,
+    pub with_children: Option<syn::ExprPath>,
+}
+
+impl NodeStructAttrs {
+    /// Extract out the `#[node(...)]` attributes from a struct field.
+    pub fn from_ast(ast_result: &ASTResult, _index: usize, field: &syn::Field) -> Self {
+        let mut rename = ASTAttr::none(ast_result, RENAME_NODE);
+        let mut child_name = ASTAttr::none(ast_result, CHILD_NODE_NAME);
+        let mut child_index = ASTAttr::none(ast_result, CHILD_NODE_INDEX);
+        let mut get_node_value_with = ASTAttr::none(ast_result, GET_NODE_VALUE_WITH);
+        let mut set_node_value_with = ASTAttr::none(ast_result, SET_NODE_VALUE_WITH);
+        let mut with_children = ASTAttr::none(ast_result, WITH_CHILDREN);
+
+        for meta_item in field
+            .attrs
+            .iter()
+            .flat_map(|attr| get_node_meta_items(ast_result, attr))
+            .flatten()
+        {
+            match &meta_item {
+                // Parse '#[node(rename = x)]'
+                Meta(NameValue(m)) if m.path == RENAME_NODE => {
+                    if let syn::Lit::Str(lit) = &m.lit {
+                        rename.set(&m.path, lit.clone());
+                    }
+                }
+
+                // Parse '#[node(child_name = x)]'
+                Meta(NameValue(m)) if m.path == CHILD_NODE_NAME => {
+                    if let syn::Lit::Str(lit) = &m.lit {
+                        child_name.set(&m.path, lit.clone());
+                    }
+                }
+
+                // Parse '#[node(child_index = x)]'
+                Meta(NameValue(m)) if m.path == CHILD_NODE_INDEX => {
+                    if let syn::Lit::Int(lit) = &m.lit {
+                        child_index.set(&m.path, lit.clone());
+                    }
+                }
+
+                // Parse `#[node(get_node_value_with = "...")]`
+                Meta(NameValue(m)) if m.path == GET_NODE_VALUE_WITH => {
+                    if let Ok(path) = parse_lit_into_expr_path(ast_result, GET_NODE_VALUE_WITH, &m.lit) {
+                        get_node_value_with.set(&m.path, path);
+                    }
+                }
+
+                // Parse `#[node(set_node_value_with= "...")]`
+                Meta(NameValue(m)) if m.path == SET_NODE_VALUE_WITH => {
+                    if let Ok(path) = parse_lit_into_expr_path(ast_result, SET_NODE_VALUE_WITH, &m.lit) {
+                        set_node_value_with.set(&m.path, path);
+                    }
+                }
+
+                // Parse `#[node(with_children= "...")]`
+                Meta(NameValue(m)) if m.path == WITH_CHILDREN => {
+                    if let Ok(path) = parse_lit_into_expr_path(ast_result, WITH_CHILDREN, &m.lit) {
+                        with_children.set(&m.path, path);
+                    }
+                }
+
+                Meta(meta_item) => {
+                    let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
+                    ast_result.error_spanned_by(meta_item.path(), format!("unknown node field attribute `{}`", path));
+                }
+
+                Lit(lit) => {
+                    ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
+                }
+            }
+        }
+        let child_name = child_name.get();
+        NodeStructAttrs {
+            rename: rename.get(),
+            child_index: child_index.get(),
+            has_child: child_name.is_some(),
+            child_name,
+            get_node_value_with: get_node_value_with.get(),
+            set_node_value_with: set_node_value_with.get(),
+            with_children: with_children.get(),
+        }
+    }
+}

+ 444 - 0
shared-lib/flowy-ast/src/pb_attrs.rs

@@ -0,0 +1,444 @@
+#![allow(clippy::all)]
+
+use crate::{symbol::*, ASTResult};
+use proc_macro2::{Group, Span, TokenStream, TokenTree};
+use quote::ToTokens;
+use syn::{
+    self,
+    parse::{self, Parse},
+    Meta::{List, NameValue, Path},
+    NestedMeta::{Lit, Meta},
+};
+
+#[allow(dead_code)]
+pub struct PBAttrsContainer {
+    name: String,
+    pb_struct_type: Option<syn::Type>,
+    pb_enum_type: Option<syn::Type>,
+}
+
+impl PBAttrsContainer {
+    /// Extract out the `#[pb(...)]` attributes from an item.
+    pub fn from_ast(ast_result: &ASTResult, item: &syn::DeriveInput) -> Self {
+        let mut pb_struct_type = ASTAttr::none(ast_result, PB_STRUCT);
+        let mut pb_enum_type = ASTAttr::none(ast_result, PB_ENUM);
+        for meta_item in item
+            .attrs
+            .iter()
+            .flat_map(|attr| get_pb_meta_items(ast_result, attr))
+            .flatten()
+        {
+            match &meta_item {
+                // Parse `#[pb(struct = "Type")]
+                Meta(NameValue(m)) if m.path == PB_STRUCT => {
+                    if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_STRUCT, &m.lit) {
+                        pb_struct_type.set_opt(&m.path, Some(into_ty));
+                    }
+                }
+
+                // Parse `#[pb(enum = "Type")]
+                Meta(NameValue(m)) if m.path == PB_ENUM => {
+                    if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_ENUM, &m.lit) {
+                        pb_enum_type.set_opt(&m.path, Some(into_ty));
+                    }
+                }
+
+                Meta(meta_item) => {
+                    let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
+                    ast_result.error_spanned_by(meta_item.path(), format!("unknown container attribute `{}`", path));
+                }
+
+                Lit(lit) => {
+                    ast_result.error_spanned_by(lit, "unexpected literal in container attribute");
+                }
+            }
+        }
+        match &item.data {
+            syn::Data::Struct(_) => {
+                pb_struct_type.set_if_none(default_pb_type(&ast_result, &item.ident));
+            }
+            syn::Data::Enum(_) => {
+                pb_enum_type.set_if_none(default_pb_type(&ast_result, &item.ident));
+            }
+            _ => {}
+        }
+
+        PBAttrsContainer {
+            name: item.ident.to_string(),
+            pb_struct_type: pb_struct_type.get(),
+            pb_enum_type: pb_enum_type.get(),
+        }
+    }
+
+    pub fn pb_struct_type(&self) -> Option<&syn::Type> {
+        self.pb_struct_type.as_ref()
+    }
+
+    pub fn pb_enum_type(&self) -> Option<&syn::Type> {
+        self.pb_enum_type.as_ref()
+    }
+}
+
+pub struct ASTAttr<'c, T> {
+    ast_result: &'c ASTResult,
+    name: Symbol,
+    tokens: TokenStream,
+    value: Option<T>,
+}
+
+impl<'c, T> ASTAttr<'c, T> {
+    pub(crate) fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
+        ASTAttr {
+            ast_result,
+            name,
+            tokens: TokenStream::new(),
+            value: None,
+        }
+    }
+
+    pub(crate) fn set<A: ToTokens>(&mut self, obj: A, value: T) {
+        let tokens = obj.into_token_stream();
+
+        if self.value.is_some() {
+            self.ast_result
+                .error_spanned_by(tokens, format!("duplicate attribute `{}`", self.name));
+        } else {
+            self.tokens = tokens;
+            self.value = Some(value);
+        }
+    }
+
+    fn set_opt<A: ToTokens>(&mut self, obj: A, value: Option<T>) {
+        if let Some(value) = value {
+            self.set(obj, value);
+        }
+    }
+
+    pub(crate) fn set_if_none(&mut self, value: T) {
+        if self.value.is_none() {
+            self.value = Some(value);
+        }
+    }
+
+    pub(crate) fn get(self) -> Option<T> {
+        self.value
+    }
+
+    #[allow(dead_code)]
+    fn get_with_tokens(self) -> Option<(TokenStream, T)> {
+        match self.value {
+            Some(v) => Some((self.tokens, v)),
+            None => None,
+        }
+    }
+}
+
+pub struct PBStructAttrs {
+    #[allow(dead_code)]
+    name: String,
+    pb_index: Option<syn::LitInt>,
+    pb_one_of: bool,
+    skip_pb_serializing: bool,
+    skip_pb_deserializing: bool,
+    serialize_pb_with: Option<syn::ExprPath>,
+    deserialize_pb_with: Option<syn::ExprPath>,
+}
+
+pub fn is_recognizable_field(field: &syn::Field) -> bool {
+    field.attrs.iter().any(|attr| is_recognizable_attribute(attr))
+}
+
+impl PBStructAttrs {
+    /// Extract out the `#[pb(...)]` attributes from a struct field.
+    pub fn from_ast(ast_result: &ASTResult, index: usize, field: &syn::Field) -> Self {
+        let mut pb_index = ASTAttr::none(ast_result, PB_INDEX);
+        let mut pb_one_of = BoolAttr::none(ast_result, PB_ONE_OF);
+        let mut serialize_pb_with = ASTAttr::none(ast_result, SERIALIZE_PB_WITH);
+        let mut skip_pb_serializing = BoolAttr::none(ast_result, SKIP_PB_SERIALIZING);
+        let mut deserialize_pb_with = ASTAttr::none(ast_result, DESERIALIZE_PB_WITH);
+        let mut skip_pb_deserializing = BoolAttr::none(ast_result, SKIP_PB_DESERIALIZING);
+
+        let ident = match &field.ident {
+            Some(ident) => ident.to_string(),
+            None => index.to_string(),
+        };
+
+        for meta_item in field
+            .attrs
+            .iter()
+            .flat_map(|attr| get_pb_meta_items(ast_result, attr))
+            .flatten()
+        {
+            match &meta_item {
+                // Parse `#[pb(skip)]`
+                Meta(Path(word)) if word == SKIP => {
+                    skip_pb_serializing.set_true(word);
+                    skip_pb_deserializing.set_true(word);
+                }
+
+                // Parse '#[pb(index = x)]'
+                Meta(NameValue(m)) if m.path == PB_INDEX => {
+                    if let syn::Lit::Int(lit) = &m.lit {
+                        pb_index.set(&m.path, lit.clone());
+                    }
+                }
+
+                // Parse `#[pb(one_of)]`
+                Meta(Path(path)) if path == PB_ONE_OF => {
+                    pb_one_of.set_true(path);
+                }
+
+                // Parse `#[pb(serialize_pb_with = "...")]`
+                Meta(NameValue(m)) if m.path == SERIALIZE_PB_WITH => {
+                    if let Ok(path) = parse_lit_into_expr_path(ast_result, SERIALIZE_PB_WITH, &m.lit) {
+                        serialize_pb_with.set(&m.path, path);
+                    }
+                }
+
+                // Parse `#[pb(deserialize_pb_with = "...")]`
+                Meta(NameValue(m)) if m.path == DESERIALIZE_PB_WITH => {
+                    if let Ok(path) = parse_lit_into_expr_path(ast_result, DESERIALIZE_PB_WITH, &m.lit) {
+                        deserialize_pb_with.set(&m.path, path);
+                    }
+                }
+
+                Meta(meta_item) => {
+                    let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
+                    ast_result.error_spanned_by(meta_item.path(), format!("unknown pb field attribute `{}`", path));
+                }
+
+                Lit(lit) => {
+                    ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
+                }
+            }
+        }
+
+        PBStructAttrs {
+            name: ident,
+            pb_index: pb_index.get(),
+            pb_one_of: pb_one_of.get(),
+            skip_pb_serializing: skip_pb_serializing.get(),
+            skip_pb_deserializing: skip_pb_deserializing.get(),
+            serialize_pb_with: serialize_pb_with.get(),
+            deserialize_pb_with: deserialize_pb_with.get(),
+        }
+    }
+
+    #[allow(dead_code)]
+    pub fn pb_index(&self) -> Option<String> {
+        self.pb_index.as_ref().map(|lit| lit.base10_digits().to_string())
+    }
+
+    pub fn is_one_of(&self) -> bool {
+        self.pb_one_of
+    }
+
+    pub fn serialize_pb_with(&self) -> Option<&syn::ExprPath> {
+        self.serialize_pb_with.as_ref()
+    }
+
+    pub fn deserialize_pb_with(&self) -> Option<&syn::ExprPath> {
+        self.deserialize_pb_with.as_ref()
+    }
+
+    pub fn skip_pb_serializing(&self) -> bool {
+        self.skip_pb_serializing
+    }
+
+    pub fn skip_pb_deserializing(&self) -> bool {
+        self.skip_pb_deserializing
+    }
+}
+
+pub enum Default {
+    /// Field must always be specified because it does not have a default.
+    None,
+    /// The default is given by `std::default::Default::default()`.
+    Default,
+    /// The default is given by this function.
+    Path(syn::ExprPath),
+}
+
+pub fn is_recognizable_attribute(attr: &syn::Attribute) -> bool {
+    attr.path == PB_ATTRS || attr.path == EVENT || attr.path == NODE_ATTRS || attr.path == NODES_ATTRS
+}
+
+pub fn get_pb_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
+    // Only handle the attribute that we have defined
+    if attr.path != PB_ATTRS {
+        return Ok(vec![]);
+    }
+
+    // http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
+    match attr.parse_meta() {
+        Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
+        Ok(other) => {
+            cx.error_spanned_by(other, "expected #[pb(...)]");
+            Err(())
+        }
+        Err(err) => {
+            cx.error_spanned_by(attr, "attribute must be str, e.g. #[pb(xx = \"xxx\")]");
+            cx.syn_error(err);
+            Err(())
+        }
+    }
+}
+
+pub fn get_node_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
+    // Only handle the attribute that we have defined
+    if attr.path != NODE_ATTRS && attr.path != NODES_ATTRS {
+        return Ok(vec![]);
+    }
+
+    // http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
+    match attr.parse_meta() {
+        Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
+        Ok(_) => Ok(vec![]),
+        Err(err) => {
+            cx.error_spanned_by(attr, "attribute must be str, e.g. #[node(xx = \"xxx\")]");
+            cx.syn_error(err);
+            Err(())
+        }
+    }
+}
+
+pub fn get_event_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
+    // Only handle the attribute that we have defined
+    if attr.path != EVENT {
+        return Ok(vec![]);
+    }
+
+    // http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
+    match attr.parse_meta() {
+        Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
+        Ok(other) => {
+            cx.error_spanned_by(other, "expected #[event(...)]");
+            Err(())
+        }
+        Err(err) => {
+            cx.error_spanned_by(attr, "attribute must be str, e.g. #[event(xx = \"xxx\")]");
+            cx.syn_error(err);
+            Err(())
+        }
+    }
+}
+
+pub fn parse_lit_into_expr_path(
+    ast_result: &ASTResult,
+    attr_name: Symbol,
+    lit: &syn::Lit,
+) -> Result<syn::ExprPath, ()> {
+    let string = get_lit_str(ast_result, attr_name, lit)?;
+    parse_lit_str(string)
+        .map_err(|_| ast_result.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value())))
+}
+
+fn get_lit_str<'a>(ast_result: &ASTResult, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'a syn::LitStr, ()> {
+    if let syn::Lit::Str(lit) = lit {
+        Ok(lit)
+    } else {
+        ast_result.error_spanned_by(
+            lit,
+            format!(
+                "expected pb {} attribute to be a string: `{} = \"...\"`",
+                attr_name, attr_name
+            ),
+        );
+        Err(())
+    }
+}
+
+fn parse_lit_into_ty(ast_result: &ASTResult, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::Type, ()> {
+    let string = get_lit_str(ast_result, attr_name, lit)?;
+
+    parse_lit_str(string).map_err(|_| {
+        ast_result.error_spanned_by(
+            lit,
+            format!("failed to parse type: {} = {:?}", attr_name, string.value()),
+        )
+    })
+}
+
+pub fn parse_lit_str<T>(s: &syn::LitStr) -> parse::Result<T>
+where
+    T: Parse,
+{
+    let tokens = spanned_tokens(s)?;
+    syn::parse2(tokens)
+}
+
+fn spanned_tokens(s: &syn::LitStr) -> parse::Result<TokenStream> {
+    let stream = syn::parse_str(&s.value())?;
+    Ok(respan_token_stream(stream, s.span()))
+}
+
+fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream {
+    stream.into_iter().map(|token| respan_token_tree(token, span)).collect()
+}
+
+fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
+    if let TokenTree::Group(g) = &mut token {
+        *g = Group::new(g.delimiter(), respan_token_stream(g.stream(), span));
+    }
+    token.set_span(span);
+    token
+}
+
+fn default_pb_type(ast_result: &ASTResult, ident: &syn::Ident) -> syn::Type {
+    let take_ident = ident.to_string();
+    let lit_str = syn::LitStr::new(&take_ident, ident.span());
+    if let Ok(tokens) = spanned_tokens(&lit_str) {
+        if let Ok(pb_struct_ty) = syn::parse2(tokens) {
+            return pb_struct_ty;
+        }
+    }
+    ast_result.error_spanned_by(ident, format!("❌ Can't find {} protobuf struct", take_ident));
+    panic!()
+}
+
+#[allow(dead_code)]
+pub fn is_option(ty: &syn::Type) -> bool {
+    let path = match ungroup(ty) {
+        syn::Type::Path(ty) => &ty.path,
+        _ => {
+            return false;
+        }
+    };
+    let seg = match path.segments.last() {
+        Some(seg) => seg,
+        None => {
+            return false;
+        }
+    };
+    let args = match &seg.arguments {
+        syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args,
+        _ => {
+            return false;
+        }
+    };
+    seg.ident == "Option" && args.len() == 1
+}
+
+#[allow(dead_code)]
+pub fn ungroup(mut ty: &syn::Type) -> &syn::Type {
+    while let syn::Type::Group(group) = ty {
+        ty = &group.elem;
+    }
+    ty
+}
+
+struct BoolAttr<'c>(ASTAttr<'c, ()>);
+
+impl<'c> BoolAttr<'c> {
+    fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
+        BoolAttr(ASTAttr::none(ast_result, name))
+    }
+
+    fn set_true<A: ToTokens>(&mut self, obj: A) {
+        self.0.set(obj, ());
+    }
+
+    fn get(&self) -> bool {
+        self.0.value.is_some()
+    }
+}

+ 36 - 9
shared-lib/flowy-ast/src/symbol.rs

@@ -3,23 +3,50 @@ use syn::{Ident, Path};
 
 
 #[derive(Copy, Clone)]
 #[derive(Copy, Clone)]
 pub struct Symbol(&'static str);
 pub struct Symbol(&'static str);
+
+// Protobuf
 pub const PB_ATTRS: Symbol = Symbol("pb");
 pub const PB_ATTRS: Symbol = Symbol("pb");
-pub const SKIP: Symbol = Symbol("skip"); //#[pb(skip)]
-pub const PB_INDEX: Symbol = Symbol("index"); //#[pb(index = "1")]
-pub const PB_ONE_OF: Symbol = Symbol("one_of"); //#[pb(one_of)]
-pub const DESERIALIZE_WITH: Symbol = Symbol("deserialize_with");
-pub const SKIP_DESERIALIZING: Symbol = Symbol("skip_deserializing");
-pub const SERIALIZE_WITH: Symbol = Symbol("serialize_with"); //#[pb(serialize_with = "...")]
-pub const SKIP_SERIALIZING: Symbol = Symbol("skip_serializing"); //#[pb(skip_serializing)]
-pub const PB_STRUCT: Symbol = Symbol("struct"); //#[pb(struct="some struct")]
-pub const PB_ENUM: Symbol = Symbol("enum"); //#[pb(enum="some enum")]
+//#[pb(skip)]
+pub const SKIP: Symbol = Symbol("skip");
+//#[pb(index = "1")]
+pub const PB_INDEX: Symbol = Symbol("index");
+//#[pb(one_of)]
+pub const PB_ONE_OF: Symbol = Symbol("one_of");
+//#[pb(skip_pb_deserializing = "...")]
+pub const SKIP_PB_DESERIALIZING: Symbol = Symbol("skip_pb_deserializing");
+//#[pb(skip_pb_serializing)]
+pub const SKIP_PB_SERIALIZING: Symbol = Symbol("skip_pb_serializing");
+//#[pb(serialize_pb_with = "...")]
+pub const SERIALIZE_PB_WITH: Symbol = Symbol("serialize_pb_with");
+//#[pb(deserialize_pb_with = "...")]
+pub const DESERIALIZE_PB_WITH: Symbol = Symbol("deserialize_pb_with");
+//#[pb(struct="some struct")]
+pub const PB_STRUCT: Symbol = Symbol("struct");
+//#[pb(enum="some enum")]
+pub const PB_ENUM: Symbol = Symbol("enum");
 
 
+// Event
 pub const EVENT_INPUT: Symbol = Symbol("input");
 pub const EVENT_INPUT: Symbol = Symbol("input");
 pub const EVENT_OUTPUT: Symbol = Symbol("output");
 pub const EVENT_OUTPUT: Symbol = Symbol("output");
 pub const EVENT_IGNORE: Symbol = Symbol("ignore");
 pub const EVENT_IGNORE: Symbol = Symbol("ignore");
 pub const EVENT: Symbol = Symbol("event");
 pub const EVENT: Symbol = Symbol("event");
 pub const EVENT_ERR: Symbol = Symbol("event_err");
 pub const EVENT_ERR: Symbol = Symbol("event_err");
 
 
+// Node
+pub const NODE_ATTRS: Symbol = Symbol("node");
+pub const NODES_ATTRS: Symbol = Symbol("nodes");
+pub const NODE_TYPE: Symbol = Symbol("node_type");
+pub const NODE_INDEX: Symbol = Symbol("index");
+pub const RENAME_NODE: Symbol = Symbol("rename");
+pub const CHILD_NODE_NAME: Symbol = Symbol("child_name");
+pub const CHILD_NODE_INDEX: Symbol = Symbol("child_index");
+pub const SKIP_NODE_ATTRS: Symbol = Symbol("skip_node_attribute");
+pub const GET_NODE_VALUE_WITH: Symbol = Symbol("get_value_with");
+pub const SET_NODE_VALUE_WITH: Symbol = Symbol("set_value_with");
+pub const GET_VEC_ELEMENT_WITH: Symbol = Symbol("get_element_with");
+pub const GET_MUT_VEC_ELEMENT_WITH: Symbol = Symbol("get_mut_element_with");
+pub const WITH_CHILDREN: Symbol = Symbol("with_children");
+
 impl PartialEq<Symbol> for Ident {
 impl PartialEq<Symbol> for Ident {
     fn eq(&self, word: &Symbol) -> bool {
     fn eq(&self, word: &Symbol) -> bool {
         self == word.0
         self == word.0

+ 16 - 14
shared-lib/flowy-ast/src/ty_ext.rs

@@ -1,4 +1,4 @@
-use crate::Ctxt;
+use crate::ASTResult;
 use syn::{self, AngleBracketedGenericArguments, PathSegment};
 use syn::{self, AngleBracketedGenericArguments, PathSegment};
 
 
 #[derive(Eq, PartialEq, Debug)]
 #[derive(Eq, PartialEq, Debug)]
@@ -41,7 +41,7 @@ impl<'a> TyInfo<'a> {
     }
     }
 }
 }
 
 
-pub fn parse_ty<'a>(ctxt: &Ctxt, ty: &'a syn::Type) -> Result<Option<TyInfo<'a>>, String> {
+pub fn parse_ty<'a>(ast_result: &ASTResult, ty: &'a syn::Type) -> Result<Option<TyInfo<'a>>, String> {
     // Type -> TypePath -> Path -> PathSegment -> PathArguments ->
     // Type -> TypePath -> Path -> PathSegment -> PathArguments ->
     // AngleBracketedGenericArguments -> GenericArgument -> Type.
     // AngleBracketedGenericArguments -> GenericArgument -> Type.
     if let syn::Type::Path(ref p) = ty {
     if let syn::Type::Path(ref p) = ty {
@@ -58,11 +58,13 @@ pub fn parse_ty<'a>(ctxt: &Ctxt, ty: &'a syn::Type) -> Result<Option<TyInfo<'a>>
 
 
         return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments {
         return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments {
             match seg.ident.to_string().as_ref() {
             match seg.ident.to_string().as_ref() {
-                "HashMap" => generate_hashmap_ty_info(ctxt, ty, seg, bracketed),
-                "Vec" => generate_vec_ty_info(ctxt, seg, bracketed),
-                "Option" => generate_option_ty_info(ctxt, ty, seg, bracketed),
+                "HashMap" => generate_hashmap_ty_info(ast_result, ty, seg, bracketed),
+                "Vec" => generate_vec_ty_info(ast_result, seg, bracketed),
+                "Option" => generate_option_ty_info(ast_result, ty, seg, bracketed),
                 _ => {
                 _ => {
-                    return Err(format!("Unsupported ty {}", seg.ident));
+                    let msg = format!("Unsupported type: {}", seg.ident);
+                    ast_result.error_spanned_by(&seg.ident, &msg);
+                    return Err(msg);
                 }
                 }
             }
             }
         } else {
         } else {
@@ -92,7 +94,7 @@ fn parse_bracketed(bracketed: &AngleBracketedGenericArguments) -> Vec<&syn::Type
 }
 }
 
 
 pub fn generate_hashmap_ty_info<'a>(
 pub fn generate_hashmap_ty_info<'a>(
-    ctxt: &Ctxt,
+    ast_result: &ASTResult,
     ty: &'a syn::Type,
     ty: &'a syn::Type,
     path_segment: &'a PathSegment,
     path_segment: &'a PathSegment,
     bracketed: &'a AngleBracketedGenericArguments,
     bracketed: &'a AngleBracketedGenericArguments,
@@ -102,9 +104,9 @@ pub fn generate_hashmap_ty_info<'a>(
         return Ok(None);
         return Ok(None);
     }
     }
     let types = parse_bracketed(bracketed);
     let types = parse_bracketed(bracketed);
-    let key = parse_ty(ctxt, types[0])?.unwrap().ident.to_string();
-    let value = parse_ty(ctxt, types[1])?.unwrap().ident.to_string();
-    let bracket_ty_info = Box::new(parse_ty(ctxt, types[1])?);
+    let key = parse_ty(ast_result, types[0])?.unwrap().ident.to_string();
+    let value = parse_ty(ast_result, types[1])?.unwrap().ident.to_string();
+    let bracket_ty_info = Box::new(parse_ty(ast_result, types[1])?);
     Ok(Some(TyInfo {
     Ok(Some(TyInfo {
         ident: &path_segment.ident,
         ident: &path_segment.ident,
         ty,
         ty,
@@ -114,14 +116,14 @@ pub fn generate_hashmap_ty_info<'a>(
 }
 }
 
 
 fn generate_option_ty_info<'a>(
 fn generate_option_ty_info<'a>(
-    ctxt: &Ctxt,
+    ast_result: &ASTResult,
     ty: &'a syn::Type,
     ty: &'a syn::Type,
     path_segment: &'a PathSegment,
     path_segment: &'a PathSegment,
     bracketed: &'a AngleBracketedGenericArguments,
     bracketed: &'a AngleBracketedGenericArguments,
 ) -> Result<Option<TyInfo<'a>>, String> {
 ) -> Result<Option<TyInfo<'a>>, String> {
     assert_eq!(path_segment.ident.to_string(), "Option".to_string());
     assert_eq!(path_segment.ident.to_string(), "Option".to_string());
     let types = parse_bracketed(bracketed);
     let types = parse_bracketed(bracketed);
-    let bracket_ty_info = Box::new(parse_ty(ctxt, types[0])?);
+    let bracket_ty_info = Box::new(parse_ty(ast_result, types[0])?);
     Ok(Some(TyInfo {
     Ok(Some(TyInfo {
         ident: &path_segment.ident,
         ident: &path_segment.ident,
         ty,
         ty,
@@ -131,7 +133,7 @@ fn generate_option_ty_info<'a>(
 }
 }
 
 
 fn generate_vec_ty_info<'a>(
 fn generate_vec_ty_info<'a>(
-    ctxt: &Ctxt,
+    ast_result: &ASTResult,
     path_segment: &'a PathSegment,
     path_segment: &'a PathSegment,
     bracketed: &'a AngleBracketedGenericArguments,
     bracketed: &'a AngleBracketedGenericArguments,
 ) -> Result<Option<TyInfo<'a>>, String> {
 ) -> Result<Option<TyInfo<'a>>, String> {
@@ -139,7 +141,7 @@ fn generate_vec_ty_info<'a>(
         return Ok(None);
         return Ok(None);
     }
     }
     if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() {
     if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() {
-        let bracketed_ty_info = Box::new(parse_ty(ctxt, bracketed_type)?);
+        let bracketed_ty_info = Box::new(parse_ty(ast_result, bracketed_type)?);
         return Ok(Some(TyInfo {
         return Ok(Some(TyInfo {
             ident: &path_segment.ident,
             ident: &path_segment.ident,
             ty: bracketed_type,
             ty: bracketed_type,

+ 49 - 0
shared-lib/flowy-codegen/Cargo.toml

@@ -0,0 +1,49 @@
+[package]
+name = "flowy-codegen"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+log = "0.4.14"
+serde = { version = "1.0", features = ["derive"]}
+serde_json = "1.0"
+flowy-ast = { path = "../flowy-ast"}
+quote = "1.0"
+
+cmd_lib = { version = "1", optional = true }
+protoc-rust = { version = "2", optional = true }
+walkdir = { version = "2", optional = true }
+similar = { version = "1.2.2", optional = true }
+syn = { version = "1.0.60", features = ["extra-traits", "parsing", "derive", "full"], optional = true }
+fancy-regex = { version = "0.10.0", optional = true }
+lazy_static = { version = "1.4.0", optional = true }
+tera = { version = "1.5.0", optional = true}
+itertools = { version = "0.10", optional = true }
+phf = { version = "0.8.0", features = ["macros"], optional = true }
+console = {version = "0.14.0", optional = true}
+protoc-bin-vendored = { version = "3.0", optional = true }
+toml = {version = "0.5.8", optional = true}
+
+
+
+[features]
+proto_gen = [
+    "similar",
+    "syn",
+    "fancy-regex",
+    "lazy_static",
+    "tera",
+    "itertools",
+    "phf",
+    "walkdir",
+    "console",
+    "toml",
+    "cmd_lib",
+    "protoc-rust",
+    "walkdir",
+    "protoc-bin-vendored",
+]
+dart_event = ["walkdir", "tera", "syn"]
+dart = ["proto_gen", "dart_event"]

+ 9 - 8
shared-lib/flowy-ast/src/event_ast.rs → shared-lib/flowy-codegen/src/dart_event/ast.rs

@@ -1,4 +1,5 @@
-use crate::ASTEnumAttrVariant;
+use flowy_ast::EventEnumAttrs;
+use quote::format_ident;
 
 
 pub struct EventASTContext {
 pub struct EventASTContext {
     pub event: syn::Ident,
     pub event: syn::Ident,
@@ -10,21 +11,21 @@ pub struct EventASTContext {
 }
 }
 
 
 impl EventASTContext {
 impl EventASTContext {
-    pub fn from(variant: &ASTEnumAttrVariant) -> EventASTContext {
-        let command_name = variant.enum_item_name.clone();
+    pub fn from(enum_attrs: &EventEnumAttrs) -> EventASTContext {
+        let command_name = enum_attrs.enum_item_name.clone();
         if command_name.is_empty() {
         if command_name.is_empty() {
-            panic!("Invalid command name: {}", variant.enum_item_name);
+            panic!("Invalid command name: {}", enum_attrs.enum_item_name);
         }
         }
 
 
         let event = format_ident!("{}", &command_name);
         let event = format_ident!("{}", &command_name);
         let splits = command_name.split('_').collect::<Vec<&str>>();
         let splits = command_name.split('_').collect::<Vec<&str>>();
 
 
-        let event_ty = format_ident!("{}", variant.enum_name);
+        let event_ty = format_ident!("{}", enum_attrs.enum_name);
         let event_request_struct = format_ident!("{}Event", &splits.join(""));
         let event_request_struct = format_ident!("{}Event", &splits.join(""));
 
 
-        let event_input = variant.event_input();
-        let event_output = variant.event_output();
-        let event_error = variant.event_error();
+        let event_input = enum_attrs.event_input();
+        let event_output = enum_attrs.event_output();
+        let event_error = enum_attrs.event_error();
 
 
         EventASTContext {
         EventASTContext {
             event,
             event,

+ 13 - 8
shared-lib/lib-infra/src/code_gen/dart_event/dart_event.rs → shared-lib/flowy-codegen/src/dart_event/dart_event.rs

@@ -1,7 +1,8 @@
 use super::event_template::*;
 use super::event_template::*;
-use crate::code_gen::flowy_toml::{parse_crate_config_from, CrateConfig};
-use crate::code_gen::util::{is_crate_dir, is_hidden, path_string_with_component, read_file};
-use flowy_ast::{event_ast::*, *};
+use crate::dart_event::ast::EventASTContext;
+use crate::flowy_toml::{parse_crate_config_from, CrateConfig};
+use crate::util::{is_crate_dir, is_hidden, path_string_with_component, read_file};
+use flowy_ast::ASTResult;
 use std::fs::File;
 use std::fs::File;
 use std::io::Write;
 use std::io::Write;
 use std::path::PathBuf;
 use std::path::PathBuf;
@@ -117,15 +118,19 @@ pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
                 .iter()
                 .iter()
                 .map(|item| match item {
                 .map(|item| match item {
                     Item::Enum(item_enum) => {
                     Item::Enum(item_enum) => {
-                        let ctxt = Ctxt::new();
-                        let attrs =
-                            flowy_ast::enum_from_ast(&ctxt, &item_enum.ident, &item_enum.variants, &item_enum.attrs);
-                        ctxt.check().unwrap();
+                        let ast_result = ASTResult::new();
+                        let attrs = flowy_ast::enum_from_ast(
+                            &ast_result,
+                            &item_enum.ident,
+                            &item_enum.variants,
+                            &item_enum.attrs,
+                        );
+                        ast_result.check().unwrap();
                         attrs
                         attrs
                             .iter()
                             .iter()
                             .filter(|attr| !attr.attrs.event_attrs.ignore)
                             .filter(|attr| !attr.attrs.event_attrs.ignore)
                             .enumerate()
                             .enumerate()
-                            .map(|(_index, attr)| EventASTContext::from(&attr.attrs))
+                            .map(|(_index, variant)| EventASTContext::from(&variant.attrs))
                             .collect::<Vec<_>>()
                             .collect::<Vec<_>>()
                     }
                     }
                     _ => vec![],
                     _ => vec![],

+ 1 - 1
shared-lib/lib-infra/src/code_gen/dart_event/event_template.rs → shared-lib/flowy-codegen/src/dart_event/event_template.rs

@@ -1,4 +1,4 @@
-use crate::code_gen::util::get_tera;
+use crate::util::get_tera;
 use tera::Context;
 use tera::Context;
 
 
 pub struct EventTemplate {
 pub struct EventTemplate {

+ 0 - 0
shared-lib/lib-infra/src/code_gen/dart_event/event_template.tera → shared-lib/flowy-codegen/src/dart_event/event_template.tera


+ 1 - 0
frontend/scripts/flowy-tool/src/dart_event/mod.rs → shared-lib/flowy-codegen/src/dart_event/mod.rs

@@ -1,4 +1,5 @@
 #![allow(clippy::module_inception)]
 #![allow(clippy::module_inception)]
+mod ast;
 mod dart_event;
 mod dart_event;
 mod event_template;
 mod event_template;
 
 

+ 0 - 0
shared-lib/lib-infra/src/code_gen/flowy_toml.rs → shared-lib/flowy-codegen/src/flowy_toml.rs


+ 0 - 0
shared-lib/lib-infra/src/code_gen/mod.rs → shared-lib/flowy-codegen/src/lib.rs


+ 11 - 11
shared-lib/lib-infra/src/code_gen/protobuf_file/ast.rs → shared-lib/flowy-codegen/src/protobuf_file/ast.rs

@@ -2,9 +2,9 @@
 #![allow(dead_code)]
 #![allow(dead_code)]
 #![allow(unused_imports)]
 #![allow(unused_imports)]
 #![allow(unused_results)]
 #![allow(unused_results)]
-use crate::code_gen::protobuf_file::template::{EnumTemplate, StructTemplate, RUST_TYPE_MAP};
-use crate::code_gen::protobuf_file::{parse_crate_info_from_path, ProtoFile, ProtobufCrateContext};
-use crate::code_gen::util::*;
+use crate::protobuf_file::template::{EnumTemplate, StructTemplate, RUST_TYPE_MAP};
+use crate::protobuf_file::{parse_crate_info_from_path, ProtoFile, ProtobufCrateContext};
+use crate::util::*;
 use fancy_regex::Regex;
 use fancy_regex::Regex;
 use flowy_ast::*;
 use flowy_ast::*;
 use lazy_static::lazy_static;
 use lazy_static::lazy_static;
@@ -66,7 +66,7 @@ fn parse_files_protobuf(proto_crate_path: &Path, proto_output_path: &Path) -> Ve
 
 
             s.fields
             s.fields
                 .iter()
                 .iter()
-                .filter(|field| field.attrs.pb_index().is_some())
+                .filter(|field| field.pb_attrs.pb_index().is_some())
                 .for_each(|field| {
                 .for_each(|field| {
                     ref_types.push(field.ty_as_str());
                     ref_types.push(field.ty_as_str());
                     struct_template.set_field(field);
                     struct_template.set_field(field);
@@ -115,13 +115,13 @@ pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
     // let mut content = format!("{:#?}", &ast);
     // let mut content = format!("{:#?}", &ast);
     // let mut file = File::create("./foo.txt").unwrap();
     // let mut file = File::create("./foo.txt").unwrap();
     // file.write_all(content.as_bytes()).unwrap();
     // file.write_all(content.as_bytes()).unwrap();
-    let ctxt = Ctxt::new();
+    let ast_result = ASTResult::new();
     let mut proto_structs: Vec<Struct> = vec![];
     let mut proto_structs: Vec<Struct> = vec![];
     ast.items.iter().for_each(|item| {
     ast.items.iter().for_each(|item| {
         if let Item::Struct(item_struct) = item {
         if let Item::Struct(item_struct) = item {
-            let (_, fields) = struct_from_ast(&ctxt, &item_struct.fields);
+            let (_, fields) = struct_from_ast(&ast_result, &item_struct.fields);
 
 
-            if fields.iter().filter(|f| f.attrs.pb_index().is_some()).count() > 0 {
+            if fields.iter().filter(|f| f.pb_attrs.pb_index().is_some()).count() > 0 {
                 proto_structs.push(Struct {
                 proto_structs.push(Struct {
                     name: item_struct.ident.to_string(),
                     name: item_struct.ident.to_string(),
                     fields,
                     fields,
@@ -129,25 +129,25 @@ pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
             }
             }
         }
         }
     });
     });
-    ctxt.check().unwrap();
+    ast_result.check().unwrap();
     proto_structs
     proto_structs
 }
 }
 
 
 pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
 pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
     let mut flowy_enums: Vec<FlowyEnum> = vec![];
     let mut flowy_enums: Vec<FlowyEnum> = vec![];
-    let ctxt = Ctxt::new();
+    let ast_result = ASTResult::new();
 
 
     ast.items.iter().for_each(|item| {
     ast.items.iter().for_each(|item| {
         // https://docs.rs/syn/1.0.54/syn/enum.Item.html
         // https://docs.rs/syn/1.0.54/syn/enum.Item.html
         if let Item::Enum(item_enum) = item {
         if let Item::Enum(item_enum) = item {
-            let attrs = flowy_ast::enum_from_ast(&ctxt, &item_enum.ident, &item_enum.variants, &ast.attrs);
+            let attrs = flowy_ast::enum_from_ast(&ast_result, &item_enum.ident, &item_enum.variants, &ast.attrs);
             flowy_enums.push(FlowyEnum {
             flowy_enums.push(FlowyEnum {
                 name: item_enum.ident.to_string(),
                 name: item_enum.ident.to_string(),
                 attrs,
                 attrs,
             });
             });
         }
         }
     });
     });
-    ctxt.check().unwrap();
+    ast_result.check().unwrap();
     flowy_enums
     flowy_enums
 }
 }
 
 

+ 1 - 1
shared-lib/lib-infra/src/code_gen/protobuf_file/mod.rs → shared-lib/flowy-codegen/src/protobuf_file/mod.rs

@@ -6,7 +6,7 @@ mod proto_gen;
 mod proto_info;
 mod proto_info;
 mod template;
 mod template;
 
 
-use crate::code_gen::util::path_string_with_component;
+use crate::util::path_string_with_component;
 use itertools::Itertools;
 use itertools::Itertools;
 use log::info;
 use log::info;
 pub use proto_gen::*;
 pub use proto_gen::*;

+ 5 - 5
shared-lib/lib-infra/src/code_gen/protobuf_file/proto_gen.rs → shared-lib/flowy-codegen/src/protobuf_file/proto_gen.rs

@@ -2,11 +2,11 @@
 #![allow(dead_code)]
 #![allow(dead_code)]
 #![allow(unused_imports)]
 #![allow(unused_imports)]
 #![allow(unused_results)]
 #![allow(unused_results)]
-use crate::code_gen::protobuf_file::ast::parse_protobuf_context_from;
-use crate::code_gen::protobuf_file::proto_info::ProtobufCrateContext;
-use crate::code_gen::protobuf_file::ProtoFile;
-use crate::code_gen::util::*;
-use crate::code_gen::ProtoCache;
+use crate::protobuf_file::ast::parse_protobuf_context_from;
+use crate::protobuf_file::proto_info::ProtobufCrateContext;
+use crate::protobuf_file::ProtoFile;
+use crate::util::*;
+use crate::ProtoCache;
 use std::collections::HashMap;
 use std::collections::HashMap;
 use std::fs::File;
 use std::fs::File;
 use std::path::Path;
 use std::path::Path;

+ 2 - 2
shared-lib/lib-infra/src/code_gen/protobuf_file/proto_info.rs → shared-lib/flowy-codegen/src/protobuf_file/proto_info.rs

@@ -1,6 +1,6 @@
 #![allow(dead_code)]
 #![allow(dead_code)]
-use crate::code_gen::flowy_toml::{parse_crate_config_from, CrateConfig, FlowyConfig};
-use crate::code_gen::util::*;
+use crate::flowy_toml::{parse_crate_config_from, CrateConfig, FlowyConfig};
+use crate::util::*;
 use std::fs::OpenOptions;
 use std::fs::OpenOptions;
 use std::io::Write;
 use std::io::Write;
 use std::path::PathBuf;
 use std::path::PathBuf;

+ 1 - 1
shared-lib/lib-infra/src/code_gen/protobuf_file/template/derive_meta/derive_meta.rs → shared-lib/flowy-codegen/src/protobuf_file/template/derive_meta/derive_meta.rs

@@ -1,4 +1,4 @@
-use crate::code_gen::util::get_tera;
+use crate::util::get_tera;
 use itertools::Itertools;
 use itertools::Itertools;
 use tera::Context;
 use tera::Context;
 
 

+ 0 - 0
frontend/scripts/flowy-tool/src/proto/template/derive_meta/derive_meta.tera → shared-lib/flowy-codegen/src/protobuf_file/template/derive_meta/derive_meta.tera


+ 0 - 0
frontend/scripts/flowy-tool/src/proto/template/derive_meta/mod.rs → shared-lib/flowy-codegen/src/protobuf_file/template/derive_meta/mod.rs


+ 0 - 0
frontend/scripts/flowy-tool/src/proto/template/mod.rs → shared-lib/flowy-codegen/src/protobuf_file/template/mod.rs


+ 0 - 0
frontend/scripts/flowy-tool/src/proto/template/proto_file/enum.tera → shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/enum.tera


+ 2 - 2
shared-lib/lib-infra/src/code_gen/protobuf_file/template/proto_file/enum_template.rs → shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/enum_template.rs

@@ -1,5 +1,5 @@
-use crate::code_gen::protobuf_file::ast::FlowyEnum;
-use crate::code_gen::util::get_tera;
+use crate::protobuf_file::ast::FlowyEnum;
+use crate::util::get_tera;
 use tera::Context;
 use tera::Context;
 
 
 pub struct EnumTemplate {
 pub struct EnumTemplate {

+ 0 - 0
frontend/scripts/flowy-tool/src/proto/template/proto_file/mod.rs → shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/mod.rs


+ 0 - 0
frontend/scripts/flowy-tool/src/proto/template/proto_file/struct.tera → shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/struct.tera


+ 2 - 2
shared-lib/lib-infra/src/code_gen/protobuf_file/template/proto_file/struct_template.rs → shared-lib/flowy-codegen/src/protobuf_file/template/proto_file/struct_template.rs

@@ -1,4 +1,4 @@
-use crate::code_gen::util::get_tera;
+use crate::util::get_tera;
 use flowy_ast::*;
 use flowy_ast::*;
 use phf::phf_map;
 use phf::phf_map;
 use tera::Context;
 use tera::Context;
@@ -36,7 +36,7 @@ impl StructTemplate {
     pub fn set_field(&mut self, field: &ASTField) {
     pub fn set_field(&mut self, field: &ASTField) {
         // {{ field_type }} {{ field_name }} = {{index}};
         // {{ field_type }} {{ field_name }} = {{index}};
         let name = field.name().unwrap().to_string();
         let name = field.name().unwrap().to_string();
-        let index = field.attrs.pb_index().unwrap();
+        let index = field.pb_attrs.pb_index().unwrap();
 
 
         let ty: &str = &field.ty_as_str();
         let ty: &str = &field.ty_as_str();
         let mut mapped_ty: &str = ty;
         let mut mapped_ty: &str = ty;

+ 1 - 1
shared-lib/lib-infra/src/code_gen/util.rs → shared-lib/flowy-codegen/src/util.rs

@@ -142,7 +142,7 @@ pub fn suffix_relative_to_path(path: &str, base: &str) -> String {
 }
 }
 
 
 pub fn get_tera(directory: &str) -> Tera {
 pub fn get_tera(directory: &str) -> Tera {
-    let mut root = format!("{}/src/code_gen/", env!("CARGO_MANIFEST_DIR"));
+    let mut root = format!("{}/src/", env!("CARGO_MANIFEST_DIR"));
     root.push_str(directory);
     root.push_str(directory);
 
 
     let root_absolute_path = match std::fs::canonicalize(&root) {
     let root_absolute_path = match std::fs::canonicalize(&root) {

+ 1 - 1
shared-lib/flowy-derive/Cargo.toml

@@ -20,7 +20,7 @@ proc-macro2 = "1.0"
 flowy-ast = { path = "../flowy-ast" }
 flowy-ast = { path = "../flowy-ast" }
 lazy_static = {version = "1.4.0"}
 lazy_static = {version = "1.4.0"}
 dashmap = "5"
 dashmap = "5"
-lib-infra = { path = "../lib-infra", features = ["proto_gen"]}
+flowy-codegen= { path = "../flowy-codegen"}
 serde_json = "1.0"
 serde_json = "1.0"
 walkdir = "2.3.1"
 walkdir = "2.3.1"
 
 

+ 7 - 0
shared-lib/flowy-derive/src/lib.rs

@@ -9,6 +9,7 @@ use syn::{parse_macro_input, DeriveInput};
 extern crate quote;
 extern crate quote;
 
 
 mod dart_event;
 mod dart_event;
+mod node;
 mod proto_buf;
 mod proto_buf;
 
 
 // Inspired by https://serde.rs/attributes.html
 // Inspired by https://serde.rs/attributes.html
@@ -36,6 +37,12 @@ pub fn derive_dart_event(input: TokenStream) -> TokenStream {
         .into()
         .into()
 }
 }
 
 
+#[proc_macro_derive(Node, attributes(node, nodes, node_type))]
+pub fn derive_node(input: TokenStream) -> TokenStream {
+    let input = parse_macro_input!(input as DeriveInput);
+    node::expand_derive(&input).unwrap_or_else(to_compile_errors).into()
+}
+
 fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {
 fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {
     let compile_errors = errors.iter().map(syn::Error::to_compile_error);
     let compile_errors = errors.iter().map(syn::Error::to_compile_error);
     quote!(#(#compile_errors)*)
     quote!(#(#compile_errors)*)

+ 228 - 0
shared-lib/flowy-derive/src/node/mod.rs

@@ -0,0 +1,228 @@
+use flowy_ast::{ASTContainer, ASTField, ASTResult};
+use proc_macro2::TokenStream;
+
+pub fn expand_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
+    let ast_result = ASTResult::new();
+    let cont = match ASTContainer::from_ast(&ast_result, input) {
+        Some(cont) => cont,
+        None => return Err(ast_result.check().unwrap_err()),
+    };
+
+    let mut token_stream: TokenStream = TokenStream::default();
+    token_stream.extend(make_helper_funcs_token_stream(&cont));
+    token_stream.extend(make_to_node_data_token_stream(&cont));
+
+    if let Some(get_value_token_stream) = make_get_set_value_token_steam(&cont) {
+        token_stream.extend(get_value_token_stream);
+    }
+
+    token_stream.extend(make_alter_children_token_stream(&ast_result, &cont));
+    ast_result.check()?;
+    Ok(token_stream)
+}
+
+pub fn make_helper_funcs_token_stream(ast: &ASTContainer) -> TokenStream {
+    let mut token_streams = TokenStream::default();
+    let struct_ident = &ast.ident;
+    token_streams.extend(quote! {
+      impl #struct_ident {
+            pub fn get_path(&self) -> Option<Path> {
+                let node_id = &self.node_id?;
+               Some(self.tree.read().path_from_node_id(node_id.clone()))
+            }
+        }
+    });
+    token_streams
+}
+
+pub fn make_alter_children_token_stream(ast_result: &ASTResult, ast: &ASTContainer) -> TokenStream {
+    let mut token_streams = TokenStream::default();
+    let children_fields = ast
+        .data
+        .all_fields()
+        .filter(|field| field.node_attrs.has_child)
+        .collect::<Vec<&ASTField>>();
+
+    if !children_fields.is_empty() {
+        let struct_ident = &ast.ident;
+        if children_fields.len() > 1 {
+            ast_result.error_spanned_by(struct_ident, "Only one children property");
+            return token_streams;
+        }
+        let children_field = children_fields.first().unwrap();
+        let field_name = children_field.name().unwrap();
+        let child_name = children_field.node_attrs.child_name.as_ref().unwrap();
+        let get_func_name = format_ident!("get_{}", child_name.value());
+        let get_mut_func_name = format_ident!("get_mut_{}", child_name.value());
+        let add_func_name = format_ident!("add_{}", child_name.value());
+        let remove_func_name = format_ident!("remove_{}", child_name.value());
+        let ty = children_field.bracket_inner_ty.as_ref().unwrap().clone();
+
+        token_streams.extend(quote! {
+             impl #struct_ident {
+                pub fn #get_func_name<T: AsRef<str>>(&self, id: T) -> Option<&#ty> {
+                    let id = id.as_ref();
+                    self.#field_name.iter().find(|element| element.id == id)
+                }
+
+                pub fn #get_mut_func_name<T: AsRef<str>>(&mut self, id: T) -> Option<&mut #ty> {
+                    let id = id.as_ref();
+                    self.#field_name.iter_mut().find(|element| element.id == id)
+                }
+
+                pub fn #remove_func_name<T: AsRef<str>>(&mut self, id: T) {
+                    let id = id.as_ref();
+                     if let Some(index) = self.#field_name.iter().position(|element| element.id == id && element.node_id.is_some()) {
+                        let element = self.#field_name.remove(index);
+                        let element_path = element.get_path().unwrap();
+
+                        let mut write_guard = self.tree.write();
+                        let mut nodes = vec![];
+
+                        if let Some(node_data) = element.node_id.and_then(|node_id| write_guard.get_node_data(node_id.clone())) {
+                            nodes.push(node_data);
+                        }
+                        let _ = write_guard.apply_op(NodeOperation::Delete {
+                            path: element_path,
+                            nodes,
+                        });
+                    }
+                }
+
+                pub fn #add_func_name(&mut self, mut value: #ty) -> Result<(), String> {
+                    if self.node_id.is_none() {
+                        return Err("The node id is empty".to_owned());
+                    }
+
+                    let mut transaction = Transaction::new();
+                    let parent_path = self.get_path().unwrap();
+
+                    let path = parent_path.clone_with(self.#field_name.len());
+                    let node_data = value.to_node_data();
+                    transaction.push_operation(NodeOperation::Insert {
+                        path: path.clone(),
+                        nodes: vec![node_data],
+                     });
+
+                    let _ = self.tree.write().apply_transaction(transaction);
+                    let child_node_id = self.tree.read().node_id_at_path(path).unwrap();
+                    value.node_id = Some(child_node_id);
+                    self.#field_name.push(value);
+                    Ok(())
+                }
+             }
+        });
+    }
+
+    token_streams
+}
+
+pub fn make_to_node_data_token_stream(ast: &ASTContainer) -> TokenStream {
+    let struct_ident = &ast.ident;
+    let mut token_streams = TokenStream::default();
+    let node_type = ast
+        .node_type
+        .as_ref()
+        .expect("Define the type of the node by using #[node_type = \"xx\" in the struct");
+    let set_key_values = ast
+        .data
+        .all_fields()
+        .filter(|field| !field.node_attrs.has_child)
+        .flat_map(|field| {
+            let mut field_name = field.name().expect("the name of the field should not be empty");
+            let original_field_name = field.name().expect("the name of the field should not be empty");
+            if let Some(rename) = &field.node_attrs.rename {
+                field_name = format_ident!("{}", rename.value());
+            }
+            let field_name_str = field_name.to_string();
+            quote! {
+               .insert_attribute(#field_name_str, self.#original_field_name.clone())
+            }
+        });
+
+    let children_fields = ast
+        .data
+        .all_fields()
+        .filter(|field| field.node_attrs.has_child)
+        .collect::<Vec<&ASTField>>();
+
+    let childrens_token_streams = match children_fields.is_empty() {
+        true => {
+            quote! {
+                let children = vec![];
+            }
+        }
+        false => {
+            let children_field = children_fields.first().unwrap();
+            let original_field_name = children_field
+                .name()
+                .expect("the name of the field should not be empty");
+            quote! {
+                let children = self.#original_field_name.iter().map(|value| value.to_node_data()).collect::<Vec<NodeData>>();
+            }
+        }
+    };
+
+    token_streams.extend(quote! {
+      impl ToNodeData for #struct_ident {
+            fn to_node_data(&self) -> NodeData {
+                #childrens_token_streams
+
+                let builder = NodeDataBuilder::new(#node_type)
+                #(#set_key_values)*
+                .extend_node_data(children);
+
+                builder.build()
+            }
+        }
+    });
+
+    token_streams
+}
+
+pub fn make_get_set_value_token_steam(ast: &ASTContainer) -> Option<TokenStream> {
+    let struct_ident = &ast.ident;
+    let mut token_streams = TokenStream::default();
+
+    let tree = format_ident!("tree");
+    for field in ast.data.all_fields() {
+        if field.node_attrs.has_child {
+            continue;
+        }
+
+        let mut field_name = field.name().expect("the name of the field should not be empty");
+        if let Some(rename) = &field.node_attrs.rename {
+            field_name = format_ident!("{}", rename.value());
+        }
+
+        let field_name_str = field_name.to_string();
+        let get_func_name = format_ident!("get_{}", field_name);
+        let set_func_name = format_ident!("set_{}", field_name);
+        let get_value_return_ty = field.ty;
+        let set_value_input_ty = field.ty;
+
+        if let Some(get_value_with_fn) = &field.node_attrs.get_node_value_with {
+            token_streams.extend(quote! {
+              impl #struct_ident {
+                    pub fn #get_func_name(&self) -> Option<#get_value_return_ty> {
+                        let node_id = self.node_id.as_ref()?;
+                        #get_value_with_fn(self.#tree.clone(), node_id, #field_name_str)
+                    }
+                }
+            });
+        }
+
+        if let Some(set_value_with_fn) = &field.node_attrs.set_node_value_with {
+            token_streams.extend(quote! {
+              impl #struct_ident {
+                    pub fn #set_func_name(&self, value: #set_value_input_ty) {
+                        if let Some(node_id) = self.node_id.as_ref() {
+                            let _ = #set_value_with_fn(self.#tree.clone(), node_id, #field_name_str, value);
+                        }
+                    }
+                }
+            });
+        }
+    }
+    Some(token_streams)
+}

+ 26 - 21
shared-lib/flowy-derive/src/proto_buf/deserialize.rs

@@ -2,22 +2,22 @@ use crate::proto_buf::util::*;
 use flowy_ast::*;
 use flowy_ast::*;
 use proc_macro2::{Span, TokenStream};
 use proc_macro2::{Span, TokenStream};
 
 
-pub fn make_de_token_steam(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStream> {
-    let pb_ty = ast.attrs.pb_struct_type()?;
+pub fn make_de_token_steam(ast_result: &ASTResult, ast: &ASTContainer) -> Option<TokenStream> {
+    let pb_ty = ast.pb_attrs.pb_struct_type()?;
     let struct_ident = &ast.ident;
     let struct_ident = &ast.ident;
 
 
     let build_take_fields = ast
     let build_take_fields = ast
         .data
         .data
         .all_fields()
         .all_fields()
-        .filter(|f| !f.attrs.skip_deserializing())
+        .filter(|f| !f.pb_attrs.skip_pb_deserializing())
         .flat_map(|field| {
         .flat_map(|field| {
-            if let Some(func) = field.attrs.deserialize_with() {
+            if let Some(func) = field.pb_attrs.deserialize_pb_with() {
                 let member = &field.member;
                 let member = &field.member;
                 Some(quote! { o.#member=#struct_ident::#func(pb); })
                 Some(quote! { o.#member=#struct_ident::#func(pb); })
-            } else if field.attrs.is_one_of() {
-                token_stream_for_one_of(ctxt, field)
+            } else if field.pb_attrs.is_one_of() {
+                token_stream_for_one_of(ast_result, field)
             } else {
             } else {
-                token_stream_for_field(ctxt, &field.member, field.ty, false)
+                token_stream_for_field(ast_result, &field.member, field.ty, false)
             }
             }
         });
         });
 
 
@@ -58,10 +58,10 @@ pub fn make_de_token_steam(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStrea
     // None
     // None
 }
 }
 
 
-fn token_stream_for_one_of(ctxt: &Ctxt, field: &ASTField) -> Option<TokenStream> {
+fn token_stream_for_one_of(ast_result: &ASTResult, field: &ASTField) -> Option<TokenStream> {
     let member = &field.member;
     let member = &field.member;
-    let ident = get_member_ident(ctxt, member)?;
-    let ty_info = match parse_ty(ctxt, field.ty) {
+    let ident = get_member_ident(ast_result, member)?;
+    let ty_info = match parse_ty(ast_result, field.ty) {
         Ok(ty_info) => ty_info,
         Ok(ty_info) => ty_info,
         Err(e) => {
         Err(e) => {
             eprintln!("token_stream_for_one_of failed: {:?} with error: {}", member, e);
             eprintln!("token_stream_for_one_of failed: {:?} with error: {}", member, e);
@@ -118,9 +118,14 @@ fn token_stream_for_one_of(ctxt: &Ctxt, field: &ASTField) -> Option<TokenStream>
     }
     }
 }
 }
 
 
-fn token_stream_for_field(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option: bool) -> Option<TokenStream> {
-    let ident = get_member_ident(ctxt, member)?;
-    let ty_info = match parse_ty(ctxt, ty) {
+fn token_stream_for_field(
+    ast_result: &ASTResult,
+    member: &syn::Member,
+    ty: &syn::Type,
+    is_option: bool,
+) -> Option<TokenStream> {
+    let ident = get_member_ident(ast_result, member)?;
+    let ty_info = match parse_ty(ast_result, ty) {
         Ok(ty_info) => ty_info,
         Ok(ty_info) => ty_info,
         Err(e) => {
         Err(e) => {
             eprintln!("token_stream_for_field: {:?} with error: {}", member, e);
             eprintln!("token_stream_for_field: {:?} with error: {}", member, e);
@@ -129,12 +134,12 @@ fn token_stream_for_field(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_
     }?;
     }?;
     match ident_category(ty_info.ident) {
     match ident_category(ty_info.ident) {
         TypeCategory::Array => {
         TypeCategory::Array => {
-            assert_bracket_ty_is_some(ctxt, &ty_info);
-            token_stream_for_vec(ctxt, member, &ty_info.bracket_ty_info.unwrap())
+            assert_bracket_ty_is_some(ast_result, &ty_info);
+            token_stream_for_vec(ast_result, member, &ty_info.bracket_ty_info.unwrap())
         }
         }
         TypeCategory::Map => {
         TypeCategory::Map => {
-            assert_bracket_ty_is_some(ctxt, &ty_info);
-            token_stream_for_map(ctxt, member, &ty_info.bracket_ty_info.unwrap())
+            assert_bracket_ty_is_some(ast_result, &ty_info);
+            token_stream_for_map(ast_result, member, &ty_info.bracket_ty_info.unwrap())
         }
         }
         TypeCategory::Protobuf => {
         TypeCategory::Protobuf => {
             // if the type wrapped by SingularPtrField, should call take first
             // if the type wrapped by SingularPtrField, should call take first
@@ -174,7 +179,7 @@ fn token_stream_for_field(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_
                 })
                 })
             }
             }
         }
         }
-        TypeCategory::Opt => token_stream_for_field(ctxt, member, ty_info.bracket_ty_info.unwrap().ty, true),
+        TypeCategory::Opt => token_stream_for_field(ast_result, member, ty_info.bracket_ty_info.unwrap().ty, true),
         TypeCategory::Primitive | TypeCategory::Bytes => {
         TypeCategory::Primitive | TypeCategory::Bytes => {
             // eprintln!("😄 #{:?}", &field.name().unwrap());
             // eprintln!("😄 #{:?}", &field.name().unwrap());
             if is_option {
             if is_option {
@@ -186,7 +191,7 @@ fn token_stream_for_field(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_
     }
     }
 }
 }
 
 
-fn token_stream_for_vec(ctxt: &Ctxt, member: &syn::Member, bracketed_type: &TyInfo) -> Option<TokenStream> {
+fn token_stream_for_vec(ctxt: &ASTResult, member: &syn::Member, bracketed_type: &TyInfo) -> Option<TokenStream> {
     let ident = get_member_ident(ctxt, member)?;
     let ident = get_member_ident(ctxt, member)?;
 
 
     match ident_category(bracketed_type.ident) {
     match ident_category(bracketed_type.ident) {
@@ -218,8 +223,8 @@ fn token_stream_for_vec(ctxt: &Ctxt, member: &syn::Member, bracketed_type: &TyIn
     }
     }
 }
 }
 
 
-fn token_stream_for_map(ctxt: &Ctxt, member: &syn::Member, ty_info: &TyInfo) -> Option<TokenStream> {
-    let ident = get_member_ident(ctxt, member)?;
+fn token_stream_for_map(ast_result: &ASTResult, member: &syn::Member, ty_info: &TyInfo) -> Option<TokenStream> {
+    let ident = get_member_ident(ast_result, member)?;
     let take_ident = format_ident!("take_{}", ident.to_string());
     let take_ident = format_ident!("take_{}", ident.to_string());
     let ty = ty_info.ty;
     let ty = ty_info.ty;
 
 

+ 2 - 2
shared-lib/flowy-derive/src/proto_buf/enum_serde.rs

@@ -2,9 +2,9 @@ use flowy_ast::*;
 use proc_macro2::TokenStream;
 use proc_macro2::TokenStream;
 
 
 #[allow(dead_code)]
 #[allow(dead_code)]
-pub fn make_enum_token_stream(_ctxt: &Ctxt, cont: &ASTContainer) -> Option<TokenStream> {
+pub fn make_enum_token_stream(_ast_result: &ASTResult, cont: &ASTContainer) -> Option<TokenStream> {
     let enum_ident = &cont.ident;
     let enum_ident = &cont.ident;
-    let pb_enum = cont.attrs.pb_enum_type()?;
+    let pb_enum = cont.pb_attrs.pb_enum_type()?;
     let build_to_pb_enum = cont.data.all_idents().map(|i| {
     let build_to_pb_enum = cont.data.all_idents().map(|i| {
         let token_stream: TokenStream = quote! {
         let token_stream: TokenStream = quote! {
             #enum_ident::#i => crate::protobuf::#pb_enum::#i,
             #enum_ident::#i => crate::protobuf::#pb_enum::#i,

+ 11 - 11
shared-lib/flowy-derive/src/proto_buf/mod.rs

@@ -11,40 +11,40 @@ use proc_macro2::TokenStream;
 use std::default::Default;
 use std::default::Default;
 
 
 pub fn expand_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
 pub fn expand_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
-    let ctxt = Ctxt::new();
-    let cont = match ASTContainer::from_ast(&ctxt, input) {
+    let ast_result = ASTResult::new();
+    let cont = match ASTContainer::from_ast(&ast_result, input) {
         Some(cont) => cont,
         Some(cont) => cont,
-        None => return Err(ctxt.check().unwrap_err()),
+        None => return Err(ast_result.check().unwrap_err()),
     };
     };
 
 
     let mut token_stream: TokenStream = TokenStream::default();
     let mut token_stream: TokenStream = TokenStream::default();
 
 
-    if let Some(de_token_stream) = make_de_token_steam(&ctxt, &cont) {
+    if let Some(de_token_stream) = make_de_token_steam(&ast_result, &cont) {
         token_stream.extend(de_token_stream);
         token_stream.extend(de_token_stream);
     }
     }
 
 
-    if let Some(se_token_stream) = make_se_token_stream(&ctxt, &cont) {
+    if let Some(se_token_stream) = make_se_token_stream(&ast_result, &cont) {
         token_stream.extend(se_token_stream);
         token_stream.extend(se_token_stream);
     }
     }
 
 
-    ctxt.check()?;
+    ast_result.check()?;
     Ok(token_stream)
     Ok(token_stream)
 }
 }
 
 
 pub fn expand_enum_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
 pub fn expand_enum_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
-    let ctxt = Ctxt::new();
-    let cont = match ASTContainer::from_ast(&ctxt, input) {
+    let ast_result = ASTResult::new();
+    let cont = match ASTContainer::from_ast(&ast_result, input) {
         Some(cont) => cont,
         Some(cont) => cont,
-        None => return Err(ctxt.check().unwrap_err()),
+        None => return Err(ast_result.check().unwrap_err()),
     };
     };
 
 
     let mut token_stream: TokenStream = TokenStream::default();
     let mut token_stream: TokenStream = TokenStream::default();
 
 
-    if let Some(enum_token_stream) = make_enum_token_stream(&ctxt, &cont) {
+    if let Some(enum_token_stream) = make_enum_token_stream(&ast_result, &cont) {
         token_stream.extend(enum_token_stream);
         token_stream.extend(enum_token_stream);
     }
     }
 
 
-    ctxt.check()?;
+    ast_result.check()?;
     Ok(token_stream)
     Ok(token_stream)
 }
 }
 // #[macro_use]
 // #[macro_use]

+ 26 - 21
shared-lib/flowy-derive/src/proto_buf/serialize.rs

@@ -3,15 +3,15 @@ use crate::proto_buf::util::{get_member_ident, ident_category, TypeCategory};
 use flowy_ast::*;
 use flowy_ast::*;
 use proc_macro2::TokenStream;
 use proc_macro2::TokenStream;
 
 
-pub fn make_se_token_stream(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStream> {
-    let pb_ty = ast.attrs.pb_struct_type()?;
+pub fn make_se_token_stream(ast_result: &ASTResult, ast: &ASTContainer) -> Option<TokenStream> {
+    let pb_ty = ast.pb_attrs.pb_struct_type()?;
     let struct_ident = &ast.ident;
     let struct_ident = &ast.ident;
 
 
     let build_set_pb_fields = ast
     let build_set_pb_fields = ast
         .data
         .data
         .all_fields()
         .all_fields()
-        .filter(|f| !f.attrs.skip_serializing())
-        .flat_map(|field| se_token_stream_for_field(ctxt, field, false));
+        .filter(|f| !f.pb_attrs.skip_pb_serializing())
+        .flat_map(|field| se_token_stream_for_field(ast_result, field, false));
 
 
     let se_token_stream: TokenStream = quote! {
     let se_token_stream: TokenStream = quote! {
 
 
@@ -37,21 +37,21 @@ pub fn make_se_token_stream(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStre
     Some(se_token_stream)
     Some(se_token_stream)
 }
 }
 
 
-fn se_token_stream_for_field(ctxt: &Ctxt, field: &ASTField, _take: bool) -> Option<TokenStream> {
-    if let Some(func) = &field.attrs.serialize_with() {
+fn se_token_stream_for_field(ast_result: &ASTResult, field: &ASTField, _take: bool) -> Option<TokenStream> {
+    if let Some(func) = &field.pb_attrs.serialize_pb_with() {
         let member = &field.member;
         let member = &field.member;
         Some(quote! { pb.#member=o.#func(); })
         Some(quote! { pb.#member=o.#func(); })
-    } else if field.attrs.is_one_of() {
-        token_stream_for_one_of(ctxt, field)
+    } else if field.pb_attrs.is_one_of() {
+        token_stream_for_one_of(ast_result, field)
     } else {
     } else {
-        gen_token_stream(ctxt, &field.member, field.ty, false)
+        gen_token_stream(ast_result, &field.member, field.ty, false)
     }
     }
 }
 }
 
 
-fn token_stream_for_one_of(ctxt: &Ctxt, field: &ASTField) -> Option<TokenStream> {
+fn token_stream_for_one_of(ast_result: &ASTResult, field: &ASTField) -> Option<TokenStream> {
     let member = &field.member;
     let member = &field.member;
-    let ident = get_member_ident(ctxt, member)?;
-    let ty_info = match parse_ty(ctxt, field.ty) {
+    let ident = get_member_ident(ast_result, member)?;
+    let ty_info = match parse_ty(ast_result, field.ty) {
         Ok(ty_info) => ty_info,
         Ok(ty_info) => ty_info,
         Err(e) => {
         Err(e) => {
             eprintln!("token_stream_for_one_of failed: {:?} with error: {}", member, e);
             eprintln!("token_stream_for_one_of failed: {:?} with error: {}", member, e);
@@ -85,8 +85,13 @@ fn token_stream_for_one_of(ctxt: &Ctxt, field: &ASTField) -> Option<TokenStream>
     }
     }
 }
 }
 
 
-fn gen_token_stream(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option: bool) -> Option<TokenStream> {
-    let ty_info = match parse_ty(ctxt, ty) {
+fn gen_token_stream(
+    ast_result: &ASTResult,
+    member: &syn::Member,
+    ty: &syn::Type,
+    is_option: bool,
+) -> Option<TokenStream> {
+    let ty_info = match parse_ty(ast_result, ty) {
         Ok(ty_info) => ty_info,
         Ok(ty_info) => ty_info,
         Err(e) => {
         Err(e) => {
             eprintln!("gen_token_stream failed: {:?} with error: {}", member, e);
             eprintln!("gen_token_stream failed: {:?} with error: {}", member, e);
@@ -94,8 +99,8 @@ fn gen_token_stream(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option
         }
         }
     }?;
     }?;
     match ident_category(ty_info.ident) {
     match ident_category(ty_info.ident) {
-        TypeCategory::Array => token_stream_for_vec(ctxt, member, ty_info.bracket_ty_info.unwrap().ty),
-        TypeCategory::Map => token_stream_for_map(ctxt, member, ty_info.bracket_ty_info.unwrap().ty),
+        TypeCategory::Array => token_stream_for_vec(ast_result, member, ty_info.bracket_ty_info.unwrap().ty),
+        TypeCategory::Map => token_stream_for_map(ast_result, member, ty_info.bracket_ty_info.unwrap().ty),
         TypeCategory::Str => {
         TypeCategory::Str => {
             if is_option {
             if is_option {
                 Some(quote! {
                 Some(quote! {
@@ -109,7 +114,7 @@ fn gen_token_stream(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option
             }
             }
         }
         }
         TypeCategory::Protobuf => Some(quote! { pb.#member =  ::protobuf::SingularPtrField::some(o.#member.into()); }),
         TypeCategory::Protobuf => Some(quote! { pb.#member =  ::protobuf::SingularPtrField::some(o.#member.into()); }),
-        TypeCategory::Opt => gen_token_stream(ctxt, member, ty_info.bracket_ty_info.unwrap().ty, true),
+        TypeCategory::Opt => gen_token_stream(ast_result, member, ty_info.bracket_ty_info.unwrap().ty, true),
         TypeCategory::Enum => {
         TypeCategory::Enum => {
             // let pb_enum_ident = format_ident!("{}", ty_info.ident.to_string());
             // let pb_enum_ident = format_ident!("{}", ty_info.ident.to_string());
             // Some(quote! {
             // Some(quote! {
@@ -124,8 +129,8 @@ fn gen_token_stream(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option
 }
 }
 
 
 // e.g. pub cells: Vec<CellData>, the member will be cells, ty would be Vec
 // e.g. pub cells: Vec<CellData>, the member will be cells, ty would be Vec
-fn token_stream_for_vec(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type) -> Option<TokenStream> {
-    let ty_info = match parse_ty(ctxt, ty) {
+fn token_stream_for_vec(ast_result: &ASTResult, member: &syn::Member, ty: &syn::Type) -> Option<TokenStream> {
+    let ty_info = match parse_ty(ast_result, ty) {
         Ok(ty_info) => ty_info,
         Ok(ty_info) => ty_info,
         Err(e) => {
         Err(e) => {
             eprintln!("token_stream_for_vec failed: {:?} with error: {}", member, e);
             eprintln!("token_stream_for_vec failed: {:?} with error: {}", member, e);
@@ -150,9 +155,9 @@ fn token_stream_for_vec(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type) -> Op
 }
 }
 
 
 // e.g. pub cells: HashMap<xx, xx>
 // e.g. pub cells: HashMap<xx, xx>
-fn token_stream_for_map(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type) -> Option<TokenStream> {
+fn token_stream_for_map(ast_result: &ASTResult, member: &syn::Member, ty: &syn::Type) -> Option<TokenStream> {
     // The key of the hashmap must be string
     // The key of the hashmap must be string
-    let ty_info = match parse_ty(ctxt, ty) {
+    let ty_info = match parse_ty(ast_result, ty) {
         Ok(ty_info) => ty_info,
         Ok(ty_info) => ty_info,
         Err(e) => {
         Err(e) => {
             eprintln!("token_stream_for_map failed: {:?} with error: {}", member, e);
             eprintln!("token_stream_for_map failed: {:?} with error: {}", member, e);

+ 7 - 7
shared-lib/flowy-derive/src/proto_buf/util.rs

@@ -1,7 +1,7 @@
 use dashmap::{DashMap, DashSet};
 use dashmap::{DashMap, DashSet};
-use flowy_ast::{Ctxt, TyInfo};
+use flowy_ast::{ASTResult, TyInfo};
+use flowy_codegen::ProtoCache;
 use lazy_static::lazy_static;
 use lazy_static::lazy_static;
-use lib_infra::code_gen::ProtoCache;
 use std::fs::File;
 use std::fs::File;
 use std::io::Read;
 use std::io::Read;
 use std::sync::atomic::{AtomicBool, Ordering};
 use std::sync::atomic::{AtomicBool, Ordering};
@@ -12,18 +12,18 @@ pub fn ident_category(ident: &syn::Ident) -> TypeCategory {
     category_from_str(ident_str)
     category_from_str(ident_str)
 }
 }
 
 
-pub(crate) fn get_member_ident<'a>(ctxt: &Ctxt, member: &'a syn::Member) -> Option<&'a syn::Ident> {
+pub(crate) fn get_member_ident<'a>(ast_result: &ASTResult, member: &'a syn::Member) -> Option<&'a syn::Ident> {
     if let syn::Member::Named(ref ident) = member {
     if let syn::Member::Named(ref ident) = member {
         Some(ident)
         Some(ident)
     } else {
     } else {
-        ctxt.error_spanned_by(member, "Unsupported member, shouldn't be self.0".to_string());
+        ast_result.error_spanned_by(member, "Unsupported member, shouldn't be self.0".to_string());
         None
         None
     }
     }
 }
 }
 
 
-pub fn assert_bracket_ty_is_some(ctxt: &Ctxt, ty_info: &TyInfo) {
+pub fn assert_bracket_ty_is_some(ast_result: &ASTResult, ty_info: &TyInfo) {
     if ty_info.bracket_ty_info.is_none() {
     if ty_info.bracket_ty_info.is_none() {
-        ctxt.error_spanned_by(ty_info.ty, "Invalid bracketed type when gen de token steam".to_string());
+        ast_result.error_spanned_by(ty_info.ty, "Invalid bracketed type when gen de token steam".to_string());
     }
     }
 }
 }
 
 
@@ -50,7 +50,7 @@ pub fn category_from_str(type_str: String) -> TypeCategory {
         IS_LOAD.store(true, Ordering::SeqCst);
         IS_LOAD.store(true, Ordering::SeqCst);
         // Dependents on another crate file is not good, just leave it here.
         // Dependents on another crate file is not good, just leave it here.
         // Maybe find another way to read the .cache in the future.
         // Maybe find another way to read the .cache in the future.
-        let cache_dir = format!("{}/../lib-infra/.cache", env!("CARGO_MANIFEST_DIR"));
+        let cache_dir = format!("{}/../flowy-codegen/.cache", env!("CARGO_MANIFEST_DIR"));
         for path in WalkDir::new(cache_dir)
         for path in WalkDir::new(cache_dir)
             .into_iter()
             .into_iter()
             .filter_map(|e| e.ok())
             .filter_map(|e| e.ok())

+ 3 - 2
shared-lib/flowy-error-code/Cargo.toml

@@ -11,7 +11,8 @@ protobuf = {version = "2.18.0"}
 derive_more = {version = "0.99", features = ["display"]}
 derive_more = {version = "0.99", features = ["display"]}
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../lib-infra", features = ["proto_gen"] }
+flowy-codegen= { path = "../flowy-codegen", features = ["proto_gen"]}
+
 
 
 [features]
 [features]
-dart = ["lib-infra/dart"]
+dart = ["flowy-codegen/dart"]

+ 1 - 3
shared-lib/flowy-error-code/build.rs

@@ -1,5 +1,3 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
-    code_gen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
+    flowy_codegen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
 }
 }

+ 2 - 1
shared-lib/flowy-http-model/Cargo.toml

@@ -13,5 +13,6 @@ protobuf = {version = "2.18.0"}
 md5 = "0.7.0"
 md5 = "0.7.0"
 
 
 [build-dependencies]
 [build-dependencies]
-lib-infra = { path = "../lib-infra", features = ["proto_gen"] }
+flowy-codegen= { path = "../flowy-codegen", features = ["proto_gen"]}
+
 
 

+ 1 - 3
shared-lib/flowy-http-model/build.rs

@@ -1,5 +1,3 @@
-use lib_infra::code_gen;
-
 fn main() {
 fn main() {
-    code_gen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
+    flowy_codegen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
 }
 }

+ 142 - 0
shared-lib/flowy-sync/src/client_folder/folder_node.rs

@@ -0,0 +1,142 @@
+use crate::client_folder::trash_node::TrashNode;
+use crate::client_folder::workspace_node::WorkspaceNode;
+use crate::errors::{CollaborateError, CollaborateResult};
+use flowy_derive::Node;
+use lib_ot::core::NodeTree;
+use lib_ot::core::*;
+use parking_lot::RwLock;
+use std::sync::Arc;
+
+pub type AtomicNodeTree = RwLock<NodeTree>;
+
+pub struct FolderNodePad {
+    pub tree: Arc<AtomicNodeTree>,
+    pub node_id: NodeId,
+    pub workspaces: WorkspaceList,
+    pub trash: TrashList,
+}
+
+#[derive(Clone, Node)]
+#[node_type = "workspaces"]
+pub struct WorkspaceList {
+    pub tree: Arc<AtomicNodeTree>,
+    pub node_id: Option<NodeId>,
+
+    #[node(child_name = "workspace")]
+    inner: Vec<WorkspaceNode>,
+}
+
+impl std::ops::Deref for WorkspaceList {
+    type Target = Vec<WorkspaceNode>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.inner
+    }
+}
+
+impl std::ops::DerefMut for WorkspaceList {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.inner
+    }
+}
+
+#[derive(Clone, Node)]
+#[node_type = "trash"]
+pub struct TrashList {
+    pub tree: Arc<AtomicNodeTree>,
+    pub node_id: Option<NodeId>,
+
+    #[node(child_name = "trash")]
+    inner: Vec<TrashNode>,
+}
+
+impl FolderNodePad {
+    pub fn new() -> Self {
+        Self::default()
+    }
+
+    pub fn get_workspace(&self, workspace_id: &str) -> Option<&WorkspaceNode> {
+        self.workspaces.iter().find(|workspace| workspace.id == workspace_id)
+    }
+
+    pub fn get_mut_workspace(&mut self, workspace_id: &str) -> Option<&mut WorkspaceNode> {
+        self.workspaces
+            .iter_mut()
+            .find(|workspace| workspace.id == workspace_id)
+    }
+
+    pub fn add_workspace(&mut self, mut workspace: WorkspaceNode) {
+        let path = workspaces_path().clone_with(self.workspaces.len());
+        let op = NodeOperation::Insert {
+            path: path.clone(),
+            nodes: vec![workspace.to_node_data()],
+        };
+        self.tree.write().apply_op(op).unwrap();
+
+        let node_id = self.tree.read().node_id_at_path(path).unwrap();
+        workspace.node_id = Some(node_id);
+        self.workspaces.push(workspace);
+    }
+
+    pub fn to_json(&self, pretty: bool) -> CollaborateResult<String> {
+        self.tree
+            .read()
+            .to_json(pretty)
+            .map_err(|e| CollaborateError::serde().context(e))
+    }
+}
+
+impl std::default::Default for FolderNodePad {
+    fn default() -> Self {
+        let tree = Arc::new(RwLock::new(NodeTree::default()));
+
+        // Workspace
+        let mut workspaces = WorkspaceList {
+            tree: tree.clone(),
+            node_id: None,
+            inner: vec![],
+        };
+        let workspace_node = workspaces.to_node_data();
+
+        // Trash
+        let mut trash = TrashList {
+            tree: tree.clone(),
+            node_id: None,
+            inner: vec![],
+        };
+        let trash_node = trash.to_node_data();
+
+        let folder_node = NodeDataBuilder::new("folder")
+            .add_node_data(workspace_node)
+            .add_node_data(trash_node)
+            .build();
+
+        let operation = NodeOperation::Insert {
+            path: folder_path(),
+            nodes: vec![folder_node],
+        };
+        let _ = tree.write().apply_op(operation).unwrap();
+        let node_id = tree.read().node_id_at_path(folder_path()).unwrap();
+        workspaces.node_id = Some(tree.read().node_id_at_path(workspaces_path()).unwrap());
+        trash.node_id = Some(tree.read().node_id_at_path(trash_path()).unwrap());
+
+        Self {
+            tree,
+            node_id,
+            workspaces,
+            trash,
+        }
+    }
+}
+
+fn folder_path() -> Path {
+    vec![0].into()
+}
+
+fn workspaces_path() -> Path {
+    folder_path().clone_with(0)
+}
+
+fn trash_path() -> Path {
+    folder_path().clone_with(1)
+}

+ 7 - 0
shared-lib/flowy-sync/src/client_folder/mod.rs

@@ -1,4 +1,11 @@
 mod builder;
 mod builder;
+mod folder_node;
 mod folder_pad;
 mod folder_pad;
+mod trash_node;
+mod util;
+mod workspace_node;
 
 
+pub use folder_node::*;
+pub use folder_node::*;
 pub use folder_pad::*;
 pub use folder_pad::*;
+pub use workspace_node::*;

+ 20 - 0
shared-lib/flowy-sync/src/client_folder/trash_node.rs

@@ -0,0 +1,20 @@
+use crate::client_folder::util::*;
+use crate::client_folder::AtomicNodeTree;
+use flowy_derive::Node;
+use lib_ot::core::*;
+use std::sync::Arc;
+
+#[derive(Clone, Node)]
+#[node_type = "trash"]
+pub struct TrashNode {
+    pub tree: Arc<AtomicNodeTree>,
+    pub node_id: Option<NodeId>,
+
+    #[node(get_value_with = "get_attributes_str_value")]
+    #[node(set_value_with = "set_attributes_str_value")]
+    pub id: String,
+
+    #[node(get_value_with = "get_attributes_str_value")]
+    #[node(set_value_with = "set_attributes_str_value")]
+    pub name: String,
+}

+ 54 - 0
shared-lib/flowy-sync/src/client_folder/util.rs

@@ -0,0 +1,54 @@
+use crate::client_folder::AtomicNodeTree;
+use crate::errors::CollaborateResult;
+use lib_ot::core::{AttributeHashMap, AttributeValue, Changeset, NodeId, NodeOperation};
+use std::sync::Arc;
+
+pub fn get_attributes_str_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<String> {
+    tree.read()
+        .get_node(*node_id)
+        .and_then(|node| node.attributes.get(key).cloned())
+        .and_then(|value| value.str_value())
+}
+
+pub fn set_attributes_str_value(
+    tree: Arc<AtomicNodeTree>,
+    node_id: &NodeId,
+    key: &str,
+    value: String,
+) -> CollaborateResult<()> {
+    let old_attributes = match get_attributes(tree.clone(), node_id) {
+        None => AttributeHashMap::new(),
+        Some(attributes) => attributes,
+    };
+    let mut new_attributes = old_attributes.clone();
+    new_attributes.insert(key, value);
+    let path = tree.read().path_from_node_id(*node_id);
+    let update_operation = NodeOperation::Update {
+        path,
+        changeset: Changeset::Attributes {
+            new: new_attributes,
+            old: old_attributes,
+        },
+    };
+    let _ = tree.write().apply_op(update_operation)?;
+    Ok(())
+}
+
+#[allow(dead_code)]
+pub fn get_attributes_int_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<i64> {
+    tree.read()
+        .get_node(*node_id)
+        .and_then(|node| node.attributes.get(key).cloned())
+        .and_then(|value| value.int_value())
+}
+
+pub fn get_attributes(tree: Arc<AtomicNodeTree>, node_id: &NodeId) -> Option<AttributeHashMap> {
+    tree.read().get_node(*node_id).map(|node| node.attributes.clone())
+}
+
+#[allow(dead_code)]
+pub fn get_attributes_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<AttributeValue> {
+    tree.read()
+        .get_node(*node_id)
+        .and_then(|node| node.attributes.get(key).cloned())
+}

+ 62 - 0
shared-lib/flowy-sync/src/client_folder/workspace_node.rs

@@ -0,0 +1,62 @@
+use crate::client_folder::util::*;
+use crate::client_folder::AtomicNodeTree;
+
+use flowy_derive::Node;
+use lib_ot::core::*;
+use std::sync::Arc;
+
+#[derive(Clone, Node)]
+#[node_type = "workspace"]
+pub struct WorkspaceNode {
+    pub tree: Arc<AtomicNodeTree>,
+    pub node_id: Option<NodeId>,
+
+    #[node(get_value_with = "get_attributes_str_value")]
+    #[node(set_value_with = "set_attributes_str_value")]
+    pub id: String,
+
+    #[node(get_value_with = "get_attributes_str_value")]
+    #[node(set_value_with = "set_attributes_str_value")]
+    pub name: String,
+
+    #[node(child_name = "app")]
+    pub apps: Vec<AppNode>,
+}
+
+impl WorkspaceNode {
+    pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
+        Self {
+            tree,
+            node_id: None,
+            id,
+            name,
+            apps: vec![],
+        }
+    }
+}
+
+#[derive(Clone, Node)]
+#[node_type = "app"]
+pub struct AppNode {
+    pub tree: Arc<AtomicNodeTree>,
+    pub node_id: Option<NodeId>,
+
+    #[node(get_value_with = "get_attributes_str_value")]
+    #[node(set_value_with = "set_attributes_str_value")]
+    pub id: String,
+
+    #[node(get_value_with = "get_attributes_str_value")]
+    #[node(set_value_with = "set_attributes_str_value")]
+    pub name: String,
+}
+
+impl AppNode {
+    pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
+        Self {
+            tree,
+            node_id: None,
+            id,
+            name,
+        }
+    }
+}

+ 4 - 2
shared-lib/flowy-sync/src/errors.rs

@@ -34,6 +34,7 @@ impl CollaborateError {
         self
         self
     }
     }
 
 
+    static_error!(serde, ErrorCode::SerdeError);
     static_error!(internal, ErrorCode::InternalError);
     static_error!(internal, ErrorCode::InternalError);
     static_error!(undo, ErrorCode::UndoFail);
     static_error!(undo, ErrorCode::UndoFail);
     static_error!(redo, ErrorCode::RedoFail);
     static_error!(redo, ErrorCode::RedoFail);
@@ -51,14 +52,15 @@ impl fmt::Display for CollaborateError {
 
 
 #[derive(Debug, Clone, Display, PartialEq, Eq)]
 #[derive(Debug, Clone, Display, PartialEq, Eq)]
 pub enum ErrorCode {
 pub enum ErrorCode {
-    DocIdInvalid = 0,
-    DocNotfound = 1,
+    DocumentIdInvalid = 0,
+    DocumentNotfound = 1,
     UndoFail = 200,
     UndoFail = 200,
     RedoFail = 201,
     RedoFail = 201,
     OutOfBound = 202,
     OutOfBound = 202,
     RevisionConflict = 203,
     RevisionConflict = 203,
     RecordNotFound = 300,
     RecordNotFound = 300,
     CannotDeleteThePrimaryField = 301,
     CannotDeleteThePrimaryField = 301,
+    SerdeError = 999,
     InternalError = 1000,
     InternalError = 1000,
 }
 }
 
 

+ 58 - 0
shared-lib/flowy-sync/tests/client_folder/folder_test.rs

@@ -0,0 +1,58 @@
+use flowy_sync::client_folder::{FolderNodePad, WorkspaceNode};
+
+#[test]
+fn client_folder_create_default_folder_test() {
+    let folder_pad = FolderNodePad::new();
+    let json = folder_pad.to_json(false).unwrap();
+    assert_eq!(
+        json,
+        r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
+    );
+}
+
+#[test]
+fn client_folder_create_default_folder_with_workspace_test() {
+    let mut folder_pad = FolderNodePad::new();
+    let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
+    folder_pad.workspaces.add_workspace(workspace).unwrap();
+    let json = folder_pad.to_json(false).unwrap();
+    assert_eq!(
+        json,
+        r#"{"type":"folder","children":[{"type":"workspaces","children":[{"type":"workspace","attributes":{"id":"1","name":"workspace name"}}]},{"type":"trash"}]}"#
+    );
+
+    assert_eq!(
+        folder_pad.get_workspace("1").unwrap().get_name().unwrap(),
+        "workspace name"
+    );
+}
+
+#[test]
+fn client_folder_delete_workspace_test() {
+    let mut folder_pad = FolderNodePad::new();
+    let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
+    folder_pad.workspaces.add_workspace(workspace).unwrap();
+    folder_pad.workspaces.remove_workspace("1");
+    let json = folder_pad.to_json(false).unwrap();
+    assert_eq!(
+        json,
+        r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
+    );
+}
+
+#[test]
+fn client_folder_update_workspace_name_test() {
+    let mut folder_pad = FolderNodePad::new();
+    let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
+    folder_pad.workspaces.add_workspace(workspace).unwrap();
+    folder_pad
+        .workspaces
+        .get_mut_workspace("1")
+        .unwrap()
+        .set_name("my first workspace".to_string());
+
+    assert_eq!(
+        folder_pad.workspaces.get_workspace("1").unwrap().get_name().unwrap(),
+        "my first workspace"
+    );
+}

+ 3 - 0
shared-lib/flowy-sync/tests/client_folder/mod.rs

@@ -0,0 +1,3 @@
+mod folder_test;
+mod script;
+mod workspace_test;

+ 89 - 0
shared-lib/flowy-sync/tests/client_folder/script.rs

@@ -0,0 +1,89 @@
+use flowy_sync::client_folder::{AppNode, FolderNodePad, WorkspaceNode};
+use folder_rev_model::AppRevision;
+use lib_ot::core::Path;
+
+pub enum FolderNodePadScript {
+    CreateWorkspace { id: String, name: String },
+    DeleteWorkspace { id: String },
+    AssertPathOfWorkspace { id: String, expected_path: Path },
+    AssertNumberOfWorkspace { expected: usize },
+    CreateApp { id: String, name: String },
+    DeleteApp { id: String },
+    UpdateApp { id: String, name: String },
+    AssertApp { id: String, expected: Option<AppRevision> },
+    AssertAppContent { id: String, name: String },
+    // AssertNumberOfApps { expected: usize },
+}
+
+pub struct FolderNodePadTest {
+    folder_pad: FolderNodePad,
+}
+
+impl FolderNodePadTest {
+    pub fn new() -> FolderNodePadTest {
+        let mut folder_pad = FolderNodePad::default();
+        let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
+        let _ = folder_pad.workspaces.add_workspace(workspace).unwrap();
+        Self { folder_pad }
+    }
+
+    pub fn run_scripts(&mut self, scripts: Vec<FolderNodePadScript>) {
+        for script in scripts {
+            self.run_script(script);
+        }
+    }
+
+    pub fn run_script(&mut self, script: FolderNodePadScript) {
+        match script {
+            FolderNodePadScript::CreateWorkspace { id, name } => {
+                let workspace = WorkspaceNode::new(self.folder_pad.tree.clone(), id, name);
+                self.folder_pad.workspaces.add_workspace(workspace).unwrap();
+            }
+            FolderNodePadScript::DeleteWorkspace { id } => {
+                self.folder_pad.workspaces.remove_workspace(id);
+            }
+            FolderNodePadScript::AssertPathOfWorkspace { id, expected_path } => {
+                let workspace_node: &WorkspaceNode = self.folder_pad.workspaces.get_workspace(id).unwrap();
+                let node_id = workspace_node.node_id.unwrap();
+                let path = self.folder_pad.tree.read().path_from_node_id(node_id);
+                assert_eq!(path, expected_path);
+            }
+            FolderNodePadScript::AssertNumberOfWorkspace { expected } => {
+                assert_eq!(self.folder_pad.workspaces.len(), expected);
+            }
+            FolderNodePadScript::CreateApp { id, name } => {
+                let app_node = AppNode::new(self.folder_pad.tree.clone(), id, name);
+                let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
+                let _ = workspace_node.add_app(app_node).unwrap();
+            }
+            FolderNodePadScript::DeleteApp { id } => {
+                let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
+                workspace_node.remove_app(&id);
+            }
+            FolderNodePadScript::UpdateApp { id, name } => {
+                let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
+                workspace_node.get_mut_app(&id).unwrap().set_name(name);
+            }
+            FolderNodePadScript::AssertApp { id, expected } => {
+                let workspace_node = self.folder_pad.get_workspace("1").unwrap();
+                let app = workspace_node.get_app(&id);
+                match expected {
+                    None => assert!(app.is_none()),
+                    Some(expected_app) => {
+                        let app_node = app.unwrap();
+                        assert_eq!(expected_app.name, app_node.get_name().unwrap());
+                        assert_eq!(expected_app.id, app_node.get_id().unwrap());
+                    }
+                }
+            }
+            FolderNodePadScript::AssertAppContent { id, name } => {
+                let workspace_node = self.folder_pad.get_workspace("1").unwrap();
+                let app = workspace_node.get_app(&id).unwrap();
+                assert_eq!(app.get_name().unwrap(), name)
+            } // FolderNodePadScript::AssertNumberOfApps { expected } => {
+              //     let workspace_node = self.folder_pad.get_workspace("1").unwrap();
+              //     assert_eq!(workspace_node.apps.len(), expected);
+              // }
+        }
+    }
+}

+ 86 - 0
shared-lib/flowy-sync/tests/client_folder/workspace_test.rs

@@ -0,0 +1,86 @@
+use crate::client_folder::script::FolderNodePadScript::*;
+use crate::client_folder::script::FolderNodePadTest;
+
+#[test]
+fn client_folder_create_multi_workspaces_test() {
+    let mut test = FolderNodePadTest::new();
+    test.run_scripts(vec![
+        AssertPathOfWorkspace {
+            id: "1".to_string(),
+            expected_path: vec![0, 0, 0].into(),
+        },
+        CreateWorkspace {
+            id: "a".to_string(),
+            name: "workspace a".to_string(),
+        },
+        AssertPathOfWorkspace {
+            id: "a".to_string(),
+            expected_path: vec![0, 0, 1].into(),
+        },
+        CreateWorkspace {
+            id: "b".to_string(),
+            name: "workspace b".to_string(),
+        },
+        AssertPathOfWorkspace {
+            id: "b".to_string(),
+            expected_path: vec![0, 0, 2].into(),
+        },
+        AssertNumberOfWorkspace { expected: 3 },
+        // The path of the workspace 'b' will be changed after deleting the 'a' workspace.
+        DeleteWorkspace { id: "a".to_string() },
+        AssertPathOfWorkspace {
+            id: "b".to_string(),
+            expected_path: vec![0, 0, 1].into(),
+        },
+    ]);
+}
+
+#[test]
+fn client_folder_create_app_test() {
+    let mut test = FolderNodePadTest::new();
+    test.run_scripts(vec![
+        CreateApp {
+            id: "1".to_string(),
+            name: "my first app".to_string(),
+        },
+        AssertAppContent {
+            id: "1".to_string(),
+            name: "my first app".to_string(),
+        },
+    ]);
+}
+
+#[test]
+fn client_folder_delete_app_test() {
+    let mut test = FolderNodePadTest::new();
+    test.run_scripts(vec![
+        CreateApp {
+            id: "1".to_string(),
+            name: "my first app".to_string(),
+        },
+        DeleteApp { id: "1".to_string() },
+        AssertApp {
+            id: "1".to_string(),
+            expected: None,
+        },
+    ]);
+}
+
+#[test]
+fn client_folder_update_app_test() {
+    let mut test = FolderNodePadTest::new();
+    test.run_scripts(vec![
+        CreateApp {
+            id: "1".to_string(),
+            name: "my first app".to_string(),
+        },
+        UpdateApp {
+            id: "1".to_string(),
+            name: "TODO".to_string(),
+        },
+        AssertAppContent {
+            id: "1".to_string(),
+            name: "TODO".to_string(),
+        },
+    ]);
+}

+ 1 - 0
shared-lib/flowy-sync/tests/main.rs

@@ -0,0 +1 @@
+mod client_folder;

+ 3 - 3
shared-lib/grid-rev-model/src/grid_setting_rev.rs

@@ -61,8 +61,8 @@ where
         predicate: impl Fn(&Arc<T>) -> bool,
         predicate: impl Fn(&Arc<T>) -> bool,
     ) -> Option<Arc<T>> {
     ) -> Option<Arc<T>> {
         let objects = self.get_objects(field_id, field_type)?;
         let objects = self.get_objects(field_id, field_type)?;
-        let index = objects.iter().position(|object| predicate(object))?;
-        objects.get(index).map(|object| object.clone())
+        let index = objects.iter().position(predicate)?;
+        objects.get(index).cloned()
     }
     }
 
 
     pub fn get_mut_object(
     pub fn get_mut_object(
@@ -72,7 +72,7 @@ where
         predicate: impl Fn(&Arc<T>) -> bool,
         predicate: impl Fn(&Arc<T>) -> bool,
     ) -> Option<&mut Arc<T>> {
     ) -> Option<&mut Arc<T>> {
         let objects = self.get_mut_objects(field_id, field_type)?;
         let objects = self.get_mut_objects(field_id, field_type)?;
-        let index = objects.iter().position(|object| predicate(object))?;
+        let index = objects.iter().position(predicate)?;
         objects.get_mut(index)
         objects.get_mut(index)
     }
     }
 
 

+ 0 - 40
shared-lib/lib-infra/Cargo.toml

@@ -6,50 +6,10 @@ edition = "2018"
 # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
 # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
 
 
 [dependencies]
 [dependencies]
-log = "0.4.14"
 chrono = "0.4.19"
 chrono = "0.4.19"
 bytes = { version = "1.0" }
 bytes = { version = "1.0" }
 pin-project = "1.0"
 pin-project = "1.0"
 futures-core = { version = "0.3" }
 futures-core = { version = "0.3" }
 tokio = { version = "1.0", features = ["time", "rt"] }
 tokio = { version = "1.0", features = ["time", "rt"] }
 rand = "0.8.5"
 rand = "0.8.5"
-serde = { version = "1.0", features = ["derive"]}
-serde_json = "1.0"
 
 
-
-cmd_lib = { version = "1", optional = true }
-protoc-rust = { version = "2", optional = true }
-walkdir = { version = "2", optional = true }
-
-flowy-ast = { path = "../flowy-ast", optional = true}
-similar = { version = "1.2.2", optional = true }
-syn = { version = "1.0.60", features = ["extra-traits", "parsing", "derive", "full"], optional = true }
-fancy-regex = { version = "0.10.0", optional = true }
-lazy_static = { version = "1.4.0", optional = true }
-tera = { version = "1.5.0", optional = true}
-itertools = { version = "0.10", optional = true }
-phf = { version = "0.8.0", features = ["macros"], optional = true }
-console = {version = "0.14.0", optional = true}
-protoc-bin-vendored = { version = "3.0", optional = true }
-toml = {version = "0.5.8", optional = true}
-
-[features]
-proto_gen = [
-    "flowy-ast",
-    "similar",
-    "syn",
-    "fancy-regex",
-    "lazy_static",
-    "tera",
-    "itertools",
-    "phf",
-    "walkdir",
-    "console",
-    "toml",
-    "cmd_lib",
-    "protoc-rust",
-    "walkdir",
-    "protoc-bin-vendored",
-]
-dart_event = ["walkdir", "flowy-ast", "tera", "syn"]
-dart = ["proto_gen", "dart_event"]

+ 0 - 5
shared-lib/lib-infra/src/code_gen/dart_event/mod.rs

@@ -1,5 +0,0 @@
-#![allow(clippy::module_inception)]
-mod dart_event;
-mod event_template;
-
-pub use dart_event::*;

+ 0 - 45
shared-lib/lib-infra/src/code_gen/protobuf_file/template/derive_meta/derive_meta.tera

@@ -1,45 +0,0 @@
-#![cfg_attr(rustfmt, rustfmt::skip)]
-pub enum TypeCategory {
-    Array,
-    Map,
-    Str,
-    Protobuf,
-    Bytes,
-    Enum,
-    Opt,
-    Primitive,
-}
-// auto generate, do not edit
-pub fn category_from_str(type_str: &str) -> TypeCategory {
-    match type_str {
-        "Vec" => TypeCategory::Array,
-        "HashMap" => TypeCategory::Map,
-        "u8" => TypeCategory::Bytes,
-        "String" => TypeCategory::Str,
-{%- for name in names -%}
-    {%- if loop.first %}
-        "{{ name }}"
-    {%- else %}
-        | "{{ name }}"
-    {%- endif -%}
-    {%- if loop.last %}
-        => TypeCategory::Protobuf,
-    {%- endif %}
-
-{%- endfor %}
-
-{%- for enum in enums -%}
-    {%- if loop.first %}
-        "{{ enum }}"
-    {%- else %}
-        | "{{ enum }}"
-    {%- endif -%}
-    {%- if loop.last %}
-        => TypeCategory::Enum,
-    {%- endif %}
-{%- endfor %}
-
-        "Option" => TypeCategory::Opt,
-        _ => TypeCategory::Primitive,
-    }
-}

+ 0 - 4
shared-lib/lib-infra/src/code_gen/protobuf_file/template/derive_meta/mod.rs

@@ -1,4 +0,0 @@
-#![allow(clippy::module_inception)]
-mod derive_meta;
-
-pub use derive_meta::*;

+ 0 - 5
shared-lib/lib-infra/src/code_gen/protobuf_file/template/mod.rs

@@ -1,5 +0,0 @@
-mod derive_meta;
-mod proto_file;
-
-pub use derive_meta::*;
-pub use proto_file::*;

Một số tệp đã không được hiển thị bởi vì quá nhiều tập tin thay đổi trong này khác