소스 검색

feat: integrated openAI rust plugin (#3302)

* feat: create flowy-plugins

* feat: integrated openai service with interface

* feat: integrated openai api call

* fix: response in api

* fix: removed unused imports

* fix: remove parse.rs

* fix: macos version + removed print

* fix: removed debugPrint + changed deployment target

* fix: android project+removed gesture detector

* fix: removed unused imports

* chore: fix compile

* chore: rever changes

* chore: rever changes

* chore: fix clippy warnings

* chore: rename

* chore: fix compile error

* chore: remove dart ai

---------

Co-authored-by: nathan <[email protected]>
Yatendra Kumar 1 년 전
부모
커밋
e3f11ea9c0

+ 2 - 2
frontend/appflowy_flutter/packages/flowy_infra_ui/android/.project

@@ -22,12 +22,12 @@
 	</natures>
 	<filteredResources>
 		<filter>
-			<id>1626576261667</id>
+			<id>1693395487121</id>
 			<name></name>
 			<type>30</type>
 			<matcher>
 				<id>org.eclipse.core.resources.regexFilterMatcher</id>
-				<arguments>node_modules|.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__</arguments>
+				<arguments>node_modules|\.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__</arguments>
 			</matcher>
 		</filter>
 	</filteredResources>

+ 11 - 0
frontend/appflowy_flutter/packages/flowy_infra_ui/android/.settings/org.eclipse.buildship.core.prefs

@@ -1,2 +1,13 @@
+arguments=--init-script /var/folders/th/tfqrqcp12kvgzs3c3z0xqxlc0000gn/T/d146c9752a26f79b52047fb6dc6ed385d064e120494f96f08ca63a317c41f94c.gradle --init-script /var/folders/th/tfqrqcp12kvgzs3c3z0xqxlc0000gn/T/52cde0cfcf3e28b8b7510e992210d9614505e0911af0c190bd590d7158574963.gradle
+auto.sync=false
+build.scans.enabled=false
+connection.gradle.distribution=GRADLE_DISTRIBUTION(VERSION(7.4.2))
 connection.project.dir=
 eclipse.preferences.version=1
+gradle.user.home=
+java.home=/Library/Java/JavaVirtualMachines/temurin-17.jdk/Contents/Home
+jvm.arguments=
+offline.mode=false
+override.workspace.settings=true
+show.console.view=true
+show.executions.view=true

+ 12 - 1
frontend/appflowy_flutter/packages/flowy_infra_ui/example/android/app/.settings/org.eclipse.buildship.core.prefs

@@ -1,2 +1,13 @@
-connection.project.dir=..
+arguments=
+auto.sync=false
+build.scans.enabled=false
+connection.gradle.distribution=GRADLE_DISTRIBUTION(WRAPPER)
+connection.project.dir=
 eclipse.preferences.version=1
+gradle.user.home=
+java.home=/Library/Java/JavaVirtualMachines/jdk11.0.5-zulu.jdk/Contents/Home
+jvm.arguments=
+offline.mode=false
+override.workspace.settings=true
+show.console.view=true
+show.executions.view=true

+ 18 - 0
frontend/appflowy_tauri/src-tauri/Cargo.lock

@@ -1569,6 +1569,23 @@ dependencies = [
  "miniz_oxide 0.7.1",
 ]
 
+[[package]]
+name = "flowy-ai"
+version = "0.1.0"
+dependencies = [
+ "bytes",
+ "flowy-derive",
+ "flowy-error",
+ "flowy-notification",
+ "lib-dispatch",
+ "lib-infra",
+ "protobuf",
+ "reqwest",
+ "serde",
+ "serde_json",
+ "strum_macros 0.21.1",
+]
+
 [[package]]
 name = "flowy-ast"
 version = "0.1.0"
@@ -1627,6 +1644,7 @@ dependencies = [
  "collab-integrate",
  "collab-plugins",
  "diesel",
+ "flowy-ai",
  "flowy-config",
  "flowy-database-deps",
  "flowy-database2",

+ 18 - 0
frontend/rust-lib/Cargo.lock

@@ -1317,6 +1317,23 @@ dependencies = [
  "miniz_oxide",
 ]
 
+[[package]]
+name = "flowy-ai"
+version = "0.1.0"
+dependencies = [
+ "bytes",
+ "flowy-derive",
+ "flowy-error",
+ "flowy-notification",
+ "lib-dispatch",
+ "lib-infra",
+ "protobuf",
+ "reqwest",
+ "serde",
+ "serde_json",
+ "strum_macros 0.21.1",
+]
+
 [[package]]
 name = "flowy-ast"
 version = "0.1.0"
@@ -1376,6 +1393,7 @@ dependencies = [
  "collab-plugins",
  "console-subscriber",
  "diesel",
+ "flowy-ai",
  "flowy-config",
  "flowy-database-deps",
  "flowy-database2",

+ 2 - 0
frontend/rust-lib/Cargo.toml

@@ -23,6 +23,7 @@ members = [
   "flowy-encrypt",
   "flowy-storage",
   "collab-integrate",
+  "flowy-ai",
 ]
 
 [workspace.dependencies]
@@ -48,6 +49,7 @@ flowy-config = { workspace = true, path = "flowy-config" }
 flowy-encrypt = { workspace = true, path = "flowy-encrypt" }
 flowy-storage = { workspace = true, path = "flowy-storage" }
 collab-integrate = { workspace = true, path = "collab-integrate" }
+flowy-ai = { workspace = true, path = "flowy-ai" }
 
 [profile.dev]
 opt-level = 0

+ 20 - 0
frontend/rust-lib/flowy-ai/Cargo.toml

@@ -0,0 +1,20 @@
+[package]
+name = "flowy-ai"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+flowy-derive = { path = "../../../shared-lib/flowy-derive" }
+flowy-notification = { path = "../flowy-notification" }
+flowy-error = { path = "../flowy-error", features = ["impl_from_serde", "impl_from_dispatch_error"] }
+lib-dispatch = { path = "../lib-dispatch" }
+lib-infra = { path = "../../../shared-lib/lib-infra" }
+
+protobuf = {version = "2.28.0"}
+bytes = { version = "1.4" }
+strum_macros = "0.21"
+reqwest = { version = "0.11", features = ["json"] }
+serde = { version = "1.0", features = ["derive"] }
+serde_json = "1.0"

+ 71 - 0
frontend/rust-lib/flowy-ai/src/entities.rs

@@ -0,0 +1,71 @@
+use flowy_error::ErrorCode;
+
+/*
+ model="text-davinci-003",
+ prompt="Write a tagline for an ice cream shop."
+*/
+#[derive(Default)]
+pub struct TextCompletionPayloadPB {
+  pub request_id: String,
+
+  // Model: Either text-davinci-003 or gpt-3.5-turbo
+  pub model: String,
+
+  // Prompt to query gpt
+  pub prompt: String,
+
+  // User open_ai_key for authentication
+  pub open_ai_key: String,
+}
+
+pub struct TextCompletionParams {
+  pub request_id: String,
+  pub model: String,
+  pub prompt: String,
+  pub open_ai_key: String,
+}
+
+impl TryInto<TextCompletionParams> for TextCompletionPayloadPB {
+  type Error = ErrorCode;
+  fn try_into(self) -> Result<TextCompletionParams, Self::Error> {
+    Ok(TextCompletionParams {
+      request_id: self.request_id,
+      model: self.model,
+      prompt: self.prompt,
+      open_ai_key: self.open_ai_key,
+    })
+  }
+}
+
+/*
+{
+  "id": "chatcmpl-123",
+  "object": "chat.completion",
+  "created": 1677652288,
+  "model": "gpt-3.5-turbo-0613",
+  "choices": [{
+    "index": 0,
+    "message": {
+      "role": "assistant",
+      "content": "\n\nHello there, how may I assist you today?",
+    },
+    "finish_reason": "stop"
+  }],
+  "usage": {
+    "prompt_tokens": 9,
+    "completion_tokens": 12,
+    "total_tokens": 21
+  }
+}
+
+*/
+#[derive(Default)]
+pub struct TextCompletionDataPB {
+  pub request_id: String,
+
+  pub model: String,
+
+  pub index: i32,
+
+  pub content: String,
+}

+ 71 - 0
frontend/rust-lib/flowy-ai/src/event_handler.rs

@@ -0,0 +1,71 @@
+use crate::entities::{TextCompletionDataPB, TextCompletionParams, TextCompletionPayloadPB};
+use flowy_error::FlowyError;
+use lib_dispatch::prelude::{data_result_ok, AFPluginData, DataResult};
+use reqwest;
+use serde::{Deserialize, Serialize};
+
+#[derive(Serialize, Deserialize)]
+struct Message {
+  role: String,
+  content: String,
+}
+
+#[derive(Serialize)]
+struct RequestBody {
+  model: String,
+  messages: Vec<Message>,
+}
+
+#[derive(Deserialize)]
+struct ResponseChoice {
+  index: i32,
+  message: Message,
+}
+
+#[derive(Deserialize)]
+struct ApiResponse {
+  choices: Vec<ResponseChoice>,
+}
+
+pub(crate) async fn request_text_completion(
+  data: AFPluginData<TextCompletionPayloadPB>,
+) -> DataResult<TextCompletionDataPB, FlowyError> {
+  // Set up the request body
+  let body = RequestBody {
+    model: "gpt-3.5-turbo".to_string(),
+    messages: vec![
+      Message {
+        role: "system".to_string(),
+        content: "You are a helpful assistant.".to_string(),
+      },
+      Message {
+        role: "user".to_string(),
+        content: data.prompt.to_string(),
+      },
+    ],
+  };
+
+  // Make the API call
+  let client = reqwest::Client::new();
+  let response: ApiResponse = client
+    .post("https://api.openai.com/v1/chat/completions")
+    .header("Content-Type", "application/json")
+    .header("Authorization", format!("Bearer {}", data.open_ai_key))
+    .json(&body)
+    .send()
+    .await?
+    .json()
+    .await?;
+
+  // Extract index and content
+  let _choice = &response.choices[0];
+
+  let params: TextCompletionParams = data.into_inner().try_into()?;
+
+  data_result_ok(TextCompletionDataPB {
+    request_id: params.request_id,
+    model: params.model,
+    index: response.choices[0].index,
+    content: response.choices[0].message.content.to_string(),
+  })
+}

+ 18 - 0
frontend/rust-lib/flowy-ai/src/event_map.rs

@@ -0,0 +1,18 @@
+use flowy_derive::{Flowy_Event, ProtoBuf_Enum};
+use lib_dispatch::prelude::AFPlugin;
+use strum_macros::Display;
+
+use crate::event_handler::request_text_completion;
+
+pub fn init() -> AFPlugin {
+  AFPlugin::new()
+    .name(env!("CARGO_PKG_NAME"))
+    .event(OpenAIEvent::RequestTextCompletion, request_text_completion)
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Display, ProtoBuf_Enum, Flowy_Event)]
+#[event_err = "FlowyError"]
+pub enum OpenAIEvent {
+  #[event(input = "TextCompletionPayloadPB", output = "TextCompletionDataPB")]
+  RequestTextCompletion = 0,
+}

+ 4 - 0
frontend/rust-lib/flowy-ai/src/lib.rs

@@ -0,0 +1,4 @@
+// pub mod entities;
+// pub mod event_handler;
+// pub mod event_map;
+// pub mod notification;

+ 21 - 0
frontend/rust-lib/flowy-ai/src/notification.rs

@@ -0,0 +1,21 @@
+use flowy_derive::ProtoBuf_Enum;
+use flowy_notification::NotificationBuilder;
+
+const OPEN_AI_NOTIFICATION: &str = "OpenAI";
+
+#[derive(ProtoBuf_Enum, Debug, Default)]
+pub(crate) enum OpenAINotification {
+  #[default]
+  Unknown = 0,
+}
+
+impl std::convert::From<OpenAINotification> for i32 {
+  fn from(notification: OpenAINotification) -> Self {
+    notification as i32
+  }
+}
+
+#[allow(dead_code)]
+pub(crate) fn send_notification(id: &str, ty: OpenAINotification) -> NotificationBuilder {
+  NotificationBuilder::new(id, ty, OPEN_AI_NOTIFICATION)
+}

+ 1 - 0
frontend/rust-lib/flowy-core/Cargo.toml

@@ -23,6 +23,7 @@ flowy-server = { workspace = true }
 flowy-server-config = { workspace = true }
 flowy-config = { workspace = true }
 collab-integrate = { workspace = true, features = ["supabase_integrate", "appflowy_cloud_integrate", "snapshot_plugin"] }
+flowy-ai = { workspace = true }
 collab-define = { version = "0.1.0" }
 collab-plugins = { version = "0.1.0", features = ["sync_plugin"] }
 collab = { version = "0.1.0" }

+ 1 - 0
frontend/rust-lib/flowy-user/src/migrations/sync_new_user.rs

@@ -70,6 +70,7 @@ pub async fn sync_user_data_to_cloud(
   Ok(())
 }
 
+#[allow(clippy::too_many_arguments)]
 fn sync_views(
   uid: i64,
   folder: Arc<MutexFolder>,