From eb601932ea5916c5b90e8ea4a7fb8a9e18bfbd46 Mon Sep 17 00:00:00 2001 From: appflowy Date: Tue, 21 Dec 2021 21:06:16 +0800 Subject: [PATCH] add backend kv store --- backend/doc/database_setup.md | 9 +- backend/migrations/20211221061753_kv.sql | 6 + backend/src/context.rs | 6 + backend/src/services/core/app/persistence.rs | 16 +- backend/src/services/core/trash/trash.rs | 12 +- backend/src/services/core/view/persistence.rs | 16 +- .../services/core/workspace/persistence.rs | 12 +- backend/src/services/document/crud.rs | 8 +- backend/src/services/kv_store/kv.rs | 165 ++++++++++++++++++ backend/src/services/kv_store/mod.rs | 21 +++ backend/src/services/mod.rs | 1 + backend/src/util/sqlx_ext/query.rs | 6 +- backend/tests/api/mod.rs | 3 - .../{api/auth.rs => api_test/auth_test.rs} | 0 backend/tests/api_test/kv_test.rs | 51 ++++++ backend/tests/api_test/mod.rs | 3 + .../workspace_test.rs} | 0 .../doc.rs => document_test/crud_test.rs} | 0 .../edit_script.rs | 0 .../{document => document_test}/edit_test.rs | 6 +- .../tests/{document => document_test}/mod.rs | 1 + backend/tests/main.rs | 4 +- backend/tests/util/helper.rs | 2 +- frontend/scripts/flowy-tool/src/proto/ast.rs | 4 +- 24 files changed, 298 insertions(+), 54 deletions(-) create mode 100644 backend/migrations/20211221061753_kv.sql create mode 100644 backend/src/services/kv_store/kv.rs create mode 100644 backend/src/services/kv_store/mod.rs delete mode 100644 backend/tests/api/mod.rs rename backend/tests/{api/auth.rs => api_test/auth_test.rs} (100%) create mode 100644 backend/tests/api_test/kv_test.rs create mode 100644 backend/tests/api_test/mod.rs rename backend/tests/{api/workspace.rs => api_test/workspace_test.rs} (100%) rename backend/tests/{api/doc.rs => document_test/crud_test.rs} (100%) rename backend/tests/{document => document_test}/edit_script.rs (100%) rename backend/tests/{document => document_test}/edit_test.rs (99%) rename backend/tests/{document => document_test}/mod.rs (71%) diff --git a/backend/doc/database_setup.md b/backend/doc/database_setup.md index 52e21bbc65..3a89974cc9 100644 --- a/backend/doc/database_setup.md +++ b/backend/doc/database_setup.md @@ -5,14 +5,7 @@ 1. follow the [instructions](https://docs.docker.com/desktop/mac/install/) to install docker. 2. open terminal and run: `docker pull postgres` - -3. run `make init_postgres` if you have not run before. You can find out the running container by run `docker ps` -``` -CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -bfcdd6369e89 postgres "docker-entrypoint.s…" 19 minutes ago Up 19 minutes 0.0.0.0:5432->5432/tcp, :::5432->5432/tcp brave_bassi -``` - -4. run `make init_database`. It will create the database scheme on remote specified by DATABASE_URL. You can connect you database using +3run `make init_database`. It will create the database scheme on remote specified by DATABASE_URL. You can connect you database using pgAdmin. ![img_2.png](img_2.png) diff --git a/backend/migrations/20211221061753_kv.sql b/backend/migrations/20211221061753_kv.sql new file mode 100644 index 0000000000..833353c43e --- /dev/null +++ b/backend/migrations/20211221061753_kv.sql @@ -0,0 +1,6 @@ +-- Add migration script here +CREATE TABLE IF NOT EXISTS kv_table( + id TEXT NOT NULL, + PRIMARY KEY (id), + blob bytea +); \ No newline at end of file diff --git a/backend/src/context.rs b/backend/src/context.rs index fd34580106..9a3940a024 100644 --- a/backend/src/context.rs +++ b/backend/src/context.rs @@ -1,5 +1,6 @@ use crate::services::{ document::manager::DocumentManager, + kv_store::{KVStore, PostgresKV}, web_socket::{WSServer, WebSocketReceivers}, }; use actix::Addr; @@ -14,6 +15,7 @@ pub struct AppContext { pub pg_pool: Data, pub ws_receivers: Data, pub document_mng: Data>, + pub kv_store: Data>, } impl AppContext { @@ -24,12 +26,16 @@ impl AppContext { let mut ws_receivers = WebSocketReceivers::new(); let document_mng = Arc::new(DocumentManager::new(pg_pool.clone())); ws_receivers.set(WSModule::Doc, document_mng.clone()); + let kv_store = Arc::new(PostgresKV { + pg_pool: pg_pool.clone(), + }); AppContext { ws_server, pg_pool, ws_receivers: Data::new(ws_receivers), document_mng: Data::new(document_mng), + kv_store: Data::new(kv_store), } } } diff --git a/backend/src/services/core/app/persistence.rs b/backend/src/services/core/app/persistence.rs index f5543a2cea..5fcf1980e3 100644 --- a/backend/src/services/core/app/persistence.rs +++ b/backend/src/services/core/app/persistence.rs @@ -80,14 +80,14 @@ impl NewAppSqlBuilder { let app: App = self.table.clone().into(); let (sql, args) = SqlBuilder::create(APP_TABLE) - .add_arg("id", self.table.id) - .add_arg("workspace_id", self.table.workspace_id) - .add_arg("name", self.table.name) - .add_arg("description", self.table.description) - .add_arg("color_style", self.table.color_style) - .add_arg("modified_time", self.table.modified_time) - .add_arg("create_time", self.table.create_time) - .add_arg("user_id", self.table.user_id) + .add_field_with_arg("id", self.table.id) + .add_field_with_arg("workspace_id", self.table.workspace_id) + .add_field_with_arg("name", self.table.name) + .add_field_with_arg("description", self.table.description) + .add_field_with_arg("color_style", self.table.color_style) + .add_field_with_arg("modified_time", self.table.modified_time) + .add_field_with_arg("create_time", self.table.create_time) + .add_field_with_arg("user_id", self.table.user_id) .build()?; Ok((sql, args, app)) diff --git a/backend/src/services/core/trash/trash.rs b/backend/src/services/core/trash/trash.rs index 3f842701a2..64acb2058b 100644 --- a/backend/src/services/core/trash/trash.rs +++ b/backend/src/services/core/trash/trash.rs @@ -21,9 +21,9 @@ pub(crate) async fn create_trash( ) -> Result<(), ServerError> { for (trash_id, ty) in records { let (sql, args) = SqlBuilder::create(TRASH_TABLE) - .add_arg("id", trash_id) - .add_arg("user_id", &user.user_id) - .add_arg("ty", ty) + .add_field_with_arg("id", trash_id) + .add_field_with_arg("user_id", &user.user_id) + .add_field_with_arg("ty", ty) .build()?; let _ = sqlx::query_with(&sql, args) @@ -52,7 +52,7 @@ pub(crate) async fn delete_all_trash( .collect::>(); tracing::Span::current().record("delete_rows", &format!("{:?}", rows).as_str()); let affected_row_count = rows.len(); - let _ = delete_trash_targets(transaction as &mut DBTransaction<'_>, rows).await?; + let _ = delete_trash_associate_targets(transaction as &mut DBTransaction<'_>, rows).await?; let (sql, args) = SqlBuilder::delete(TRASH_TABLE) .and_where_eq("user_id", &user.user_id) @@ -84,7 +84,7 @@ pub(crate) async fn delete_trash( .await .map_err(map_sqlx_error)?; - let _ = delete_trash_targets( + let _ = delete_trash_associate_targets( transaction as &mut DBTransaction<'_>, vec![(trash_table.id, trash_table.ty)], ) @@ -101,7 +101,7 @@ pub(crate) async fn delete_trash( } #[tracing::instrument(skip(transaction, targets), err)] -async fn delete_trash_targets( +async fn delete_trash_associate_targets( transaction: &mut DBTransaction<'_>, targets: Vec<(Uuid, i32)>, ) -> Result<(), ServerError> { diff --git a/backend/src/services/core/view/persistence.rs b/backend/src/services/core/view/persistence.rs index f9cbfe22b1..fb7d830542 100644 --- a/backend/src/services/core/view/persistence.rs +++ b/backend/src/services/core/view/persistence.rs @@ -77,14 +77,14 @@ impl NewViewSqlBuilder { let view: View = self.table.clone().into(); let (sql, args) = SqlBuilder::create(VIEW_TABLE) - .add_arg("id", self.table.id) - .add_arg("belong_to_id", self.table.belong_to_id) - .add_arg("name", self.table.name) - .add_arg("description", self.table.description) - .add_arg("modified_time", self.table.modified_time) - .add_arg("create_time", self.table.create_time) - .add_arg("thumbnail", self.table.thumbnail) - .add_arg("view_type", self.table.view_type) + .add_field_with_arg("id", self.table.id) + .add_field_with_arg("belong_to_id", self.table.belong_to_id) + .add_field_with_arg("name", self.table.name) + .add_field_with_arg("description", self.table.description) + .add_field_with_arg("modified_time", self.table.modified_time) + .add_field_with_arg("create_time", self.table.create_time) + .add_field_with_arg("thumbnail", self.table.thumbnail) + .add_field_with_arg("view_type", self.table.view_type) .build()?; Ok((sql, args, view)) diff --git a/backend/src/services/core/workspace/persistence.rs b/backend/src/services/core/workspace/persistence.rs index c48a080824..7e3f152a68 100644 --- a/backend/src/services/core/workspace/persistence.rs +++ b/backend/src/services/core/workspace/persistence.rs @@ -56,12 +56,12 @@ impl NewWorkspaceBuilder { let workspace: Workspace = self.table.clone().into(); // TODO: use macro to fetch each field from struct let (sql, args) = SqlBuilder::create(WORKSPACE_TABLE) - .add_arg("id", self.table.id) - .add_arg("name", self.table.name) - .add_arg("description", self.table.description) - .add_arg("modified_time", self.table.modified_time) - .add_arg("create_time", self.table.create_time) - .add_arg("user_id", self.table.user_id) + .add_field_with_arg("id", self.table.id) + .add_field_with_arg("name", self.table.name) + .add_field_with_arg("description", self.table.description) + .add_field_with_arg("modified_time", self.table.modified_time) + .add_field_with_arg("create_time", self.table.create_time) + .add_field_with_arg("user_id", self.table.user_id) .build()?; Ok((sql, args, workspace)) diff --git a/backend/src/services/document/crud.rs b/backend/src/services/document/crud.rs index d96f08d59c..d003adbba2 100644 --- a/backend/src/services/document/crud.rs +++ b/backend/src/services/document/crud.rs @@ -79,7 +79,7 @@ pub async fn update_doc(pool: &PgPool, mut params: UpdateDocParams) -> Result<() let (sql, args) = SqlBuilder::update(DOC_TABLE) .add_some_arg("data", data) - .add_arg("rev_id", params.rev_id) + .add_field_with_arg("rev_id", params.rev_id) .and_where_eq("id", doc_id) .build()?; @@ -128,9 +128,9 @@ impl NewDocSqlBuilder { pub fn build(self) -> Result<(String, PgArguments), ServerError> { let (sql, args) = SqlBuilder::create(DOC_TABLE) - .add_arg("id", self.table.id) - .add_arg("data", self.table.data) - .add_arg("rev_id", self.table.rev_id) + .add_field_with_arg("id", self.table.id) + .add_field_with_arg("data", self.table.data) + .add_field_with_arg("rev_id", self.table.rev_id) .build()?; Ok((sql, args)) diff --git a/backend/src/services/kv_store/kv.rs b/backend/src/services/kv_store/kv.rs new file mode 100644 index 0000000000..138aa9cd6b --- /dev/null +++ b/backend/src/services/kv_store/kv.rs @@ -0,0 +1,165 @@ +use crate::{ + services::kv_store::{KVStore, KeyValue}, + util::sqlx_ext::{map_sqlx_error, SqlBuilder}, +}; +use actix_web::web::Data; +use anyhow::Context; +use backend_service::errors::ServerError; +use bytes::Bytes; +use lib_infra::future::FutureResultSend; +use sql_builder::{quote, SqlBuilder as RawSqlBuilder}; +use sqlx::{postgres::PgArguments, Error, PgPool, Postgres, Row}; + +const KV_TABLE: &str = "kv_table"; + +pub(crate) struct PostgresKV { + pub(crate) pg_pool: Data, +} + +impl KVStore for PostgresKV { + fn get(&self, key: &str) -> FutureResultSend, ServerError> { + let pg_pool = self.pg_pool.clone(); + let id = key.to_string(); + FutureResultSend::new(async move { + let mut transaction = pg_pool + .begin() + .await + .context("[KV]:Failed to acquire a Postgres connection")?; + + let (sql, args) = SqlBuilder::select(KV_TABLE) + .add_field("*") + .and_where_eq("id", &id) + .build()?; + + let result = sqlx::query_as_with::(&sql, args) + .fetch_one(&mut transaction) + .await; + + let result = match result { + Ok(val) => Ok(Some(Bytes::from(val.blob))), + Err(error) => match error { + Error::RowNotFound => Ok(None), + _ => Err(map_sqlx_error(error)), + }, + }; + + transaction + .commit() + .await + .context("[KV]:Failed to commit SQL transaction.")?; + + result + }) + } + + fn set(&self, key: &str, bytes: Bytes) -> FutureResultSend<(), ServerError> { + self.batch_set(vec![KeyValue { + key: key.to_string(), + value: bytes, + }]) + } + + fn delete(&self, key: &str) -> FutureResultSend<(), ServerError> { + let pg_pool = self.pg_pool.clone(); + let id = key.to_string(); + + FutureResultSend::new(async move { + let mut transaction = pg_pool + .begin() + .await + .context("[KV]:Failed to acquire a Postgres connection")?; + + let (sql, args) = SqlBuilder::delete(KV_TABLE).and_where_eq("id", &id).build()?; + let _ = sqlx::query_with(&sql, args) + .execute(&mut transaction) + .await + .map_err(map_sqlx_error)?; + + transaction + .commit() + .await + .context("[KV]:Failed to commit SQL transaction.")?; + + Ok(()) + }) + } + + fn batch_set(&self, kvs: Vec) -> FutureResultSend<(), ServerError> { + let pg_pool = self.pg_pool.clone(); + FutureResultSend::new(async move { + let mut transaction = pg_pool + .begin() + .await + .context("[KV]:Failed to acquire a Postgres connection")?; + + let mut builder = RawSqlBuilder::insert_into(KV_TABLE); + let mut m_builder = builder.field("id").field("blob"); + for kv in kvs { + let s = match std::str::from_utf8(&kv.value) { + Ok(v) => v, + Err(e) => { + log::error!("[KV]: {}", e); + "" + }, + }; + m_builder = m_builder.values(&[quote(kv.key), quote(s)]); + } + let sql = m_builder.sql()?; + let _ = sqlx::query(&sql) + .execute(&mut transaction) + .await + .map_err(map_sqlx_error)?; + + transaction + .commit() + .await + .context("[KV]:Failed to commit SQL transaction.")?; + + Ok::<(), ServerError>(()) + }) + } + + fn batch_get(&self, keys: Vec) -> FutureResultSend, ServerError> { + let pg_pool = self.pg_pool.clone(); + FutureResultSend::new(async move { + let mut transaction = pg_pool + .begin() + .await + .context("[KV]:Failed to acquire a Postgres connection")?; + + let sql = RawSqlBuilder::select_from(KV_TABLE) + .field("id") + .field("blob") + .and_where_in_quoted("id", &keys) + .sql()?; + + let rows = sqlx::query(&sql) + .fetch_all(&mut transaction) + .await + .map_err(map_sqlx_error)?; + let kvs = rows + .into_iter() + .map(|row| { + let bytes: Vec = row.get("blob"); + KeyValue { + key: row.get("id"), + value: Bytes::from(bytes), + } + }) + .collect::>(); + + transaction + .commit() + .await + .context("[KV]:Failed to commit SQL transaction.")?; + + Ok::, ServerError>(kvs) + }) + } +} + +#[derive(Debug, Clone, sqlx::FromRow)] +struct KVTable { + pub(crate) id: String, + pub(crate) blob: Vec, +} diff --git a/backend/src/services/kv_store/mod.rs b/backend/src/services/kv_store/mod.rs new file mode 100644 index 0000000000..8e218e32ee --- /dev/null +++ b/backend/src/services/kv_store/mod.rs @@ -0,0 +1,21 @@ +mod kv; + +use bytes::Bytes; +pub(crate) use kv::*; + +use backend_service::errors::ServerError; +use lib_infra::future::FutureResultSend; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct KeyValue { + pub key: String, + pub value: Bytes, +} + +pub trait KVStore: Send + Sync { + fn get(&self, key: &str) -> FutureResultSend, ServerError>; + fn set(&self, key: &str, value: Bytes) -> FutureResultSend<(), ServerError>; + fn delete(&self, key: &str) -> FutureResultSend<(), ServerError>; + fn batch_set(&self, kvs: Vec) -> FutureResultSend<(), ServerError>; + fn batch_get(&self, keys: Vec) -> FutureResultSend, ServerError>; +} diff --git a/backend/src/services/mod.rs b/backend/src/services/mod.rs index ea70a37330..400c29b088 100644 --- a/backend/src/services/mod.rs +++ b/backend/src/services/mod.rs @@ -1,4 +1,5 @@ pub mod core; pub mod document; +pub mod kv_store; pub mod user; pub mod web_socket; diff --git a/backend/src/util/sqlx_ext/query.rs b/backend/src/util/sqlx_ext/query.rs index d9d7e1a6a3..f7af99ba24 100644 --- a/backend/src/util/sqlx_ext/query.rs +++ b/backend/src/util/sqlx_ext/query.rs @@ -52,7 +52,7 @@ impl SqlBuilder { builder } - pub fn add_arg<'a, T>(mut self, field: &str, arg: T) -> Self + pub fn add_field_with_arg<'a, T>(mut self, field: &str, arg: T) -> Self where T: 'a + Send + Encode<'a, Postgres> + Type, { @@ -67,7 +67,7 @@ impl SqlBuilder { T: 'a + Send + Encode<'a, Postgres> + Type, { if add { - self.add_arg(field, arg) + self.add_field_with_arg(field, arg) } else { self } @@ -78,7 +78,7 @@ impl SqlBuilder { T: 'a + Send + Encode<'a, Postgres> + Type, { if let Some(arg) = arg { - self.add_arg(field, arg) + self.add_field_with_arg(field, arg) } else { self } diff --git a/backend/tests/api/mod.rs b/backend/tests/api/mod.rs deleted file mode 100644 index d5903daf2c..0000000000 --- a/backend/tests/api/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod auth; -mod doc; -mod workspace; diff --git a/backend/tests/api/auth.rs b/backend/tests/api_test/auth_test.rs similarity index 100% rename from backend/tests/api/auth.rs rename to backend/tests/api_test/auth_test.rs diff --git a/backend/tests/api_test/kv_test.rs b/backend/tests/api_test/kv_test.rs new file mode 100644 index 0000000000..bab205eabd --- /dev/null +++ b/backend/tests/api_test/kv_test.rs @@ -0,0 +1,51 @@ +use crate::util::helper::spawn_server; +use backend::services::kv_store::KeyValue; +use std::str; + +#[actix_rt::test] +async fn kv_set_test() { + let server = spawn_server().await; + let kv = server.app_ctx.kv_store.clone(); + let s1 = "123".to_string(); + let key = "1"; + + let _ = kv.set(key, s1.clone().into()).await.unwrap(); + let bytes = kv.get(key).await.unwrap().unwrap(); + let s2 = str::from_utf8(&bytes).unwrap(); + assert_eq!(s1, s2); +} + +#[actix_rt::test] +async fn kv_delete_test() { + let server = spawn_server().await; + let kv = server.app_ctx.kv_store.clone(); + let s1 = "123".to_string(); + let key = "1"; + + let _ = kv.set(key, s1.clone().into()).await.unwrap(); + let _ = kv.delete(key).await.unwrap(); + assert_eq!(kv.get(key).await.unwrap(), None); +} + +#[actix_rt::test] +async fn kv_batch_set_test() { + let server = spawn_server().await; + let kv = server.app_ctx.kv_store.clone(); + let kvs = vec![ + KeyValue { + key: "1".to_string(), + value: "a".to_string().into(), + }, + KeyValue { + key: "2".to_string(), + value: "b".to_string().into(), + }, + ]; + kv.batch_set(kvs.clone()).await.unwrap(); + let kvs_from_db = kv + .batch_get(kvs.clone().into_iter().map(|value| value.key).collect::>()) + .await + .unwrap(); + + assert_eq!(kvs, kvs_from_db); +} diff --git a/backend/tests/api_test/mod.rs b/backend/tests/api_test/mod.rs new file mode 100644 index 0000000000..6053586a32 --- /dev/null +++ b/backend/tests/api_test/mod.rs @@ -0,0 +1,3 @@ +mod auth_test; +mod kv_test; +mod workspace_test; diff --git a/backend/tests/api/workspace.rs b/backend/tests/api_test/workspace_test.rs similarity index 100% rename from backend/tests/api/workspace.rs rename to backend/tests/api_test/workspace_test.rs diff --git a/backend/tests/api/doc.rs b/backend/tests/document_test/crud_test.rs similarity index 100% rename from backend/tests/api/doc.rs rename to backend/tests/document_test/crud_test.rs diff --git a/backend/tests/document/edit_script.rs b/backend/tests/document_test/edit_script.rs similarity index 100% rename from backend/tests/document/edit_script.rs rename to backend/tests/document_test/edit_script.rs diff --git a/backend/tests/document/edit_test.rs b/backend/tests/document_test/edit_test.rs similarity index 99% rename from backend/tests/document/edit_test.rs rename to backend/tests/document_test/edit_test.rs index 29efd7c912..f7344fe6b3 100644 --- a/backend/tests/document/edit_test.rs +++ b/backend/tests/document_test/edit_test.rs @@ -70,7 +70,7 @@ async fn delta_sync_while_editing_with_attribute() { // │ops: ["123", "456"] rev: 2│ │ │ // └──────────────────────────┘ │ │ // │ │ -// ◀── http request ─┤ Open document +// ◀── http request ─┤ Open document_test // │ │ // │ │ ┌──────────────────────────┐ // ├──http response──┼─▶│ops: ["123", "456"] rev: 2│ @@ -115,7 +115,7 @@ async fn delta_sync_with_server_push_delta() { // └─────────┘ └─────────┘ // │ │ // │ │ -// ◀── http request ─┤ Open document +// ◀── http request ─┤ Open document_test // │ │ // │ │ ┌───────────────┐ // ├──http response──┼─▶│ops: [] rev: 0 │ @@ -165,7 +165,7 @@ async fn delta_sync_while_local_rev_less_than_server_rev() { // ┌───────────────────┐ │ │ // │ops: ["123"] rev: 1│ │ │ // └───────────────────┘ │ │ -// ◀── http request ─┤ Open document +// ◀── http request ─┤ Open document_test // │ │ // │ │ ┌───────────────┐ // ├──http response──┼──▶│ops: [123] rev:│ diff --git a/backend/tests/document/mod.rs b/backend/tests/document_test/mod.rs similarity index 71% rename from backend/tests/document/mod.rs rename to backend/tests/document_test/mod.rs index 14cb6254f7..3a785e530b 100644 --- a/backend/tests/document/mod.rs +++ b/backend/tests/document_test/mod.rs @@ -1,2 +1,3 @@ // mod edit_script; // mod edit_test; +mod crud_test; diff --git a/backend/tests/main.rs b/backend/tests/main.rs index ad0fc2dcea..eb6c8a9e40 100644 --- a/backend/tests/main.rs +++ b/backend/tests/main.rs @@ -1,3 +1,3 @@ -mod api; -mod document; +mod api_test; +mod document_test; pub mod util; diff --git a/backend/tests/util/helper.rs b/backend/tests/util/helper.rs index 3995bf04b2..c02afea9a8 100644 --- a/backend/tests/util/helper.rs +++ b/backend/tests/util/helper.rs @@ -150,7 +150,7 @@ impl TestUserServer { } pub async fn read_doc(&self, params: DocIdentifier) -> Option { - let url = format!("{}/api/document", self.http_addr()); + let url = format!("{}/api/document_test", self.http_addr()); let doc = read_doc_request(self.user_token(), params, &url).await.unwrap(); doc } diff --git a/frontend/scripts/flowy-tool/src/proto/ast.rs b/frontend/scripts/flowy-tool/src/proto/ast.rs index 92d94d6f43..e03fd9a3a2 100644 --- a/frontend/scripts/flowy-tool/src/proto/ast.rs +++ b/frontend/scripts/flowy-tool/src/proto/ast.rs @@ -45,8 +45,8 @@ fn parse_files_protobuf(proto_crate_path: &str, proto_output_dir: &str) -> Vec