feat: run rustfmt with custom defined fmt configuration (#1848)

* chore: update rustfmt

* chore: apply rustfmt format
This commit is contained in:
Nathan.fooo 2023-02-13 09:29:49 +08:00 committed by GitHub
parent e2496e734c
commit 6bb1c4e89c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
459 changed files with 50554 additions and 46600 deletions

View file

@ -1,3 +1,3 @@
fn main() { fn main() {
tauri_build::build() tauri_build::build()
} }

View file

@ -0,0 +1,12 @@
# https://rust-lang.github.io/rustfmt/?version=master&search=
max_width = 100
tab_spaces = 2
newline_style = "Auto"
match_block_trailing_comma = true
use_field_init_shorthand = true
use_try_shorthand = true
reorder_imports = true
reorder_modules = true
remove_nested_parens = true
merge_derives = true
edition = "2021"

View file

@ -1,10 +1,10 @@
use flowy_core::{get_client_server_configuration, AppFlowyCore, AppFlowyCoreConfig}; use flowy_core::{get_client_server_configuration, AppFlowyCore, AppFlowyCoreConfig};
pub fn init_flowy_core() -> AppFlowyCore { pub fn init_flowy_core() -> AppFlowyCore {
let data_path = tauri::api::path::data_dir().unwrap(); let data_path = tauri::api::path::data_dir().unwrap();
let path = format!("{}/AppFlowy", data_path.to_str().unwrap()); let path = format!("{}/AppFlowy", data_path.to_str().unwrap());
let server_config = get_client_server_configuration().unwrap(); let server_config = get_client_server_configuration().unwrap();
let config = AppFlowyCoreConfig::new(&path, "AppFlowy".to_string(), server_config) let config = AppFlowyCoreConfig::new(&path, "AppFlowy".to_string(), server_config)
.log_filter("trace", vec!["appflowy_tauri".to_string()]); .log_filter("trace", vec!["appflowy_tauri".to_string()]);
AppFlowyCore::new(config) AppFlowyCore::new(config)
} }

View file

@ -1,6 +1,6 @@
#![cfg_attr( #![cfg_attr(
all(not(debug_assertions), target_os = "windows"), all(not(debug_assertions), target_os = "windows"),
windows_subsystem = "windows" windows_subsystem = "windows"
)] )]
mod init; mod init;
@ -14,28 +14,28 @@ use request::*;
use tauri::Manager; use tauri::Manager;
fn main() { fn main() {
let flowy_core = init_flowy_core(); let flowy_core = init_flowy_core();
tauri::Builder::default() tauri::Builder::default()
.invoke_handler(tauri::generate_handler![invoke_request]) .invoke_handler(tauri::generate_handler![invoke_request])
.manage(flowy_core) .manage(flowy_core)
.on_window_event(|_window_event| {}) .on_window_event(|_window_event| {})
.on_menu_event(|_menu| {}) .on_menu_event(|_menu| {})
.on_page_load(|window, _payload| { .on_page_load(|window, _payload| {
let app_handler = window.app_handle(); let app_handler = window.app_handle();
register_notification_sender(TSNotificationSender::new(app_handler.clone())); register_notification_sender(TSNotificationSender::new(app_handler.clone()));
// tauri::async_runtime::spawn(async move {}); // tauri::async_runtime::spawn(async move {});
window.listen_global(AF_EVENT, move |event| { window.listen_global(AF_EVENT, move |event| {
on_event(app_handler.clone(), event); on_event(app_handler.clone(), event);
}); });
}) })
.setup(|app| { .setup(|app| {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
{ {
let window = app.get_window("main").unwrap(); let window = app.get_window("main").unwrap();
window.open_devtools(); window.open_devtools();
} }
Ok(()) Ok(())
}) })
.run(tauri::generate_context!()) .run(tauri::generate_context!())
.expect("error while running tauri application"); .expect("error while running tauri application");
} }

View file

@ -12,23 +12,24 @@ pub fn on_event(app_handler: AppHandle<Wry>, event: Event) {}
#[allow(dead_code)] #[allow(dead_code)]
pub fn send_notification<P: Serialize + Clone>(app_handler: AppHandle<Wry>, payload: P) { pub fn send_notification<P: Serialize + Clone>(app_handler: AppHandle<Wry>, payload: P) {
app_handler.emit_all(AF_NOTIFICATION, payload).unwrap(); app_handler.emit_all(AF_NOTIFICATION, payload).unwrap();
} }
pub struct TSNotificationSender { pub struct TSNotificationSender {
handler: AppHandle<Wry>, handler: AppHandle<Wry>,
} }
impl TSNotificationSender { impl TSNotificationSender {
pub fn new(handler: AppHandle<Wry>) -> Self { pub fn new(handler: AppHandle<Wry>) -> Self {
Self { handler } Self { handler }
} }
} }
impl NotificationSender for TSNotificationSender { impl NotificationSender for TSNotificationSender {
fn send_subject(&self, subject: SubscribeObject) -> Result<(), String> { fn send_subject(&self, subject: SubscribeObject) -> Result<(), String> {
self.handler self
.emit_all(AF_NOTIFICATION, subject) .handler
.map_err(|e| format!("{:?}", e)) .emit_all(AF_NOTIFICATION, subject)
} .map_err(|e| format!("{:?}", e))
}
} }

View file

@ -1,46 +1,46 @@
use flowy_core::AppFlowyCore; use flowy_core::AppFlowyCore;
use lib_dispatch::prelude::{ use lib_dispatch::prelude::{
AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode, AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode,
}; };
use tauri::{AppHandle, Manager, State, Wry}; use tauri::{AppHandle, Manager, State, Wry};
#[derive(Clone, Debug, serde::Deserialize)] #[derive(Clone, Debug, serde::Deserialize)]
pub struct AFTauriRequest { pub struct AFTauriRequest {
ty: String, ty: String,
payload: Vec<u8>, payload: Vec<u8>,
} }
impl std::convert::From<AFTauriRequest> for AFPluginRequest { impl std::convert::From<AFTauriRequest> for AFPluginRequest {
fn from(event: AFTauriRequest) -> Self { fn from(event: AFTauriRequest) -> Self {
AFPluginRequest::new(event.ty).payload(event.payload) AFPluginRequest::new(event.ty).payload(event.payload)
} }
} }
#[derive(Clone, serde::Serialize)] #[derive(Clone, serde::Serialize)]
pub struct AFTauriResponse { pub struct AFTauriResponse {
code: StatusCode, code: StatusCode,
payload: Vec<u8>, payload: Vec<u8>,
} }
impl std::convert::From<AFPluginEventResponse> for AFTauriResponse { impl std::convert::From<AFPluginEventResponse> for AFTauriResponse {
fn from(response: AFPluginEventResponse) -> Self { fn from(response: AFPluginEventResponse) -> Self {
Self { Self {
code: response.status_code, code: response.status_code,
payload: response.payload.to_vec(), payload: response.payload.to_vec(),
}
} }
}
} }
// Learn more about Tauri commands at https://tauri.app/v1/guides/features/command // Learn more about Tauri commands at https://tauri.app/v1/guides/features/command
#[tracing::instrument(level = "trace", skip(app_handler))] #[tracing::instrument(level = "trace", skip(app_handler))]
#[tauri::command] #[tauri::command]
pub async fn invoke_request( pub async fn invoke_request(
request: AFTauriRequest, request: AFTauriRequest,
app_handler: AppHandle<Wry>, app_handler: AppHandle<Wry>,
) -> AFTauriResponse { ) -> AFTauriResponse {
let request: AFPluginRequest = request.into(); let request: AFPluginRequest = request.into();
let state: State<AppFlowyCore> = app_handler.state(); let state: State<AppFlowyCore> = app_handler.state();
let dispatcher = state.inner().dispatcher(); let dispatcher = state.inner().dispatcher();
let response = AFPluginDispatcher::async_send(dispatcher, request).await; let response = AFPluginDispatcher::async_send(dispatcher, request).await;
response.into() response.into()
} }

View file

@ -1,3 +1,3 @@
fn main() { fn main() {
flowy_codegen::protobuf_file::gen(env!("CARGO_PKG_NAME")); flowy_codegen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
} }

View file

@ -2,25 +2,25 @@ use byteorder::{BigEndian, ByteOrder};
use std::mem::forget; use std::mem::forget;
pub fn forget_rust(buf: Vec<u8>) -> *const u8 { pub fn forget_rust(buf: Vec<u8>) -> *const u8 {
let ptr = buf.as_ptr(); let ptr = buf.as_ptr();
forget(buf); forget(buf);
ptr ptr
} }
#[allow(unused_attributes)] #[allow(unused_attributes)]
#[allow(dead_code)] #[allow(dead_code)]
pub fn reclaim_rust(ptr: *mut u8, length: u32) { pub fn reclaim_rust(ptr: *mut u8, length: u32) {
unsafe { unsafe {
let len: usize = length as usize; let len: usize = length as usize;
Vec::from_raw_parts(ptr, len, len); Vec::from_raw_parts(ptr, len, len);
} }
} }
pub fn extend_front_four_bytes_into_bytes(bytes: &[u8]) -> Vec<u8> { pub fn extend_front_four_bytes_into_bytes(bytes: &[u8]) -> Vec<u8> {
let mut output = Vec::with_capacity(bytes.len() + 4); let mut output = Vec::with_capacity(bytes.len() + 4);
let mut marker_bytes = [0; 4]; let mut marker_bytes = [0; 4];
BigEndian::write_u32(&mut marker_bytes, bytes.len() as u32); BigEndian::write_u32(&mut marker_bytes, bytes.len() as u32);
output.extend_from_slice(&marker_bytes); output.extend_from_slice(&marker_bytes);
output.extend_from_slice(bytes); output.extend_from_slice(bytes);
output output
} }

View file

@ -7,8 +7,8 @@ mod util;
use crate::notification::DartNotificationSender; use crate::notification::DartNotificationSender;
use crate::{ use crate::{
c::{extend_front_four_bytes_into_bytes, forget_rust}, c::{extend_front_four_bytes_into_bytes, forget_rust},
model::{FFIRequest, FFIResponse}, model::{FFIRequest, FFIResponse},
}; };
use flowy_core::get_client_server_configuration; use flowy_core::get_client_server_configuration;
use flowy_core::*; use flowy_core::*;
@ -20,69 +20,74 @@ use parking_lot::RwLock;
use std::{ffi::CStr, os::raw::c_char}; use std::{ffi::CStr, os::raw::c_char};
lazy_static! { lazy_static! {
static ref APPFLOWY_CORE: RwLock<Option<AppFlowyCore>> = RwLock::new(None); static ref APPFLOWY_CORE: RwLock<Option<AppFlowyCore>> = RwLock::new(None);
} }
#[no_mangle] #[no_mangle]
pub extern "C" fn init_sdk(path: *mut c_char) -> i64 { pub extern "C" fn init_sdk(path: *mut c_char) -> i64 {
let c_str: &CStr = unsafe { CStr::from_ptr(path) }; let c_str: &CStr = unsafe { CStr::from_ptr(path) };
let path: &str = c_str.to_str().unwrap(); let path: &str = c_str.to_str().unwrap();
let server_config = get_client_server_configuration().unwrap(); let server_config = get_client_server_configuration().unwrap();
let log_crates = vec!["flowy-ffi".to_string()]; let log_crates = vec!["flowy-ffi".to_string()];
let config = AppFlowyCoreConfig::new(path, "appflowy".to_string(), server_config).log_filter("info", log_crates); let config = AppFlowyCoreConfig::new(path, "appflowy".to_string(), server_config)
*APPFLOWY_CORE.write() = Some(AppFlowyCore::new(config)); .log_filter("info", log_crates);
*APPFLOWY_CORE.write() = Some(AppFlowyCore::new(config));
0 0
} }
#[no_mangle] #[no_mangle]
pub extern "C" fn async_event(port: i64, input: *const u8, len: usize) { pub extern "C" fn async_event(port: i64, input: *const u8, len: usize) {
let request: AFPluginRequest = FFIRequest::from_u8_pointer(input, len).into(); let request: AFPluginRequest = FFIRequest::from_u8_pointer(input, len).into();
log::trace!( log::trace!(
"[FFI]: {} Async Event: {:?} with {} port", "[FFI]: {} Async Event: {:?} with {} port",
&request.id, &request.id,
&request.event, &request.event,
port port
); );
let dispatcher = match APPFLOWY_CORE.read().as_ref() { let dispatcher = match APPFLOWY_CORE.read().as_ref() {
None => { None => {
log::error!("sdk not init yet."); log::error!("sdk not init yet.");
return; return;
} },
Some(e) => e.event_dispatcher.clone(), Some(e) => e.event_dispatcher.clone(),
}; };
let _ = AFPluginDispatcher::async_send_with_callback(dispatcher, request, move |resp: AFPluginEventResponse| { let _ = AFPluginDispatcher::async_send_with_callback(
log::trace!("[FFI]: Post data to dart through {} port", port); dispatcher,
Box::pin(post_to_flutter(resp, port)) request,
}); move |resp: AFPluginEventResponse| {
log::trace!("[FFI]: Post data to dart through {} port", port);
Box::pin(post_to_flutter(resp, port))
},
);
} }
#[no_mangle] #[no_mangle]
pub extern "C" fn sync_event(input: *const u8, len: usize) -> *const u8 { pub extern "C" fn sync_event(input: *const u8, len: usize) -> *const u8 {
let request: AFPluginRequest = FFIRequest::from_u8_pointer(input, len).into(); let request: AFPluginRequest = FFIRequest::from_u8_pointer(input, len).into();
log::trace!("[FFI]: {} Sync Event: {:?}", &request.id, &request.event,); log::trace!("[FFI]: {} Sync Event: {:?}", &request.id, &request.event,);
let dispatcher = match APPFLOWY_CORE.read().as_ref() { let dispatcher = match APPFLOWY_CORE.read().as_ref() {
None => { None => {
log::error!("sdk not init yet."); log::error!("sdk not init yet.");
return forget_rust(Vec::default()); return forget_rust(Vec::default());
} },
Some(e) => e.event_dispatcher.clone(), Some(e) => e.event_dispatcher.clone(),
}; };
let _response = AFPluginDispatcher::sync_send(dispatcher, request); let _response = AFPluginDispatcher::sync_send(dispatcher, request);
// FFIResponse { } // FFIResponse { }
let response_bytes = vec![]; let response_bytes = vec![];
let result = extend_front_four_bytes_into_bytes(&response_bytes); let result = extend_front_four_bytes_into_bytes(&response_bytes);
forget_rust(result) forget_rust(result)
} }
#[no_mangle] #[no_mangle]
pub extern "C" fn set_stream_port(port: i64) -> i32 { pub extern "C" fn set_stream_port(port: i64) -> i32 {
register_notification_sender(DartNotificationSender::new(port)); register_notification_sender(DartNotificationSender::new(port));
0 0
} }
#[inline(never)] #[inline(never)]
@ -91,39 +96,39 @@ pub extern "C" fn link_me_please() {}
#[inline(always)] #[inline(always)]
async fn post_to_flutter(response: AFPluginEventResponse, port: i64) { async fn post_to_flutter(response: AFPluginEventResponse, port: i64) {
let isolate = allo_isolate::Isolate::new(port); let isolate = allo_isolate::Isolate::new(port);
match isolate match isolate
.catch_unwind(async { .catch_unwind(async {
let ffi_resp = FFIResponse::from(response); let ffi_resp = FFIResponse::from(response);
ffi_resp.into_bytes().unwrap().to_vec() ffi_resp.into_bytes().unwrap().to_vec()
}) })
.await .await
{ {
Ok(_success) => { Ok(_success) => {
log::trace!("[FFI]: Post data to dart success"); log::trace!("[FFI]: Post data to dart success");
} },
Err(e) => { Err(e) => {
if let Some(msg) = e.downcast_ref::<&str>() { if let Some(msg) = e.downcast_ref::<&str>() {
log::error!("[FFI]: {:?}", msg); log::error!("[FFI]: {:?}", msg);
} else { } else {
log::error!("[FFI]: allo_isolate post panic"); log::error!("[FFI]: allo_isolate post panic");
} }
} },
} }
} }
#[no_mangle] #[no_mangle]
pub extern "C" fn backend_log(level: i64, data: *const c_char) { pub extern "C" fn backend_log(level: i64, data: *const c_char) {
let c_str = unsafe { CStr::from_ptr(data) }; let c_str = unsafe { CStr::from_ptr(data) };
let log_str = c_str.to_str().unwrap(); let log_str = c_str.to_str().unwrap();
// Don't change the mapping relation between number and level // Don't change the mapping relation between number and level
match level { match level {
0 => tracing::info!("{}", log_str), 0 => tracing::info!("{}", log_str),
1 => tracing::debug!("{}", log_str), 1 => tracing::debug!("{}", log_str),
2 => tracing::trace!("{}", log_str), 2 => tracing::trace!("{}", log_str),
3 => tracing::warn!("{}", log_str), 3 => tracing::warn!("{}", log_str),
4 => tracing::error!("{}", log_str), 4 => tracing::error!("{}", log_str),
_ => (), _ => (),
} }
} }

View file

@ -5,24 +5,24 @@ use std::convert::TryFrom;
#[derive(Default, ProtoBuf)] #[derive(Default, ProtoBuf)]
pub struct FFIRequest { pub struct FFIRequest {
#[pb(index = 1)] #[pb(index = 1)]
pub(crate) event: String, pub(crate) event: String,
#[pb(index = 2)] #[pb(index = 2)]
pub(crate) payload: Vec<u8>, pub(crate) payload: Vec<u8>,
} }
impl FFIRequest { impl FFIRequest {
pub fn from_u8_pointer(pointer: *const u8, len: usize) -> Self { pub fn from_u8_pointer(pointer: *const u8, len: usize) -> Self {
let buffer = unsafe { std::slice::from_raw_parts(pointer, len) }.to_vec(); let buffer = unsafe { std::slice::from_raw_parts(pointer, len) }.to_vec();
let bytes = Bytes::from(buffer); let bytes = Bytes::from(buffer);
let request: FFIRequest = FFIRequest::try_from(bytes).unwrap(); let request: FFIRequest = FFIRequest::try_from(bytes).unwrap();
request request
} }
} }
impl std::convert::From<FFIRequest> for AFPluginRequest { impl std::convert::From<FFIRequest> for AFPluginRequest {
fn from(ffi_request: FFIRequest) -> Self { fn from(ffi_request: FFIRequest) -> Self {
AFPluginRequest::new(ffi_request.event).payload(ffi_request.payload) AFPluginRequest::new(ffi_request.event).payload(ffi_request.payload)
} }
} }

View file

@ -3,43 +3,43 @@ use lib_dispatch::prelude::{AFPluginEventResponse, Payload, StatusCode};
#[derive(ProtoBuf_Enum, Clone, Copy)] #[derive(ProtoBuf_Enum, Clone, Copy)]
pub enum FFIStatusCode { pub enum FFIStatusCode {
Ok = 0, Ok = 0,
Err = 1, Err = 1,
Internal = 2, Internal = 2,
} }
impl std::default::Default for FFIStatusCode { impl std::default::Default for FFIStatusCode {
fn default() -> FFIStatusCode { fn default() -> FFIStatusCode {
FFIStatusCode::Ok FFIStatusCode::Ok
} }
} }
#[derive(ProtoBuf, Default)] #[derive(ProtoBuf, Default)]
pub struct FFIResponse { pub struct FFIResponse {
#[pb(index = 1)] #[pb(index = 1)]
payload: Vec<u8>, payload: Vec<u8>,
#[pb(index = 2)] #[pb(index = 2)]
code: FFIStatusCode, code: FFIStatusCode,
} }
impl std::convert::From<AFPluginEventResponse> for FFIResponse { impl std::convert::From<AFPluginEventResponse> for FFIResponse {
fn from(resp: AFPluginEventResponse) -> Self { fn from(resp: AFPluginEventResponse) -> Self {
let payload = match resp.payload { let payload = match resp.payload {
Payload::Bytes(bytes) => bytes.to_vec(), Payload::Bytes(bytes) => bytes.to_vec(),
Payload::None => vec![], Payload::None => vec![],
}; };
let code = match resp.status_code { let code = match resp.status_code {
StatusCode::Ok => FFIStatusCode::Ok, StatusCode::Ok => FFIStatusCode::Ok,
StatusCode::Err => FFIStatusCode::Err, StatusCode::Err => FFIStatusCode::Err,
}; };
// let msg = match resp.error { // let msg = match resp.error {
// None => "".to_owned(), // None => "".to_owned(),
// Some(e) => format!("{:?}", e), // Some(e) => format!("{:?}", e),
// }; // };
FFIResponse { payload, code } FFIResponse { payload, code }
} }
} }

View file

@ -5,21 +5,21 @@ use flowy_notification::NotificationSender;
use std::convert::TryInto; use std::convert::TryInto;
pub struct DartNotificationSender { pub struct DartNotificationSender {
isolate: Isolate, isolate: Isolate,
} }
impl DartNotificationSender { impl DartNotificationSender {
pub fn new(port: i64) -> Self { pub fn new(port: i64) -> Self {
Self { Self {
isolate: Isolate::new(port), isolate: Isolate::new(port),
}
} }
}
} }
impl NotificationSender for DartNotificationSender { impl NotificationSender for DartNotificationSender {
fn send_subject(&self, subject: SubscribeObject) -> Result<(), String> { fn send_subject(&self, subject: SubscribeObject) -> Result<(), String> {
let bytes: Bytes = subject.try_into().unwrap(); let bytes: Bytes = subject.try_into().unwrap();
self.isolate.post(bytes.to_vec()); self.isolate.post(bytes.to_vec());
Ok(()) Ok(())
} }
} }

View file

@ -4,277 +4,296 @@
use crate::event_attrs::EventEnumAttrs; use crate::event_attrs::EventEnumAttrs;
use crate::node_attrs::NodeStructAttrs; use crate::node_attrs::NodeStructAttrs;
use crate::{is_recognizable_field, ty_ext::*, ASTResult, PBAttrsContainer, PBStructAttrs, NODE_TYPE}; use crate::{
is_recognizable_field, ty_ext::*, ASTResult, PBAttrsContainer, PBStructAttrs, NODE_TYPE,
};
use proc_macro2::Ident; use proc_macro2::Ident;
use syn::Meta::NameValue; use syn::Meta::NameValue;
use syn::{self, punctuated::Punctuated}; use syn::{self, punctuated::Punctuated};
pub struct ASTContainer<'a> { pub struct ASTContainer<'a> {
/// The struct or enum name (without generics). /// The struct or enum name (without generics).
pub ident: syn::Ident, pub ident: syn::Ident,
pub node_type: Option<String>, pub node_type: Option<String>,
/// Attributes on the structure. /// Attributes on the structure.
pub pb_attrs: PBAttrsContainer, pub pb_attrs: PBAttrsContainer,
/// The contents of the struct or enum. /// The contents of the struct or enum.
pub data: ASTData<'a>, pub data: ASTData<'a>,
} }
impl<'a> ASTContainer<'a> { impl<'a> ASTContainer<'a> {
pub fn from_ast(ast_result: &ASTResult, ast: &'a syn::DeriveInput) -> Option<ASTContainer<'a>> { pub fn from_ast(ast_result: &ASTResult, ast: &'a syn::DeriveInput) -> Option<ASTContainer<'a>> {
let attrs = PBAttrsContainer::from_ast(ast_result, ast); let attrs = PBAttrsContainer::from_ast(ast_result, ast);
// syn::DeriveInput // syn::DeriveInput
// 1. syn::DataUnion // 1. syn::DataUnion
// 2. syn::DataStruct // 2. syn::DataStruct
// 3. syn::DataEnum // 3. syn::DataEnum
let data = match &ast.data { let data = match &ast.data {
syn::Data::Struct(data) => { syn::Data::Struct(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataStruct.html // https://docs.rs/syn/1.0.48/syn/struct.DataStruct.html
let (style, fields) = struct_from_ast(ast_result, &data.fields); let (style, fields) = struct_from_ast(ast_result, &data.fields);
ASTData::Struct(style, fields) ASTData::Struct(style, fields)
} },
syn::Data::Union(_) => { syn::Data::Union(_) => {
ast_result.error_spanned_by(ast, "Does not support derive for unions"); ast_result.error_spanned_by(ast, "Does not support derive for unions");
return None; return None;
} },
syn::Data::Enum(data) => { syn::Data::Enum(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataEnum.html // https://docs.rs/syn/1.0.48/syn/struct.DataEnum.html
ASTData::Enum(enum_from_ast(ast_result, &ast.ident, &data.variants, &ast.attrs)) ASTData::Enum(enum_from_ast(
} ast_result,
}; &ast.ident,
&data.variants,
&ast.attrs,
))
},
};
let ident = ast.ident.clone(); let ident = ast.ident.clone();
let node_type = get_node_type(ast_result, &ident, &ast.attrs); let node_type = get_node_type(ast_result, &ident, &ast.attrs);
let item = ASTContainer { let item = ASTContainer {
ident, ident,
pb_attrs: attrs, pb_attrs: attrs,
node_type, node_type,
data, data,
}; };
Some(item) Some(item)
} }
} }
pub enum ASTData<'a> { pub enum ASTData<'a> {
Struct(ASTStyle, Vec<ASTField<'a>>), Struct(ASTStyle, Vec<ASTField<'a>>),
Enum(Vec<ASTEnumVariant<'a>>), Enum(Vec<ASTEnumVariant<'a>>),
} }
impl<'a> ASTData<'a> { impl<'a> ASTData<'a> {
pub fn all_fields(&'a self) -> Box<dyn Iterator<Item = &'a ASTField<'a>> + 'a> { pub fn all_fields(&'a self) -> Box<dyn Iterator<Item = &'a ASTField<'a>> + 'a> {
match self { match self {
ASTData::Enum(variants) => Box::new(variants.iter().flat_map(|variant| variant.fields.iter())), ASTData::Enum(variants) => {
ASTData::Struct(_, fields) => Box::new(fields.iter()), Box::new(variants.iter().flat_map(|variant| variant.fields.iter()))
} },
ASTData::Struct(_, fields) => Box::new(fields.iter()),
} }
}
pub fn all_variants(&'a self) -> Box<dyn Iterator<Item = &'a EventEnumAttrs> + 'a> { pub fn all_variants(&'a self) -> Box<dyn Iterator<Item = &'a EventEnumAttrs> + 'a> {
match self { match self {
ASTData::Enum(variants) => { ASTData::Enum(variants) => {
let iter = variants.iter().map(|variant| &variant.attrs); let iter = variants.iter().map(|variant| &variant.attrs);
Box::new(iter) Box::new(iter)
} },
ASTData::Struct(_, fields) => { ASTData::Struct(_, fields) => {
let iter = fields.iter().flat_map(|_| None); let iter = fields.iter().flat_map(|_| None);
Box::new(iter) Box::new(iter)
} },
}
} }
}
pub fn all_idents(&'a self) -> Box<dyn Iterator<Item = &'a syn::Ident> + 'a> { pub fn all_idents(&'a self) -> Box<dyn Iterator<Item = &'a syn::Ident> + 'a> {
match self { match self {
ASTData::Enum(variants) => Box::new(variants.iter().map(|v| &v.ident)), ASTData::Enum(variants) => Box::new(variants.iter().map(|v| &v.ident)),
ASTData::Struct(_, fields) => { ASTData::Struct(_, fields) => {
let iter = fields.iter().flat_map(|f| match &f.member { let iter = fields.iter().flat_map(|f| match &f.member {
syn::Member::Named(ident) => Some(ident), syn::Member::Named(ident) => Some(ident),
_ => None, _ => None,
}); });
Box::new(iter) Box::new(iter)
} },
}
} }
}
} }
/// A variant of an enum. /// A variant of an enum.
pub struct ASTEnumVariant<'a> { pub struct ASTEnumVariant<'a> {
pub ident: syn::Ident, pub ident: syn::Ident,
pub attrs: EventEnumAttrs, pub attrs: EventEnumAttrs,
pub style: ASTStyle, pub style: ASTStyle,
pub fields: Vec<ASTField<'a>>, pub fields: Vec<ASTField<'a>>,
pub original: &'a syn::Variant, pub original: &'a syn::Variant,
} }
impl<'a> ASTEnumVariant<'a> { impl<'a> ASTEnumVariant<'a> {
pub fn name(&self) -> String { pub fn name(&self) -> String {
self.ident.to_string() self.ident.to_string()
} }
} }
pub enum BracketCategory { pub enum BracketCategory {
Other, Other,
Opt, Opt,
Vec, Vec,
Map((String, String)), Map((String, String)),
} }
pub struct ASTField<'a> { pub struct ASTField<'a> {
pub member: syn::Member, pub member: syn::Member,
pub pb_attrs: PBStructAttrs, pub pb_attrs: PBStructAttrs,
pub node_attrs: NodeStructAttrs, pub node_attrs: NodeStructAttrs,
pub ty: &'a syn::Type, pub ty: &'a syn::Type,
pub original: &'a syn::Field, pub original: &'a syn::Field,
// If the field is Vec<String>, then the bracket_ty will be Vec // If the field is Vec<String>, then the bracket_ty will be Vec
pub bracket_ty: Option<syn::Ident>, pub bracket_ty: Option<syn::Ident>,
// If the field is Vec<String>, then the bracket_inner_ty will be String // If the field is Vec<String>, then the bracket_inner_ty will be String
pub bracket_inner_ty: Option<syn::Ident>, pub bracket_inner_ty: Option<syn::Ident>,
pub bracket_category: Option<BracketCategory>, pub bracket_category: Option<BracketCategory>,
} }
impl<'a> ASTField<'a> { impl<'a> ASTField<'a> {
pub fn new(cx: &ASTResult, field: &'a syn::Field, index: usize) -> Result<Self, String> { pub fn new(cx: &ASTResult, field: &'a syn::Field, index: usize) -> Result<Self, String> {
let mut bracket_inner_ty = None; let mut bracket_inner_ty = None;
let mut bracket_ty = None; let mut bracket_ty = None;
let mut bracket_category = Some(BracketCategory::Other); let mut bracket_category = Some(BracketCategory::Other);
match parse_ty(cx, &field.ty) { match parse_ty(cx, &field.ty) {
Ok(Some(inner)) => { Ok(Some(inner)) => {
match inner.primitive_ty { match inner.primitive_ty {
PrimitiveTy::Map(map_info) => { PrimitiveTy::Map(map_info) => {
bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value))) bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value)))
} },
PrimitiveTy::Vec => { PrimitiveTy::Vec => {
bracket_category = Some(BracketCategory::Vec); bracket_category = Some(BracketCategory::Vec);
} },
PrimitiveTy::Opt => { PrimitiveTy::Opt => {
bracket_category = Some(BracketCategory::Opt); bracket_category = Some(BracketCategory::Opt);
} },
PrimitiveTy::Other => { PrimitiveTy::Other => {
bracket_category = Some(BracketCategory::Other); bracket_category = Some(BracketCategory::Other);
} },
}
match *inner.bracket_ty_info {
Some(bracketed_inner_ty) => {
bracket_inner_ty = Some(bracketed_inner_ty.ident.clone());
bracket_ty = Some(inner.ident.clone());
}
None => {
bracket_ty = Some(inner.ident.clone());
}
}
}
Ok(None) => {
let msg = format!("Fail to get the ty inner type: {:?}", field);
return Err(msg);
}
Err(e) => {
eprintln!("ASTField parser failed: {:?} with error: {}", field, e);
return Err(e);
}
} }
Ok(ASTField { match *inner.bracket_ty_info {
member: match &field.ident { Some(bracketed_inner_ty) => {
Some(ident) => syn::Member::Named(ident.clone()), bracket_inner_ty = Some(bracketed_inner_ty.ident.clone());
None => syn::Member::Unnamed(index.into()), bracket_ty = Some(inner.ident.clone());
}, },
pb_attrs: PBStructAttrs::from_ast(cx, index, field), None => {
node_attrs: NodeStructAttrs::from_ast(cx, index, field), bracket_ty = Some(inner.ident.clone());
ty: &field.ty, },
original: field, }
bracket_ty, },
bracket_inner_ty, Ok(None) => {
bracket_category, let msg = format!("Fail to get the ty inner type: {:?}", field);
}) return Err(msg);
},
Err(e) => {
eprintln!("ASTField parser failed: {:?} with error: {}", field, e);
return Err(e);
},
} }
pub fn ty_as_str(&self) -> String { Ok(ASTField {
match self.bracket_inner_ty { member: match &field.ident {
Some(ref ty) => ty.to_string(), Some(ident) => syn::Member::Named(ident.clone()),
None => self.bracket_ty.as_ref().unwrap().clone().to_string(), None => syn::Member::Unnamed(index.into()),
} },
} pb_attrs: PBStructAttrs::from_ast(cx, index, field),
node_attrs: NodeStructAttrs::from_ast(cx, index, field),
ty: &field.ty,
original: field,
bracket_ty,
bracket_inner_ty,
bracket_category,
})
}
pub fn name(&self) -> Option<syn::Ident> { pub fn ty_as_str(&self) -> String {
if let syn::Member::Named(ident) = &self.member { match self.bracket_inner_ty {
Some(ident.clone()) Some(ref ty) => ty.to_string(),
} else { None => self.bracket_ty.as_ref().unwrap().clone().to_string(),
None
}
} }
}
pub fn name(&self) -> Option<syn::Ident> {
if let syn::Member::Named(ident) = &self.member {
Some(ident.clone())
} else {
None
}
}
} }
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub enum ASTStyle { pub enum ASTStyle {
Struct, Struct,
/// Many unnamed fields. /// Many unnamed fields.
Tuple, Tuple,
/// One unnamed field. /// One unnamed field.
NewType, NewType,
/// No fields. /// No fields.
Unit, Unit,
} }
pub fn struct_from_ast<'a>(cx: &ASTResult, fields: &'a syn::Fields) -> (ASTStyle, Vec<ASTField<'a>>) { pub fn struct_from_ast<'a>(
match fields { cx: &ASTResult,
syn::Fields::Named(fields) => (ASTStyle::Struct, fields_from_ast(cx, &fields.named)), fields: &'a syn::Fields,
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => { ) -> (ASTStyle, Vec<ASTField<'a>>) {
(ASTStyle::NewType, fields_from_ast(cx, &fields.unnamed)) match fields {
} syn::Fields::Named(fields) => (ASTStyle::Struct, fields_from_ast(cx, &fields.named)),
syn::Fields::Unnamed(fields) => (ASTStyle::Tuple, fields_from_ast(cx, &fields.unnamed)), syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
syn::Fields::Unit => (ASTStyle::Unit, Vec::new()), (ASTStyle::NewType, fields_from_ast(cx, &fields.unnamed))
} },
syn::Fields::Unnamed(fields) => (ASTStyle::Tuple, fields_from_ast(cx, &fields.unnamed)),
syn::Fields::Unit => (ASTStyle::Unit, Vec::new()),
}
} }
pub fn enum_from_ast<'a>( pub fn enum_from_ast<'a>(
cx: &ASTResult, cx: &ASTResult,
ident: &syn::Ident, ident: &syn::Ident,
variants: &'a Punctuated<syn::Variant, Token![,]>, variants: &'a Punctuated<syn::Variant, Token![,]>,
enum_attrs: &[syn::Attribute], enum_attrs: &[syn::Attribute],
) -> Vec<ASTEnumVariant<'a>> { ) -> Vec<ASTEnumVariant<'a>> {
variants variants
.iter() .iter()
.flat_map(|variant| { .flat_map(|variant| {
let attrs = EventEnumAttrs::from_ast(cx, ident, variant, enum_attrs); let attrs = EventEnumAttrs::from_ast(cx, ident, variant, enum_attrs);
let (style, fields) = struct_from_ast(cx, &variant.fields); let (style, fields) = struct_from_ast(cx, &variant.fields);
Some(ASTEnumVariant { Some(ASTEnumVariant {
ident: variant.ident.clone(), ident: variant.ident.clone(),
attrs, attrs,
style, style,
fields, fields,
original: variant, original: variant,
}) })
}) })
.collect() .collect()
} }
fn fields_from_ast<'a>(cx: &ASTResult, fields: &'a Punctuated<syn::Field, Token![,]>) -> Vec<ASTField<'a>> { fn fields_from_ast<'a>(
fields cx: &ASTResult,
.iter() fields: &'a Punctuated<syn::Field, Token![,]>,
.enumerate() ) -> Vec<ASTField<'a>> {
.flat_map(|(index, field)| { fields
if is_recognizable_field(field) { .iter()
ASTField::new(cx, field, index).ok() .enumerate()
} else { .flat_map(|(index, field)| {
None if is_recognizable_field(field) {
} ASTField::new(cx, field, index).ok()
}) } else {
.collect() None
}
})
.collect()
} }
fn get_node_type(ast_result: &ASTResult, struct_name: &Ident, attrs: &[syn::Attribute]) -> Option<String> { fn get_node_type(
let mut node_type = None; ast_result: &ASTResult,
attrs struct_name: &Ident,
.iter() attrs: &[syn::Attribute],
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == NODE_TYPE)) ) -> Option<String> {
.for_each(|attr| { let mut node_type = None;
if let Ok(NameValue(named_value)) = attr.parse_meta() { attrs
if node_type.is_some() { .iter()
ast_result.error_spanned_by(struct_name, "Duplicate node type definition"); .filter(|attr| attr.path.segments.iter().any(|s| s.ident == NODE_TYPE))
} .for_each(|attr| {
if let syn::Lit::Str(s) = named_value.lit { if let Ok(NameValue(named_value)) = attr.parse_meta() {
node_type = Some(s.value()); if node_type.is_some() {
} ast_result.error_spanned_by(struct_name, "Duplicate node type definition");
} }
}); if let syn::Lit::Str(s) = named_value.lit {
node_type node_type = Some(s.value());
}
}
});
node_type
} }

View file

@ -3,41 +3,42 @@ use std::{cell::RefCell, fmt::Display, thread};
#[derive(Default)] #[derive(Default)]
pub struct ASTResult { pub struct ASTResult {
errors: RefCell<Option<Vec<syn::Error>>>, errors: RefCell<Option<Vec<syn::Error>>>,
} }
impl ASTResult { impl ASTResult {
pub fn new() -> Self { pub fn new() -> Self {
ASTResult { ASTResult {
errors: RefCell::new(Some(Vec::new())), errors: RefCell::new(Some(Vec::new())),
}
} }
}
pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) { pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) {
self.errors self
.borrow_mut() .errors
.as_mut() .borrow_mut()
.unwrap() .as_mut()
.push(syn::Error::new_spanned(obj.into_token_stream(), msg)); .unwrap()
} .push(syn::Error::new_spanned(obj.into_token_stream(), msg));
}
pub fn syn_error(&self, err: syn::Error) { pub fn syn_error(&self, err: syn::Error) {
self.errors.borrow_mut().as_mut().unwrap().push(err); self.errors.borrow_mut().as_mut().unwrap().push(err);
} }
pub fn check(self) -> Result<(), Vec<syn::Error>> { pub fn check(self) -> Result<(), Vec<syn::Error>> {
let errors = self.errors.borrow_mut().take().unwrap(); let errors = self.errors.borrow_mut().take().unwrap();
match errors.len() { match errors.len() {
0 => Ok(()), 0 => Ok(()),
_ => Err(errors), _ => Err(errors),
}
} }
}
} }
impl Drop for ASTResult { impl Drop for ASTResult {
fn drop(&mut self) { fn drop(&mut self) {
if !thread::panicking() && self.errors.borrow().is_some() { if !thread::panicking() && self.errors.borrow().is_some() {
panic!("forgot to check for errors"); panic!("forgot to check for errors");
}
} }
}
} }

View file

@ -1,145 +1,150 @@
use crate::{get_event_meta_items, parse_lit_str, symbol::*, ASTResult}; use crate::{get_event_meta_items, parse_lit_str, symbol::*, ASTResult};
use syn::{ use syn::{
self, self,
Meta::{NameValue, Path}, Meta::{NameValue, Path},
NestedMeta::{Lit, Meta}, NestedMeta::{Lit, Meta},
}; };
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct EventAttrs { pub struct EventAttrs {
input: Option<syn::Path>, input: Option<syn::Path>,
output: Option<syn::Path>, output: Option<syn::Path>,
error_ty: Option<String>, error_ty: Option<String>,
pub ignore: bool, pub ignore: bool,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct EventEnumAttrs { pub struct EventEnumAttrs {
pub enum_name: String, pub enum_name: String,
pub enum_item_name: String, pub enum_item_name: String,
pub value: String, pub value: String,
pub event_attrs: EventAttrs, pub event_attrs: EventAttrs,
} }
impl EventEnumAttrs { impl EventEnumAttrs {
pub fn from_ast( pub fn from_ast(
ast_result: &ASTResult, ast_result: &ASTResult,
ident: &syn::Ident, ident: &syn::Ident,
variant: &syn::Variant, variant: &syn::Variant,
enum_attrs: &[syn::Attribute], enum_attrs: &[syn::Attribute],
) -> Self { ) -> Self {
let enum_item_name = variant.ident.to_string(); let enum_item_name = variant.ident.to_string();
let enum_name = ident.to_string(); let enum_name = ident.to_string();
let mut value = String::new(); let mut value = String::new();
if variant.discriminant.is_some() { if variant.discriminant.is_some() {
if let syn::Expr::Lit(ref expr_list) = variant.discriminant.as_ref().unwrap().1 { if let syn::Expr::Lit(ref expr_list) = variant.discriminant.as_ref().unwrap().1 {
let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit { let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
int_value int_value
} else { } else {
unimplemented!() unimplemented!()
}; };
value = lit_int.base10_digits().to_string(); value = lit_int.base10_digits().to_string();
} }
}
let event_attrs = get_event_attrs_from(ast_result, &variant.attrs, enum_attrs);
EventEnumAttrs {
enum_name,
enum_item_name,
value,
event_attrs,
}
} }
let event_attrs = get_event_attrs_from(ast_result, &variant.attrs, enum_attrs);
EventEnumAttrs {
enum_name,
enum_item_name,
value,
event_attrs,
}
}
pub fn event_input(&self) -> Option<syn::Path> { pub fn event_input(&self) -> Option<syn::Path> {
self.event_attrs.input.clone() self.event_attrs.input.clone()
} }
pub fn event_output(&self) -> Option<syn::Path> { pub fn event_output(&self) -> Option<syn::Path> {
self.event_attrs.output.clone() self.event_attrs.output.clone()
} }
pub fn event_error(&self) -> String { pub fn event_error(&self) -> String {
self.event_attrs.error_ty.as_ref().unwrap().clone() self.event_attrs.error_ty.as_ref().unwrap().clone()
} }
} }
fn get_event_attrs_from( fn get_event_attrs_from(
ast_result: &ASTResult, ast_result: &ASTResult,
variant_attrs: &[syn::Attribute], variant_attrs: &[syn::Attribute],
enum_attrs: &[syn::Attribute], enum_attrs: &[syn::Attribute],
) -> EventAttrs { ) -> EventAttrs {
let mut event_attrs = EventAttrs { let mut event_attrs = EventAttrs {
input: None, input: None,
output: None, output: None,
error_ty: None, error_ty: None,
ignore: false, ignore: false,
}; };
enum_attrs enum_attrs
.iter() .iter()
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == EVENT_ERR)) .filter(|attr| attr.path.segments.iter().any(|s| s.ident == EVENT_ERR))
.for_each(|attr| { .for_each(|attr| {
if let Ok(NameValue(named_value)) = attr.parse_meta() { if let Ok(NameValue(named_value)) = attr.parse_meta() {
if let syn::Lit::Str(s) = named_value.lit { if let syn::Lit::Str(s) = named_value.lit {
event_attrs.error_ty = Some(s.value()); event_attrs.error_ty = Some(s.value());
} else { } else {
eprintln!("{} should not be empty", EVENT_ERR); eprintln!("{} should not be empty", EVENT_ERR);
}
} else {
eprintln!("❌ Can not find any {} on attr: {:#?}", EVENT_ERR, attr);
}
});
let mut extract_event_attr = |attr: &syn::Attribute, meta_item: &syn::NestedMeta| match &meta_item {
Meta(NameValue(name_value)) => {
if name_value.path == EVENT_INPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let input_type = parse_lit_str(s)
.map_err(|_| {
ast_result
.error_spanned_by(s, format!("failed to parse request deserializer {:?}", s.value()))
})
.unwrap();
event_attrs.input = Some(input_type);
}
}
if name_value.path == EVENT_OUTPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let output_type = parse_lit_str(s)
.map_err(|_| {
ast_result
.error_spanned_by(s, format!("failed to parse response deserializer {:?}", s.value()))
})
.unwrap();
event_attrs.output = Some(output_type);
}
}
} }
Meta(Path(word)) => { } else {
if word == EVENT_IGNORE && attr.path == EVENT { eprintln!("❌ Can not find any {} on attr: {:#?}", EVENT_ERR, attr);
event_attrs.ignore = true; }
} });
let mut extract_event_attr = |attr: &syn::Attribute, meta_item: &syn::NestedMeta| match &meta_item
{
Meta(NameValue(name_value)) => {
if name_value.path == EVENT_INPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let input_type = parse_lit_str(s)
.map_err(|_| {
ast_result.error_spanned_by(
s,
format!("failed to parse request deserializer {:?}", s.value()),
)
})
.unwrap();
event_attrs.input = Some(input_type);
} }
Lit(s) => ast_result.error_spanned_by(s, "unexpected attribute"), }
_ => ast_result.error_spanned_by(meta_item, "unexpected attribute"),
};
let attr_meta_items_info = variant_attrs if name_value.path == EVENT_OUTPUT {
.iter() if let syn::Lit::Str(s) = &name_value.lit {
.flat_map(|attr| match get_event_meta_items(ast_result, attr) { let output_type = parse_lit_str(s)
Ok(items) => Some((attr, items)), .map_err(|_| {
Err(_) => None, ast_result.error_spanned_by(
}) s,
.collect::<Vec<(&syn::Attribute, Vec<syn::NestedMeta>)>>(); format!("failed to parse response deserializer {:?}", s.value()),
)
})
.unwrap();
event_attrs.output = Some(output_type);
}
}
},
Meta(Path(word)) => {
if word == EVENT_IGNORE && attr.path == EVENT {
event_attrs.ignore = true;
}
},
Lit(s) => ast_result.error_spanned_by(s, "unexpected attribute"),
_ => ast_result.error_spanned_by(meta_item, "unexpected attribute"),
};
for (attr, nested_metas) in attr_meta_items_info { let attr_meta_items_info = variant_attrs
nested_metas .iter()
.iter() .flat_map(|attr| match get_event_meta_items(ast_result, attr) {
.for_each(|meta_item| extract_event_attr(attr, meta_item)) Ok(items) => Some((attr, items)),
} Err(_) => None,
})
.collect::<Vec<(&syn::Attribute, Vec<syn::NestedMeta>)>>();
// eprintln!("😁{:#?}", event_attrs); for (attr, nested_metas) in attr_meta_items_info {
event_attrs nested_metas
.iter()
.for_each(|meta_item| extract_event_attr(attr, meta_item))
}
// eprintln!("😁{:#?}", event_attrs);
event_attrs
} }

View file

@ -1,99 +1,106 @@
use crate::{get_node_meta_items, parse_lit_into_expr_path, symbol::*, ASTAttr, ASTResult}; use crate::{get_node_meta_items, parse_lit_into_expr_path, symbol::*, ASTAttr, ASTResult};
use quote::ToTokens; use quote::ToTokens;
use syn::{ use syn::{
self, LitStr, self, LitStr,
Meta::NameValue, Meta::NameValue,
NestedMeta::{Lit, Meta}, NestedMeta::{Lit, Meta},
}; };
pub struct NodeStructAttrs { pub struct NodeStructAttrs {
pub rename: Option<LitStr>, pub rename: Option<LitStr>,
pub has_child: bool, pub has_child: bool,
pub child_name: Option<LitStr>, pub child_name: Option<LitStr>,
pub child_index: Option<syn::LitInt>, pub child_index: Option<syn::LitInt>,
pub get_node_value_with: Option<syn::ExprPath>, pub get_node_value_with: Option<syn::ExprPath>,
pub set_node_value_with: Option<syn::ExprPath>, pub set_node_value_with: Option<syn::ExprPath>,
pub with_children: Option<syn::ExprPath>, pub with_children: Option<syn::ExprPath>,
} }
impl NodeStructAttrs { impl NodeStructAttrs {
/// Extract out the `#[node(...)]` attributes from a struct field. /// Extract out the `#[node(...)]` attributes from a struct field.
pub fn from_ast(ast_result: &ASTResult, _index: usize, field: &syn::Field) -> Self { pub fn from_ast(ast_result: &ASTResult, _index: usize, field: &syn::Field) -> Self {
let mut rename = ASTAttr::none(ast_result, RENAME_NODE); let mut rename = ASTAttr::none(ast_result, RENAME_NODE);
let mut child_name = ASTAttr::none(ast_result, CHILD_NODE_NAME); let mut child_name = ASTAttr::none(ast_result, CHILD_NODE_NAME);
let mut child_index = ASTAttr::none(ast_result, CHILD_NODE_INDEX); let mut child_index = ASTAttr::none(ast_result, CHILD_NODE_INDEX);
let mut get_node_value_with = ASTAttr::none(ast_result, GET_NODE_VALUE_WITH); let mut get_node_value_with = ASTAttr::none(ast_result, GET_NODE_VALUE_WITH);
let mut set_node_value_with = ASTAttr::none(ast_result, SET_NODE_VALUE_WITH); let mut set_node_value_with = ASTAttr::none(ast_result, SET_NODE_VALUE_WITH);
let mut with_children = ASTAttr::none(ast_result, WITH_CHILDREN); let mut with_children = ASTAttr::none(ast_result, WITH_CHILDREN);
for meta_item in field for meta_item in field
.attrs .attrs
.iter() .iter()
.flat_map(|attr| get_node_meta_items(ast_result, attr)) .flat_map(|attr| get_node_meta_items(ast_result, attr))
.flatten() .flatten()
{ {
match &meta_item { match &meta_item {
// Parse '#[node(rename = x)]' // Parse '#[node(rename = x)]'
Meta(NameValue(m)) if m.path == RENAME_NODE => { Meta(NameValue(m)) if m.path == RENAME_NODE => {
if let syn::Lit::Str(lit) = &m.lit { if let syn::Lit::Str(lit) = &m.lit {
rename.set(&m.path, lit.clone()); rename.set(&m.path, lit.clone());
} }
} },
// Parse '#[node(child_name = x)]' // Parse '#[node(child_name = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_NAME => { Meta(NameValue(m)) if m.path == CHILD_NODE_NAME => {
if let syn::Lit::Str(lit) = &m.lit { if let syn::Lit::Str(lit) = &m.lit {
child_name.set(&m.path, lit.clone()); child_name.set(&m.path, lit.clone());
} }
} },
// Parse '#[node(child_index = x)]' // Parse '#[node(child_index = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_INDEX => { Meta(NameValue(m)) if m.path == CHILD_NODE_INDEX => {
if let syn::Lit::Int(lit) = &m.lit { if let syn::Lit::Int(lit) = &m.lit {
child_index.set(&m.path, lit.clone()); child_index.set(&m.path, lit.clone());
} }
} },
// Parse `#[node(get_node_value_with = "...")]` // Parse `#[node(get_node_value_with = "...")]`
Meta(NameValue(m)) if m.path == GET_NODE_VALUE_WITH => { Meta(NameValue(m)) if m.path == GET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, GET_NODE_VALUE_WITH, &m.lit) { if let Ok(path) = parse_lit_into_expr_path(ast_result, GET_NODE_VALUE_WITH, &m.lit) {
get_node_value_with.set(&m.path, path); get_node_value_with.set(&m.path, path);
} }
} },
// Parse `#[node(set_node_value_with= "...")]` // Parse `#[node(set_node_value_with= "...")]`
Meta(NameValue(m)) if m.path == SET_NODE_VALUE_WITH => { Meta(NameValue(m)) if m.path == SET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SET_NODE_VALUE_WITH, &m.lit) { if let Ok(path) = parse_lit_into_expr_path(ast_result, SET_NODE_VALUE_WITH, &m.lit) {
set_node_value_with.set(&m.path, path); set_node_value_with.set(&m.path, path);
} }
} },
// Parse `#[node(with_children= "...")]` // Parse `#[node(with_children= "...")]`
Meta(NameValue(m)) if m.path == WITH_CHILDREN => { Meta(NameValue(m)) if m.path == WITH_CHILDREN => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, WITH_CHILDREN, &m.lit) { if let Ok(path) = parse_lit_into_expr_path(ast_result, WITH_CHILDREN, &m.lit) {
with_children.set(&m.path, path); with_children.set(&m.path, path);
} }
} },
Meta(meta_item) => { Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', ""); let path = meta_item
ast_result.error_spanned_by(meta_item.path(), format!("unknown node field attribute `{}`", path)); .path()
} .into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown node field attribute `{}`", path),
);
},
Lit(lit) => { Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute"); ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
} },
} }
}
let child_name = child_name.get();
NodeStructAttrs {
rename: rename.get(),
child_index: child_index.get(),
has_child: child_name.is_some(),
child_name,
get_node_value_with: get_node_value_with.get(),
set_node_value_with: set_node_value_with.get(),
with_children: with_children.get(),
}
} }
let child_name = child_name.get();
NodeStructAttrs {
rename: rename.get(),
child_index: child_index.get(),
has_child: child_name.is_some(),
child_name,
get_node_value_with: get_node_value_with.get(),
set_node_value_with: set_node_value_with.get(),
with_children: with_children.get(),
}
}
} }

View file

@ -4,441 +4,486 @@ use crate::{symbol::*, ASTResult};
use proc_macro2::{Group, Span, TokenStream, TokenTree}; use proc_macro2::{Group, Span, TokenStream, TokenTree};
use quote::ToTokens; use quote::ToTokens;
use syn::{ use syn::{
self, self,
parse::{self, Parse}, parse::{self, Parse},
Meta::{List, NameValue, Path}, Meta::{List, NameValue, Path},
NestedMeta::{Lit, Meta}, NestedMeta::{Lit, Meta},
}; };
#[allow(dead_code)] #[allow(dead_code)]
pub struct PBAttrsContainer { pub struct PBAttrsContainer {
name: String, name: String,
pb_struct_type: Option<syn::Type>, pb_struct_type: Option<syn::Type>,
pb_enum_type: Option<syn::Type>, pb_enum_type: Option<syn::Type>,
} }
impl PBAttrsContainer { impl PBAttrsContainer {
/// Extract out the `#[pb(...)]` attributes from an item. /// Extract out the `#[pb(...)]` attributes from an item.
pub fn from_ast(ast_result: &ASTResult, item: &syn::DeriveInput) -> Self { pub fn from_ast(ast_result: &ASTResult, item: &syn::DeriveInput) -> Self {
let mut pb_struct_type = ASTAttr::none(ast_result, PB_STRUCT); let mut pb_struct_type = ASTAttr::none(ast_result, PB_STRUCT);
let mut pb_enum_type = ASTAttr::none(ast_result, PB_ENUM); let mut pb_enum_type = ASTAttr::none(ast_result, PB_ENUM);
for meta_item in item for meta_item in item
.attrs .attrs
.iter() .iter()
.flat_map(|attr| get_pb_meta_items(ast_result, attr)) .flat_map(|attr| get_pb_meta_items(ast_result, attr))
.flatten() .flatten()
{ {
match &meta_item { match &meta_item {
// Parse `#[pb(struct = "Type")] // Parse `#[pb(struct = "Type")]
Meta(NameValue(m)) if m.path == PB_STRUCT => { Meta(NameValue(m)) if m.path == PB_STRUCT => {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_STRUCT, &m.lit) { if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_STRUCT, &m.lit) {
pb_struct_type.set_opt(&m.path, Some(into_ty)); pb_struct_type.set_opt(&m.path, Some(into_ty));
} }
} },
// Parse `#[pb(enum = "Type")] // Parse `#[pb(enum = "Type")]
Meta(NameValue(m)) if m.path == PB_ENUM => { Meta(NameValue(m)) if m.path == PB_ENUM => {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_ENUM, &m.lit) { if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_ENUM, &m.lit) {
pb_enum_type.set_opt(&m.path, Some(into_ty)); pb_enum_type.set_opt(&m.path, Some(into_ty));
} }
} },
Meta(meta_item) => { Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', ""); let path = meta_item
ast_result.error_spanned_by(meta_item.path(), format!("unknown container attribute `{}`", path)); .path()
} .into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown container attribute `{}`", path),
);
},
Lit(lit) => { Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in container attribute"); ast_result.error_spanned_by(lit, "unexpected literal in container attribute");
} },
} }
} }
match &item.data { match &item.data {
syn::Data::Struct(_) => { syn::Data::Struct(_) => {
pb_struct_type.set_if_none(default_pb_type(&ast_result, &item.ident)); pb_struct_type.set_if_none(default_pb_type(&ast_result, &item.ident));
} },
syn::Data::Enum(_) => { syn::Data::Enum(_) => {
pb_enum_type.set_if_none(default_pb_type(&ast_result, &item.ident)); pb_enum_type.set_if_none(default_pb_type(&ast_result, &item.ident));
} },
_ => {} _ => {},
}
PBAttrsContainer {
name: item.ident.to_string(),
pb_struct_type: pb_struct_type.get(),
pb_enum_type: pb_enum_type.get(),
}
} }
pub fn pb_struct_type(&self) -> Option<&syn::Type> { PBAttrsContainer {
self.pb_struct_type.as_ref() name: item.ident.to_string(),
pb_struct_type: pb_struct_type.get(),
pb_enum_type: pb_enum_type.get(),
} }
}
pub fn pb_enum_type(&self) -> Option<&syn::Type> { pub fn pb_struct_type(&self) -> Option<&syn::Type> {
self.pb_enum_type.as_ref() self.pb_struct_type.as_ref()
} }
pub fn pb_enum_type(&self) -> Option<&syn::Type> {
self.pb_enum_type.as_ref()
}
} }
pub struct ASTAttr<'c, T> { pub struct ASTAttr<'c, T> {
ast_result: &'c ASTResult, ast_result: &'c ASTResult,
name: Symbol, name: Symbol,
tokens: TokenStream, tokens: TokenStream,
value: Option<T>, value: Option<T>,
} }
impl<'c, T> ASTAttr<'c, T> { impl<'c, T> ASTAttr<'c, T> {
pub(crate) fn none(ast_result: &'c ASTResult, name: Symbol) -> Self { pub(crate) fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
ASTAttr { ASTAttr {
ast_result, ast_result,
name, name,
tokens: TokenStream::new(), tokens: TokenStream::new(),
value: None, value: None,
}
} }
}
pub(crate) fn set<A: ToTokens>(&mut self, obj: A, value: T) { pub(crate) fn set<A: ToTokens>(&mut self, obj: A, value: T) {
let tokens = obj.into_token_stream(); let tokens = obj.into_token_stream();
if self.value.is_some() { if self.value.is_some() {
self.ast_result self
.error_spanned_by(tokens, format!("duplicate attribute `{}`", self.name)); .ast_result
} else { .error_spanned_by(tokens, format!("duplicate attribute `{}`", self.name));
self.tokens = tokens; } else {
self.value = Some(value); self.tokens = tokens;
} self.value = Some(value);
} }
}
fn set_opt<A: ToTokens>(&mut self, obj: A, value: Option<T>) { fn set_opt<A: ToTokens>(&mut self, obj: A, value: Option<T>) {
if let Some(value) = value { if let Some(value) = value {
self.set(obj, value); self.set(obj, value);
}
} }
}
pub(crate) fn set_if_none(&mut self, value: T) { pub(crate) fn set_if_none(&mut self, value: T) {
if self.value.is_none() { if self.value.is_none() {
self.value = Some(value); self.value = Some(value);
}
} }
}
pub(crate) fn get(self) -> Option<T> { pub(crate) fn get(self) -> Option<T> {
self.value self.value
} }
#[allow(dead_code)] #[allow(dead_code)]
fn get_with_tokens(self) -> Option<(TokenStream, T)> { fn get_with_tokens(self) -> Option<(TokenStream, T)> {
match self.value { match self.value {
Some(v) => Some((self.tokens, v)), Some(v) => Some((self.tokens, v)),
None => None, None => None,
}
} }
}
} }
pub struct PBStructAttrs { pub struct PBStructAttrs {
#[allow(dead_code)] #[allow(dead_code)]
name: String, name: String,
pb_index: Option<syn::LitInt>, pb_index: Option<syn::LitInt>,
pb_one_of: bool, pb_one_of: bool,
skip_pb_serializing: bool, skip_pb_serializing: bool,
skip_pb_deserializing: bool, skip_pb_deserializing: bool,
serialize_pb_with: Option<syn::ExprPath>, serialize_pb_with: Option<syn::ExprPath>,
deserialize_pb_with: Option<syn::ExprPath>, deserialize_pb_with: Option<syn::ExprPath>,
} }
pub fn is_recognizable_field(field: &syn::Field) -> bool { pub fn is_recognizable_field(field: &syn::Field) -> bool {
field.attrs.iter().any(|attr| is_recognizable_attribute(attr)) field
.attrs
.iter()
.any(|attr| is_recognizable_attribute(attr))
} }
impl PBStructAttrs { impl PBStructAttrs {
/// Extract out the `#[pb(...)]` attributes from a struct field. /// Extract out the `#[pb(...)]` attributes from a struct field.
pub fn from_ast(ast_result: &ASTResult, index: usize, field: &syn::Field) -> Self { pub fn from_ast(ast_result: &ASTResult, index: usize, field: &syn::Field) -> Self {
let mut pb_index = ASTAttr::none(ast_result, PB_INDEX); let mut pb_index = ASTAttr::none(ast_result, PB_INDEX);
let mut pb_one_of = BoolAttr::none(ast_result, PB_ONE_OF); let mut pb_one_of = BoolAttr::none(ast_result, PB_ONE_OF);
let mut serialize_pb_with = ASTAttr::none(ast_result, SERIALIZE_PB_WITH); let mut serialize_pb_with = ASTAttr::none(ast_result, SERIALIZE_PB_WITH);
let mut skip_pb_serializing = BoolAttr::none(ast_result, SKIP_PB_SERIALIZING); let mut skip_pb_serializing = BoolAttr::none(ast_result, SKIP_PB_SERIALIZING);
let mut deserialize_pb_with = ASTAttr::none(ast_result, DESERIALIZE_PB_WITH); let mut deserialize_pb_with = ASTAttr::none(ast_result, DESERIALIZE_PB_WITH);
let mut skip_pb_deserializing = BoolAttr::none(ast_result, SKIP_PB_DESERIALIZING); let mut skip_pb_deserializing = BoolAttr::none(ast_result, SKIP_PB_DESERIALIZING);
let ident = match &field.ident { let ident = match &field.ident {
Some(ident) => ident.to_string(), Some(ident) => ident.to_string(),
None => index.to_string(), None => index.to_string(),
}; };
for meta_item in field for meta_item in field
.attrs .attrs
.iter() .iter()
.flat_map(|attr| get_pb_meta_items(ast_result, attr)) .flat_map(|attr| get_pb_meta_items(ast_result, attr))
.flatten() .flatten()
{ {
match &meta_item { match &meta_item {
// Parse `#[pb(skip)]` // Parse `#[pb(skip)]`
Meta(Path(word)) if word == SKIP => { Meta(Path(word)) if word == SKIP => {
skip_pb_serializing.set_true(word); skip_pb_serializing.set_true(word);
skip_pb_deserializing.set_true(word); skip_pb_deserializing.set_true(word);
} },
// Parse '#[pb(index = x)]' // Parse '#[pb(index = x)]'
Meta(NameValue(m)) if m.path == PB_INDEX => { Meta(NameValue(m)) if m.path == PB_INDEX => {
if let syn::Lit::Int(lit) = &m.lit { if let syn::Lit::Int(lit) = &m.lit {
pb_index.set(&m.path, lit.clone()); pb_index.set(&m.path, lit.clone());
} }
} },
// Parse `#[pb(one_of)]` // Parse `#[pb(one_of)]`
Meta(Path(path)) if path == PB_ONE_OF => { Meta(Path(path)) if path == PB_ONE_OF => {
pb_one_of.set_true(path); pb_one_of.set_true(path);
} },
// Parse `#[pb(serialize_pb_with = "...")]` // Parse `#[pb(serialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == SERIALIZE_PB_WITH => { Meta(NameValue(m)) if m.path == SERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SERIALIZE_PB_WITH, &m.lit) { if let Ok(path) = parse_lit_into_expr_path(ast_result, SERIALIZE_PB_WITH, &m.lit) {
serialize_pb_with.set(&m.path, path); serialize_pb_with.set(&m.path, path);
} }
} },
// Parse `#[pb(deserialize_pb_with = "...")]` // Parse `#[pb(deserialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == DESERIALIZE_PB_WITH => { Meta(NameValue(m)) if m.path == DESERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, DESERIALIZE_PB_WITH, &m.lit) { if let Ok(path) = parse_lit_into_expr_path(ast_result, DESERIALIZE_PB_WITH, &m.lit) {
deserialize_pb_with.set(&m.path, path); deserialize_pb_with.set(&m.path, path);
} }
} },
Meta(meta_item) => { Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', ""); let path = meta_item
ast_result.error_spanned_by(meta_item.path(), format!("unknown pb field attribute `{}`", path)); .path()
} .into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown pb field attribute `{}`", path),
);
},
Lit(lit) => { Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute"); ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
} },
} }
}
PBStructAttrs {
name: ident,
pb_index: pb_index.get(),
pb_one_of: pb_one_of.get(),
skip_pb_serializing: skip_pb_serializing.get(),
skip_pb_deserializing: skip_pb_deserializing.get(),
serialize_pb_with: serialize_pb_with.get(),
deserialize_pb_with: deserialize_pb_with.get(),
}
} }
#[allow(dead_code)] PBStructAttrs {
pub fn pb_index(&self) -> Option<String> { name: ident,
self.pb_index.as_ref().map(|lit| lit.base10_digits().to_string()) pb_index: pb_index.get(),
pb_one_of: pb_one_of.get(),
skip_pb_serializing: skip_pb_serializing.get(),
skip_pb_deserializing: skip_pb_deserializing.get(),
serialize_pb_with: serialize_pb_with.get(),
deserialize_pb_with: deserialize_pb_with.get(),
} }
}
pub fn is_one_of(&self) -> bool { #[allow(dead_code)]
self.pb_one_of pub fn pb_index(&self) -> Option<String> {
} self
.pb_index
.as_ref()
.map(|lit| lit.base10_digits().to_string())
}
pub fn serialize_pb_with(&self) -> Option<&syn::ExprPath> { pub fn is_one_of(&self) -> bool {
self.serialize_pb_with.as_ref() self.pb_one_of
} }
pub fn deserialize_pb_with(&self) -> Option<&syn::ExprPath> { pub fn serialize_pb_with(&self) -> Option<&syn::ExprPath> {
self.deserialize_pb_with.as_ref() self.serialize_pb_with.as_ref()
} }
pub fn skip_pb_serializing(&self) -> bool { pub fn deserialize_pb_with(&self) -> Option<&syn::ExprPath> {
self.skip_pb_serializing self.deserialize_pb_with.as_ref()
} }
pub fn skip_pb_deserializing(&self) -> bool { pub fn skip_pb_serializing(&self) -> bool {
self.skip_pb_deserializing self.skip_pb_serializing
} }
pub fn skip_pb_deserializing(&self) -> bool {
self.skip_pb_deserializing
}
} }
pub enum Default { pub enum Default {
/// Field must always be specified because it does not have a default. /// Field must always be specified because it does not have a default.
None, None,
/// The default is given by `std::default::Default::default()`. /// The default is given by `std::default::Default::default()`.
Default, Default,
/// The default is given by this function. /// The default is given by this function.
Path(syn::ExprPath), Path(syn::ExprPath),
} }
pub fn is_recognizable_attribute(attr: &syn::Attribute) -> bool { pub fn is_recognizable_attribute(attr: &syn::Attribute) -> bool {
attr.path == PB_ATTRS || attr.path == EVENT || attr.path == NODE_ATTRS || attr.path == NODES_ATTRS attr.path == PB_ATTRS || attr.path == EVENT || attr.path == NODE_ATTRS || attr.path == NODES_ATTRS
} }
pub fn get_pb_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> { pub fn get_pb_meta_items(
// Only handle the attribute that we have defined cx: &ASTResult,
if attr.path != PB_ATTRS { attr: &syn::Attribute,
return Ok(vec![]); ) -> Result<Vec<syn::NestedMeta>, ()> {
} // Only handle the attribute that we have defined
if attr.path != PB_ATTRS {
return Ok(vec![]);
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html // http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() { match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()), Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(other) => { Ok(other) => {
cx.error_spanned_by(other, "expected #[pb(...)]"); cx.error_spanned_by(other, "expected #[pb(...)]");
Err(()) Err(())
} },
Err(err) => { Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[pb(xx = \"xxx\")]"); cx.error_spanned_by(attr, "attribute must be str, e.g. #[pb(xx = \"xxx\")]");
cx.syn_error(err); cx.syn_error(err);
Err(()) Err(())
} },
} }
} }
pub fn get_node_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> { pub fn get_node_meta_items(
// Only handle the attribute that we have defined cx: &ASTResult,
if attr.path != NODE_ATTRS && attr.path != NODES_ATTRS { attr: &syn::Attribute,
return Ok(vec![]); ) -> Result<Vec<syn::NestedMeta>, ()> {
} // Only handle the attribute that we have defined
if attr.path != NODE_ATTRS && attr.path != NODES_ATTRS {
return Ok(vec![]);
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html // http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() { match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()), Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(_) => Ok(vec![]), Ok(_) => Ok(vec![]),
Err(err) => { Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[node(xx = \"xxx\")]"); cx.error_spanned_by(attr, "attribute must be str, e.g. #[node(xx = \"xxx\")]");
cx.syn_error(err); cx.syn_error(err);
Err(()) Err(())
} },
} }
} }
pub fn get_event_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> { pub fn get_event_meta_items(
// Only handle the attribute that we have defined cx: &ASTResult,
if attr.path != EVENT { attr: &syn::Attribute,
return Ok(vec![]); ) -> Result<Vec<syn::NestedMeta>, ()> {
} // Only handle the attribute that we have defined
if attr.path != EVENT {
return Ok(vec![]);
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html // http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() { match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()), Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(other) => { Ok(other) => {
cx.error_spanned_by(other, "expected #[event(...)]"); cx.error_spanned_by(other, "expected #[event(...)]");
Err(()) Err(())
} },
Err(err) => { Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[event(xx = \"xxx\")]"); cx.error_spanned_by(attr, "attribute must be str, e.g. #[event(xx = \"xxx\")]");
cx.syn_error(err); cx.syn_error(err);
Err(()) Err(())
} },
} }
} }
pub fn parse_lit_into_expr_path( pub fn parse_lit_into_expr_path(
ast_result: &ASTResult, ast_result: &ASTResult,
attr_name: Symbol, attr_name: Symbol,
lit: &syn::Lit, lit: &syn::Lit,
) -> Result<syn::ExprPath, ()> { ) -> Result<syn::ExprPath, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?; let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string) parse_lit_str(string).map_err(|_| {
.map_err(|_| ast_result.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value()))) ast_result.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value()))
})
} }
fn get_lit_str<'a>(ast_result: &ASTResult, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'a syn::LitStr, ()> { fn get_lit_str<'a>(
if let syn::Lit::Str(lit) = lit { ast_result: &ASTResult,
Ok(lit) attr_name: Symbol,
} else { lit: &'a syn::Lit,
ast_result.error_spanned_by( ) -> Result<&'a syn::LitStr, ()> {
lit, if let syn::Lit::Str(lit) = lit {
format!( Ok(lit)
"expected pb {} attribute to be a string: `{} = \"...\"`", } else {
attr_name, attr_name ast_result.error_spanned_by(
), lit,
); format!(
Err(()) "expected pb {} attribute to be a string: `{} = \"...\"`",
} attr_name, attr_name
),
);
Err(())
}
} }
fn parse_lit_into_ty(ast_result: &ASTResult, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::Type, ()> { fn parse_lit_into_ty(
let string = get_lit_str(ast_result, attr_name, lit)?; ast_result: &ASTResult,
attr_name: Symbol,
lit: &syn::Lit,
) -> Result<syn::Type, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string).map_err(|_| { parse_lit_str(string).map_err(|_| {
ast_result.error_spanned_by( ast_result.error_spanned_by(
lit, lit,
format!("failed to parse type: {} = {:?}", attr_name, string.value()), format!("failed to parse type: {} = {:?}", attr_name, string.value()),
) )
}) })
} }
pub fn parse_lit_str<T>(s: &syn::LitStr) -> parse::Result<T> pub fn parse_lit_str<T>(s: &syn::LitStr) -> parse::Result<T>
where where
T: Parse, T: Parse,
{ {
let tokens = spanned_tokens(s)?; let tokens = spanned_tokens(s)?;
syn::parse2(tokens) syn::parse2(tokens)
} }
fn spanned_tokens(s: &syn::LitStr) -> parse::Result<TokenStream> { fn spanned_tokens(s: &syn::LitStr) -> parse::Result<TokenStream> {
let stream = syn::parse_str(&s.value())?; let stream = syn::parse_str(&s.value())?;
Ok(respan_token_stream(stream, s.span())) Ok(respan_token_stream(stream, s.span()))
} }
fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream { fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream {
stream.into_iter().map(|token| respan_token_tree(token, span)).collect() stream
.into_iter()
.map(|token| respan_token_tree(token, span))
.collect()
} }
fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree { fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
if let TokenTree::Group(g) = &mut token { if let TokenTree::Group(g) = &mut token {
*g = Group::new(g.delimiter(), respan_token_stream(g.stream(), span)); *g = Group::new(g.delimiter(), respan_token_stream(g.stream(), span));
} }
token.set_span(span); token.set_span(span);
token token
} }
fn default_pb_type(ast_result: &ASTResult, ident: &syn::Ident) -> syn::Type { fn default_pb_type(ast_result: &ASTResult, ident: &syn::Ident) -> syn::Type {
let take_ident = ident.to_string(); let take_ident = ident.to_string();
let lit_str = syn::LitStr::new(&take_ident, ident.span()); let lit_str = syn::LitStr::new(&take_ident, ident.span());
if let Ok(tokens) = spanned_tokens(&lit_str) { if let Ok(tokens) = spanned_tokens(&lit_str) {
if let Ok(pb_struct_ty) = syn::parse2(tokens) { if let Ok(pb_struct_ty) = syn::parse2(tokens) {
return pb_struct_ty; return pb_struct_ty;
}
} }
ast_result.error_spanned_by(ident, format!("❌ Can't find {} protobuf struct", take_ident)); }
panic!() ast_result.error_spanned_by(
ident,
format!("❌ Can't find {} protobuf struct", take_ident),
);
panic!()
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn is_option(ty: &syn::Type) -> bool { pub fn is_option(ty: &syn::Type) -> bool {
let path = match ungroup(ty) { let path = match ungroup(ty) {
syn::Type::Path(ty) => &ty.path, syn::Type::Path(ty) => &ty.path,
_ => { _ => {
return false; return false;
} },
}; };
let seg = match path.segments.last() { let seg = match path.segments.last() {
Some(seg) => seg, Some(seg) => seg,
None => { None => {
return false; return false;
} },
}; };
let args = match &seg.arguments { let args = match &seg.arguments {
syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args, syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args,
_ => { _ => {
return false; return false;
} },
}; };
seg.ident == "Option" && args.len() == 1 seg.ident == "Option" && args.len() == 1
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn ungroup(mut ty: &syn::Type) -> &syn::Type { pub fn ungroup(mut ty: &syn::Type) -> &syn::Type {
while let syn::Type::Group(group) = ty { while let syn::Type::Group(group) = ty {
ty = &group.elem; ty = &group.elem;
} }
ty ty
} }
struct BoolAttr<'c>(ASTAttr<'c, ()>); struct BoolAttr<'c>(ASTAttr<'c, ()>);
impl<'c> BoolAttr<'c> { impl<'c> BoolAttr<'c> {
fn none(ast_result: &'c ASTResult, name: Symbol) -> Self { fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
BoolAttr(ASTAttr::none(ast_result, name)) BoolAttr(ASTAttr::none(ast_result, name))
} }
fn set_true<A: ToTokens>(&mut self, obj: A) { fn set_true<A: ToTokens>(&mut self, obj: A) {
self.0.set(obj, ()); self.0.set(obj, ());
} }
fn get(&self) -> bool { fn get(&self) -> bool {
self.0.value.is_some() self.0.value.is_some()
} }
} }

View file

@ -48,31 +48,31 @@ pub const GET_MUT_VEC_ELEMENT_WITH: Symbol = Symbol("get_mut_element_with");
pub const WITH_CHILDREN: Symbol = Symbol("with_children"); pub const WITH_CHILDREN: Symbol = Symbol("with_children");
impl PartialEq<Symbol> for Ident { impl PartialEq<Symbol> for Ident {
fn eq(&self, word: &Symbol) -> bool { fn eq(&self, word: &Symbol) -> bool {
self == word.0 self == word.0
} }
} }
impl<'a> PartialEq<Symbol> for &'a Ident { impl<'a> PartialEq<Symbol> for &'a Ident {
fn eq(&self, word: &Symbol) -> bool { fn eq(&self, word: &Symbol) -> bool {
*self == word.0 *self == word.0
} }
} }
impl PartialEq<Symbol> for Path { impl PartialEq<Symbol> for Path {
fn eq(&self, word: &Symbol) -> bool { fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0) self.is_ident(word.0)
} }
} }
impl<'a> PartialEq<Symbol> for &'a Path { impl<'a> PartialEq<Symbol> for &'a Path {
fn eq(&self, word: &Symbol) -> bool { fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0) self.is_ident(word.0)
} }
} }
impl Display for Symbol { impl Display for Symbol {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(self.0) formatter.write_str(self.0)
} }
} }

View file

@ -3,151 +3,154 @@ use syn::{self, AngleBracketedGenericArguments, PathSegment};
#[derive(Eq, PartialEq, Debug)] #[derive(Eq, PartialEq, Debug)]
pub enum PrimitiveTy { pub enum PrimitiveTy {
Map(MapInfo), Map(MapInfo),
Vec, Vec,
Opt, Opt,
Other, Other,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct TyInfo<'a> { pub struct TyInfo<'a> {
pub ident: &'a syn::Ident, pub ident: &'a syn::Ident,
pub ty: &'a syn::Type, pub ty: &'a syn::Type,
pub primitive_ty: PrimitiveTy, pub primitive_ty: PrimitiveTy,
pub bracket_ty_info: Box<Option<TyInfo<'a>>>, pub bracket_ty_info: Box<Option<TyInfo<'a>>>,
} }
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
pub struct MapInfo { pub struct MapInfo {
pub key: String, pub key: String,
pub value: String, pub value: String,
} }
impl MapInfo { impl MapInfo {
fn new(key: String, value: String) -> Self { fn new(key: String, value: String) -> Self {
MapInfo { key, value } MapInfo { key, value }
} }
} }
impl<'a> TyInfo<'a> { impl<'a> TyInfo<'a> {
#[allow(dead_code)] #[allow(dead_code)]
pub fn bracketed_ident(&'a self) -> &'a syn::Ident { pub fn bracketed_ident(&'a self) -> &'a syn::Ident {
match self.bracket_ty_info.as_ref() { match self.bracket_ty_info.as_ref() {
Some(b_ty) => b_ty.ident, Some(b_ty) => b_ty.ident,
None => { None => {
panic!() panic!()
} },
}
} }
}
} }
pub fn parse_ty<'a>(ast_result: &ASTResult, ty: &'a syn::Type) -> Result<Option<TyInfo<'a>>, String> { pub fn parse_ty<'a>(
// Type -> TypePath -> Path -> PathSegment -> PathArguments -> ast_result: &ASTResult,
// AngleBracketedGenericArguments -> GenericArgument -> Type. ty: &'a syn::Type,
if let syn::Type::Path(ref p) = ty { ) -> Result<Option<TyInfo<'a>>, String> {
if p.path.segments.len() != 1 { // Type -> TypePath -> Path -> PathSegment -> PathArguments ->
return Ok(None); // AngleBracketedGenericArguments -> GenericArgument -> Type.
} if let syn::Type::Path(ref p) = ty {
if p.path.segments.len() != 1 {
let seg = match p.path.segments.last() { return Ok(None);
Some(seg) => seg,
None => return Ok(None),
};
let _is_option = seg.ident == "Option";
return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments {
match seg.ident.to_string().as_ref() {
"HashMap" => generate_hashmap_ty_info(ast_result, ty, seg, bracketed),
"Vec" => generate_vec_ty_info(ast_result, seg, bracketed),
"Option" => generate_option_ty_info(ast_result, ty, seg, bracketed),
_ => {
let msg = format!("Unsupported type: {}", seg.ident);
ast_result.error_spanned_by(&seg.ident, &msg);
return Err(msg);
}
}
} else {
return Ok(Some(TyInfo {
ident: &seg.ident,
ty,
primitive_ty: PrimitiveTy::Other,
bracket_ty_info: Box::new(None),
}));
};
} }
Err("Unsupported inner type, get inner type fail".to_string())
let seg = match p.path.segments.last() {
Some(seg) => seg,
None => return Ok(None),
};
let _is_option = seg.ident == "Option";
return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments {
match seg.ident.to_string().as_ref() {
"HashMap" => generate_hashmap_ty_info(ast_result, ty, seg, bracketed),
"Vec" => generate_vec_ty_info(ast_result, seg, bracketed),
"Option" => generate_option_ty_info(ast_result, ty, seg, bracketed),
_ => {
let msg = format!("Unsupported type: {}", seg.ident);
ast_result.error_spanned_by(&seg.ident, &msg);
return Err(msg);
},
}
} else {
return Ok(Some(TyInfo {
ident: &seg.ident,
ty,
primitive_ty: PrimitiveTy::Other,
bracket_ty_info: Box::new(None),
}));
};
}
Err("Unsupported inner type, get inner type fail".to_string())
} }
fn parse_bracketed(bracketed: &AngleBracketedGenericArguments) -> Vec<&syn::Type> { fn parse_bracketed(bracketed: &AngleBracketedGenericArguments) -> Vec<&syn::Type> {
bracketed bracketed
.args .args
.iter() .iter()
.flat_map(|arg| { .flat_map(|arg| {
if let syn::GenericArgument::Type(ref ty_in_bracket) = arg { if let syn::GenericArgument::Type(ref ty_in_bracket) = arg {
Some(ty_in_bracket) Some(ty_in_bracket)
} else { } else {
None None
} }
}) })
.collect::<Vec<&syn::Type>>() .collect::<Vec<&syn::Type>>()
} }
pub fn generate_hashmap_ty_info<'a>( pub fn generate_hashmap_ty_info<'a>(
ast_result: &ASTResult, ast_result: &ASTResult,
ty: &'a syn::Type, ty: &'a syn::Type,
path_segment: &'a PathSegment, path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments, bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> { ) -> Result<Option<TyInfo<'a>>, String> {
// The args of map must greater than 2 // The args of map must greater than 2
if bracketed.args.len() != 2 { if bracketed.args.len() != 2 {
return Ok(None); return Ok(None);
} }
let types = parse_bracketed(bracketed); let types = parse_bracketed(bracketed);
let key = parse_ty(ast_result, types[0])?.unwrap().ident.to_string(); let key = parse_ty(ast_result, types[0])?.unwrap().ident.to_string();
let value = parse_ty(ast_result, types[1])?.unwrap().ident.to_string(); let value = parse_ty(ast_result, types[1])?.unwrap().ident.to_string();
let bracket_ty_info = Box::new(parse_ty(ast_result, types[1])?); let bracket_ty_info = Box::new(parse_ty(ast_result, types[1])?);
Ok(Some(TyInfo { Ok(Some(TyInfo {
ident: &path_segment.ident, ident: &path_segment.ident,
ty, ty,
primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)), primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)),
bracket_ty_info, bracket_ty_info,
})) }))
} }
fn generate_option_ty_info<'a>( fn generate_option_ty_info<'a>(
ast_result: &ASTResult, ast_result: &ASTResult,
ty: &'a syn::Type, ty: &'a syn::Type,
path_segment: &'a PathSegment, path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments, bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> { ) -> Result<Option<TyInfo<'a>>, String> {
assert_eq!(path_segment.ident.to_string(), "Option".to_string()); assert_eq!(path_segment.ident.to_string(), "Option".to_string());
let types = parse_bracketed(bracketed); let types = parse_bracketed(bracketed);
let bracket_ty_info = Box::new(parse_ty(ast_result, types[0])?); let bracket_ty_info = Box::new(parse_ty(ast_result, types[0])?);
Ok(Some(TyInfo { Ok(Some(TyInfo {
ident: &path_segment.ident, ident: &path_segment.ident,
ty, ty,
primitive_ty: PrimitiveTy::Opt, primitive_ty: PrimitiveTy::Opt,
bracket_ty_info, bracket_ty_info,
})) }))
} }
fn generate_vec_ty_info<'a>( fn generate_vec_ty_info<'a>(
ast_result: &ASTResult, ast_result: &ASTResult,
path_segment: &'a PathSegment, path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments, bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> { ) -> Result<Option<TyInfo<'a>>, String> {
if bracketed.args.len() != 1 { if bracketed.args.len() != 1 {
return Ok(None); return Ok(None);
} }
if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() { if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() {
let bracketed_ty_info = Box::new(parse_ty(ast_result, bracketed_type)?); let bracketed_ty_info = Box::new(parse_ty(ast_result, bracketed_type)?);
return Ok(Some(TyInfo { return Ok(Some(TyInfo {
ident: &path_segment.ident, ident: &path_segment.ident,
ty: bracketed_type, ty: bracketed_type,
primitive_ty: PrimitiveTy::Vec, primitive_ty: PrimitiveTy::Vec,
bracket_ty_info: bracketed_ty_info, bracket_ty_info: bracketed_ty_info,
})); }));
} }
Ok(None) Ok(None)
} }

View file

@ -1,7 +1,9 @@
use crate::errors::{SyncError, SyncResult}; use crate::errors::{SyncError, SyncResult};
use crate::util::cal_diff; use crate::util::cal_diff;
use flowy_sync::util::make_operations_from_revisions; use flowy_sync::util::make_operations_from_revisions;
use grid_model::{gen_block_id, gen_row_id, CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision}; use grid_model::{
gen_block_id, gen_row_id, CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision,
};
use lib_infra::util::md5; use lib_infra::util::md5;
use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform}; use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform};
use revision_model::Revision; use revision_model::Revision;
@ -14,425 +16,463 @@ pub type GridBlockOperationsBuilder = DeltaBuilder;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct GridBlockRevisionPad { pub struct GridBlockRevisionPad {
block: DatabaseBlockRevision, block: DatabaseBlockRevision,
operations: GridBlockOperations, operations: GridBlockOperations,
} }
impl std::ops::Deref for GridBlockRevisionPad { impl std::ops::Deref for GridBlockRevisionPad {
type Target = DatabaseBlockRevision; type Target = DatabaseBlockRevision;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.block &self.block
} }
} }
impl GridBlockRevisionPad { impl GridBlockRevisionPad {
pub fn duplicate_data(&self, duplicated_block_id: &str) -> DatabaseBlockRevision { pub fn duplicate_data(&self, duplicated_block_id: &str) -> DatabaseBlockRevision {
let duplicated_rows = self let duplicated_rows = self
.block .block
.rows .rows
.iter()
.map(|row| {
let mut duplicated_row = row.as_ref().clone();
duplicated_row.id = gen_row_id();
duplicated_row.block_id = duplicated_block_id.to_string();
Arc::new(duplicated_row)
})
.collect::<Vec<Arc<RowRevision>>>();
DatabaseBlockRevision {
block_id: duplicated_block_id.to_string(),
rows: duplicated_rows,
}
}
pub fn from_operations(operations: GridBlockOperations) -> SyncResult<Self> {
let s = operations.content()?;
let revision: DatabaseBlockRevision = serde_json::from_str(&s).map_err(|e| {
let msg = format!("Deserialize operations to GridBlockRevision failed: {}", e);
tracing::error!("{}", s);
SyncError::internal().context(msg)
})?;
Ok(Self {
block: revision,
operations,
})
}
pub fn from_revisions(_grid_id: &str, revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: GridBlockOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(operations)
}
#[tracing::instrument(level = "trace", skip(self, row), err)]
pub fn add_row_rev(
&mut self,
row: RowRevision,
start_row_id: Option<String>,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|rows| {
if let Some(start_row_id) = start_row_id {
if !start_row_id.is_empty() {
if let Some(index) = rows.iter().position(|row| row.id == start_row_id) {
rows.insert(index + 1, Arc::new(row));
return Ok(Some(()));
}
}
}
rows.push(Arc::new(row));
Ok(Some(()))
})
}
pub fn delete_rows(
&mut self,
row_ids: Vec<Cow<'_, String>>,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|rows| {
rows.retain(|row| !row_ids.contains(&Cow::Borrowed(&row.id)));
Ok(Some(()))
})
}
pub fn get_row_rev(&self, row_id: &str) -> Option<(usize, Arc<RowRevision>)> {
for (index, row) in self.block.rows.iter().enumerate() {
if row.id == row_id {
return Some((index, row.clone()));
}
}
None
}
pub fn get_row_revs<T>(
&self,
row_ids: Option<Vec<Cow<'_, T>>>,
) -> SyncResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
match row_ids {
None => Ok(self.block.rows.clone()),
Some(row_ids) => {
let row_map = self
.block
.rows
.iter()
.map(|row| (row.id.as_str(), row.clone()))
.collect::<HashMap<&str, Arc<RowRevision>>>();
Ok(
row_ids
.iter() .iter()
.map(|row| { .flat_map(|row_id| {
let mut duplicated_row = row.as_ref().clone(); let row_id = row_id.as_ref().as_ref();
duplicated_row.id = gen_row_id(); match row_map.get(row_id) {
duplicated_row.block_id = duplicated_block_id.to_string(); None => {
Arc::new(duplicated_row) tracing::error!("Can't find the row with id: {}", row_id);
None
},
Some(row) => Some(row.clone()),
}
}) })
.collect::<Vec<Arc<RowRevision>>>(); .collect::<Vec<_>>(),
DatabaseBlockRevision { )
block_id: duplicated_block_id.to_string(), },
rows: duplicated_rows, }
}
pub fn get_cell_revs(
&self,
field_id: &str,
row_ids: Option<Vec<Cow<'_, String>>>,
) -> SyncResult<Vec<CellRevision>> {
let rows = self.get_row_revs(row_ids)?;
let cell_revs = rows
.iter()
.flat_map(|row| {
let cell_rev = row.cells.get(field_id)?;
Some(cell_rev.clone())
})
.collect::<Vec<CellRevision>>();
Ok(cell_revs)
}
pub fn number_of_rows(&self) -> i32 {
self.block.rows.len() as i32
}
pub fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.block.rows.iter().position(|row| row.id == row_id)
}
pub fn update_row(
&mut self,
changeset: RowChangeset,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
let row_id = changeset.row_id.clone();
self.modify_row(&row_id, |row| {
let mut is_changed = None;
if let Some(height) = changeset.height {
row.height = height;
is_changed = Some(());
}
if let Some(visibility) = changeset.visibility {
row.visibility = visibility;
is_changed = Some(());
}
if !changeset.cell_by_field_id.is_empty() {
is_changed = Some(());
changeset
.cell_by_field_id
.into_iter()
.for_each(|(field_id, cell)| {
row.cells.insert(field_id, cell);
})
}
Ok(is_changed)
})
}
pub fn move_row(
&mut self,
row_id: &str,
from: usize,
to: usize,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|row_revs| {
if let Some(position) = row_revs.iter().position(|row_rev| row_rev.id == row_id) {
debug_assert_eq!(from, position);
let row_rev = row_revs.remove(position);
if to > row_revs.len() {
Err(SyncError::out_of_bound())
} else {
row_revs.insert(to, row_rev);
Ok(Some(()))
} }
} } else {
Ok(None)
}
})
}
pub fn from_operations(operations: GridBlockOperations) -> SyncResult<Self> { pub fn modify<F>(&mut self, f: F) -> SyncResult<Option<GridBlockRevisionChangeset>>
let s = operations.content()?; where
let revision: DatabaseBlockRevision = serde_json::from_str(&s).map_err(|e| { F: for<'a> FnOnce(&'a mut Vec<Arc<RowRevision>>) -> SyncResult<Option<()>>,
let msg = format!("Deserialize operations to GridBlockRevision failed: {}", e); {
tracing::error!("{}", s); let cloned_self = self.clone();
SyncError::internal().context(msg) match f(&mut self.block.rows)? {
})?; None => Ok(None),
Ok(Self { Some(_) => {
block: revision, let old = cloned_self.revision_json()?;
operations, let new = self.revision_json()?;
}) match cal_diff::<EmptyAttributes>(old, new) {
} None => Ok(None),
Some(operations) => {
pub fn from_revisions(_grid_id: &str, revisions: Vec<Revision>) -> SyncResult<Self> { tracing::trace!(
let operations: GridBlockOperations = make_operations_from_revisions(revisions)?; "[GridBlockRevision] Composing operations {}",
Self::from_operations(operations) operations.json_str()
} );
self.operations = self.operations.compose(&operations)?;
#[tracing::instrument(level = "trace", skip(self, row), err)] Ok(Some(GridBlockRevisionChangeset {
pub fn add_row_rev( operations,
&mut self, md5: md5(&self.operations.json_bytes()),
row: RowRevision, }))
start_row_id: Option<String>, },
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|rows| {
if let Some(start_row_id) = start_row_id {
if !start_row_id.is_empty() {
if let Some(index) = rows.iter().position(|row| row.id == start_row_id) {
rows.insert(index + 1, Arc::new(row));
return Ok(Some(()));
}
}
}
rows.push(Arc::new(row));
Ok(Some(()))
})
}
pub fn delete_rows(&mut self, row_ids: Vec<Cow<'_, String>>) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|rows| {
rows.retain(|row| !row_ids.contains(&Cow::Borrowed(&row.id)));
Ok(Some(()))
})
}
pub fn get_row_rev(&self, row_id: &str) -> Option<(usize, Arc<RowRevision>)> {
for (index, row) in self.block.rows.iter().enumerate() {
if row.id == row_id {
return Some((index, row.clone()));
}
} }
None },
} }
}
pub fn get_row_revs<T>(&self, row_ids: Option<Vec<Cow<'_, T>>>) -> SyncResult<Vec<Arc<RowRevision>>> fn modify_row<F>(&mut self, row_id: &str, f: F) -> SyncResult<Option<GridBlockRevisionChangeset>>
where where
T: AsRef<str> + ToOwned + ?Sized, F: FnOnce(&mut RowRevision) -> SyncResult<Option<()>>,
{ {
match row_ids { self.modify(|rows| {
None => Ok(self.block.rows.clone()), if let Some(row_rev) = rows.iter_mut().find(|row_rev| row_id == row_rev.id) {
Some(row_ids) => { f(Arc::make_mut(row_rev))
let row_map = self } else {
.block tracing::warn!("[BlockMetaPad]: Can't find any row with id: {}", row_id);
.rows Ok(None)
.iter() }
.map(|row| (row.id.as_str(), row.clone())) })
.collect::<HashMap<&str, Arc<RowRevision>>>(); }
Ok(row_ids pub fn revision_json(&self) -> SyncResult<String> {
.iter() serde_json::to_string(&self.block)
.flat_map(|row_id| { .map_err(|e| SyncError::internal().context(format!("serial block to json failed: {}", e)))
let row_id = row_id.as_ref().as_ref(); }
match row_map.get(row_id) {
None => {
tracing::error!("Can't find the row with id: {}", row_id);
None
}
Some(row) => Some(row.clone()),
}
})
.collect::<Vec<_>>())
}
}
}
pub fn get_cell_revs( pub fn operations_json_str(&self) -> String {
&self, self.operations.json_str()
field_id: &str, }
row_ids: Option<Vec<Cow<'_, String>>>,
) -> SyncResult<Vec<CellRevision>> {
let rows = self.get_row_revs(row_ids)?;
let cell_revs = rows
.iter()
.flat_map(|row| {
let cell_rev = row.cells.get(field_id)?;
Some(cell_rev.clone())
})
.collect::<Vec<CellRevision>>();
Ok(cell_revs)
}
pub fn number_of_rows(&self) -> i32 {
self.block.rows.len() as i32
}
pub fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.block.rows.iter().position(|row| row.id == row_id)
}
pub fn update_row(&mut self, changeset: RowChangeset) -> SyncResult<Option<GridBlockRevisionChangeset>> {
let row_id = changeset.row_id.clone();
self.modify_row(&row_id, |row| {
let mut is_changed = None;
if let Some(height) = changeset.height {
row.height = height;
is_changed = Some(());
}
if let Some(visibility) = changeset.visibility {
row.visibility = visibility;
is_changed = Some(());
}
if !changeset.cell_by_field_id.is_empty() {
is_changed = Some(());
changeset.cell_by_field_id.into_iter().for_each(|(field_id, cell)| {
row.cells.insert(field_id, cell);
})
}
Ok(is_changed)
})
}
pub fn move_row(&mut self, row_id: &str, from: usize, to: usize) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|row_revs| {
if let Some(position) = row_revs.iter().position(|row_rev| row_rev.id == row_id) {
debug_assert_eq!(from, position);
let row_rev = row_revs.remove(position);
if to > row_revs.len() {
Err(SyncError::out_of_bound())
} else {
row_revs.insert(to, row_rev);
Ok(Some(()))
}
} else {
Ok(None)
}
})
}
pub fn modify<F>(&mut self, f: F) -> SyncResult<Option<GridBlockRevisionChangeset>>
where
F: for<'a> FnOnce(&'a mut Vec<Arc<RowRevision>>) -> SyncResult<Option<()>>,
{
let cloned_self = self.clone();
match f(&mut self.block.rows)? {
None => Ok(None),
Some(_) => {
let old = cloned_self.revision_json()?;
let new = self.revision_json()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
tracing::trace!("[GridBlockRevision] Composing operations {}", operations.json_str());
self.operations = self.operations.compose(&operations)?;
Ok(Some(GridBlockRevisionChangeset {
operations,
md5: md5(&self.operations.json_bytes()),
}))
}
}
}
}
}
fn modify_row<F>(&mut self, row_id: &str, f: F) -> SyncResult<Option<GridBlockRevisionChangeset>>
where
F: FnOnce(&mut RowRevision) -> SyncResult<Option<()>>,
{
self.modify(|rows| {
if let Some(row_rev) = rows.iter_mut().find(|row_rev| row_id == row_rev.id) {
f(Arc::make_mut(row_rev))
} else {
tracing::warn!("[BlockMetaPad]: Can't find any row with id: {}", row_id);
Ok(None)
}
})
}
pub fn revision_json(&self) -> SyncResult<String> {
serde_json::to_string(&self.block)
.map_err(|e| SyncError::internal().context(format!("serial block to json failed: {}", e)))
}
pub fn operations_json_str(&self) -> String {
self.operations.json_str()
}
} }
pub struct GridBlockRevisionChangeset { pub struct GridBlockRevisionChangeset {
pub operations: GridBlockOperations, pub operations: GridBlockOperations,
/// md5: the md5 of the grid after applying the change. /// md5: the md5 of the grid after applying the change.
pub md5: String, pub md5: String,
} }
pub fn make_database_block_operations(block_rev: &DatabaseBlockRevision) -> GridBlockOperations { pub fn make_database_block_operations(block_rev: &DatabaseBlockRevision) -> GridBlockOperations {
let json = serde_json::to_string(&block_rev).unwrap(); let json = serde_json::to_string(&block_rev).unwrap();
GridBlockOperationsBuilder::new().insert(&json).build() GridBlockOperationsBuilder::new().insert(&json).build()
} }
pub fn make_grid_block_revisions(_user_id: &str, grid_block_meta_data: &DatabaseBlockRevision) -> Vec<Revision> { pub fn make_grid_block_revisions(
let operations = make_database_block_operations(grid_block_meta_data); _user_id: &str,
let bytes = operations.json_bytes(); grid_block_meta_data: &DatabaseBlockRevision,
let revision = Revision::initial_revision(&grid_block_meta_data.block_id, bytes); ) -> Vec<Revision> {
vec![revision] let operations = make_database_block_operations(grid_block_meta_data);
let bytes = operations.json_bytes();
let revision = Revision::initial_revision(&grid_block_meta_data.block_id, bytes);
vec![revision]
} }
impl std::default::Default for GridBlockRevisionPad { impl std::default::Default for GridBlockRevisionPad {
fn default() -> Self { fn default() -> Self {
let block_revision = DatabaseBlockRevision { let block_revision = DatabaseBlockRevision {
block_id: gen_block_id(), block_id: gen_block_id(),
rows: vec![], rows: vec![],
}; };
let operations = make_database_block_operations(&block_revision); let operations = make_database_block_operations(&block_revision);
GridBlockRevisionPad { GridBlockRevisionPad {
block: block_revision, block: block_revision,
operations, operations,
}
} }
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::client_database::{GridBlockOperations, GridBlockRevisionPad}; use crate::client_database::{GridBlockOperations, GridBlockRevisionPad};
use grid_model::{RowChangeset, RowRevision}; use grid_model::{RowChangeset, RowRevision};
use std::borrow::Cow; use std::borrow::Cow;
#[test] #[test]
fn block_meta_add_row() { fn block_meta_add_row() {
let mut pad = test_pad(); let mut pad = test_pad();
let row = RowRevision { let row = RowRevision {
id: "1".to_string(), id: "1".to_string(),
block_id: pad.block_id.clone(), block_id: pad.block_id.clone(),
cells: Default::default(), cells: Default::default(),
height: 0, height: 0,
visibility: false, visibility: false,
}; };
let change = pad.add_row_rev(row.clone(), None).unwrap().unwrap(); let change = pad.add_row_rev(row.clone(), None).unwrap().unwrap();
assert_eq!(pad.rows.first().unwrap().as_ref(), &row); assert_eq!(pad.rows.first().unwrap().as_ref(), &row);
assert_eq!( assert_eq!(
change.operations.json_str(), change.operations.json_str(),
r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"# r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
); );
}
#[test]
fn block_meta_insert_row() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
let change = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":90},{"insert":",{\"id\":\"2\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad
.add_row_rev(row_3.clone(), Some("2".to_string()))
.unwrap()
.unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":157},{"insert":",{\"id\":\"3\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_2);
assert_eq!(*pad.rows[2], row_3);
}
fn test_row_rev(id: &str, pad: &GridBlockRevisionPad) -> RowRevision {
RowRevision {
id: id.to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
} }
}
#[test] #[test]
fn block_meta_insert_row() { fn block_meta_insert_row2() {
let mut pad = test_pad(); let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad); let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad); let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad); let row_3 = test_row_rev("3", &pad);
let change = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap(); let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
assert_eq!( let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
change.operations.json_str(), let _ = pad
r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"# .add_row_rev(row_3.clone(), Some("1".to_string()))
); .unwrap()
.unwrap();
let change = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap(); assert_eq!(*pad.rows[0], row_1);
assert_eq!( assert_eq!(*pad.rows[1], row_3);
change.operations.json_str(), assert_eq!(*pad.rows[2], row_2);
r#"[{"retain":90},{"insert":",{\"id\":\"2\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"# }
);
let change = pad.add_row_rev(row_3.clone(), Some("2".to_string())).unwrap().unwrap(); #[test]
assert_eq!( fn block_meta_insert_row3() {
change.operations.json_str(), let mut pad = test_pad();
r#"[{"retain":157},{"insert":",{\"id\":\"3\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"# let row_1 = test_row_rev("1", &pad);
); let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
assert_eq!(*pad.rows[0], row_1); let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
assert_eq!(*pad.rows[1], row_2); let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
assert_eq!(*pad.rows[2], row_3); let _ = pad
} .add_row_rev(row_3.clone(), Some("".to_string()))
.unwrap()
.unwrap();
fn test_row_rev(id: &str, pad: &GridBlockRevisionPad) -> RowRevision { assert_eq!(*pad.rows[0], row_1);
RowRevision { assert_eq!(*pad.rows[1], row_2);
id: id.to_string(), assert_eq!(*pad.rows[2], row_3);
block_id: pad.block_id.clone(), }
cells: Default::default(),
height: 0,
visibility: false,
}
}
#[test] #[test]
fn block_meta_insert_row2() { fn block_meta_delete_row() {
let mut pad = test_pad(); let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad); let pre_json_str = pad.operations_json_str();
let row_2 = test_row_rev("2", &pad); let row = RowRevision {
let row_3 = test_row_rev("3", &pad); id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap(); let _ = pad.add_row_rev(row.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap(); let change = pad
let _ = pad.add_row_rev(row_3.clone(), Some("1".to_string())).unwrap().unwrap(); .delete_rows(vec![Cow::Borrowed(&row.id)])
.unwrap()
.unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"delete":66},{"retain":2}]"#
);
assert_eq!(*pad.rows[0], row_1); assert_eq!(pad.operations_json_str(), pre_json_str);
assert_eq!(*pad.rows[1], row_3); }
assert_eq!(*pad.rows[2], row_2);
}
#[test] #[test]
fn block_meta_insert_row3() { fn block_meta_update_row() {
let mut pad = test_pad(); let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad); let row = RowRevision {
let row_2 = test_row_rev("2", &pad); id: "1".to_string(),
let row_3 = test_row_rev("3", &pad); block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap(); let changeset = RowChangeset {
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap(); row_id: row.id.clone(),
let _ = pad.add_row_rev(row_3.clone(), Some("".to_string())).unwrap().unwrap(); height: Some(100),
visibility: Some(true),
cell_by_field_id: Default::default(),
};
assert_eq!(*pad.rows[0], row_1); let _ = pad.add_row_rev(row, None).unwrap().unwrap();
assert_eq!(*pad.rows[1], row_2); let change = pad.update_row(changeset).unwrap().unwrap();
assert_eq!(*pad.rows[2], row_3);
}
#[test] assert_eq!(
fn block_meta_delete_row() { change.operations.json_str(),
let mut pad = test_pad(); r#"[{"retain":69},{"insert":"10"},{"retain":15},{"insert":"tru"},{"delete":4},{"retain":4}]"#
let pre_json_str = pad.operations_json_str(); );
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let _ = pad.add_row_rev(row.clone(), None).unwrap().unwrap(); assert_eq!(
let change = pad.delete_rows(vec![Cow::Borrowed(&row.id)]).unwrap().unwrap(); pad.revision_json().unwrap(),
assert_eq!( r#"{"block_id":"1","rows":[{"id":"1","block_id":"1","cells":[],"height":100,"visibility":true}]}"#
change.operations.json_str(), );
r#"[{"retain":24},{"delete":66},{"retain":2}]"# }
);
assert_eq!(pad.operations_json_str(), pre_json_str); fn test_pad() -> GridBlockRevisionPad {
} let operations =
GridBlockOperations::from_json(r#"[{"insert":"{\"block_id\":\"1\",\"rows\":[]}"}]"#).unwrap();
#[test] GridBlockRevisionPad::from_operations(operations).unwrap()
fn block_meta_update_row() { }
let mut pad = test_pad();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let changeset = RowChangeset {
row_id: row.id.clone(),
height: Some(100),
visibility: Some(true),
cell_by_field_id: Default::default(),
};
let _ = pad.add_row_rev(row, None).unwrap().unwrap();
let change = pad.update_row(changeset).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":69},{"insert":"10"},{"retain":15},{"insert":"tru"},{"delete":4},{"retain":4}]"#
);
assert_eq!(
pad.revision_json().unwrap(),
r#"{"block_id":"1","rows":[{"id":"1","block_id":"1","cells":[],"height":100,"visibility":true}]}"#
);
}
fn test_pad() -> GridBlockRevisionPad {
let operations = GridBlockOperations::from_json(r#"[{"insert":"{\"block_id\":\"1\",\"rows\":[]}"}]"#).unwrap();
GridBlockRevisionPad::from_operations(operations).unwrap()
}
} }

View file

@ -1,70 +1,75 @@
use crate::errors::{SyncError, SyncResult}; use crate::errors::{SyncError, SyncResult};
use grid_model::{BuildDatabaseContext, DatabaseBlockRevision, FieldRevision, GridBlockMetaRevision, RowRevision}; use grid_model::{
BuildDatabaseContext, DatabaseBlockRevision, FieldRevision, GridBlockMetaRevision, RowRevision,
};
use std::sync::Arc; use std::sync::Arc;
pub struct DatabaseBuilder { pub struct DatabaseBuilder {
build_context: BuildDatabaseContext, build_context: BuildDatabaseContext,
} }
impl std::default::Default for DatabaseBuilder { impl std::default::Default for DatabaseBuilder {
fn default() -> Self { fn default() -> Self {
let mut build_context = BuildDatabaseContext::new(); let mut build_context = BuildDatabaseContext::new();
let block_meta = GridBlockMetaRevision::new(); let block_meta = GridBlockMetaRevision::new();
let block_meta_data = DatabaseBlockRevision { let block_meta_data = DatabaseBlockRevision {
block_id: block_meta.block_id.clone(), block_id: block_meta.block_id.clone(),
rows: vec![], rows: vec![],
}; };
build_context.block_metas.push(block_meta); build_context.block_metas.push(block_meta);
build_context.blocks.push(block_meta_data); build_context.blocks.push(block_meta_data);
DatabaseBuilder { build_context } DatabaseBuilder { build_context }
} }
} }
impl DatabaseBuilder { impl DatabaseBuilder {
pub fn new() -> Self { pub fn new() -> Self {
Self::default() Self::default()
} }
pub fn add_field(&mut self, field: FieldRevision) { pub fn add_field(&mut self, field: FieldRevision) {
self.build_context.field_revs.push(Arc::new(field)); self.build_context.field_revs.push(Arc::new(field));
} }
pub fn add_row(&mut self, row_rev: RowRevision) { pub fn add_row(&mut self, row_rev: RowRevision) {
let block_meta_rev = self.build_context.block_metas.first_mut().unwrap(); let block_meta_rev = self.build_context.block_metas.first_mut().unwrap();
let block_rev = self.build_context.blocks.first_mut().unwrap(); let block_rev = self.build_context.blocks.first_mut().unwrap();
block_rev.rows.push(Arc::new(row_rev)); block_rev.rows.push(Arc::new(row_rev));
block_meta_rev.row_count += 1; block_meta_rev.row_count += 1;
} }
pub fn add_empty_row(&mut self) { pub fn add_empty_row(&mut self) {
let row = RowRevision::new(self.block_id()); let row = RowRevision::new(self.block_id());
self.add_row(row); self.add_row(row);
} }
pub fn field_revs(&self) -> &Vec<Arc<FieldRevision>> { pub fn field_revs(&self) -> &Vec<Arc<FieldRevision>> {
&self.build_context.field_revs &self.build_context.field_revs
} }
pub fn block_id(&self) -> &str { pub fn block_id(&self) -> &str {
&self.build_context.block_metas.first().unwrap().block_id &self.build_context.block_metas.first().unwrap().block_id
} }
pub fn build(self) -> BuildDatabaseContext { pub fn build(self) -> BuildDatabaseContext {
self.build_context self.build_context
} }
} }
#[allow(dead_code)] #[allow(dead_code)]
fn check_rows(fields: &[FieldRevision], rows: &[RowRevision]) -> SyncResult<()> { fn check_rows(fields: &[FieldRevision], rows: &[RowRevision]) -> SyncResult<()> {
let field_ids = fields.iter().map(|field| &field.id).collect::<Vec<&String>>(); let field_ids = fields
for row in rows { .iter()
let cell_field_ids = row.cells.keys().into_iter().collect::<Vec<&String>>(); .map(|field| &field.id)
if cell_field_ids != field_ids { .collect::<Vec<&String>>();
let msg = format!("{:?} contains invalid cells", row); for row in rows {
return Err(SyncError::internal().context(msg)); let cell_field_ids = row.cells.keys().into_iter().collect::<Vec<&String>>();
} if cell_field_ids != field_ids {
let msg = format!("{:?} contains invalid cells", row);
return Err(SyncError::internal().context(msg));
} }
Ok(()) }
Ok(())
} }

View file

@ -2,8 +2,8 @@ use crate::errors::{internal_sync_error, SyncError, SyncResult};
use crate::util::cal_diff; use crate::util::cal_diff;
use flowy_sync::util::make_operations_from_revisions; use flowy_sync::util::make_operations_from_revisions;
use grid_model::{ use grid_model::{
gen_block_id, gen_grid_id, DatabaseRevision, FieldRevision, FieldTypeRevision, GridBlockMetaRevision, gen_block_id, gen_grid_id, DatabaseRevision, FieldRevision, FieldTypeRevision,
GridBlockMetaRevisionChangeset, GridBlockMetaRevision, GridBlockMetaRevisionChangeset,
}; };
use lib_infra::util::md5; use lib_infra::util::md5;
use lib_infra::util::move_vec_element; use lib_infra::util::move_vec_element;
@ -17,264 +17,301 @@ pub type DatabaseOperationsBuilder = DeltaOperationBuilder<EmptyAttributes>;
#[derive(Clone)] #[derive(Clone)]
pub struct DatabaseRevisionPad { pub struct DatabaseRevisionPad {
grid_rev: Arc<DatabaseRevision>, grid_rev: Arc<DatabaseRevision>,
operations: DatabaseOperations, operations: DatabaseOperations,
} }
pub trait JsonDeserializer { pub trait JsonDeserializer {
fn deserialize(&self, type_option_data: Vec<u8>) -> SyncResult<String>; fn deserialize(&self, type_option_data: Vec<u8>) -> SyncResult<String>;
} }
impl DatabaseRevisionPad { impl DatabaseRevisionPad {
pub fn grid_id(&self) -> String { pub fn grid_id(&self) -> String {
self.grid_rev.grid_id.clone() self.grid_rev.grid_id.clone()
} }
pub async fn duplicate_grid_block_meta(&self) -> (Vec<FieldRevision>, Vec<GridBlockMetaRevision>) { pub async fn duplicate_grid_block_meta(
let fields = self &self,
.grid_rev ) -> (Vec<FieldRevision>, Vec<GridBlockMetaRevision>) {
.fields let fields = self
.iter() .grid_rev
.map(|field_rev| field_rev.as_ref().clone()) .fields
.collect(); .iter()
.map(|field_rev| field_rev.as_ref().clone())
.collect();
let blocks = self let blocks = self
.grid_rev .grid_rev
.blocks .blocks
.iter() .iter()
.map(|block| { .map(|block| {
let mut duplicated_block = (**block).clone(); let mut duplicated_block = (**block).clone();
duplicated_block.block_id = gen_block_id(); duplicated_block.block_id = gen_block_id();
duplicated_block duplicated_block
}) })
.collect::<Vec<GridBlockMetaRevision>>(); .collect::<Vec<GridBlockMetaRevision>>();
(fields, blocks) (fields, blocks)
} }
pub fn from_operations(operations: DatabaseOperations) -> SyncResult<Self> { pub fn from_operations(operations: DatabaseOperations) -> SyncResult<Self> {
let content = operations.content()?; let content = operations.content()?;
let grid: DatabaseRevision = serde_json::from_str(&content).map_err(|e| { let grid: DatabaseRevision = serde_json::from_str(&content).map_err(|e| {
let msg = format!("Deserialize operations to grid failed: {}", e); let msg = format!("Deserialize operations to grid failed: {}", e);
tracing::error!("{}", msg); tracing::error!("{}", msg);
SyncError::internal().context(msg) SyncError::internal().context(msg)
})?; })?;
Ok(Self { Ok(Self {
grid_rev: Arc::new(grid), grid_rev: Arc::new(grid),
operations, operations,
}) })
} }
pub fn from_revisions(revisions: Vec<Revision>) -> SyncResult<Self> { pub fn from_revisions(revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: DatabaseOperations = make_operations_from_revisions(revisions)?; let operations: DatabaseOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(operations) Self::from_operations(operations)
} }
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "debug", skip_all, err)]
pub fn create_field_rev( pub fn create_field_rev(
&mut self, &mut self,
new_field_rev: FieldRevision, new_field_rev: FieldRevision,
start_field_id: Option<String>, start_field_id: Option<String>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> { ) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| { self.modify_grid(|grid_meta| {
// Check if the field exists or not // Check if the field exists or not
if grid_meta if grid_meta
.fields .fields
.iter() .iter()
.any(|field_rev| field_rev.id == new_field_rev.id) .any(|field_rev| field_rev.id == new_field_rev.id)
{ {
tracing::error!("Duplicate grid field"); tracing::error!("Duplicate grid field");
return Ok(None); return Ok(None);
} }
let insert_index = match start_field_id { let insert_index = match start_field_id {
None => None, None => None,
Some(start_field_id) => grid_meta.fields.iter().position(|field| field.id == start_field_id), Some(start_field_id) => grid_meta
}; .fields
let new_field_rev = Arc::new(new_field_rev); .iter()
match insert_index { .position(|field| field.id == start_field_id),
None => grid_meta.fields.push(new_field_rev), };
Some(index) => grid_meta.fields.insert(index, new_field_rev), let new_field_rev = Arc::new(new_field_rev);
} match insert_index {
None => grid_meta.fields.push(new_field_rev),
Some(index) => grid_meta.fields.insert(index, new_field_rev),
}
Ok(Some(()))
})
}
pub fn delete_field_rev(
&mut self,
field_id: &str,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_id)
{
None => Ok(None),
Some(index) => {
if grid_meta.fields[index].is_primary {
Err(SyncError::can_not_delete_primary_field())
} else {
grid_meta.fields.remove(index);
Ok(Some(())) Ok(Some(()))
}) }
} },
}
})
}
pub fn delete_field_rev(&mut self, field_id: &str) -> SyncResult<Option<DatabaseRevisionChangeset>> { pub fn duplicate_field_rev(
self.modify_grid( &mut self,
|grid_meta| match grid_meta.fields.iter().position(|field| field.id == field_id) { field_id: &str,
None => Ok(None), duplicated_field_id: &str,
Some(index) => { ) -> SyncResult<Option<DatabaseRevisionChangeset>> {
if grid_meta.fields[index].is_primary { self.modify_grid(|grid_meta| {
Err(SyncError::can_not_delete_primary_field()) match grid_meta
} else { .fields
grid_meta.fields.remove(index); .iter()
Ok(Some(())) .position(|field| field.id == field_id)
} {
} None => Ok(None),
}, Some(index) => {
) let mut duplicate_field_rev = grid_meta.fields[index].as_ref().clone();
} duplicate_field_rev.id = duplicated_field_id.to_string();
duplicate_field_rev.name = format!("{} (copy)", duplicate_field_rev.name);
pub fn duplicate_field_rev( grid_meta
&mut self,
field_id: &str,
duplicated_field_id: &str,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(
|grid_meta| match grid_meta.fields.iter().position(|field| field.id == field_id) {
None => Ok(None),
Some(index) => {
let mut duplicate_field_rev = grid_meta.fields[index].as_ref().clone();
duplicate_field_rev.id = duplicated_field_id.to_string();
duplicate_field_rev.name = format!("{} (copy)", duplicate_field_rev.name);
grid_meta.fields.insert(index + 1, Arc::new(duplicate_field_rev));
Ok(Some(()))
}
},
)
}
/// Modifies the current field type of the [FieldTypeRevision]
///
/// # Arguments
///
/// * `field_id`: the id of the field
/// * `field_type`: the new field type of the field
/// * `make_default_type_option`: create the field type's type-option data
/// * `type_option_transform`: create the field type's type-option data
///
///
pub fn switch_to_field<DT, TT, T>(
&mut self,
field_id: &str,
new_field_type: T,
make_default_type_option: DT,
type_option_transform: TT,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
DT: FnOnce() -> String,
TT: FnOnce(FieldTypeRevision, Option<String>, String) -> String,
T: Into<FieldTypeRevision>,
{
let new_field_type = new_field_type.into();
self.modify_grid(|grid_meta| {
match grid_meta.fields.iter_mut().find(|field_rev| field_rev.id == field_id) {
None => {
tracing::warn!("Can not find the field with id: {}", field_id);
Ok(None)
}
Some(field_rev) => {
let mut_field_rev = Arc::make_mut(field_rev);
let old_field_type_rev = mut_field_rev.ty;
let old_field_type_option = mut_field_rev
.get_type_option_str(mut_field_rev.ty)
.map(|value| value.to_owned());
match mut_field_rev.get_type_option_str(new_field_type) {
Some(new_field_type_option) => {
let transformed_type_option = type_option_transform(
old_field_type_rev,
old_field_type_option,
new_field_type_option.to_owned(),
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
}
None => {
// If the type-option data isn't exist before, creating the default type-option data.
let new_field_type_option = make_default_type_option();
let transformed_type_option =
type_option_transform(old_field_type_rev, old_field_type_option, new_field_type_option);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
}
}
mut_field_rev.ty = new_field_type;
Ok(Some(()))
}
}
})
}
pub fn replace_field_rev(
&mut self,
field_rev: Arc<FieldRevision>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(
|grid_meta| match grid_meta.fields.iter().position(|field| field.id == field_rev.id) {
None => Ok(None),
Some(index) => {
grid_meta.fields.remove(index);
grid_meta.fields.insert(index, field_rev);
Ok(Some(()))
}
},
)
}
pub fn move_field(
&mut self,
field_id: &str,
from_index: usize,
to_index: usize,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match move_vec_element(
&mut grid_meta.fields,
|field| field.id == field_id,
from_index,
to_index,
)
.map_err(internal_sync_error)?
{
true => Ok(Some(())),
false => Ok(None),
}
})
}
pub fn contain_field(&self, field_id: &str) -> bool {
self.grid_rev.fields.iter().any(|field| field.id == field_id)
}
pub fn get_field_rev(&self, field_id: &str) -> Option<(usize, &Arc<FieldRevision>)> {
self.grid_rev
.fields .fields
.iter() .insert(index + 1, Arc::new(duplicate_field_rev));
.enumerate() Ok(Some(()))
.find(|(_, field)| field.id == field_id) },
}
})
}
/// Modifies the current field type of the [FieldTypeRevision]
///
/// # Arguments
///
/// * `field_id`: the id of the field
/// * `field_type`: the new field type of the field
/// * `make_default_type_option`: create the field type's type-option data
/// * `type_option_transform`: create the field type's type-option data
///
///
pub fn switch_to_field<DT, TT, T>(
&mut self,
field_id: &str,
new_field_type: T,
make_default_type_option: DT,
type_option_transform: TT,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
DT: FnOnce() -> String,
TT: FnOnce(FieldTypeRevision, Option<String>, String) -> String,
T: Into<FieldTypeRevision>,
{
let new_field_type = new_field_type.into();
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter_mut()
.find(|field_rev| field_rev.id == field_id)
{
None => {
tracing::warn!("Can not find the field with id: {}", field_id);
Ok(None)
},
Some(field_rev) => {
let mut_field_rev = Arc::make_mut(field_rev);
let old_field_type_rev = mut_field_rev.ty;
let old_field_type_option = mut_field_rev
.get_type_option_str(mut_field_rev.ty)
.map(|value| value.to_owned());
match mut_field_rev.get_type_option_str(new_field_type) {
Some(new_field_type_option) => {
let transformed_type_option = type_option_transform(
old_field_type_rev,
old_field_type_option,
new_field_type_option.to_owned(),
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
},
None => {
// If the type-option data isn't exist before, creating the default type-option data.
let new_field_type_option = make_default_type_option();
let transformed_type_option = type_option_transform(
old_field_type_rev,
old_field_type_option,
new_field_type_option,
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
},
}
mut_field_rev.ty = new_field_type;
Ok(Some(()))
},
}
})
}
pub fn replace_field_rev(
&mut self,
field_rev: Arc<FieldRevision>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_rev.id)
{
None => Ok(None),
Some(index) => {
grid_meta.fields.remove(index);
grid_meta.fields.insert(index, field_rev);
Ok(Some(()))
},
}
})
}
pub fn move_field(
&mut self,
field_id: &str,
from_index: usize,
to_index: usize,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match move_vec_element(
&mut grid_meta.fields,
|field| field.id == field_id,
from_index,
to_index,
)
.map_err(internal_sync_error)?
{
true => Ok(Some(())),
false => Ok(None),
}
})
}
pub fn contain_field(&self, field_id: &str) -> bool {
self
.grid_rev
.fields
.iter()
.any(|field| field.id == field_id)
}
pub fn get_field_rev(&self, field_id: &str) -> Option<(usize, &Arc<FieldRevision>)> {
self
.grid_rev
.fields
.iter()
.enumerate()
.find(|(_, field)| field.id == field_id)
}
pub fn get_field_revs(
&self,
field_ids: Option<Vec<String>>,
) -> SyncResult<Vec<Arc<FieldRevision>>> {
match field_ids {
None => Ok(self.grid_rev.fields.clone()),
Some(field_ids) => {
let field_by_field_id = self
.grid_rev
.fields
.iter()
.map(|field| (&field.id, field))
.collect::<HashMap<&String, &Arc<FieldRevision>>>();
let fields = field_ids
.iter()
.flat_map(|field_id| match field_by_field_id.get(&field_id) {
None => {
tracing::error!("Can't find the field with id: {}", field_id);
None
},
Some(field) => Some((*field).clone()),
})
.collect::<Vec<Arc<FieldRevision>>>();
Ok(fields)
},
} }
}
pub fn get_field_revs(&self, field_ids: Option<Vec<String>>) -> SyncResult<Vec<Arc<FieldRevision>>> { pub fn create_block_meta_rev(
match field_ids { &mut self,
None => Ok(self.grid_rev.fields.clone()), block: GridBlockMetaRevision,
Some(field_ids) => { ) -> SyncResult<Option<DatabaseRevisionChangeset>> {
let field_by_field_id = self self.modify_grid(|grid_meta| {
.grid_rev
.fields
.iter()
.map(|field| (&field.id, field))
.collect::<HashMap<&String, &Arc<FieldRevision>>>();
let fields = field_ids
.iter()
.flat_map(|field_id| match field_by_field_id.get(&field_id) {
None => {
tracing::error!("Can't find the field with id: {}", field_id);
None
}
Some(field) => Some((*field).clone()),
})
.collect::<Vec<Arc<FieldRevision>>>();
Ok(fields)
}
}
}
pub fn create_block_meta_rev(
&mut self,
block: GridBlockMetaRevision,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
if grid_meta.blocks.iter().any(|b| b.block_id == block.block_id) { if grid_meta.blocks.iter().any(|b| b.block_id == block.block_id) {
tracing::warn!("Duplicate grid block"); tracing::warn!("Duplicate grid block");
Ok(None) Ok(None)
@ -294,142 +331,158 @@ impl DatabaseRevisionPad {
Ok(Some(())) Ok(Some(()))
} }
}) })
} }
pub fn get_block_meta_revs(&self) -> Vec<Arc<GridBlockMetaRevision>> { pub fn get_block_meta_revs(&self) -> Vec<Arc<GridBlockMetaRevision>> {
self.grid_rev.blocks.clone() self.grid_rev.blocks.clone()
} }
pub fn update_block_rev( pub fn update_block_rev(
&mut self, &mut self,
changeset: GridBlockMetaRevisionChangeset, changeset: GridBlockMetaRevisionChangeset,
) -> SyncResult<Option<DatabaseRevisionChangeset>> { ) -> SyncResult<Option<DatabaseRevisionChangeset>> {
let block_id = changeset.block_id.clone(); let block_id = changeset.block_id.clone();
self.modify_block(&block_id, |block| { self.modify_block(&block_id, |block| {
let mut is_changed = None; let mut is_changed = None;
if let Some(row_count) = changeset.row_count { if let Some(row_count) = changeset.row_count {
block.row_count = row_count; block.row_count = row_count;
is_changed = Some(()); is_changed = Some(());
} }
if let Some(start_row_index) = changeset.start_row_index { if let Some(start_row_index) = changeset.start_row_index {
block.start_row_index = start_row_index; block.start_row_index = start_row_index;
is_changed = Some(()); is_changed = Some(());
} }
Ok(is_changed) Ok(is_changed)
}) })
} }
pub fn database_md5(&self) -> String { pub fn database_md5(&self) -> String {
md5(&self.operations.json_bytes()) md5(&self.operations.json_bytes())
} }
pub fn operations_json_str(&self) -> String { pub fn operations_json_str(&self) -> String {
self.operations.json_str() self.operations.json_str()
} }
pub fn get_fields(&self) -> &[Arc<FieldRevision>] { pub fn get_fields(&self) -> &[Arc<FieldRevision>] {
&self.grid_rev.fields &self.grid_rev.fields
} }
fn modify_grid<F>(&mut self, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>> fn modify_grid<F>(&mut self, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>>
where where
F: FnOnce(&mut DatabaseRevision) -> SyncResult<Option<()>>, F: FnOnce(&mut DatabaseRevision) -> SyncResult<Option<()>>,
{ {
let cloned_grid = self.grid_rev.clone(); let cloned_grid = self.grid_rev.clone();
match f(Arc::make_mut(&mut self.grid_rev))? { match f(Arc::make_mut(&mut self.grid_rev))? {
None => Ok(None), None => Ok(None),
Some(_) => { Some(_) => {
let old = make_database_rev_json_str(&cloned_grid)?; let old = make_database_rev_json_str(&cloned_grid)?;
let new = self.json_str()?; let new = self.json_str()?;
match cal_diff::<EmptyAttributes>(old, new) { match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None), None => Ok(None),
Some(operations) => { Some(operations) => {
self.operations = self.operations.compose(&operations)?; self.operations = self.operations.compose(&operations)?;
Ok(Some(DatabaseRevisionChangeset { Ok(Some(DatabaseRevisionChangeset {
operations, operations,
md5: self.database_md5(), md5: self.database_md5(),
})) }))
} },
}
}
} }
},
} }
}
fn modify_block<F>(&mut self, block_id: &str, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>> fn modify_block<F>(
where &mut self,
F: FnOnce(&mut GridBlockMetaRevision) -> SyncResult<Option<()>>, block_id: &str,
{ f: F,
self.modify_grid( ) -> SyncResult<Option<DatabaseRevisionChangeset>>
|grid_rev| match grid_rev.blocks.iter().position(|block| block.block_id == block_id) { where
None => { F: FnOnce(&mut GridBlockMetaRevision) -> SyncResult<Option<()>>,
tracing::warn!("[GridMetaPad]: Can't find any block with id: {}", block_id); {
Ok(None) self.modify_grid(|grid_rev| {
} match grid_rev
Some(index) => { .blocks
let block_rev = Arc::make_mut(&mut grid_rev.blocks[index]); .iter()
f(block_rev) .position(|block| block.block_id == block_id)
} {
}, None => {
) tracing::warn!("[GridMetaPad]: Can't find any block with id: {}", block_id);
} Ok(None)
},
Some(index) => {
let block_rev = Arc::make_mut(&mut grid_rev.blocks[index]);
f(block_rev)
},
}
})
}
pub fn modify_field<F>(&mut self, field_id: &str, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>> pub fn modify_field<F>(
where &mut self,
F: FnOnce(&mut FieldRevision) -> SyncResult<Option<()>>, field_id: &str,
{ f: F,
self.modify_grid( ) -> SyncResult<Option<DatabaseRevisionChangeset>>
|grid_rev| match grid_rev.fields.iter().position(|field| field.id == field_id) { where
None => { F: FnOnce(&mut FieldRevision) -> SyncResult<Option<()>>,
tracing::warn!("[GridMetaPad]: Can't find any field with id: {}", field_id); {
Ok(None) self.modify_grid(|grid_rev| {
} match grid_rev
Some(index) => { .fields
let mut_field_rev = Arc::make_mut(&mut grid_rev.fields[index]); .iter()
f(mut_field_rev) .position(|field| field.id == field_id)
} {
}, None => {
) tracing::warn!("[GridMetaPad]: Can't find any field with id: {}", field_id);
} Ok(None)
},
Some(index) => {
let mut_field_rev = Arc::make_mut(&mut grid_rev.fields[index]);
f(mut_field_rev)
},
}
})
}
pub fn json_str(&self) -> SyncResult<String> { pub fn json_str(&self) -> SyncResult<String> {
make_database_rev_json_str(&self.grid_rev) make_database_rev_json_str(&self.grid_rev)
} }
} }
pub fn make_database_rev_json_str(grid_revision: &DatabaseRevision) -> SyncResult<String> { pub fn make_database_rev_json_str(grid_revision: &DatabaseRevision) -> SyncResult<String> {
let json = serde_json::to_string(grid_revision) let json = serde_json::to_string(grid_revision)
.map_err(|err| internal_sync_error(format!("Serialize grid to json str failed. {:?}", err)))?; .map_err(|err| internal_sync_error(format!("Serialize grid to json str failed. {:?}", err)))?;
Ok(json) Ok(json)
} }
pub struct DatabaseRevisionChangeset { pub struct DatabaseRevisionChangeset {
pub operations: DatabaseOperations, pub operations: DatabaseOperations,
/// md5: the md5 of the grid after applying the change. /// md5: the md5 of the grid after applying the change.
pub md5: String, pub md5: String,
} }
pub fn make_database_operations(grid_rev: &DatabaseRevision) -> DatabaseOperations { pub fn make_database_operations(grid_rev: &DatabaseRevision) -> DatabaseOperations {
let json = serde_json::to_string(&grid_rev).unwrap(); let json = serde_json::to_string(&grid_rev).unwrap();
DatabaseOperationsBuilder::new().insert(&json).build() DatabaseOperationsBuilder::new().insert(&json).build()
} }
pub fn make_database_revisions(_user_id: &str, grid_rev: &DatabaseRevision) -> Vec<Revision> { pub fn make_database_revisions(_user_id: &str, grid_rev: &DatabaseRevision) -> Vec<Revision> {
let operations = make_database_operations(grid_rev); let operations = make_database_operations(grid_rev);
let bytes = operations.json_bytes(); let bytes = operations.json_bytes();
let revision = Revision::initial_revision(&grid_rev.grid_id, bytes); let revision = Revision::initial_revision(&grid_rev.grid_id, bytes);
vec![revision] vec![revision]
} }
impl std::default::Default for DatabaseRevisionPad { impl std::default::Default for DatabaseRevisionPad {
fn default() -> Self { fn default() -> Self {
let grid = DatabaseRevision::new(&gen_grid_id()); let grid = DatabaseRevision::new(&gen_grid_id());
let operations = make_database_operations(&grid); let operations = make_database_operations(&grid);
DatabaseRevisionPad { DatabaseRevisionPad {
grid_rev: Arc::new(grid), grid_rev: Arc::new(grid),
operations, operations,
}
} }
}
} }

View file

@ -2,8 +2,8 @@ use crate::errors::{internal_sync_error, SyncError, SyncResult};
use crate::util::cal_diff; use crate::util::cal_diff;
use flowy_sync::util::make_operations_from_revisions; use flowy_sync::util::make_operations_from_revisions;
use grid_model::{ use grid_model::{
DatabaseViewRevision, FieldRevision, FieldTypeRevision, FilterRevision, GroupConfigurationRevision, LayoutRevision, DatabaseViewRevision, FieldRevision, FieldTypeRevision, FilterRevision,
SortRevision, GroupConfigurationRevision, LayoutRevision, SortRevision,
}; };
use lib_infra::util::md5; use lib_infra::util::md5;
use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform}; use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform};
@ -15,305 +15,334 @@ pub type GridViewOperationsBuilder = DeltaBuilder;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct GridViewRevisionPad { pub struct GridViewRevisionPad {
view: Arc<DatabaseViewRevision>, view: Arc<DatabaseViewRevision>,
operations: GridViewOperations, operations: GridViewOperations,
} }
impl std::ops::Deref for GridViewRevisionPad { impl std::ops::Deref for GridViewRevisionPad {
type Target = DatabaseViewRevision; type Target = DatabaseViewRevision;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.view &self.view
} }
} }
impl GridViewRevisionPad { impl GridViewRevisionPad {
// For the moment, the view_id is equal to grid_id. The grid_id represents the database id. // For the moment, the view_id is equal to grid_id. The grid_id represents the database id.
// A database can be referenced by multiple views. // A database can be referenced by multiple views.
pub fn new(grid_id: String, view_id: String, layout: LayoutRevision) -> Self { pub fn new(grid_id: String, view_id: String, layout: LayoutRevision) -> Self {
let view = Arc::new(DatabaseViewRevision::new(grid_id, view_id, layout)); let view = Arc::new(DatabaseViewRevision::new(grid_id, view_id, layout));
let json = serde_json::to_string(&view).unwrap(); let json = serde_json::to_string(&view).unwrap();
let operations = GridViewOperationsBuilder::new().insert(&json).build(); let operations = GridViewOperationsBuilder::new().insert(&json).build();
Self { view, operations } Self { view, operations }
} }
pub fn from_operations(view_id: &str, operations: GridViewOperations) -> SyncResult<Self> { pub fn from_operations(view_id: &str, operations: GridViewOperations) -> SyncResult<Self> {
if operations.is_empty() { if operations.is_empty() {
return Ok(GridViewRevisionPad::new( return Ok(GridViewRevisionPad::new(
view_id.to_owned(), view_id.to_owned(),
view_id.to_owned(), view_id.to_owned(),
LayoutRevision::Grid, LayoutRevision::Grid,
)); ));
} }
let s = operations.content()?; let s = operations.content()?;
let view: DatabaseViewRevision = serde_json::from_str(&s).map_err(|e| { let view: DatabaseViewRevision = serde_json::from_str(&s).map_err(|e| {
let msg = format!("Deserialize operations to GridViewRevision failed: {}", e); let msg = format!("Deserialize operations to GridViewRevision failed: {}", e);
tracing::error!("parsing json: {}", s); tracing::error!("parsing json: {}", s);
SyncError::internal().context(msg) SyncError::internal().context(msg)
})?; })?;
Ok(Self { Ok(Self {
view: Arc::new(view), view: Arc::new(view),
operations, operations,
}) })
} }
pub fn from_revisions(view_id: &str, revisions: Vec<Revision>) -> SyncResult<Self> { pub fn from_revisions(view_id: &str, revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: GridViewOperations = make_operations_from_revisions(revisions)?; let operations: GridViewOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(view_id, operations) Self::from_operations(view_id, operations)
} }
pub fn get_groups_by_field_revs(&self, field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<GroupConfigurationRevision>> { pub fn get_groups_by_field_revs(
self.groups.get_objects_by_field_revs(field_revs) &self,
} field_revs: &[Arc<FieldRevision>],
) -> Vec<Arc<GroupConfigurationRevision>> {
pub fn get_all_groups(&self) -> Vec<Arc<GroupConfigurationRevision>> { self.groups.get_objects_by_field_revs(field_revs)
self.groups.get_all_objects() }
}
pub fn get_all_groups(&self) -> Vec<Arc<GroupConfigurationRevision>> {
#[tracing::instrument(level = "trace", skip_all, err)] self.groups.get_all_objects()
pub fn insert_or_update_group_configuration( }
&mut self,
field_id: &str, #[tracing::instrument(level = "trace", skip_all, err)]
field_type: &FieldTypeRevision, pub fn insert_or_update_group_configuration(
group_configuration_rev: GroupConfigurationRevision, &mut self,
) -> SyncResult<Option<GridViewRevisionChangeset>> { field_id: &str,
self.modify(|view| { field_type: &FieldTypeRevision,
// Only save one group group_configuration_rev: GroupConfigurationRevision,
view.groups.clear(); ) -> SyncResult<Option<GridViewRevisionChangeset>> {
view.groups.add_object(field_id, field_type, group_configuration_rev); self.modify(|view| {
Ok(Some(())) // Only save one group
}) view.groups.clear();
} view
.groups
#[tracing::instrument(level = "trace", skip_all)] .add_object(field_id, field_type, group_configuration_rev);
pub fn contains_group(&self, field_id: &str, field_type: &FieldTypeRevision) -> bool { Ok(Some(()))
self.view.groups.get_objects(field_id, field_type).is_some() })
} }
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all)]
pub fn with_mut_group<F: FnOnce(&mut GroupConfigurationRevision)>( pub fn contains_group(&self, field_id: &str, field_type: &FieldTypeRevision) -> bool {
&mut self, self.view.groups.get_objects(field_id, field_type).is_some()
field_id: &str, }
field_type: &FieldTypeRevision,
configuration_id: &str, #[tracing::instrument(level = "trace", skip_all, err)]
mut_configuration_fn: F, pub fn with_mut_group<F: FnOnce(&mut GroupConfigurationRevision)>(
) -> SyncResult<Option<GridViewRevisionChangeset>> { &mut self,
self.modify(|view| match view.groups.get_mut_objects(field_id, field_type) { field_id: &str,
None => Ok(None), field_type: &FieldTypeRevision,
Some(configurations_revs) => { configuration_id: &str,
for configuration_rev in configurations_revs { mut_configuration_fn: F,
if configuration_rev.id == configuration_id { ) -> SyncResult<Option<GridViewRevisionChangeset>> {
mut_configuration_fn(Arc::make_mut(configuration_rev)); self.modify(
return Ok(Some(())); |view| match view.groups.get_mut_objects(field_id, field_type) {
} None => Ok(None),
} Some(configurations_revs) => {
Ok(None) for configuration_rev in configurations_revs {
} if configuration_rev.id == configuration_id {
}) mut_configuration_fn(Arc::make_mut(configuration_rev));
} return Ok(Some(()));
}
pub fn delete_group( }
&mut self, Ok(None)
group_id: &str, },
field_id: &str, },
field_type: &FieldTypeRevision, )
) -> SyncResult<Option<GridViewRevisionChangeset>> { }
self.modify(|view| {
if let Some(groups) = view.groups.get_mut_objects(field_id, field_type) { pub fn delete_group(
groups.retain(|group| group.id != group_id); &mut self,
Ok(Some(())) group_id: &str,
} else { field_id: &str,
Ok(None) field_type: &FieldTypeRevision,
} ) -> SyncResult<Option<GridViewRevisionChangeset>> {
}) self.modify(|view| {
} if let Some(groups) = view.groups.get_mut_objects(field_id, field_type) {
groups.retain(|group| group.id != group_id);
pub fn get_all_sorts(&self, _field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<SortRevision>> { Ok(Some(()))
self.sorts.get_all_objects() } else {
} Ok(None)
}
/// For the moment, a field type only have one filter. })
pub fn get_sorts(&self, field_id: &str, field_type_rev: &FieldTypeRevision) -> Vec<Arc<SortRevision>> { }
self.sorts.get_objects(field_id, field_type_rev).unwrap_or_default()
} pub fn get_all_sorts(&self, _field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<SortRevision>> {
self.sorts.get_all_objects()
pub fn get_sort( }
&self,
field_id: &str, /// For the moment, a field type only have one filter.
field_type_rev: &FieldTypeRevision, pub fn get_sorts(
sort_id: &str, &self,
) -> Option<Arc<SortRevision>> { field_id: &str,
self.sorts field_type_rev: &FieldTypeRevision,
.get_object(field_id, field_type_rev, |sort| sort.id == sort_id) ) -> Vec<Arc<SortRevision>> {
} self
.sorts
pub fn insert_sort( .get_objects(field_id, field_type_rev)
&mut self, .unwrap_or_default()
field_id: &str, }
sort_rev: SortRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> { pub fn get_sort(
self.modify(|view| { &self,
let field_type = sort_rev.field_type; field_id: &str,
view.sorts.add_object(field_id, &field_type, sort_rev); field_type_rev: &FieldTypeRevision,
Ok(Some(())) sort_id: &str,
}) ) -> Option<Arc<SortRevision>> {
} self
.sorts
pub fn update_sort( .get_object(field_id, field_type_rev, |sort| sort.id == sort_id)
&mut self, }
field_id: &str,
sort_rev: SortRevision, pub fn insert_sort(
) -> SyncResult<Option<GridViewRevisionChangeset>> { &mut self,
self.modify(|view| { field_id: &str,
if let Some(sort) = view sort_rev: SortRevision,
.sorts ) -> SyncResult<Option<GridViewRevisionChangeset>> {
.get_mut_object(field_id, &sort_rev.field_type, |sort| sort.id == sort_rev.id) self.modify(|view| {
{ let field_type = sort_rev.field_type;
let sort = Arc::make_mut(sort); view.sorts.add_object(field_id, &field_type, sort_rev);
sort.condition = sort_rev.condition; Ok(Some(()))
Ok(Some(())) })
} else { }
Ok(None)
} pub fn update_sort(
}) &mut self,
} field_id: &str,
sort_rev: SortRevision,
pub fn delete_sort<T: Into<FieldTypeRevision>>( ) -> SyncResult<Option<GridViewRevisionChangeset>> {
&mut self, self.modify(|view| {
sort_id: &str, if let Some(sort) = view
field_id: &str, .sorts
field_type: T, .get_mut_object(field_id, &sort_rev.field_type, |sort| {
) -> SyncResult<Option<GridViewRevisionChangeset>> { sort.id == sort_rev.id
let field_type = field_type.into(); })
self.modify(|view| { {
if let Some(sorts) = view.sorts.get_mut_objects(field_id, &field_type) { let sort = Arc::make_mut(sort);
sorts.retain(|sort| sort.id != sort_id); sort.condition = sort_rev.condition;
Ok(Some(())) Ok(Some(()))
} else { } else {
Ok(None) Ok(None)
} }
}) })
} }
pub fn delete_all_sorts(&mut self) -> SyncResult<Option<GridViewRevisionChangeset>> { pub fn delete_sort<T: Into<FieldTypeRevision>>(
self.modify(|view| { &mut self,
view.sorts.clear(); sort_id: &str,
Ok(Some(())) field_id: &str,
}) field_type: T,
} ) -> SyncResult<Option<GridViewRevisionChangeset>> {
let field_type = field_type.into();
pub fn get_all_filters(&self, field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<FilterRevision>> { self.modify(|view| {
self.filters.get_objects_by_field_revs(field_revs) if let Some(sorts) = view.sorts.get_mut_objects(field_id, &field_type) {
} sorts.retain(|sort| sort.id != sort_id);
Ok(Some(()))
/// For the moment, a field type only have one filter. } else {
pub fn get_filters(&self, field_id: &str, field_type_rev: &FieldTypeRevision) -> Vec<Arc<FilterRevision>> { Ok(None)
self.filters.get_objects(field_id, field_type_rev).unwrap_or_default() }
} })
}
pub fn get_filter(
&self, pub fn delete_all_sorts(&mut self) -> SyncResult<Option<GridViewRevisionChangeset>> {
field_id: &str, self.modify(|view| {
field_type_rev: &FieldTypeRevision, view.sorts.clear();
filter_id: &str, Ok(Some(()))
) -> Option<Arc<FilterRevision>> { })
self.filters }
.get_object(field_id, field_type_rev, |filter| filter.id == filter_id)
} pub fn get_all_filters(&self, field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<FilterRevision>> {
self.filters.get_objects_by_field_revs(field_revs)
pub fn insert_filter( }
&mut self,
field_id: &str, /// For the moment, a field type only have one filter.
filter_rev: FilterRevision, pub fn get_filters(
) -> SyncResult<Option<GridViewRevisionChangeset>> { &self,
self.modify(|view| { field_id: &str,
let field_type = filter_rev.field_type; field_type_rev: &FieldTypeRevision,
view.filters.add_object(field_id, &field_type, filter_rev); ) -> Vec<Arc<FilterRevision>> {
Ok(Some(())) self
}) .filters
} .get_objects(field_id, field_type_rev)
.unwrap_or_default()
pub fn update_filter( }
&mut self,
field_id: &str, pub fn get_filter(
filter_rev: FilterRevision, &self,
) -> SyncResult<Option<GridViewRevisionChangeset>> { field_id: &str,
self.modify(|view| { field_type_rev: &FieldTypeRevision,
if let Some(filter) = view filter_id: &str,
.filters ) -> Option<Arc<FilterRevision>> {
.get_mut_object(field_id, &filter_rev.field_type, |filter| filter.id == filter_rev.id) self
{ .filters
let filter = Arc::make_mut(filter); .get_object(field_id, field_type_rev, |filter| filter.id == filter_id)
filter.condition = filter_rev.condition; }
filter.content = filter_rev.content;
Ok(Some(())) pub fn insert_filter(
} else { &mut self,
Ok(None) field_id: &str,
} filter_rev: FilterRevision,
}) ) -> SyncResult<Option<GridViewRevisionChangeset>> {
} self.modify(|view| {
let field_type = filter_rev.field_type;
pub fn delete_filter<T: Into<FieldTypeRevision>>( view.filters.add_object(field_id, &field_type, filter_rev);
&mut self, Ok(Some(()))
filter_id: &str, })
field_id: &str, }
field_type: T,
) -> SyncResult<Option<GridViewRevisionChangeset>> { pub fn update_filter(
let field_type = field_type.into(); &mut self,
self.modify(|view| { field_id: &str,
if let Some(filters) = view.filters.get_mut_objects(field_id, &field_type) { filter_rev: FilterRevision,
filters.retain(|filter| filter.id != filter_id); ) -> SyncResult<Option<GridViewRevisionChangeset>> {
Ok(Some(())) self.modify(|view| {
} else { if let Some(filter) =
Ok(None) view
} .filters
}) .get_mut_object(field_id, &filter_rev.field_type, |filter| {
} filter.id == filter_rev.id
})
pub fn json_str(&self) -> SyncResult<String> { {
make_grid_view_rev_json_str(&self.view) let filter = Arc::make_mut(filter);
} filter.condition = filter_rev.condition;
filter.content = filter_rev.content;
pub fn layout(&self) -> LayoutRevision { Ok(Some(()))
self.layout.clone() } else {
} Ok(None)
}
fn modify<F>(&mut self, f: F) -> SyncResult<Option<GridViewRevisionChangeset>> })
where }
F: FnOnce(&mut DatabaseViewRevision) -> SyncResult<Option<()>>,
{ pub fn delete_filter<T: Into<FieldTypeRevision>>(
let cloned_view = self.view.clone(); &mut self,
match f(Arc::make_mut(&mut self.view))? { filter_id: &str,
None => Ok(None), field_id: &str,
Some(_) => { field_type: T,
let old = make_grid_view_rev_json_str(&cloned_view)?; ) -> SyncResult<Option<GridViewRevisionChangeset>> {
let new = self.json_str()?; let field_type = field_type.into();
match cal_diff::<EmptyAttributes>(old, new) { self.modify(|view| {
None => Ok(None), if let Some(filters) = view.filters.get_mut_objects(field_id, &field_type) {
Some(operations) => { filters.retain(|filter| filter.id != filter_id);
self.operations = self.operations.compose(&operations)?; Ok(Some(()))
let md5 = md5(&self.operations.json_bytes()); } else {
Ok(Some(GridViewRevisionChangeset { operations, md5 })) Ok(None)
} }
} })
} }
pub fn json_str(&self) -> SyncResult<String> {
make_grid_view_rev_json_str(&self.view)
}
pub fn layout(&self) -> LayoutRevision {
self.layout.clone()
}
fn modify<F>(&mut self, f: F) -> SyncResult<Option<GridViewRevisionChangeset>>
where
F: FnOnce(&mut DatabaseViewRevision) -> SyncResult<Option<()>>,
{
let cloned_view = self.view.clone();
match f(Arc::make_mut(&mut self.view))? {
None => Ok(None),
Some(_) => {
let old = make_grid_view_rev_json_str(&cloned_view)?;
let new = self.json_str()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
self.operations = self.operations.compose(&operations)?;
let md5 = md5(&self.operations.json_bytes());
Ok(Some(GridViewRevisionChangeset { operations, md5 }))
},
} }
},
} }
}
} }
#[derive(Debug)] #[derive(Debug)]
pub struct GridViewRevisionChangeset { pub struct GridViewRevisionChangeset {
pub operations: GridViewOperations, pub operations: GridViewOperations,
pub md5: String, pub md5: String,
} }
pub fn make_grid_view_rev_json_str(grid_revision: &DatabaseViewRevision) -> SyncResult<String> { pub fn make_grid_view_rev_json_str(grid_revision: &DatabaseViewRevision) -> SyncResult<String> {
let json = serde_json::to_string(grid_revision) let json = serde_json::to_string(grid_revision).map_err(|err| {
.map_err(|err| internal_sync_error(format!("Serialize grid view to json str failed. {:?}", err)))?; internal_sync_error(format!("Serialize grid view to json str failed. {:?}", err))
Ok(json) })?;
Ok(json)
} }
pub fn make_grid_view_operations(grid_view: &DatabaseViewRevision) -> GridViewOperations { pub fn make_grid_view_operations(grid_view: &DatabaseViewRevision) -> GridViewOperations {
let json = serde_json::to_string(grid_view).unwrap(); let json = serde_json::to_string(grid_view).unwrap();
GridViewOperationsBuilder::new().insert(&json).build() GridViewOperationsBuilder::new().insert(&json).build()
} }

View file

@ -1,9 +1,9 @@
use crate::{ use crate::{
client_document::{ client_document::{
history::{History, UndoResult}, history::{History, UndoResult},
view::{ViewExtensions, RECORD_THRESHOLD}, view::{ViewExtensions, RECORD_THRESHOLD},
}, },
errors::SyncError, errors::SyncError,
}; };
use bytes::Bytes; use bytes::Bytes;
use lib_infra::util::md5; use lib_infra::util::md5;
@ -12,227 +12,252 @@ use lib_ot::{core::*, text_delta::DeltaTextOperations};
use tokio::sync::mpsc; use tokio::sync::mpsc;
pub trait InitialDocument { pub trait InitialDocument {
fn json_str() -> String; fn json_str() -> String;
} }
pub struct EmptyDocument(); pub struct EmptyDocument();
impl InitialDocument for EmptyDocument { impl InitialDocument for EmptyDocument {
fn json_str() -> String { fn json_str() -> String {
DeltaTextOperations::default().json_str() DeltaTextOperations::default().json_str()
} }
} }
pub struct NewlineDocument(); pub struct NewlineDocument();
impl InitialDocument for NewlineDocument { impl InitialDocument for NewlineDocument {
fn json_str() -> String { fn json_str() -> String {
initial_delta_document_content() initial_delta_document_content()
} }
} }
pub fn initial_delta_document_content() -> String { pub fn initial_delta_document_content() -> String {
DeltaTextOperationBuilder::new().insert("\n").build().json_str() DeltaTextOperationBuilder::new()
.insert("\n")
.build()
.json_str()
} }
pub struct ClientDocument { pub struct ClientDocument {
operations: DeltaTextOperations, operations: DeltaTextOperations,
history: History, history: History,
view: ViewExtensions, view: ViewExtensions,
last_edit_time: usize, last_edit_time: usize,
notify: Option<mpsc::UnboundedSender<()>>, notify: Option<mpsc::UnboundedSender<()>>,
} }
impl ClientDocument { impl ClientDocument {
pub fn new<C: InitialDocument>() -> Self { pub fn new<C: InitialDocument>() -> Self {
let content = C::json_str(); let content = C::json_str();
Self::from_json(&content).unwrap() Self::from_json(&content).unwrap()
}
pub fn from_operations(operations: DeltaTextOperations) -> Self {
ClientDocument {
operations,
history: History::new(),
view: ViewExtensions::new(),
last_edit_time: 0,
notify: None,
}
}
pub fn from_json(json: &str) -> Result<Self, SyncError> {
let operations = DeltaTextOperations::from_json(json)?;
Ok(Self::from_operations(operations))
}
pub fn get_operations_json(&self) -> String {
self.operations.json_str()
}
pub fn to_bytes(&self) -> Bytes {
self.operations.json_bytes()
}
pub fn to_content(&self) -> String {
self.operations.content().unwrap()
}
pub fn get_operations(&self) -> &DeltaTextOperations {
&self.operations
}
pub fn document_md5(&self) -> String {
let bytes = self.to_bytes();
md5(&bytes)
}
pub fn set_notify(&mut self, notify: mpsc::UnboundedSender<()>) {
self.notify = Some(notify);
}
pub fn set_operations(&mut self, operations: DeltaTextOperations) {
tracing::trace!("document: {}", operations.json_str());
self.operations = operations;
match &self.notify {
None => {},
Some(notify) => {
let _ = notify.send(());
},
}
}
pub fn compose_operations(&mut self, operations: DeltaTextOperations) -> Result<(), SyncError> {
tracing::trace!(
"{} compose {}",
&self.operations.json_str(),
operations.json_str()
);
let composed_operations = self.operations.compose(&operations)?;
let mut undo_operations = operations.invert(&self.operations);
let now = chrono::Utc::now().timestamp_millis() as usize;
if now - self.last_edit_time < RECORD_THRESHOLD {
if let Some(last_operation) = self.history.undo() {
tracing::trace!("compose previous change");
tracing::trace!("current = {}", undo_operations);
tracing::trace!("previous = {}", last_operation);
undo_operations = undo_operations.compose(&last_operation)?;
}
} else {
self.last_edit_time = now;
} }
pub fn from_operations(operations: DeltaTextOperations) -> Self { if !undo_operations.is_empty() {
ClientDocument { tracing::trace!("add history operations: {}", undo_operations);
operations, self.history.record(undo_operations);
history: History::new(),
view: ViewExtensions::new(),
last_edit_time: 0,
notify: None,
}
} }
pub fn from_json(json: &str) -> Result<Self, SyncError> { self.set_operations(composed_operations);
let operations = DeltaTextOperations::from_json(json)?;
Ok(Self::from_operations(operations))
}
pub fn get_operations_json(&self) -> String {
self.operations.json_str()
}
pub fn to_bytes(&self) -> Bytes {
self.operations.json_bytes()
}
pub fn to_content(&self) -> String {
self.operations.content().unwrap()
}
pub fn get_operations(&self) -> &DeltaTextOperations {
&self.operations
}
pub fn document_md5(&self) -> String {
let bytes = self.to_bytes();
md5(&bytes)
}
pub fn set_notify(&mut self, notify: mpsc::UnboundedSender<()>) {
self.notify = Some(notify);
}
pub fn set_operations(&mut self, operations: DeltaTextOperations) {
tracing::trace!("document: {}", operations.json_str());
self.operations = operations;
match &self.notify {
None => {}
Some(notify) => {
let _ = notify.send(());
}
}
}
pub fn compose_operations(&mut self, operations: DeltaTextOperations) -> Result<(), SyncError> {
tracing::trace!("{} compose {}", &self.operations.json_str(), operations.json_str());
let composed_operations = self.operations.compose(&operations)?;
let mut undo_operations = operations.invert(&self.operations);
let now = chrono::Utc::now().timestamp_millis() as usize;
if now - self.last_edit_time < RECORD_THRESHOLD {
if let Some(last_operation) = self.history.undo() {
tracing::trace!("compose previous change");
tracing::trace!("current = {}", undo_operations);
tracing::trace!("previous = {}", last_operation);
undo_operations = undo_operations.compose(&last_operation)?;
}
} else {
self.last_edit_time = now;
}
if !undo_operations.is_empty() {
tracing::trace!("add history operations: {}", undo_operations);
self.history.record(undo_operations);
}
self.set_operations(composed_operations);
Ok(())
}
pub fn insert<T: ToString>(&mut self, index: usize, data: T) -> Result<DeltaTextOperations, SyncError> {
let text = data.to_string();
let interval = Interval::new(index, index);
validate_interval(&self.operations, &interval)?;
let operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn delete(&mut self, interval: Interval) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
debug_assert!(!interval.is_empty());
let operations = self.view.delete(&self.operations, interval)?;
if !operations.is_empty() {
self.compose_operations(operations.clone())?;
}
Ok(operations)
}
pub fn format(&mut self, interval: Interval, attribute: AttributeEntry) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
tracing::trace!("format {} with {:?}", interval, attribute);
let operations = self.view.format(&self.operations, attribute, interval).unwrap();
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn replace<T: ToString>(&mut self, interval: Interval, data: T) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
let mut operations = DeltaTextOperations::default();
let text = data.to_string();
if !text.is_empty() {
operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
}
if !interval.is_empty() {
let delete = self.delete(interval)?;
operations = operations.compose(&delete)?;
}
Ok(operations)
}
pub fn can_undo(&self) -> bool {
self.history.can_undo()
}
pub fn can_redo(&self) -> bool {
self.history.can_redo()
}
pub fn undo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.undo() {
None => Err(SyncError::undo().context("Undo stack is empty")),
Some(undo_operations) => {
let (new_operations, inverted_operations) = self.invert(&undo_operations)?;
self.set_operations(new_operations);
self.history.add_redo(inverted_operations);
Ok(UndoResult {
operations: undo_operations,
})
}
}
}
pub fn redo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.redo() {
None => Err(SyncError::redo()),
Some(redo_operations) => {
let (new_operations, inverted_operations) = self.invert(&redo_operations)?;
self.set_operations(new_operations);
self.history.add_undo(inverted_operations);
Ok(UndoResult {
operations: redo_operations,
})
}
}
}
pub fn is_empty(&self) -> bool {
// The document is empty if its text is equal to the initial text.
self.operations.json_str() == NewlineDocument::json_str()
}
}
impl ClientDocument {
fn invert(
&self,
operations: &DeltaTextOperations,
) -> Result<(DeltaTextOperations, DeltaTextOperations), SyncError> {
// c = a.compose(b)
// d = b.invert(a)
// a = c.compose(d)
let new_operations = self.operations.compose(operations)?;
let inverted_operations = operations.invert(&self.operations);
Ok((new_operations, inverted_operations))
}
}
fn validate_interval(operations: &DeltaTextOperations, interval: &Interval) -> Result<(), SyncError> {
if operations.utf16_target_len < interval.end {
tracing::error!(
"{:?} out of bounds. should 0..{}",
interval,
operations.utf16_target_len
);
return Err(SyncError::out_of_bound());
}
Ok(()) Ok(())
}
pub fn insert<T: ToString>(
&mut self,
index: usize,
data: T,
) -> Result<DeltaTextOperations, SyncError> {
let text = data.to_string();
let interval = Interval::new(index, index);
validate_interval(&self.operations, &interval)?;
let operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn delete(&mut self, interval: Interval) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
debug_assert!(!interval.is_empty());
let operations = self.view.delete(&self.operations, interval)?;
if !operations.is_empty() {
self.compose_operations(operations.clone())?;
}
Ok(operations)
}
pub fn format(
&mut self,
interval: Interval,
attribute: AttributeEntry,
) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
tracing::trace!("format {} with {:?}", interval, attribute);
let operations = self
.view
.format(&self.operations, attribute, interval)
.unwrap();
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn replace<T: ToString>(
&mut self,
interval: Interval,
data: T,
) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
let mut operations = DeltaTextOperations::default();
let text = data.to_string();
if !text.is_empty() {
operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
}
if !interval.is_empty() {
let delete = self.delete(interval)?;
operations = operations.compose(&delete)?;
}
Ok(operations)
}
pub fn can_undo(&self) -> bool {
self.history.can_undo()
}
pub fn can_redo(&self) -> bool {
self.history.can_redo()
}
pub fn undo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.undo() {
None => Err(SyncError::undo().context("Undo stack is empty")),
Some(undo_operations) => {
let (new_operations, inverted_operations) = self.invert(&undo_operations)?;
self.set_operations(new_operations);
self.history.add_redo(inverted_operations);
Ok(UndoResult {
operations: undo_operations,
})
},
}
}
pub fn redo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.redo() {
None => Err(SyncError::redo()),
Some(redo_operations) => {
let (new_operations, inverted_operations) = self.invert(&redo_operations)?;
self.set_operations(new_operations);
self.history.add_undo(inverted_operations);
Ok(UndoResult {
operations: redo_operations,
})
},
}
}
pub fn is_empty(&self) -> bool {
// The document is empty if its text is equal to the initial text.
self.operations.json_str() == NewlineDocument::json_str()
}
}
impl ClientDocument {
fn invert(
&self,
operations: &DeltaTextOperations,
) -> Result<(DeltaTextOperations, DeltaTextOperations), SyncError> {
// c = a.compose(b)
// d = b.invert(a)
// a = c.compose(d)
let new_operations = self.operations.compose(operations)?;
let inverted_operations = operations.invert(&self.operations);
Ok((new_operations, inverted_operations))
}
}
fn validate_interval(
operations: &DeltaTextOperations,
interval: &Interval,
) -> Result<(), SyncError> {
if operations.utf16_target_len < interval.end {
tracing::error!(
"{:?} out of bounds. should 0..{}",
interval,
operations.utf16_target_len
);
return Err(SyncError::out_of_bound());
}
Ok(())
} }

View file

@ -1,21 +1,21 @@
use crate::client_document::DeleteExt; use crate::client_document::DeleteExt;
use lib_ot::{ use lib_ot::{
core::{DeltaOperationBuilder, Interval}, core::{DeltaOperationBuilder, Interval},
text_delta::DeltaTextOperations, text_delta::DeltaTextOperations,
}; };
pub struct DefaultDelete {} pub struct DefaultDelete {}
impl DeleteExt for DefaultDelete { impl DeleteExt for DefaultDelete {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"DefaultDelete" "DefaultDelete"
} }
fn apply(&self, _delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> { fn apply(&self, _delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> {
Some( Some(
DeltaOperationBuilder::new() DeltaOperationBuilder::new()
.retain(interval.start) .retain(interval.start)
.delete(interval.size()) .delete(interval.size())
.build(), .build(),
) )
} }
} }

View file

@ -1,62 +1,65 @@
use crate::{client_document::DeleteExt, util::is_newline}; use crate::{client_document::DeleteExt, util::is_newline};
use lib_ot::{ use lib_ot::{
core::{DeltaOperationBuilder, Interval, OperationAttributes, OperationIterator, Utf16CodeUnitMetric, NEW_LINE}, core::{
text_delta::{empty_attributes, DeltaTextOperations}, DeltaOperationBuilder, Interval, OperationAttributes, OperationIterator, Utf16CodeUnitMetric,
NEW_LINE,
},
text_delta::{empty_attributes, DeltaTextOperations},
}; };
pub struct PreserveLineFormatOnMerge {} pub struct PreserveLineFormatOnMerge {}
impl DeleteExt for PreserveLineFormatOnMerge { impl DeleteExt for PreserveLineFormatOnMerge {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"PreserveLineFormatOnMerge" "PreserveLineFormatOnMerge"
}
fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> {
if interval.is_empty() {
return None;
} }
fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> { // seek to the interval start pos. e.g. You backspace enter pos
if interval.is_empty() { let mut iter = OperationIterator::from_offset(delta, interval.start);
return None;
}
// seek to the interval start pos. e.g. You backspace enter pos // op will be the "\n"
let mut iter = OperationIterator::from_offset(delta, interval.start); let newline_op = iter.next_op_with_len(1)?;
if !is_newline(newline_op.get_data()) {
// op will be the "\n" return None;
let newline_op = iter.next_op_with_len(1)?;
if !is_newline(newline_op.get_data()) {
return None;
}
iter.seek::<Utf16CodeUnitMetric>(interval.size() - 1);
let mut new_delta = DeltaOperationBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build();
while iter.has_next() {
match iter.next() {
None => tracing::error!("op must be not None when has_next() return true"),
Some(op) => {
//
match op.get_data().find(NEW_LINE) {
None => {
new_delta.retain(op.len(), empty_attributes());
continue;
}
Some(line_break) => {
let mut attributes = op.get_attributes();
attributes.remove_all_value();
if newline_op.has_attribute() {
attributes.extend(newline_op.get_attributes());
}
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attributes);
break;
}
}
}
}
}
Some(new_delta)
} }
iter.seek::<Utf16CodeUnitMetric>(interval.size() - 1);
let mut new_delta = DeltaOperationBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build();
while iter.has_next() {
match iter.next() {
None => tracing::error!("op must be not None when has_next() return true"),
Some(op) => {
//
match op.get_data().find(NEW_LINE) {
None => {
new_delta.retain(op.len(), empty_attributes());
continue;
},
Some(line_break) => {
let mut attributes = op.get_attributes();
attributes.remove_all_value();
if newline_op.has_attribute() {
attributes.extend(newline_op.get_attributes());
}
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attributes);
break;
},
}
},
}
}
Some(new_delta)
}
} }

View file

@ -1,61 +1,63 @@
use lib_ot::core::AttributeEntry; use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::is_block; use lib_ot::text_delta::is_block;
use lib_ot::{ use lib_ot::{
core::{DeltaOperationBuilder, Interval, OperationIterator}, core::{DeltaOperationBuilder, Interval, OperationIterator},
text_delta::{empty_attributes, AttributeScope, DeltaTextOperations}, text_delta::{empty_attributes, AttributeScope, DeltaTextOperations},
}; };
use crate::{ use crate::{
client_document::{extensions::helper::line_break, FormatExt}, client_document::{extensions::helper::line_break, FormatExt},
util::find_newline, util::find_newline,
}; };
pub struct ResolveBlockFormat {} pub struct ResolveBlockFormat {}
impl FormatExt for ResolveBlockFormat { impl FormatExt for ResolveBlockFormat {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"ResolveBlockFormat" "ResolveBlockFormat"
}
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations> {
if !is_block(&attribute.key) {
return None;
} }
fn apply( let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build();
&self, let mut iter = OperationIterator::from_offset(delta, interval.start);
delta: &DeltaTextOperations, let mut start = 0;
interval: Interval, let end = interval.size();
attribute: &AttributeEntry, while start < end && iter.has_next() {
) -> Option<DeltaTextOperations> { let next_op = iter.next_op_with_len(end - start).unwrap();
if !is_block(&attribute.key) { match find_newline(next_op.get_data()) {
return None; None => new_delta.retain(next_op.len(), empty_attributes()),
} Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Block);
new_delta.extend(tmp_delta);
},
}
let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build(); start += next_op.len();
let mut iter = OperationIterator::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), empty_attributes()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Block);
new_delta.extend(tmp_delta);
}
}
start += next_op.len();
}
while iter.has_next() {
let op = iter.next_op().expect("Unexpected None, iter.has_next() must return op");
match find_newline(op.get_data()) {
None => new_delta.retain(op.len(), empty_attributes()),
Some(line_break) => {
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attribute.clone().into());
break;
}
}
}
Some(new_delta)
} }
while iter.has_next() {
let op = iter
.next_op()
.expect("Unexpected None, iter.has_next() must return op");
match find_newline(op.get_data()) {
None => new_delta.retain(op.len(), empty_attributes()),
Some(line_break) => {
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attribute.clone().into());
break;
},
}
}
Some(new_delta)
}
} }

View file

@ -1,48 +1,48 @@
use lib_ot::core::AttributeEntry; use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::is_inline; use lib_ot::text_delta::is_inline;
use lib_ot::{ use lib_ot::{
core::{DeltaOperationBuilder, Interval, OperationIterator}, core::{DeltaOperationBuilder, Interval, OperationIterator},
text_delta::{AttributeScope, DeltaTextOperations}, text_delta::{AttributeScope, DeltaTextOperations},
}; };
use crate::{ use crate::{
client_document::{extensions::helper::line_break, FormatExt}, client_document::{extensions::helper::line_break, FormatExt},
util::find_newline, util::find_newline,
}; };
pub struct ResolveInlineFormat {} pub struct ResolveInlineFormat {}
impl FormatExt for ResolveInlineFormat { impl FormatExt for ResolveInlineFormat {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"ResolveInlineFormat" "ResolveInlineFormat"
}
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations> {
if !is_inline(&attribute.key) {
return None;
}
let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build();
let mut iter = OperationIterator::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), attribute.clone().into()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Inline);
new_delta.extend(tmp_delta);
},
}
start += next_op.len();
} }
fn apply( Some(new_delta)
&self, }
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations> {
if !is_inline(&attribute.key) {
return None;
}
let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build();
let mut iter = OperationIterator::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), attribute.clone().into()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Inline);
new_delta.extend(tmp_delta);
}
}
start += next_op.len();
}
Some(new_delta)
}
} }

View file

@ -1,42 +1,44 @@
use crate::util::find_newline; use crate::util::find_newline;
use lib_ot::core::AttributeEntry; use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::{empty_attributes, AttributeScope, DeltaTextOperation, DeltaTextOperations}; use lib_ot::text_delta::{
empty_attributes, AttributeScope, DeltaTextOperation, DeltaTextOperations,
};
pub(crate) fn line_break( pub(crate) fn line_break(
op: &DeltaTextOperation, op: &DeltaTextOperation,
attribute: &AttributeEntry, attribute: &AttributeEntry,
scope: AttributeScope, scope: AttributeScope,
) -> DeltaTextOperations { ) -> DeltaTextOperations {
let mut new_delta = DeltaTextOperations::new(); let mut new_delta = DeltaTextOperations::new();
let mut start = 0; let mut start = 0;
let end = op.len(); let end = op.len();
let mut s = op.get_data(); let mut s = op.get_data();
while let Some(line_break) = find_newline(s) { while let Some(line_break) = find_newline(s) {
match scope { match scope {
AttributeScope::Inline => { AttributeScope::Inline => {
new_delta.retain(line_break - start, attribute.clone().into()); new_delta.retain(line_break - start, attribute.clone().into());
new_delta.retain(1, empty_attributes()); new_delta.retain(1, empty_attributes());
} },
AttributeScope::Block => { AttributeScope::Block => {
new_delta.retain(line_break - start, empty_attributes()); new_delta.retain(line_break - start, empty_attributes());
new_delta.retain(1, attribute.clone().into()); new_delta.retain(1, attribute.clone().into());
} },
_ => { _ => {
tracing::error!("Unsupported parser line break for {:?}", scope); tracing::error!("Unsupported parser line break for {:?}", scope);
} },
}
start = line_break + 1;
s = &s[start..s.len()];
} }
if start < end { start = line_break + 1;
match scope { s = &s[start..s.len()];
AttributeScope::Inline => new_delta.retain(end - start, attribute.clone().into()), }
AttributeScope::Block => new_delta.retain(end - start, empty_attributes()),
_ => tracing::error!("Unsupported parser line break for {:?}", scope), if start < end {
} match scope {
AttributeScope::Inline => new_delta.retain(end - start, attribute.clone().into()),
AttributeScope::Block => new_delta.retain(end - start, empty_attributes()),
_ => tracing::error!("Unsupported parser line break for {:?}", scope),
} }
new_delta }
new_delta
} }

View file

@ -5,56 +5,56 @@ use lib_ot::text_delta::{attributes_except_header, BuildInTextAttributeKey, Delt
pub struct AutoExitBlock {} pub struct AutoExitBlock {}
impl InsertExt for AutoExitBlock { impl InsertExt for AutoExitBlock {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"AutoExitBlock" "AutoExitBlock"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// Auto exit block will be triggered by enter two new lines
if !is_newline(text) {
return None;
} }
fn apply( if !is_empty_line_at_index(delta, index) {
&self, return None;
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// Auto exit block will be triggered by enter two new lines
if !is_newline(text) {
return None;
}
if !is_empty_line_at_index(delta, index) {
return None;
}
let mut iter = OperationIterator::from_offset(delta, index);
let next = iter.next_op()?;
let mut attributes = next.get_attributes();
let block_attributes = attributes_except_header(&next);
if block_attributes.is_empty() {
return None;
}
if next.len() > 1 {
return None;
}
match iter.next_op_with_newline() {
None => {}
Some((newline_op, _)) => {
let newline_attributes = attributes_except_header(&newline_op);
if block_attributes == newline_attributes {
return None;
}
}
}
attributes.retain_values(&[BuildInTextAttributeKey::Header.as_ref()]);
Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.retain_with_attributes(1, attributes)
.build(),
)
} }
let mut iter = OperationIterator::from_offset(delta, index);
let next = iter.next_op()?;
let mut attributes = next.get_attributes();
let block_attributes = attributes_except_header(&next);
if block_attributes.is_empty() {
return None;
}
if next.len() > 1 {
return None;
}
match iter.next_op_with_newline() {
None => {},
Some((newline_op, _)) => {
let newline_attributes = attributes_except_header(&newline_op);
if block_attributes == newline_attributes {
return None;
}
},
}
attributes.retain_values(&[BuildInTextAttributeKey::Header.as_ref()]);
Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.retain_with_attributes(1, attributes)
.build(),
)
}
} }

View file

@ -1,94 +1,94 @@
use crate::{client_document::InsertExt, util::is_whitespace}; use crate::{client_document::InsertExt, util::is_whitespace};
use lib_ot::core::AttributeHashMap; use lib_ot::core::AttributeHashMap;
use lib_ot::{ use lib_ot::{
core::{count_utf16_code_units, DeltaOperationBuilder, OperationIterator}, core::{count_utf16_code_units, DeltaOperationBuilder, OperationIterator},
text_delta::{empty_attributes, BuildInTextAttribute, DeltaTextOperations}, text_delta::{empty_attributes, BuildInTextAttribute, DeltaTextOperations},
}; };
use std::cmp::min; use std::cmp::min;
use url::Url; use url::Url;
pub struct AutoFormatExt {} pub struct AutoFormatExt {}
impl InsertExt for AutoFormatExt { impl InsertExt for AutoFormatExt {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"AutoFormatExt" "AutoFormatExt"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// enter whitespace to trigger auto format
if !is_whitespace(text) {
return None;
} }
let mut iter = OperationIterator::new(delta);
if let Some(prev) = iter.next_op_with_len(index) {
match AutoFormat::parse(prev.get_data()) {
None => {},
Some(formatter) => {
let mut new_attributes = prev.get_attributes();
fn apply( // format_len should not greater than index. The url crate will add "/" to the
&self, // end of input string that causes the format_len greater than the input string
delta: &DeltaTextOperations, let format_len = min(index, formatter.format_len());
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// enter whitespace to trigger auto format
if !is_whitespace(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
if let Some(prev) = iter.next_op_with_len(index) {
match AutoFormat::parse(prev.get_data()) {
None => {}
Some(formatter) => {
let mut new_attributes = prev.get_attributes();
// format_len should not greater than index. The url crate will add "/" to the let format_attributes = formatter.to_attributes();
// end of input string that causes the format_len greater than the input string format_attributes.iter().for_each(|(k, v)| {
let format_len = min(index, formatter.format_len()); if !new_attributes.contains_key(k) {
new_attributes.insert(k.clone(), v.clone());
let format_attributes = formatter.to_attributes();
format_attributes.iter().for_each(|(k, v)| {
if !new_attributes.contains_key(k) {
new_attributes.insert(k.clone(), v.clone());
}
});
let next_attributes = match iter.next_op() {
None => empty_attributes(),
Some(op) => op.get_attributes(),
};
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len - min(index, format_len))
.retain_with_attributes(format_len, format_attributes)
.insert_with_attributes(text, next_attributes)
.build(),
);
}
} }
} });
None let next_attributes = match iter.next_op() {
None => empty_attributes(),
Some(op) => op.get_attributes(),
};
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len - min(index, format_len))
.retain_with_attributes(format_len, format_attributes)
.insert_with_attributes(text, next_attributes)
.build(),
);
},
}
} }
None
}
} }
pub enum AutoFormatter { pub enum AutoFormatter {
Url(Url), Url(Url),
} }
impl AutoFormatter { impl AutoFormatter {
pub fn to_attributes(&self) -> AttributeHashMap { pub fn to_attributes(&self) -> AttributeHashMap {
match self { match self {
AutoFormatter::Url(url) => BuildInTextAttribute::Link(url.as_str()).into(), AutoFormatter::Url(url) => BuildInTextAttribute::Link(url.as_str()).into(),
}
} }
}
pub fn format_len(&self) -> usize { pub fn format_len(&self) -> usize {
let s = match self { let s = match self {
AutoFormatter::Url(url) => url.to_string(), AutoFormatter::Url(url) => url.to_string(),
}; };
count_utf16_code_units(&s) count_utf16_code_units(&s)
} }
} }
pub struct AutoFormat {} pub struct AutoFormat {}
impl AutoFormat { impl AutoFormat {
fn parse(s: &str) -> Option<AutoFormatter> { fn parse(s: &str) -> Option<AutoFormatter> {
if let Ok(url) = Url::parse(s) { if let Ok(url) = Url::parse(s) {
return Some(AutoFormatter::Url(url)); return Some(AutoFormatter::Url(url));
}
None
} }
None
}
} }

View file

@ -1,50 +1,50 @@
use crate::client_document::InsertExt; use crate::client_document::InsertExt;
use lib_ot::core::AttributeHashMap; use lib_ot::core::AttributeHashMap;
use lib_ot::{ use lib_ot::{
core::{DeltaOperationBuilder, OperationAttributes, OperationIterator, NEW_LINE}, core::{DeltaOperationBuilder, OperationAttributes, OperationIterator, NEW_LINE},
text_delta::{BuildInTextAttributeKey, DeltaTextOperations}, text_delta::{BuildInTextAttributeKey, DeltaTextOperations},
}; };
pub struct DefaultInsertAttribute {} pub struct DefaultInsertAttribute {}
impl InsertExt for DefaultInsertAttribute { impl InsertExt for DefaultInsertAttribute {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"DefaultInsertAttribute" "DefaultInsertAttribute"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
let iter = OperationIterator::new(delta);
let mut attributes = AttributeHashMap::new();
// Enable each line split by "\n" remains the block attributes. for example:
// insert "\n" to "123456" at index 3
//
// [{"insert":"123"},{"insert":"\n","attributes":{"header":1}},
// {"insert":"456"},{"insert":"\n","attributes":{"header":1}}]
if text.ends_with(NEW_LINE) {
match iter.last() {
None => {},
Some(op) => {
if op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
attributes.extend(op.get_attributes());
}
},
}
} }
fn apply( Some(
&self, DeltaOperationBuilder::new()
delta: &DeltaTextOperations, .retain(index + replace_len)
replace_len: usize, .insert_with_attributes(text, attributes)
text: &str, .build(),
index: usize, )
) -> Option<DeltaTextOperations> { }
let iter = OperationIterator::new(delta);
let mut attributes = AttributeHashMap::new();
// Enable each line split by "\n" remains the block attributes. for example:
// insert "\n" to "123456" at index 3
//
// [{"insert":"123"},{"insert":"\n","attributes":{"header":1}},
// {"insert":"456"},{"insert":"\n","attributes":{"header":1}}]
if text.ends_with(NEW_LINE) {
match iter.last() {
None => {}
Some(op) => {
if op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
attributes.extend(op.get_attributes());
}
}
}
}
Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
)
}
} }

View file

@ -16,34 +16,34 @@ mod reset_format_on_new_line;
pub struct InsertEmbedsExt {} pub struct InsertEmbedsExt {}
impl InsertExt for InsertEmbedsExt { impl InsertExt for InsertEmbedsExt {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"InsertEmbedsExt" "InsertEmbedsExt"
} }
fn apply( fn apply(
&self, &self,
_delta: &DeltaTextOperations, _delta: &DeltaTextOperations,
_replace_len: usize, _replace_len: usize,
_text: &str, _text: &str,
_index: usize, _index: usize,
) -> Option<DeltaTextOperations> { ) -> Option<DeltaTextOperations> {
None None
} }
} }
pub struct ForceNewlineForInsertsAroundEmbedExt {} pub struct ForceNewlineForInsertsAroundEmbedExt {}
impl InsertExt for ForceNewlineForInsertsAroundEmbedExt { impl InsertExt for ForceNewlineForInsertsAroundEmbedExt {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"ForceNewlineForInsertsAroundEmbedExt" "ForceNewlineForInsertsAroundEmbedExt"
} }
fn apply( fn apply(
&self, &self,
_delta: &DeltaTextOperations, _delta: &DeltaTextOperations,
_replace_len: usize, _replace_len: usize,
_text: &str, _text: &str,
_index: usize, _index: usize,
) -> Option<DeltaTextOperations> { ) -> Option<DeltaTextOperations> {
None None
} }
} }

View file

@ -1,68 +1,72 @@
use crate::{client_document::InsertExt, util::is_newline}; use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::core::AttributeHashMap; use lib_ot::core::AttributeHashMap;
use lib_ot::{ use lib_ot::{
core::{DeltaOperationBuilder, OperationIterator, NEW_LINE}, core::{DeltaOperationBuilder, OperationIterator, NEW_LINE},
text_delta::{attributes_except_header, empty_attributes, BuildInTextAttributeKey, DeltaTextOperations}, text_delta::{
attributes_except_header, empty_attributes, BuildInTextAttributeKey, DeltaTextOperations,
},
}; };
pub struct PreserveBlockFormatOnInsert {} pub struct PreserveBlockFormatOnInsert {}
impl InsertExt for PreserveBlockFormatOnInsert { impl InsertExt for PreserveBlockFormatOnInsert {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"PreserveBlockFormatOnInsert" "PreserveBlockFormatOnInsert"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
} }
fn apply( let mut iter = OperationIterator::from_offset(delta, index);
&self, match iter.next_op_with_newline() {
delta: &DeltaTextOperations, None => {},
replace_len: usize, Some((newline_op, offset)) => {
text: &str, let newline_attributes = newline_op.get_attributes();
index: usize, let block_attributes = attributes_except_header(&newline_op);
) -> Option<DeltaTextOperations> { if block_attributes.is_empty() {
if !is_newline(text) { return None;
return None;
} }
let mut iter = OperationIterator::from_offset(delta, index); let mut reset_attribute = AttributeHashMap::new();
match iter.next_op_with_newline() { if newline_attributes.contains_key(BuildInTextAttributeKey::Header.as_ref()) {
None => {} reset_attribute.insert(BuildInTextAttributeKey::Header, 1);
Some((newline_op, offset)) => {
let newline_attributes = newline_op.get_attributes();
let block_attributes = attributes_except_header(&newline_op);
if block_attributes.is_empty() {
return None;
}
let mut reset_attribute = AttributeHashMap::new();
if newline_attributes.contains_key(BuildInTextAttributeKey::Header.as_ref()) {
reset_attribute.insert(BuildInTextAttributeKey::Header, 1);
}
let lines: Vec<_> = text.split(NEW_LINE).collect();
let mut new_delta = DeltaOperationBuilder::new().retain(index + replace_len).build();
lines.iter().enumerate().for_each(|(i, line)| {
if !line.is_empty() {
new_delta.insert(line, empty_attributes());
}
if i == 0 {
new_delta.insert(NEW_LINE, newline_attributes.clone());
} else if i < lines.len() - 1 {
new_delta.insert(NEW_LINE, block_attributes.clone());
} else {
// do nothing
}
});
if !reset_attribute.is_empty() {
new_delta.retain(offset, empty_attributes());
let len = newline_op.get_data().find(NEW_LINE).unwrap();
new_delta.retain(len, empty_attributes());
new_delta.retain(1, reset_attribute);
}
return Some(new_delta);
}
} }
None let lines: Vec<_> = text.split(NEW_LINE).collect();
let mut new_delta = DeltaOperationBuilder::new()
.retain(index + replace_len)
.build();
lines.iter().enumerate().for_each(|(i, line)| {
if !line.is_empty() {
new_delta.insert(line, empty_attributes());
}
if i == 0 {
new_delta.insert(NEW_LINE, newline_attributes.clone());
} else if i < lines.len() - 1 {
new_delta.insert(NEW_LINE, block_attributes.clone());
} else {
// do nothing
}
});
if !reset_attribute.is_empty() {
new_delta.retain(offset, empty_attributes());
let len = newline_op.get_data().find(NEW_LINE).unwrap();
new_delta.retain(len, empty_attributes());
new_delta.retain(1, reset_attribute);
}
return Some(new_delta);
},
} }
None
}
} }

View file

@ -1,109 +1,109 @@
use crate::{ use crate::{
client_document::InsertExt, client_document::InsertExt,
util::{contain_newline, is_newline}, util::{contain_newline, is_newline},
}; };
use lib_ot::{ use lib_ot::{
core::{DeltaOperationBuilder, OpNewline, OperationIterator, NEW_LINE}, core::{DeltaOperationBuilder, OpNewline, OperationIterator, NEW_LINE},
text_delta::{empty_attributes, BuildInTextAttributeKey, DeltaTextOperations}, text_delta::{empty_attributes, BuildInTextAttributeKey, DeltaTextOperations},
}; };
pub struct PreserveInlineFormat {} pub struct PreserveInlineFormat {}
impl InsertExt for PreserveInlineFormat { impl InsertExt for PreserveInlineFormat {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"PreserveInlineFormat" "PreserveInlineFormat"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if contain_newline(text) {
return None;
} }
fn apply( let mut iter = OperationIterator::new(delta);
&self, let prev = iter.next_op_with_len(index)?;
delta: &DeltaTextOperations, if OpNewline::parse(&prev).is_contain() {
replace_len: usize, return None;
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if contain_newline(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_contain() {
return None;
}
let mut attributes = prev.get_attributes();
if attributes.is_empty() || !attributes.contains_key(BuildInTextAttributeKey::Link.as_ref()) {
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
);
}
let next = iter.next_op();
match &next {
None => attributes = empty_attributes(),
Some(next) => {
if OpNewline::parse(next).is_equal() {
attributes = empty_attributes();
}
}
}
let new_delta = DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build();
Some(new_delta)
} }
let mut attributes = prev.get_attributes();
if attributes.is_empty() || !attributes.contains_key(BuildInTextAttributeKey::Link.as_ref()) {
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
);
}
let next = iter.next_op();
match &next {
None => attributes = empty_attributes(),
Some(next) => {
if OpNewline::parse(next).is_equal() {
attributes = empty_attributes();
}
},
}
let new_delta = DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build();
Some(new_delta)
}
} }
pub struct PreserveLineFormatOnSplit {} pub struct PreserveLineFormatOnSplit {}
impl InsertExt for PreserveLineFormatOnSplit { impl InsertExt for PreserveLineFormatOnSplit {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"PreserveLineFormatOnSplit" "PreserveLineFormatOnSplit"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
} }
fn apply( let mut iter = OperationIterator::new(delta);
&self, let prev = iter.next_op_with_len(index)?;
delta: &DeltaTextOperations, if OpNewline::parse(&prev).is_end() {
replace_len: usize, return None;
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_end() {
return None;
}
let next = iter.next_op()?;
let newline_status = OpNewline::parse(&next);
if newline_status.is_end() {
return None;
}
let mut new_delta = DeltaTextOperations::new();
new_delta.retain(index + replace_len, empty_attributes());
if newline_status.is_contain() {
debug_assert!(!next.has_attribute());
new_delta.insert(NEW_LINE, empty_attributes());
return Some(new_delta);
}
match iter.next_op_with_newline() {
None => {}
Some((newline_op, _)) => {
new_delta.insert(NEW_LINE, newline_op.get_attributes());
}
}
Some(new_delta)
} }
let next = iter.next_op()?;
let newline_status = OpNewline::parse(&next);
if newline_status.is_end() {
return None;
}
let mut new_delta = DeltaTextOperations::new();
new_delta.retain(index + replace_len, empty_attributes());
if newline_status.is_contain() {
debug_assert!(!next.has_attribute());
new_delta.insert(NEW_LINE, empty_attributes());
return Some(new_delta);
}
match iter.next_op_with_newline() {
None => {},
Some((newline_op, _)) => {
new_delta.insert(NEW_LINE, newline_op.get_attributes());
},
}
Some(new_delta)
}
} }

View file

@ -1,50 +1,50 @@
use crate::{client_document::InsertExt, util::is_newline}; use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::core::AttributeHashMap; use lib_ot::core::AttributeHashMap;
use lib_ot::{ use lib_ot::{
core::{DeltaOperationBuilder, OperationIterator, Utf16CodeUnitMetric, NEW_LINE}, core::{DeltaOperationBuilder, OperationIterator, Utf16CodeUnitMetric, NEW_LINE},
text_delta::{BuildInTextAttributeKey, DeltaTextOperations}, text_delta::{BuildInTextAttributeKey, DeltaTextOperations},
}; };
pub struct ResetLineFormatOnNewLine {} pub struct ResetLineFormatOnNewLine {}
impl InsertExt for ResetLineFormatOnNewLine { impl InsertExt for ResetLineFormatOnNewLine {
fn ext_name(&self) -> &str { fn ext_name(&self) -> &str {
"ResetLineFormatOnNewLine" "ResetLineFormatOnNewLine"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
} }
fn apply( let mut iter = OperationIterator::new(delta);
&self, iter.seek::<Utf16CodeUnitMetric>(index);
delta: &DeltaTextOperations, let next_op = iter.next_op()?;
replace_len: usize, if !next_op.get_data().starts_with(NEW_LINE) {
text: &str, return None;
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
iter.seek::<Utf16CodeUnitMetric>(index);
let next_op = iter.next_op()?;
if !next_op.get_data().starts_with(NEW_LINE) {
return None;
}
let mut reset_attribute = AttributeHashMap::new();
if next_op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
reset_attribute.remove_value(BuildInTextAttributeKey::Header);
}
let len = index + replace_len;
Some(
DeltaOperationBuilder::new()
.retain(len)
.insert_with_attributes(NEW_LINE, next_op.get_attributes())
.retain_with_attributes(1, reset_attribute)
.trim()
.build(),
)
} }
let mut reset_attribute = AttributeHashMap::new();
if next_op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
reset_attribute.remove_value(BuildInTextAttributeKey::Header);
}
let len = index + replace_len;
Some(
DeltaOperationBuilder::new()
.retain(len)
.insert_with_attributes(NEW_LINE, next_op.get_attributes())
.retain_with_attributes(1, reset_attribute)
.trim()
.build(),
)
}
} }

View file

@ -14,27 +14,27 @@ pub type FormatExtension = Box<dyn FormatExt + Send + Sync>;
pub type DeleteExtension = Box<dyn DeleteExt + Send + Sync>; pub type DeleteExtension = Box<dyn DeleteExt + Send + Sync>;
pub trait InsertExt { pub trait InsertExt {
fn ext_name(&self) -> &str; fn ext_name(&self) -> &str;
fn apply( fn apply(
&self, &self,
delta: &DeltaTextOperations, delta: &DeltaTextOperations,
replace_len: usize, replace_len: usize,
text: &str, text: &str,
index: usize, index: usize,
) -> Option<DeltaTextOperations>; ) -> Option<DeltaTextOperations>;
} }
pub trait FormatExt { pub trait FormatExt {
fn ext_name(&self) -> &str; fn ext_name(&self) -> &str;
fn apply( fn apply(
&self, &self,
delta: &DeltaTextOperations, delta: &DeltaTextOperations,
interval: Interval, interval: Interval,
attribute: &AttributeEntry, attribute: &AttributeEntry,
) -> Option<DeltaTextOperations>; ) -> Option<DeltaTextOperations>;
} }
pub trait DeleteExt { pub trait DeleteExt {
fn ext_name(&self) -> &str; fn ext_name(&self) -> &str;
fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations>; fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations>;
} }

View file

@ -4,77 +4,77 @@ const MAX_UNDOES: usize = 20;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct UndoResult { pub struct UndoResult {
pub operations: DeltaTextOperations, pub operations: DeltaTextOperations,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct History { pub struct History {
#[allow(dead_code)] #[allow(dead_code)]
cur_undo: usize, cur_undo: usize,
undoes: Vec<DeltaTextOperations>, undoes: Vec<DeltaTextOperations>,
redoes: Vec<DeltaTextOperations>, redoes: Vec<DeltaTextOperations>,
capacity: usize, capacity: usize,
} }
impl std::default::Default for History { impl std::default::Default for History {
fn default() -> Self { fn default() -> Self {
History { History {
cur_undo: 1, cur_undo: 1,
undoes: Vec::new(), undoes: Vec::new(),
redoes: Vec::new(), redoes: Vec::new(),
capacity: MAX_UNDOES, capacity: MAX_UNDOES,
}
} }
}
} }
impl History { impl History {
pub fn new() -> Self { pub fn new() -> Self {
History::default() History::default()
}
pub fn can_undo(&self) -> bool {
!self.undoes.is_empty()
}
pub fn can_redo(&self) -> bool {
!self.redoes.is_empty()
}
pub fn add_undo(&mut self, delta: DeltaTextOperations) {
self.undoes.push(delta);
}
pub fn add_redo(&mut self, delta: DeltaTextOperations) {
self.redoes.push(delta);
}
pub fn record(&mut self, delta: DeltaTextOperations) {
if delta.ops.is_empty() {
return;
} }
pub fn can_undo(&self) -> bool { self.redoes.clear();
!self.undoes.is_empty() self.add_undo(delta);
if self.undoes.len() > self.capacity {
self.undoes.remove(0);
}
}
pub fn undo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_undo() {
return None;
}
let delta = self.undoes.pop().unwrap();
Some(delta)
}
pub fn redo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_redo() {
return None;
} }
pub fn can_redo(&self) -> bool { let delta = self.redoes.pop().unwrap();
!self.redoes.is_empty() Some(delta)
} }
pub fn add_undo(&mut self, delta: DeltaTextOperations) {
self.undoes.push(delta);
}
pub fn add_redo(&mut self, delta: DeltaTextOperations) {
self.redoes.push(delta);
}
pub fn record(&mut self, delta: DeltaTextOperations) {
if delta.ops.is_empty() {
return;
}
self.redoes.clear();
self.add_undo(delta);
if self.undoes.len() > self.capacity {
self.undoes.remove(0);
}
}
pub fn undo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_undo() {
return None;
}
let delta = self.undoes.pop().unwrap();
Some(delta)
}
pub fn redo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_redo() {
return None;
}
let delta = self.redoes.pop().unwrap();
Some(delta)
}
} }

View file

@ -1,116 +1,119 @@
use crate::client_document::*; use crate::client_document::*;
use lib_ot::core::AttributeEntry; use lib_ot::core::AttributeEntry;
use lib_ot::{ use lib_ot::{
core::{trim, Interval}, core::{trim, Interval},
errors::{ErrorBuilder, OTError, OTErrorCode}, errors::{ErrorBuilder, OTError, OTErrorCode},
text_delta::DeltaTextOperations, text_delta::DeltaTextOperations,
}; };
pub const RECORD_THRESHOLD: usize = 400; // in milliseconds pub const RECORD_THRESHOLD: usize = 400; // in milliseconds
pub struct ViewExtensions { pub struct ViewExtensions {
insert_exts: Vec<InsertExtension>, insert_exts: Vec<InsertExtension>,
format_exts: Vec<FormatExtension>, format_exts: Vec<FormatExtension>,
delete_exts: Vec<DeleteExtension>, delete_exts: Vec<DeleteExtension>,
} }
impl ViewExtensions { impl ViewExtensions {
pub(crate) fn new() -> Self { pub(crate) fn new() -> Self {
Self { Self {
insert_exts: construct_insert_exts(), insert_exts: construct_insert_exts(),
format_exts: construct_format_exts(), format_exts: construct_format_exts(),
delete_exts: construct_delete_exts(), delete_exts: construct_delete_exts(),
} }
}
pub(crate) fn insert(
&self,
operations: &DeltaTextOperations,
text: &str,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.insert_exts {
if let Some(mut operations) = ext.apply(operations, interval.size(), text, interval.start) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
} }
pub(crate) fn insert( match new_operations {
&self, None => Err(ErrorBuilder::new(OTErrorCode::ApplyInsertFail).build()),
operations: &DeltaTextOperations, Some(new_operations) => Ok(new_operations),
text: &str, }
interval: Interval, }
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.insert_exts {
if let Some(mut operations) = ext.apply(operations, interval.size(), text, interval.start) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
}
match new_operations { pub(crate) fn delete(
None => Err(ErrorBuilder::new(OTErrorCode::ApplyInsertFail).build()), &self,
Some(new_operations) => Ok(new_operations), delta: &DeltaTextOperations,
} interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_delta = None;
for ext in &self.delete_exts {
if let Some(mut delta) = ext.apply(delta, interval) {
trim(&mut delta);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), delta);
new_delta = Some(delta);
break;
}
} }
pub(crate) fn delete( match new_delta {
&self, None => Err(ErrorBuilder::new(OTErrorCode::ApplyDeleteFail).build()),
delta: &DeltaTextOperations, Some(new_delta) => Ok(new_delta),
interval: Interval, }
) -> Result<DeltaTextOperations, OTError> { }
let mut new_delta = None;
for ext in &self.delete_exts {
if let Some(mut delta) = ext.apply(delta, interval) {
trim(&mut delta);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), delta);
new_delta = Some(delta);
break;
}
}
match new_delta { pub(crate) fn format(
None => Err(ErrorBuilder::new(OTErrorCode::ApplyDeleteFail).build()), &self,
Some(new_delta) => Ok(new_delta), operations: &DeltaTextOperations,
} attribute: AttributeEntry,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.format_exts {
if let Some(mut operations) = ext.apply(operations, interval, &attribute) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
} }
pub(crate) fn format( match new_operations {
&self, None => Err(ErrorBuilder::new(OTErrorCode::ApplyFormatFail).build()),
operations: &DeltaTextOperations, Some(new_operations) => Ok(new_operations),
attribute: AttributeEntry,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.format_exts {
if let Some(mut operations) = ext.apply(operations, interval, &attribute) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
}
match new_operations {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyFormatFail).build()),
Some(new_operations) => Ok(new_operations),
}
} }
}
} }
fn construct_insert_exts() -> Vec<InsertExtension> { fn construct_insert_exts() -> Vec<InsertExtension> {
vec![ vec![
Box::new(InsertEmbedsExt {}), Box::new(InsertEmbedsExt {}),
Box::new(ForceNewlineForInsertsAroundEmbedExt {}), Box::new(ForceNewlineForInsertsAroundEmbedExt {}),
Box::new(AutoExitBlock {}), Box::new(AutoExitBlock {}),
Box::new(PreserveBlockFormatOnInsert {}), Box::new(PreserveBlockFormatOnInsert {}),
Box::new(PreserveLineFormatOnSplit {}), Box::new(PreserveLineFormatOnSplit {}),
Box::new(ResetLineFormatOnNewLine {}), Box::new(ResetLineFormatOnNewLine {}),
Box::new(AutoFormatExt {}), Box::new(AutoFormatExt {}),
Box::new(PreserveInlineFormat {}), Box::new(PreserveInlineFormat {}),
Box::new(DefaultInsertAttribute {}), Box::new(DefaultInsertAttribute {}),
] ]
} }
fn construct_format_exts() -> Vec<FormatExtension> { fn construct_format_exts() -> Vec<FormatExtension> {
vec![ vec![
// Box::new(FormatLinkAtCaretPositionExt {}), // Box::new(FormatLinkAtCaretPositionExt {}),
Box::new(ResolveBlockFormat {}), Box::new(ResolveBlockFormat {}),
Box::new(ResolveInlineFormat {}), Box::new(ResolveInlineFormat {}),
] ]
} }
fn construct_delete_exts() -> Vec<DeleteExtension> { fn construct_delete_exts() -> Vec<DeleteExtension> {
vec![Box::new(PreserveLineFormatOnMerge {}), Box::new(DefaultDelete {})] vec![
Box::new(PreserveLineFormatOnMerge {}),
Box::new(DefaultDelete {}),
]
} }

View file

@ -1,7 +1,7 @@
use crate::client_folder::FolderOperations; use crate::client_folder::FolderOperations;
use crate::{ use crate::{
client_folder::{default_folder_operations, FolderPad}, client_folder::{default_folder_operations, FolderPad},
errors::SyncResult, errors::SyncResult,
}; };
use flowy_sync::util::make_operations_from_revisions; use flowy_sync::util::make_operations_from_revisions;
use folder_model::{TrashRevision, WorkspaceRevision}; use folder_model::{TrashRevision, WorkspaceRevision};
@ -10,40 +10,40 @@ use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub(crate) struct FolderPadBuilder { pub(crate) struct FolderPadBuilder {
workspaces: Vec<WorkspaceRevision>, workspaces: Vec<WorkspaceRevision>,
trash: Vec<TrashRevision>, trash: Vec<TrashRevision>,
} }
impl FolderPadBuilder { impl FolderPadBuilder {
pub(crate) fn new() -> Self { pub(crate) fn new() -> Self {
Self { Self {
workspaces: vec![], workspaces: vec![],
trash: vec![], trash: vec![],
}
} }
}
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn with_workspace(mut self, workspaces: Vec<WorkspaceRevision>) -> Self { pub(crate) fn with_workspace(mut self, workspaces: Vec<WorkspaceRevision>) -> Self {
self.workspaces = workspaces; self.workspaces = workspaces;
self self
} }
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn with_trash(mut self, trash: Vec<TrashRevision>) -> Self { pub(crate) fn with_trash(mut self, trash: Vec<TrashRevision>) -> Self {
self.trash = trash; self.trash = trash;
self self
} }
pub(crate) fn build_with_revisions(self, revisions: Vec<Revision>) -> SyncResult<FolderPad> { pub(crate) fn build_with_revisions(self, revisions: Vec<Revision>) -> SyncResult<FolderPad> {
let mut operations: FolderOperations = make_operations_from_revisions(revisions)?; let mut operations: FolderOperations = make_operations_from_revisions(revisions)?;
if operations.is_empty() { if operations.is_empty() {
operations = default_folder_operations(); operations = default_folder_operations();
}
FolderPad::from_operations(operations)
} }
FolderPad::from_operations(operations)
}
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn build(self) -> SyncResult<FolderPad> { pub(crate) fn build(self) -> SyncResult<FolderPad> {
FolderPad::new(self.workspaces, self.trash) FolderPad::new(self.workspaces, self.trash)
} }
} }

View file

@ -10,133 +10,138 @@ use std::sync::Arc;
pub type AtomicNodeTree = RwLock<NodeTree>; pub type AtomicNodeTree = RwLock<NodeTree>;
pub struct FolderNodePad { pub struct FolderNodePad {
pub tree: Arc<AtomicNodeTree>, pub tree: Arc<AtomicNodeTree>,
pub node_id: NodeId, pub node_id: NodeId,
pub workspaces: WorkspaceList, pub workspaces: WorkspaceList,
pub trash: TrashList, pub trash: TrashList,
} }
#[derive(Clone, Node)] #[derive(Clone, Node)]
#[node_type = "workspaces"] #[node_type = "workspaces"]
pub struct WorkspaceList { pub struct WorkspaceList {
pub tree: Arc<AtomicNodeTree>, pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>, pub node_id: Option<NodeId>,
#[node(child_name = "workspace")] #[node(child_name = "workspace")]
inner: Vec<WorkspaceNode>, inner: Vec<WorkspaceNode>,
} }
impl std::ops::Deref for WorkspaceList { impl std::ops::Deref for WorkspaceList {
type Target = Vec<WorkspaceNode>; type Target = Vec<WorkspaceNode>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.inner &self.inner
} }
} }
impl std::ops::DerefMut for WorkspaceList { impl std::ops::DerefMut for WorkspaceList {
fn deref_mut(&mut self) -> &mut Self::Target { fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner &mut self.inner
} }
} }
#[derive(Clone, Node)] #[derive(Clone, Node)]
#[node_type = "trash"] #[node_type = "trash"]
pub struct TrashList { pub struct TrashList {
pub tree: Arc<AtomicNodeTree>, pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>, pub node_id: Option<NodeId>,
#[node(child_name = "trash")] #[node(child_name = "trash")]
inner: Vec<TrashNode>, inner: Vec<TrashNode>,
} }
impl FolderNodePad { impl FolderNodePad {
pub fn new() -> Self { pub fn new() -> Self {
Self::default() Self::default()
} }
pub fn get_workspace(&self, workspace_id: &str) -> Option<&WorkspaceNode> { pub fn get_workspace(&self, workspace_id: &str) -> Option<&WorkspaceNode> {
self.workspaces.iter().find(|workspace| workspace.id == workspace_id) self
} .workspaces
.iter()
.find(|workspace| workspace.id == workspace_id)
}
pub fn get_mut_workspace(&mut self, workspace_id: &str) -> Option<&mut WorkspaceNode> { pub fn get_mut_workspace(&mut self, workspace_id: &str) -> Option<&mut WorkspaceNode> {
self.workspaces self
.iter_mut() .workspaces
.find(|workspace| workspace.id == workspace_id) .iter_mut()
} .find(|workspace| workspace.id == workspace_id)
}
pub fn add_workspace(&mut self, mut workspace: WorkspaceNode) { pub fn add_workspace(&mut self, mut workspace: WorkspaceNode) {
let path = workspaces_path().clone_with(self.workspaces.len()); let path = workspaces_path().clone_with(self.workspaces.len());
let op = NodeOperation::Insert { let op = NodeOperation::Insert {
path: path.clone(), path: path.clone(),
nodes: vec![workspace.to_node_data()], nodes: vec![workspace.to_node_data()],
}; };
self.tree.write().apply_op(op).unwrap(); self.tree.write().apply_op(op).unwrap();
let node_id = self.tree.read().node_id_at_path(path).unwrap(); let node_id = self.tree.read().node_id_at_path(path).unwrap();
workspace.node_id = Some(node_id); workspace.node_id = Some(node_id);
self.workspaces.push(workspace); self.workspaces.push(workspace);
} }
pub fn to_json(&self, pretty: bool) -> SyncResult<String> { pub fn to_json(&self, pretty: bool) -> SyncResult<String> {
self.tree self
.read() .tree
.to_json(pretty) .read()
.map_err(|e| SyncError::serde().context(e)) .to_json(pretty)
} .map_err(|e| SyncError::serde().context(e))
}
} }
impl std::default::Default for FolderNodePad { impl std::default::Default for FolderNodePad {
fn default() -> Self { fn default() -> Self {
let tree = Arc::new(RwLock::new(NodeTree::default())); let tree = Arc::new(RwLock::new(NodeTree::default()));
// Workspace // Workspace
let mut workspaces = WorkspaceList { let mut workspaces = WorkspaceList {
tree: tree.clone(), tree: tree.clone(),
node_id: None, node_id: None,
inner: vec![], inner: vec![],
}; };
let workspace_node = workspaces.to_node_data(); let workspace_node = workspaces.to_node_data();
// Trash // Trash
let mut trash = TrashList { let mut trash = TrashList {
tree: tree.clone(), tree: tree.clone(),
node_id: None, node_id: None,
inner: vec![], inner: vec![],
}; };
let trash_node = trash.to_node_data(); let trash_node = trash.to_node_data();
let folder_node = NodeDataBuilder::new("folder") let folder_node = NodeDataBuilder::new("folder")
.add_node_data(workspace_node) .add_node_data(workspace_node)
.add_node_data(trash_node) .add_node_data(trash_node)
.build(); .build();
let operation = NodeOperation::Insert { let operation = NodeOperation::Insert {
path: folder_path(), path: folder_path(),
nodes: vec![folder_node], nodes: vec![folder_node],
}; };
tree.write().apply_op(operation).unwrap(); tree.write().apply_op(operation).unwrap();
let node_id = tree.read().node_id_at_path(folder_path()).unwrap(); let node_id = tree.read().node_id_at_path(folder_path()).unwrap();
workspaces.node_id = Some(tree.read().node_id_at_path(workspaces_path()).unwrap()); workspaces.node_id = Some(tree.read().node_id_at_path(workspaces_path()).unwrap());
trash.node_id = Some(tree.read().node_id_at_path(trash_path()).unwrap()); trash.node_id = Some(tree.read().node_id_at_path(trash_path()).unwrap());
Self { Self {
tree, tree,
node_id, node_id,
workspaces, workspaces,
trash, trash,
}
} }
}
} }
fn folder_path() -> Path { fn folder_path() -> Path {
vec![0].into() vec![0].into()
} }
fn workspaces_path() -> Path { fn workspaces_path() -> Path {
folder_path().clone_with(0) folder_path().clone_with(0)
} }
fn trash_path() -> Path { fn trash_path() -> Path {
folder_path().clone_with(1) folder_path().clone_with(1)
} }

View file

@ -7,14 +7,14 @@ use std::sync::Arc;
#[derive(Clone, Node)] #[derive(Clone, Node)]
#[node_type = "trash"] #[node_type = "trash"]
pub struct TrashNode { pub struct TrashNode {
pub tree: Arc<AtomicNodeTree>, pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>, pub node_id: Option<NodeId>,
#[node(get_value_with = "get_attributes_str_value")] #[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")] #[node(set_value_with = "set_attributes_str_value")]
pub id: String, pub id: String,
#[node(get_value_with = "get_attributes_str_value")] #[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")] #[node(set_value_with = "set_attributes_str_value")]
pub name: String, pub name: String,
} }

View file

@ -3,52 +3,70 @@ use crate::errors::SyncResult;
use lib_ot::core::{AttributeHashMap, AttributeValue, Changeset, NodeId, NodeOperation}; use lib_ot::core::{AttributeHashMap, AttributeValue, Changeset, NodeId, NodeOperation};
use std::sync::Arc; use std::sync::Arc;
pub fn get_attributes_str_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<String> { pub fn get_attributes_str_value(
tree.read() tree: Arc<AtomicNodeTree>,
.get_node(*node_id) node_id: &NodeId,
.and_then(|node| node.attributes.get(key).cloned()) key: &str,
.and_then(|value| value.str_value()) ) -> Option<String> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.str_value())
} }
pub fn set_attributes_str_value( pub fn set_attributes_str_value(
tree: Arc<AtomicNodeTree>, tree: Arc<AtomicNodeTree>,
node_id: &NodeId, node_id: &NodeId,
key: &str, key: &str,
value: String, value: String,
) -> SyncResult<()> { ) -> SyncResult<()> {
let old_attributes = match get_attributes(tree.clone(), node_id) { let old_attributes = match get_attributes(tree.clone(), node_id) {
None => AttributeHashMap::new(), None => AttributeHashMap::new(),
Some(attributes) => attributes, Some(attributes) => attributes,
}; };
let mut new_attributes = old_attributes.clone(); let mut new_attributes = old_attributes.clone();
new_attributes.insert(key, value); new_attributes.insert(key, value);
let path = tree.read().path_from_node_id(*node_id); let path = tree.read().path_from_node_id(*node_id);
let update_operation = NodeOperation::Update { let update_operation = NodeOperation::Update {
path, path,
changeset: Changeset::Attributes { changeset: Changeset::Attributes {
new: new_attributes, new: new_attributes,
old: old_attributes, old: old_attributes,
}, },
}; };
tree.write().apply_op(update_operation)?; tree.write().apply_op(update_operation)?;
Ok(()) Ok(())
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn get_attributes_int_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<i64> { pub fn get_attributes_int_value(
tree.read() tree: Arc<AtomicNodeTree>,
.get_node(*node_id) node_id: &NodeId,
.and_then(|node| node.attributes.get(key).cloned()) key: &str,
.and_then(|value| value.int_value()) ) -> Option<i64> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.int_value())
} }
pub fn get_attributes(tree: Arc<AtomicNodeTree>, node_id: &NodeId) -> Option<AttributeHashMap> { pub fn get_attributes(tree: Arc<AtomicNodeTree>, node_id: &NodeId) -> Option<AttributeHashMap> {
tree.read().get_node(*node_id).map(|node| node.attributes.clone()) tree
.read()
.get_node(*node_id)
.map(|node| node.attributes.clone())
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn get_attributes_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<AttributeValue> { pub fn get_attributes_value(
tree.read() tree: Arc<AtomicNodeTree>,
.get_node(*node_id) node_id: &NodeId,
.and_then(|node| node.attributes.get(key).cloned()) key: &str,
) -> Option<AttributeValue> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
} }

View file

@ -7,55 +7,55 @@ use std::sync::Arc;
#[derive(Clone, Node)] #[derive(Clone, Node)]
#[node_type = "workspace"] #[node_type = "workspace"]
pub struct WorkspaceNode { pub struct WorkspaceNode {
pub tree: Arc<AtomicNodeTree>, pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>, pub node_id: Option<NodeId>,
#[node(get_value_with = "get_attributes_str_value")] #[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")] #[node(set_value_with = "set_attributes_str_value")]
pub id: String, pub id: String,
#[node(get_value_with = "get_attributes_str_value")] #[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")] #[node(set_value_with = "set_attributes_str_value")]
pub name: String, pub name: String,
#[node(child_name = "app")] #[node(child_name = "app")]
pub apps: Vec<AppNode>, pub apps: Vec<AppNode>,
} }
impl WorkspaceNode { impl WorkspaceNode {
pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self { pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
Self { Self {
tree, tree,
node_id: None, node_id: None,
id, id,
name, name,
apps: vec![], apps: vec![],
}
} }
}
} }
#[derive(Clone, Node)] #[derive(Clone, Node)]
#[node_type = "app"] #[node_type = "app"]
pub struct AppNode { pub struct AppNode {
pub tree: Arc<AtomicNodeTree>, pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>, pub node_id: Option<NodeId>,
#[node(get_value_with = "get_attributes_str_value")] #[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")] #[node(set_value_with = "set_attributes_str_value")]
pub id: String, pub id: String,
#[node(get_value_with = "get_attributes_str_value")] #[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")] #[node(set_value_with = "set_attributes_str_value")]
pub name: String, pub name: String,
} }
impl AppNode { impl AppNode {
pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self { pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
Self { Self {
tree, tree,
node_id: None, node_id: None,
id, id,
name, name,
}
} }
}
} }

View file

@ -2,7 +2,7 @@ pub mod client_database;
pub mod client_document; pub mod client_document;
pub mod client_folder; pub mod client_folder;
pub mod errors { pub mod errors {
pub use flowy_sync::errors::*; pub use flowy_sync::errors::*;
} }
pub mod util; pub mod util;

View file

@ -3,127 +3,127 @@ use dissimilar::Chunk;
use document_model::document::DocumentInfo; use document_model::document::DocumentInfo;
use lib_ot::core::{DeltaOperationBuilder, OTString, OperationAttributes}; use lib_ot::core::{DeltaOperationBuilder, OTString, OperationAttributes};
use lib_ot::{ use lib_ot::{
core::{DeltaOperations, OperationTransform, NEW_LINE, WHITESPACE}, core::{DeltaOperations, OperationTransform, NEW_LINE, WHITESPACE},
text_delta::DeltaTextOperations, text_delta::DeltaTextOperations,
}; };
use revision_model::Revision; use revision_model::Revision;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
#[inline] #[inline]
pub fn find_newline(s: &str) -> Option<usize> { pub fn find_newline(s: &str) -> Option<usize> {
s.find(NEW_LINE) s.find(NEW_LINE)
} }
#[inline] #[inline]
pub fn is_newline(s: &str) -> bool { pub fn is_newline(s: &str) -> bool {
s == NEW_LINE s == NEW_LINE
} }
#[inline] #[inline]
pub fn is_whitespace(s: &str) -> bool { pub fn is_whitespace(s: &str) -> bool {
s == WHITESPACE s == WHITESPACE
} }
#[inline] #[inline]
pub fn contain_newline(s: &str) -> bool { pub fn contain_newline(s: &str) -> bool {
s.contains(NEW_LINE) s.contains(NEW_LINE)
} }
pub fn recover_operation_from_revisions<T>( pub fn recover_operation_from_revisions<T>(
revisions: Vec<Revision>, revisions: Vec<Revision>,
validator: impl Fn(&DeltaOperations<T>) -> bool, validator: impl Fn(&DeltaOperations<T>) -> bool,
) -> Option<(DeltaOperations<T>, i64)> ) -> Option<(DeltaOperations<T>, i64)>
where where
T: OperationAttributes + DeserializeOwned + OperationAttributes, T: OperationAttributes + DeserializeOwned + OperationAttributes,
{ {
let mut new_operations = DeltaOperations::<T>::new(); let mut new_operations = DeltaOperations::<T>::new();
let mut rev_id = 0; let mut rev_id = 0;
for revision in revisions { for revision in revisions {
if let Ok(operations) = DeltaOperations::<T>::from_bytes(revision.bytes) { if let Ok(operations) = DeltaOperations::<T>::from_bytes(revision.bytes) {
match new_operations.compose(&operations) { match new_operations.compose(&operations) {
Ok(composed_operations) => { Ok(composed_operations) => {
if validator(&composed_operations) { if validator(&composed_operations) {
rev_id = revision.rev_id; rev_id = revision.rev_id;
new_operations = composed_operations; new_operations = composed_operations;
} else { } else {
break;
}
}
Err(_) => break,
}
} else {
break; break;
} }
} },
if new_operations.is_empty() { Err(_) => break,
None }
} else { } else {
Some((new_operations, rev_id)) break;
} }
}
if new_operations.is_empty() {
None
} else {
Some((new_operations, rev_id))
}
} }
#[inline] #[inline]
pub fn make_document_info_from_revisions( pub fn make_document_info_from_revisions(
doc_id: &str, doc_id: &str,
revisions: Vec<Revision>, revisions: Vec<Revision>,
) -> Result<Option<DocumentInfo>, SyncError> { ) -> Result<Option<DocumentInfo>, SyncError> {
if revisions.is_empty() { if revisions.is_empty() {
return Ok(None); return Ok(None);
}
let mut delta = DeltaTextOperations::new();
let mut base_rev_id = 0;
let mut rev_id = 0;
for revision in revisions {
base_rev_id = revision.base_rev_id;
rev_id = revision.rev_id;
if revision.bytes.is_empty() {
tracing::warn!("revision delta_data is empty");
} }
let mut delta = DeltaTextOperations::new(); let new_delta = DeltaTextOperations::from_bytes(revision.bytes)?;
let mut base_rev_id = 0; delta = delta.compose(&new_delta)?;
let mut rev_id = 0; }
for revision in revisions {
base_rev_id = revision.base_rev_id;
rev_id = revision.rev_id;
if revision.bytes.is_empty() { Ok(Some(DocumentInfo {
tracing::warn!("revision delta_data is empty"); doc_id: doc_id.to_owned(),
} data: delta.json_bytes().to_vec(),
rev_id,
let new_delta = DeltaTextOperations::from_bytes(revision.bytes)?; base_rev_id,
delta = delta.compose(&new_delta)?; }))
}
Ok(Some(DocumentInfo {
doc_id: doc_id.to_owned(),
data: delta.json_bytes().to_vec(),
rev_id,
base_rev_id,
}))
} }
#[inline] #[inline]
pub fn rev_id_from_str(s: &str) -> Result<i64, SyncError> { pub fn rev_id_from_str(s: &str) -> Result<i64, SyncError> {
let rev_id = s let rev_id = s
.to_owned() .to_owned()
.parse::<i64>() .parse::<i64>()
.map_err(|e| SyncError::internal().context(format!("Parse rev_id from {} failed. {}", s, e)))?; .map_err(|e| SyncError::internal().context(format!("Parse rev_id from {} failed. {}", s, e)))?;
Ok(rev_id) Ok(rev_id)
} }
pub fn cal_diff<T: OperationAttributes>(old: String, new: String) -> Option<DeltaOperations<T>> { pub fn cal_diff<T: OperationAttributes>(old: String, new: String) -> Option<DeltaOperations<T>> {
let chunks = dissimilar::diff(&old, &new); let chunks = dissimilar::diff(&old, &new);
let mut delta_builder = DeltaOperationBuilder::<T>::new(); let mut delta_builder = DeltaOperationBuilder::<T>::new();
for chunk in &chunks { for chunk in &chunks {
match chunk { match chunk {
Chunk::Equal(s) => { Chunk::Equal(s) => {
delta_builder = delta_builder.retain(OTString::from(*s).utf16_len()); delta_builder = delta_builder.retain(OTString::from(*s).utf16_len());
} },
Chunk::Delete(s) => { Chunk::Delete(s) => {
delta_builder = delta_builder.delete(OTString::from(*s).utf16_len()); delta_builder = delta_builder.delete(OTString::from(*s).utf16_len());
} },
Chunk::Insert(s) => { Chunk::Insert(s) => {
delta_builder = delta_builder.insert(s); delta_builder = delta_builder.insert(s);
} },
}
} }
}
let delta = delta_builder.build(); let delta = delta_builder.build();
if delta.is_empty() { if delta.is_empty() {
None None
} else { } else {
Some(delta) Some(delta)
} }
} }

View file

@ -2,57 +2,74 @@ use flowy_client_sync::client_folder::{FolderNodePad, WorkspaceNode};
#[test] #[test]
fn client_folder_create_default_folder_test() { fn client_folder_create_default_folder_test() {
let folder_pad = FolderNodePad::new(); let folder_pad = FolderNodePad::new();
let json = folder_pad.to_json(false).unwrap(); let json = folder_pad.to_json(false).unwrap();
assert_eq!( assert_eq!(
json, json,
r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"# r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
); );
} }
#[test] #[test]
fn client_folder_create_default_folder_with_workspace_test() { fn client_folder_create_default_folder_with_workspace_test() {
let mut folder_pad = FolderNodePad::new(); let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string()); let workspace = WorkspaceNode::new(
folder_pad.workspaces.add_workspace(workspace).unwrap(); folder_pad.tree.clone(),
let json = folder_pad.to_json(false).unwrap(); "1".to_string(),
assert_eq!( "workspace name".to_string(),
json, );
r#"{"type":"folder","children":[{"type":"workspaces","children":[{"type":"workspace","attributes":{"id":"1","name":"workspace name"}}]},{"type":"trash"}]}"# folder_pad.workspaces.add_workspace(workspace).unwrap();
); let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces","children":[{"type":"workspace","attributes":{"id":"1","name":"workspace name"}}]},{"type":"trash"}]}"#
);
assert_eq!( assert_eq!(
folder_pad.get_workspace("1").unwrap().get_name().unwrap(), folder_pad.get_workspace("1").unwrap().get_name().unwrap(),
"workspace name" "workspace name"
); );
} }
#[test] #[test]
fn client_folder_delete_workspace_test() { fn client_folder_delete_workspace_test() {
let mut folder_pad = FolderNodePad::new(); let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string()); let workspace = WorkspaceNode::new(
folder_pad.workspaces.add_workspace(workspace).unwrap(); folder_pad.tree.clone(),
folder_pad.workspaces.remove_workspace("1"); "1".to_string(),
let json = folder_pad.to_json(false).unwrap(); "workspace name".to_string(),
assert_eq!( );
json, folder_pad.workspaces.add_workspace(workspace).unwrap();
r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"# folder_pad.workspaces.remove_workspace("1");
); let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
);
} }
#[test] #[test]
fn client_folder_update_workspace_name_test() { fn client_folder_update_workspace_name_test() {
let mut folder_pad = FolderNodePad::new(); let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string()); let workspace = WorkspaceNode::new(
folder_pad.workspaces.add_workspace(workspace).unwrap(); folder_pad.tree.clone(),
folder_pad "1".to_string(),
.workspaces "workspace name".to_string(),
.get_mut_workspace("1") );
.unwrap() folder_pad.workspaces.add_workspace(workspace).unwrap();
.set_name("my first workspace".to_string()); folder_pad
.workspaces
.get_mut_workspace("1")
.unwrap()
.set_name("my first workspace".to_string());
assert_eq!( assert_eq!(
folder_pad.workspaces.get_workspace("1").unwrap().get_name().unwrap(), folder_pad
"my first workspace" .workspaces
); .get_workspace("1")
.unwrap()
.get_name()
.unwrap(),
"my first workspace"
);
} }

View file

@ -3,87 +3,115 @@ use folder_model::AppRevision;
use lib_ot::core::Path; use lib_ot::core::Path;
pub enum FolderNodePadScript { pub enum FolderNodePadScript {
CreateWorkspace { id: String, name: String }, CreateWorkspace {
DeleteWorkspace { id: String }, id: String,
AssertPathOfWorkspace { id: String, expected_path: Path }, name: String,
AssertNumberOfWorkspace { expected: usize }, },
CreateApp { id: String, name: String }, DeleteWorkspace {
DeleteApp { id: String }, id: String,
UpdateApp { id: String, name: String }, },
AssertApp { id: String, expected: Option<AppRevision> }, AssertPathOfWorkspace {
AssertAppContent { id: String, name: String }, id: String,
// AssertNumberOfApps { expected: usize }, expected_path: Path,
},
AssertNumberOfWorkspace {
expected: usize,
},
CreateApp {
id: String,
name: String,
},
DeleteApp {
id: String,
},
UpdateApp {
id: String,
name: String,
},
AssertApp {
id: String,
expected: Option<AppRevision>,
},
AssertAppContent {
id: String,
name: String,
},
// AssertNumberOfApps { expected: usize },
} }
pub struct FolderNodePadTest { pub struct FolderNodePadTest {
folder_pad: FolderNodePad, folder_pad: FolderNodePad,
} }
impl FolderNodePadTest { impl FolderNodePadTest {
pub fn new() -> FolderNodePadTest { pub fn new() -> FolderNodePadTest {
let mut folder_pad = FolderNodePad::default(); let mut folder_pad = FolderNodePad::default();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string()); let workspace = WorkspaceNode::new(
folder_pad.workspaces.add_workspace(workspace).unwrap(); folder_pad.tree.clone(),
Self { folder_pad } "1".to_string(),
} "workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
Self { folder_pad }
}
pub fn run_scripts(&mut self, scripts: Vec<FolderNodePadScript>) { pub fn run_scripts(&mut self, scripts: Vec<FolderNodePadScript>) {
for script in scripts { for script in scripts {
self.run_script(script); self.run_script(script);
}
} }
}
pub fn run_script(&mut self, script: FolderNodePadScript) { pub fn run_script(&mut self, script: FolderNodePadScript) {
match script { match script {
FolderNodePadScript::CreateWorkspace { id, name } => { FolderNodePadScript::CreateWorkspace { id, name } => {
let workspace = WorkspaceNode::new(self.folder_pad.tree.clone(), id, name); let workspace = WorkspaceNode::new(self.folder_pad.tree.clone(), id, name);
self.folder_pad.workspaces.add_workspace(workspace).unwrap(); self.folder_pad.workspaces.add_workspace(workspace).unwrap();
} },
FolderNodePadScript::DeleteWorkspace { id } => { FolderNodePadScript::DeleteWorkspace { id } => {
self.folder_pad.workspaces.remove_workspace(id); self.folder_pad.workspaces.remove_workspace(id);
} },
FolderNodePadScript::AssertPathOfWorkspace { id, expected_path } => { FolderNodePadScript::AssertPathOfWorkspace { id, expected_path } => {
let workspace_node: &WorkspaceNode = self.folder_pad.workspaces.get_workspace(id).unwrap(); let workspace_node: &WorkspaceNode = self.folder_pad.workspaces.get_workspace(id).unwrap();
let node_id = workspace_node.node_id.unwrap(); let node_id = workspace_node.node_id.unwrap();
let path = self.folder_pad.tree.read().path_from_node_id(node_id); let path = self.folder_pad.tree.read().path_from_node_id(node_id);
assert_eq!(path, expected_path); assert_eq!(path, expected_path);
} },
FolderNodePadScript::AssertNumberOfWorkspace { expected } => { FolderNodePadScript::AssertNumberOfWorkspace { expected } => {
assert_eq!(self.folder_pad.workspaces.len(), expected); assert_eq!(self.folder_pad.workspaces.len(), expected);
} },
FolderNodePadScript::CreateApp { id, name } => { FolderNodePadScript::CreateApp { id, name } => {
let app_node = AppNode::new(self.folder_pad.tree.clone(), id, name); let app_node = AppNode::new(self.folder_pad.tree.clone(), id, name);
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap(); let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.add_app(app_node).unwrap(); workspace_node.add_app(app_node).unwrap();
} },
FolderNodePadScript::DeleteApp { id } => { FolderNodePadScript::DeleteApp { id } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap(); let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.remove_app(&id); workspace_node.remove_app(&id);
} },
FolderNodePadScript::UpdateApp { id, name } => { FolderNodePadScript::UpdateApp { id, name } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap(); let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.get_mut_app(&id).unwrap().set_name(name); workspace_node.get_mut_app(&id).unwrap().set_name(name);
} },
FolderNodePadScript::AssertApp { id, expected } => { FolderNodePadScript::AssertApp { id, expected } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap(); let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id); let app = workspace_node.get_app(&id);
match expected { match expected {
None => assert!(app.is_none()), None => assert!(app.is_none()),
Some(expected_app) => { Some(expected_app) => {
let app_node = app.unwrap(); let app_node = app.unwrap();
assert_eq!(expected_app.name, app_node.get_name().unwrap()); assert_eq!(expected_app.name, app_node.get_name().unwrap());
assert_eq!(expected_app.id, app_node.get_id().unwrap()); assert_eq!(expected_app.id, app_node.get_id().unwrap());
} },
}
}
FolderNodePadScript::AssertAppContent { id, name } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id).unwrap();
assert_eq!(app.get_name().unwrap(), name)
} // FolderNodePadScript::AssertNumberOfApps { expected } => {
// let workspace_node = self.folder_pad.get_workspace("1").unwrap();
// assert_eq!(workspace_node.apps.len(), expected);
// }
} }
},
FolderNodePadScript::AssertAppContent { id, name } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id).unwrap();
assert_eq!(app.get_name().unwrap(), name)
}, // FolderNodePadScript::AssertNumberOfApps { expected } => {
// let workspace_node = self.folder_pad.get_workspace("1").unwrap();
// assert_eq!(workspace_node.apps.len(), expected);
// }
} }
}
} }

View file

@ -3,84 +3,88 @@ use crate::client_folder::script::FolderNodePadTest;
#[test] #[test]
fn client_folder_create_multi_workspaces_test() { fn client_folder_create_multi_workspaces_test() {
let mut test = FolderNodePadTest::new(); let mut test = FolderNodePadTest::new();
test.run_scripts(vec![ test.run_scripts(vec![
AssertPathOfWorkspace { AssertPathOfWorkspace {
id: "1".to_string(), id: "1".to_string(),
expected_path: vec![0, 0, 0].into(), expected_path: vec![0, 0, 0].into(),
}, },
CreateWorkspace { CreateWorkspace {
id: "a".to_string(), id: "a".to_string(),
name: "workspace a".to_string(), name: "workspace a".to_string(),
}, },
AssertPathOfWorkspace { AssertPathOfWorkspace {
id: "a".to_string(), id: "a".to_string(),
expected_path: vec![0, 0, 1].into(), expected_path: vec![0, 0, 1].into(),
}, },
CreateWorkspace { CreateWorkspace {
id: "b".to_string(), id: "b".to_string(),
name: "workspace b".to_string(), name: "workspace b".to_string(),
}, },
AssertPathOfWorkspace { AssertPathOfWorkspace {
id: "b".to_string(), id: "b".to_string(),
expected_path: vec![0, 0, 2].into(), expected_path: vec![0, 0, 2].into(),
}, },
AssertNumberOfWorkspace { expected: 3 }, AssertNumberOfWorkspace { expected: 3 },
// The path of the workspace 'b' will be changed after deleting the 'a' workspace. // The path of the workspace 'b' will be changed after deleting the 'a' workspace.
DeleteWorkspace { id: "a".to_string() }, DeleteWorkspace {
AssertPathOfWorkspace { id: "a".to_string(),
id: "b".to_string(), },
expected_path: vec![0, 0, 1].into(), AssertPathOfWorkspace {
}, id: "b".to_string(),
]); expected_path: vec![0, 0, 1].into(),
},
]);
} }
#[test] #[test]
fn client_folder_create_app_test() { fn client_folder_create_app_test() {
let mut test = FolderNodePadTest::new(); let mut test = FolderNodePadTest::new();
test.run_scripts(vec![ test.run_scripts(vec![
CreateApp { CreateApp {
id: "1".to_string(), id: "1".to_string(),
name: "my first app".to_string(), name: "my first app".to_string(),
}, },
AssertAppContent { AssertAppContent {
id: "1".to_string(), id: "1".to_string(),
name: "my first app".to_string(), name: "my first app".to_string(),
}, },
]); ]);
} }
#[test] #[test]
fn client_folder_delete_app_test() { fn client_folder_delete_app_test() {
let mut test = FolderNodePadTest::new(); let mut test = FolderNodePadTest::new();
test.run_scripts(vec![ test.run_scripts(vec![
CreateApp { CreateApp {
id: "1".to_string(), id: "1".to_string(),
name: "my first app".to_string(), name: "my first app".to_string(),
}, },
DeleteApp { id: "1".to_string() }, DeleteApp {
AssertApp { id: "1".to_string(),
id: "1".to_string(), },
expected: None, AssertApp {
}, id: "1".to_string(),
]); expected: None,
},
]);
} }
#[test] #[test]
fn client_folder_update_app_test() { fn client_folder_update_app_test() {
let mut test = FolderNodePadTest::new(); let mut test = FolderNodePadTest::new();
test.run_scripts(vec![ test.run_scripts(vec![
CreateApp { CreateApp {
id: "1".to_string(), id: "1".to_string(),
name: "my first app".to_string(), name: "my first app".to_string(),
}, },
UpdateApp { UpdateApp {
id: "1".to_string(), id: "1".to_string(),
name: "TODO".to_string(), name: "TODO".to_string(),
}, },
AssertAppContent { AssertAppContent {
id: "1".to_string(), id: "1".to_string(),
name: "TODO".to_string(), name: "TODO".to_string(),
}, },
]); ]);
} }

View file

@ -3,39 +3,39 @@ use quote::format_ident;
#[allow(dead_code)] #[allow(dead_code)]
pub struct EventASTContext { pub struct EventASTContext {
pub event: syn::Ident, pub event: syn::Ident,
pub event_ty: syn::Ident, pub event_ty: syn::Ident,
pub event_request_struct: syn::Ident, pub event_request_struct: syn::Ident,
pub event_input: Option<syn::Path>, pub event_input: Option<syn::Path>,
pub event_output: Option<syn::Path>, pub event_output: Option<syn::Path>,
pub event_error: String, pub event_error: String,
} }
impl EventASTContext { impl EventASTContext {
#[allow(dead_code)] #[allow(dead_code)]
pub fn from(enum_attrs: &EventEnumAttrs) -> EventASTContext { pub fn from(enum_attrs: &EventEnumAttrs) -> EventASTContext {
let command_name = enum_attrs.enum_item_name.clone(); let command_name = enum_attrs.enum_item_name.clone();
if command_name.is_empty() { if command_name.is_empty() {
panic!("Invalid command name: {}", enum_attrs.enum_item_name); panic!("Invalid command name: {}", enum_attrs.enum_item_name);
}
let event = format_ident!("{}", &command_name);
let splits = command_name.split('_').collect::<Vec<&str>>();
let event_ty = format_ident!("{}", enum_attrs.enum_name);
let event_request_struct = format_ident!("{}Event", &splits.join(""));
let event_input = enum_attrs.event_input();
let event_output = enum_attrs.event_output();
let event_error = enum_attrs.event_error();
EventASTContext {
event,
event_ty,
event_request_struct,
event_input,
event_output,
event_error,
}
} }
let event = format_ident!("{}", &command_name);
let splits = command_name.split('_').collect::<Vec<&str>>();
let event_ty = format_ident!("{}", enum_attrs.enum_name);
let event_request_struct = format_ident!("{}Event", &splits.join(""));
let event_input = enum_attrs.event_input();
let event_output = enum_attrs.event_output();
let event_error = enum_attrs.event_error();
EventASTContext {
event,
event_ty,
event_request_struct,
event_input,
event_output,
event_error,
}
}
} }

View file

@ -10,63 +10,71 @@ use syn::Item;
use walkdir::WalkDir; use walkdir::WalkDir;
pub fn gen(crate_name: &str) { pub fn gen(crate_name: &str) {
if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() { if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() {
log::warn!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb"); log::warn!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb");
return; return;
} }
if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() { if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() {
log::warn!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb"); log::warn!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb");
return; return;
} }
let crate_path = std::fs::canonicalize(".").unwrap().as_path().display().to_string(); let crate_path = std::fs::canonicalize(".")
let event_crates = parse_dart_event_files(vec![crate_path]); .unwrap()
let event_ast = event_crates.iter().flat_map(parse_event_crate).collect::<Vec<_>>(); .as_path()
.display()
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref()); .to_string();
let mut render_result = DART_IMPORTED.to_owned(); let event_crates = parse_dart_event_files(vec![crate_path]);
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() { let event_ast = event_crates
let mut event_template = EventTemplate::new();
if let Some(content) = event_template.render(render_ctx, index) {
render_result.push_str(content.as_ref())
}
}
let dart_event_folder: PathBuf = [
&std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap(),
&std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap(),
"lib",
"dispatch",
"dart_event",
crate_name,
]
.iter() .iter()
.collect(); .flat_map(parse_event_crate)
.collect::<Vec<_>>();
if !dart_event_folder.as_path().exists() { let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
std::fs::create_dir_all(dart_event_folder.as_path()).unwrap(); let mut render_result = DART_IMPORTED.to_owned();
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
let mut event_template = EventTemplate::new();
if let Some(content) = event_template.render(render_ctx, index) {
render_result.push_str(content.as_ref())
} }
}
let dart_event_file_path = path_string_with_component(&dart_event_folder, vec!["dart_event.dart"]); let dart_event_folder: PathBuf = [
println!("cargo:rerun-if-changed={}", dart_event_file_path); &std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap(),
&std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap(),
"lib",
"dispatch",
"dart_event",
crate_name,
]
.iter()
.collect();
match std::fs::OpenOptions::new() if !dart_event_folder.as_path().exists() {
.create(true) std::fs::create_dir_all(dart_event_folder.as_path()).unwrap();
.write(true) }
.append(false)
.truncate(true) let dart_event_file_path =
.open(&dart_event_file_path) path_string_with_component(&dart_event_folder, vec!["dart_event.dart"]);
{ println!("cargo:rerun-if-changed={}", dart_event_file_path);
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap(); match std::fs::OpenOptions::new()
File::flush(file).unwrap(); .create(true)
} .write(true)
Err(err) => { .append(false)
panic!("Failed to open file: {}, {:?}", dart_event_file_path, err); .truncate(true)
} .open(&dart_event_file_path)
} {
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}, {:?}", dart_event_file_path, err);
},
}
} }
const DART_IMPORTED: &str = r#" const DART_IMPORTED: &str = r#"
@ -76,90 +84,93 @@ part of '../../dispatch.dart';
#[derive(Debug)] #[derive(Debug)]
pub struct DartEventCrate { pub struct DartEventCrate {
crate_path: PathBuf, crate_path: PathBuf,
event_files: Vec<String>, event_files: Vec<String>,
} }
impl DartEventCrate { impl DartEventCrate {
pub fn from_config(config: &CrateConfig) -> Self { pub fn from_config(config: &CrateConfig) -> Self {
DartEventCrate { DartEventCrate {
crate_path: config.crate_path.clone(), crate_path: config.crate_path.clone(),
event_files: config.flowy_config.event_files.clone(), event_files: config.flowy_config.event_files.clone(),
}
} }
}
} }
pub fn parse_dart_event_files(crate_paths: Vec<String>) -> Vec<DartEventCrate> { pub fn parse_dart_event_files(crate_paths: Vec<String>) -> Vec<DartEventCrate> {
let mut dart_event_crates: Vec<DartEventCrate> = vec![]; let mut dart_event_crates: Vec<DartEventCrate> = vec![];
crate_paths.iter().for_each(|path| { crate_paths.iter().for_each(|path| {
let crates = WalkDir::new(path) let crates = WalkDir::new(path)
.into_iter() .into_iter()
.filter_entry(|e| !is_hidden(e)) .filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok()) .filter_map(|e| e.ok())
.filter(is_crate_dir) .filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e)) .flat_map(|e| parse_crate_config_from(&e))
.map(|crate_config| DartEventCrate::from_config(&crate_config)) .map(|crate_config| DartEventCrate::from_config(&crate_config))
.collect::<Vec<DartEventCrate>>(); .collect::<Vec<DartEventCrate>>();
dart_event_crates.extend(crates); dart_event_crates.extend(crates);
}); });
dart_event_crates dart_event_crates
} }
pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> { pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
event_crate event_crate
.event_files .event_files
.iter() .iter()
.flat_map(|event_file| { .flat_map(|event_file| {
let file_path = path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]); let file_path =
path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_content = read_file(file_path.as_ref()).unwrap(); let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file"); let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast.items ast
.iter() .items
.flat_map(|item| match item { .iter()
Item::Enum(item_enum) => { .flat_map(|item| match item {
let ast_result = ASTResult::new(); Item::Enum(item_enum) => {
let attrs = flowy_ast::enum_from_ast( let ast_result = ASTResult::new();
&ast_result, let attrs = flowy_ast::enum_from_ast(
&item_enum.ident, &ast_result,
&item_enum.variants, &item_enum.ident,
&item_enum.attrs, &item_enum.variants,
); &item_enum.attrs,
ast_result.check().unwrap(); );
attrs ast_result.check().unwrap();
.iter() attrs
.filter(|attr| !attr.attrs.event_attrs.ignore) .iter()
.enumerate() .filter(|attr| !attr.attrs.event_attrs.ignore)
.map(|(_index, variant)| EventASTContext::from(&variant.attrs)) .enumerate()
.collect::<Vec<_>>() .map(|(_index, variant)| EventASTContext::from(&variant.attrs))
} .collect::<Vec<_>>()
_ => vec![], },
}) _ => vec![],
.collect::<Vec<_>>()
}) })
.collect::<Vec<EventASTContext>>() .collect::<Vec<_>>()
})
.collect::<Vec<EventASTContext>>()
} }
pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> { pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
ast.iter() ast
.map(|event_ast| { .iter()
let input_deserializer = event_ast .map(|event_ast| {
.event_input let input_deserializer = event_ast
.as_ref() .event_input
.map(|event_input| event_input.get_ident().unwrap().to_string()); .as_ref()
.map(|event_input| event_input.get_ident().unwrap().to_string());
let output_deserializer = event_ast let output_deserializer = event_ast
.event_output .event_output
.as_ref() .as_ref()
.map(|event_output| event_output.get_ident().unwrap().to_string()); .map(|event_output| event_output.get_ident().unwrap().to_string());
EventRenderContext { EventRenderContext {
input_deserializer, input_deserializer,
output_deserializer, output_deserializer,
error_deserializer: event_ast.event_error.clone(), error_deserializer: event_ast.event_error.clone(),
event: event_ast.event.to_string(), event: event_ast.event.to_string(),
event_ty: event_ast.event_ty.to_string(), event_ty: event_ast.event_ty.to_string(),
} }
}) })
.collect::<Vec<EventRenderContext>>() .collect::<Vec<EventRenderContext>>()
} }

View file

@ -2,60 +2,64 @@ use crate::util::get_tera;
use tera::Context; use tera::Context;
pub struct EventTemplate { pub struct EventTemplate {
tera_context: Context, tera_context: Context,
} }
pub struct EventRenderContext { pub struct EventRenderContext {
pub input_deserializer: Option<String>, pub input_deserializer: Option<String>,
pub output_deserializer: Option<String>, pub output_deserializer: Option<String>,
pub error_deserializer: String, pub error_deserializer: String,
pub event: String, pub event: String,
pub event_ty: String, pub event_ty: String,
} }
#[allow(dead_code)] #[allow(dead_code)]
impl EventTemplate { impl EventTemplate {
pub fn new() -> Self { pub fn new() -> Self {
EventTemplate { EventTemplate {
tera_context: Context::new(), tera_context: Context::new(),
} }
}
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
self.tera_context.insert("index", &index);
let dart_class_name = format!("{}{}", ctx.event_ty, ctx.event);
let event = format!("{}.{}", ctx.event_ty, ctx.event);
self.tera_context.insert("event_class", &dart_class_name);
self.tera_context.insert("event", &event);
self
.tera_context
.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => self.tera_context.insert("input_deserializer", "Unit"),
Some(ref input) => self.tera_context.insert("input_deserializer", input),
} }
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> { // eprintln!(
self.tera_context.insert("index", &index); // "😁 {:?} / {:?}",
let dart_class_name = format!("{}{}", ctx.event_ty, ctx.event); // &ctx.input_deserializer, &ctx.output_deserializer
let event = format!("{}.{}", ctx.event_ty, ctx.event); // );
self.tera_context.insert("event_class", &dart_class_name);
self.tera_context.insert("event", &event);
self.tera_context.insert("has_input", &ctx.input_deserializer.is_some()); let has_output = ctx.output_deserializer.is_some();
match ctx.input_deserializer { self.tera_context.insert("has_output", &has_output);
None => self.tera_context.insert("input_deserializer", "Unit"),
Some(ref input) => self.tera_context.insert("input_deserializer", input),
}
// eprintln!( match ctx.output_deserializer {
// "😁 {:?} / {:?}", None => self.tera_context.insert("output_deserializer", "Unit"),
// &ctx.input_deserializer, &ctx.output_deserializer Some(ref output) => self.tera_context.insert("output_deserializer", output),
// );
let has_output = ctx.output_deserializer.is_some();
self.tera_context.insert("has_output", &has_output);
match ctx.output_deserializer {
None => self.tera_context.insert("output_deserializer", "Unit"),
Some(ref output) => self.tera_context.insert("output_deserializer", output),
}
self.tera_context.insert("error_deserializer", &ctx.error_deserializer);
let tera = get_tera("dart_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
} }
self
.tera_context
.insert("error_deserializer", &ctx.error_deserializer);
let tera = get_tera("dart_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
} }

View file

@ -3,57 +3,62 @@ use std::path::{Path, PathBuf};
#[derive(serde::Deserialize, Clone, Debug)] #[derive(serde::Deserialize, Clone, Debug)]
pub struct FlowyConfig { pub struct FlowyConfig {
#[serde(default)] #[serde(default)]
pub event_files: Vec<String>, pub event_files: Vec<String>,
// Collect AST from the file or directory specified by proto_input to generate the proto files. // Collect AST from the file or directory specified by proto_input to generate the proto files.
#[serde(default)] #[serde(default)]
pub proto_input: Vec<String>, pub proto_input: Vec<String>,
// Output path for the generated proto files. The default value is default_proto_output() // Output path for the generated proto files. The default value is default_proto_output()
#[serde(default = "default_proto_output")] #[serde(default = "default_proto_output")]
pub proto_output: String, pub proto_output: String,
// Create a crate that stores the generated protobuf Rust structures. The default value is default_protobuf_crate() // Create a crate that stores the generated protobuf Rust structures. The default value is default_protobuf_crate()
#[serde(default = "default_protobuf_crate")] #[serde(default = "default_protobuf_crate")]
pub protobuf_crate_path: String, pub protobuf_crate_path: String,
} }
fn default_proto_output() -> String { fn default_proto_output() -> String {
"resources/proto".to_owned() "resources/proto".to_owned()
} }
fn default_protobuf_crate() -> String { fn default_protobuf_crate() -> String {
"src/protobuf".to_owned() "src/protobuf".to_owned()
} }
impl FlowyConfig { impl FlowyConfig {
pub fn from_toml_file(path: &Path) -> Self { pub fn from_toml_file(path: &Path) -> Self {
let content = fs::read_to_string(path).unwrap(); let content = fs::read_to_string(path).unwrap();
let config: FlowyConfig = toml::from_str(content.as_ref()).unwrap(); let config: FlowyConfig = toml::from_str(content.as_ref()).unwrap();
config config
} }
} }
pub struct CrateConfig { pub struct CrateConfig {
pub crate_path: PathBuf, pub crate_path: PathBuf,
pub crate_folder: String, pub crate_folder: String,
pub flowy_config: FlowyConfig, pub flowy_config: FlowyConfig,
} }
pub fn parse_crate_config_from(entry: &walkdir::DirEntry) -> Option<CrateConfig> { pub fn parse_crate_config_from(entry: &walkdir::DirEntry) -> Option<CrateConfig> {
let mut config_path = entry.path().parent().unwrap().to_path_buf(); let mut config_path = entry.path().parent().unwrap().to_path_buf();
config_path.push("Flowy.toml"); config_path.push("Flowy.toml");
if !config_path.as_path().exists() { if !config_path.as_path().exists() {
return None; return None;
} }
let crate_path = entry.path().parent().unwrap().to_path_buf(); let crate_path = entry.path().parent().unwrap().to_path_buf();
let flowy_config = FlowyConfig::from_toml_file(config_path.as_path()); let flowy_config = FlowyConfig::from_toml_file(config_path.as_path());
let crate_folder = crate_path.file_stem().unwrap().to_str().unwrap().to_string(); let crate_folder = crate_path
.file_stem()
.unwrap()
.to_str()
.unwrap()
.to_string();
Some(CrateConfig { Some(CrateConfig {
crate_path, crate_path,
crate_folder, crate_folder,
flowy_config, flowy_config,
}) })
} }

View file

@ -16,6 +16,6 @@ pub mod util;
#[derive(serde::Serialize, serde::Deserialize)] #[derive(serde::Serialize, serde::Deserialize)]
pub struct ProtoCache { pub struct ProtoCache {
pub structs: Vec<String>, pub structs: Vec<String>,
pub enums: Vec<String>, pub enums: Vec<String>,
} }

View file

@ -14,151 +14,161 @@ use syn::Item;
use walkdir::WalkDir; use walkdir::WalkDir;
pub fn parse_protobuf_context_from(crate_paths: Vec<String>) -> Vec<ProtobufCrateContext> { pub fn parse_protobuf_context_from(crate_paths: Vec<String>) -> Vec<ProtobufCrateContext> {
let crate_infos = parse_crate_info_from_path(crate_paths); let crate_infos = parse_crate_info_from_path(crate_paths);
crate_infos crate_infos
.into_iter() .into_iter()
.map(|crate_info| { .map(|crate_info| {
let proto_output_path = crate_info.proto_output_path(); let proto_output_path = crate_info.proto_output_path();
let files = crate_info let files = crate_info
.proto_input_paths() .proto_input_paths()
.iter() .iter()
.flat_map(|proto_crate_path| parse_files_protobuf(proto_crate_path, &proto_output_path)) .flat_map(|proto_crate_path| parse_files_protobuf(proto_crate_path, &proto_output_path))
.collect::<Vec<ProtoFile>>(); .collect::<Vec<ProtoFile>>();
ProtobufCrateContext::from_crate_info(crate_info, files) ProtobufCrateContext::from_crate_info(crate_info, files)
}) })
.collect::<Vec<ProtobufCrateContext>>() .collect::<Vec<ProtobufCrateContext>>()
} }
fn parse_files_protobuf(proto_crate_path: &Path, proto_output_path: &Path) -> Vec<ProtoFile> { fn parse_files_protobuf(proto_crate_path: &Path, proto_output_path: &Path) -> Vec<ProtoFile> {
let mut gen_proto_vec: Vec<ProtoFile> = vec![]; let mut gen_proto_vec: Vec<ProtoFile> = vec![];
// file_stem https://doc.rust-lang.org/std/path/struct.Path.html#method.file_stem // file_stem https://doc.rust-lang.org/std/path/struct.Path.html#method.file_stem
for (path, file_name) in WalkDir::new(proto_crate_path) for (path, file_name) in WalkDir::new(proto_crate_path)
.into_iter() .into_iter()
.filter_entry(|e| !is_hidden(e)) .filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok()) .filter_map(|e| e.ok())
.filter(|e| !e.file_type().is_dir()) .filter(|e| !e.file_type().is_dir())
.map(|e| { .map(|e| {
let path = e.path().to_str().unwrap().to_string(); let path = e.path().to_str().unwrap().to_string();
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string(); let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
(path, file_name) (path, file_name)
}) })
{ {
if file_name == "mod" { if file_name == "mod" {
continue; continue;
}
// https://docs.rs/syn/1.0.54/syn/struct.File.html
let ast = syn::parse_file(read_file(&path).unwrap().as_ref())
.unwrap_or_else(|_| panic!("Unable to parse file at {}", path));
let structs = get_ast_structs(&ast);
let proto_file = format!("{}.proto", &file_name);
let proto_file_path = path_string_with_component(proto_output_path, vec![&proto_file]);
let proto_syntax = find_proto_syntax(proto_file_path.as_ref());
let mut proto_content = String::new();
// The types that are not defined in the current file.
let mut ref_types: Vec<String> = vec![];
structs.iter().for_each(|s| {
let mut struct_template = StructTemplate::new();
struct_template.set_message_struct_name(&s.name);
s.fields
.iter()
.filter(|field| field.pb_attrs.pb_index().is_some())
.for_each(|field| {
ref_types.push(field.ty_as_str());
struct_template.set_field(field);
});
let s = struct_template.render().unwrap();
proto_content.push_str(s.as_ref());
proto_content.push('\n');
});
let enums = get_ast_enums(&ast);
enums.iter().for_each(|e| {
let mut enum_template = EnumTemplate::new();
enum_template.set_message_enum(e);
let s = enum_template.render().unwrap();
proto_content.push_str(s.as_ref());
ref_types.push(e.name.clone());
proto_content.push('\n');
});
if !enums.is_empty() || !structs.is_empty() {
let structs: Vec<String> = structs.iter().map(|s| s.name.clone()).collect();
let enums: Vec<String> = enums.iter().map(|e| e.name.clone()).collect();
ref_types.retain(|s| !structs.contains(s));
ref_types.retain(|s| !enums.contains(s));
let info = ProtoFile {
file_path: path.clone(),
file_name: file_name.clone(),
ref_types,
structs,
enums,
syntax: proto_syntax,
content: proto_content,
};
gen_proto_vec.push(info);
}
} }
gen_proto_vec // https://docs.rs/syn/1.0.54/syn/struct.File.html
let ast = syn::parse_file(read_file(&path).unwrap().as_ref())
.unwrap_or_else(|_| panic!("Unable to parse file at {}", path));
let structs = get_ast_structs(&ast);
let proto_file = format!("{}.proto", &file_name);
let proto_file_path = path_string_with_component(proto_output_path, vec![&proto_file]);
let proto_syntax = find_proto_syntax(proto_file_path.as_ref());
let mut proto_content = String::new();
// The types that are not defined in the current file.
let mut ref_types: Vec<String> = vec![];
structs.iter().for_each(|s| {
let mut struct_template = StructTemplate::new();
struct_template.set_message_struct_name(&s.name);
s.fields
.iter()
.filter(|field| field.pb_attrs.pb_index().is_some())
.for_each(|field| {
ref_types.push(field.ty_as_str());
struct_template.set_field(field);
});
let s = struct_template.render().unwrap();
proto_content.push_str(s.as_ref());
proto_content.push('\n');
});
let enums = get_ast_enums(&ast);
enums.iter().for_each(|e| {
let mut enum_template = EnumTemplate::new();
enum_template.set_message_enum(e);
let s = enum_template.render().unwrap();
proto_content.push_str(s.as_ref());
ref_types.push(e.name.clone());
proto_content.push('\n');
});
if !enums.is_empty() || !structs.is_empty() {
let structs: Vec<String> = structs.iter().map(|s| s.name.clone()).collect();
let enums: Vec<String> = enums.iter().map(|e| e.name.clone()).collect();
ref_types.retain(|s| !structs.contains(s));
ref_types.retain(|s| !enums.contains(s));
let info = ProtoFile {
file_path: path.clone(),
file_name: file_name.clone(),
ref_types,
structs,
enums,
syntax: proto_syntax,
content: proto_content,
};
gen_proto_vec.push(info);
}
}
gen_proto_vec
} }
pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> { pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
// let mut content = format!("{:#?}", &ast); // let mut content = format!("{:#?}", &ast);
// let mut file = File::create("./foo.txt").unwrap(); // let mut file = File::create("./foo.txt").unwrap();
// file.write_all(content.as_bytes()).unwrap(); // file.write_all(content.as_bytes()).unwrap();
let ast_result = ASTResult::new(); let ast_result = ASTResult::new();
let mut proto_structs: Vec<Struct> = vec![]; let mut proto_structs: Vec<Struct> = vec![];
ast.items.iter().for_each(|item| { ast.items.iter().for_each(|item| {
if let Item::Struct(item_struct) = item { if let Item::Struct(item_struct) = item {
let (_, fields) = struct_from_ast(&ast_result, &item_struct.fields); let (_, fields) = struct_from_ast(&ast_result, &item_struct.fields);
if fields.iter().filter(|f| f.pb_attrs.pb_index().is_some()).count() > 0 { if fields
proto_structs.push(Struct { .iter()
name: item_struct.ident.to_string(), .filter(|f| f.pb_attrs.pb_index().is_some())
fields, .count()
}); > 0
} {
} proto_structs.push(Struct {
}); name: item_struct.ident.to_string(),
ast_result.check().unwrap(); fields,
proto_structs });
}
}
});
ast_result.check().unwrap();
proto_structs
} }
pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> { pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
let mut flowy_enums: Vec<FlowyEnum> = vec![]; let mut flowy_enums: Vec<FlowyEnum> = vec![];
let ast_result = ASTResult::new(); let ast_result = ASTResult::new();
ast.items.iter().for_each(|item| { ast.items.iter().for_each(|item| {
// https://docs.rs/syn/1.0.54/syn/enum.Item.html // https://docs.rs/syn/1.0.54/syn/enum.Item.html
if let Item::Enum(item_enum) = item { if let Item::Enum(item_enum) = item {
let attrs = flowy_ast::enum_from_ast(&ast_result, &item_enum.ident, &item_enum.variants, &ast.attrs); let attrs = flowy_ast::enum_from_ast(
flowy_enums.push(FlowyEnum { &ast_result,
name: item_enum.ident.to_string(), &item_enum.ident,
attrs, &item_enum.variants,
}); &ast.attrs,
} );
}); flowy_enums.push(FlowyEnum {
ast_result.check().unwrap(); name: item_enum.ident.to_string(),
flowy_enums attrs,
});
}
});
ast_result.check().unwrap();
flowy_enums
} }
pub struct FlowyEnum<'a> { pub struct FlowyEnum<'a> {
pub name: String, pub name: String,
pub attrs: Vec<ASTEnumVariant<'a>>, pub attrs: Vec<ASTEnumVariant<'a>>,
} }
pub struct Struct<'a> { pub struct Struct<'a> {
pub name: String, pub name: String,
pub fields: Vec<ASTField<'a>>, pub fields: Vec<ASTField<'a>>,
} }
lazy_static! { lazy_static! {
@ -167,27 +177,27 @@ lazy_static! {
} }
fn find_proto_syntax(path: &str) -> String { fn find_proto_syntax(path: &str) -> String {
if !Path::new(path).exists() { if !Path::new(path).exists() {
return String::from("syntax = \"proto3\";\n"); return String::from("syntax = \"proto3\";\n");
}
let mut result = String::new();
let mut file = File::open(path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
content.lines().for_each(|line| {
////Result<Option<Match<'t>>>
if let Ok(Some(m)) = SYNTAX_REGEX.find(line) {
result.push_str(m.as_str());
} }
let mut result = String::new(); // if let Ok(Some(m)) = IMPORT_REGEX.find(line) {
let mut file = File::open(path).unwrap(); // result.push_str(m.as_str());
let mut content = String::new(); // result.push('\n');
file.read_to_string(&mut content).unwrap(); // }
});
content.lines().for_each(|line| { result.push('\n');
////Result<Option<Match<'t>>> result
if let Ok(Some(m)) = SYNTAX_REGEX.find(line) {
result.push_str(m.as_str());
}
// if let Ok(Some(m)) = IMPORT_REGEX.find(line) {
// result.push_str(m.as_str());
// result.push('\n');
// }
});
result.push('\n');
result
} }

View file

@ -18,255 +18,274 @@ use std::process::Command;
use walkdir::WalkDir; use walkdir::WalkDir;
pub fn gen(crate_name: &str) { pub fn gen(crate_name: &str) {
let crate_path = std::fs::canonicalize(".").unwrap().as_path().display().to_string(); let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
// 1. generate the proto files to proto_file_dir // 1. generate the proto files to proto_file_dir
#[cfg(feature = "proto_gen")] #[cfg(feature = "proto_gen")]
let proto_crates = gen_proto_files(crate_name, &crate_path); let proto_crates = gen_proto_files(crate_name, &crate_path);
for proto_crate in proto_crates { for proto_crate in proto_crates {
let mut proto_file_paths = vec![]; let mut proto_file_paths = vec![];
let mut file_names = vec![]; let mut file_names = vec![];
let proto_file_output_path = proto_crate.proto_output_path().to_str().unwrap().to_string(); let proto_file_output_path = proto_crate
let protobuf_output_path = proto_crate.protobuf_crate_path().to_str().unwrap().to_string(); .proto_output_path()
.to_str()
.unwrap()
.to_string();
let protobuf_output_path = proto_crate
.protobuf_crate_path()
.to_str()
.unwrap()
.to_string();
for (path, file_name) in WalkDir::new(&proto_file_output_path) for (path, file_name) in WalkDir::new(&proto_file_output_path)
.into_iter() .into_iter()
.filter_map(|e| e.ok()) .filter_map(|e| e.ok())
.map(|e| { .map(|e| {
let path = e.path().to_str().unwrap().to_string(); let path = e.path().to_str().unwrap().to_string();
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string(); let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
(path, file_name) (path, file_name)
}) })
{ {
if path.ends_with(".proto") { if path.ends_with(".proto") {
// https://stackoverflow.com/questions/49077147/how-can-i-force-build-rs-to-run-again-without-cleaning-my-whole-project // https://stackoverflow.com/questions/49077147/how-can-i-force-build-rs-to-run-again-without-cleaning-my-whole-project
println!("cargo:rerun-if-changed={}", path); println!("cargo:rerun-if-changed={}", path);
proto_file_paths.push(path); proto_file_paths.push(path);
file_names.push(file_name); file_names.push(file_name);
} }
}
let protoc_bin_path = protoc_bin_vendored::protoc_bin_path().unwrap();
// 2. generate the protobuf files(Dart)
#[cfg(feature = "dart")]
generate_dart_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
#[cfg(feature = "ts")]
generate_ts_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
// 3. generate the protobuf files(Rust)
generate_rust_protobuf_files(
&protoc_bin_path,
&proto_file_paths,
&proto_file_output_path,
&protobuf_output_path,
);
} }
let protoc_bin_path = protoc_bin_vendored::protoc_bin_path().unwrap();
// 2. generate the protobuf files(Dart)
#[cfg(feature = "dart")]
generate_dart_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
#[cfg(feature = "ts")]
generate_ts_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
// 3. generate the protobuf files(Rust)
generate_rust_protobuf_files(
&protoc_bin_path,
&proto_file_paths,
&proto_file_output_path,
&protobuf_output_path,
);
}
} }
fn generate_rust_protobuf_files( fn generate_rust_protobuf_files(
protoc_bin_path: &Path, protoc_bin_path: &Path,
proto_file_paths: &[String], proto_file_paths: &[String],
proto_file_output_path: &str, proto_file_output_path: &str,
protobuf_output_path: &str, protobuf_output_path: &str,
) { ) {
protoc_rust::Codegen::new() protoc_rust::Codegen::new()
.out_dir(protobuf_output_path) .out_dir(protobuf_output_path)
.protoc_path(protoc_bin_path) .protoc_path(protoc_bin_path)
.inputs(proto_file_paths) .inputs(proto_file_paths)
.include(proto_file_output_path) .include(proto_file_output_path)
.run() .run()
.expect("Running rust protoc failed."); .expect("Running rust protoc failed.");
} }
#[cfg(feature = "ts")] #[cfg(feature = "ts")]
fn generate_ts_protobuf_files( fn generate_ts_protobuf_files(
name: &str, name: &str,
proto_file_output_path: &str, proto_file_output_path: &str,
paths: &[String], paths: &[String],
file_names: &Vec<String>, file_names: &Vec<String>,
protoc_bin_path: &Path, protoc_bin_path: &Path,
) { ) {
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string()); let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path = let tauri_backend_service_path = std::env::var("TAURI_BACKEND_SERVICE_PATH")
std::env::var("TAURI_BACKEND_SERVICE_PATH").unwrap_or("appflowy_tauri/src/services/backend".to_string()); .unwrap_or("appflowy_tauri/src/services/backend".to_string());
let mut output = PathBuf::new(); let mut output = PathBuf::new();
output.push(root); output.push(root);
output.push(tauri_backend_service_path); output.push(tauri_backend_service_path);
output.push("classes"); output.push("classes");
output.push(name); output.push(name);
if !output.as_path().exists() { if !output.as_path().exists() {
std::fs::create_dir_all(&output).unwrap(); std::fs::create_dir_all(&output).unwrap();
} }
let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned(); let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned();
paths.iter().for_each(|path| { paths.iter().for_each(|path| {
let result = cmd_lib::run_cmd! { let result = cmd_lib::run_cmd! {
${protoc_bin_path} --ts_out=${output} --proto_path=${proto_file_output_path} ${path} ${protoc_bin_path} --ts_out=${output} --proto_path=${proto_file_output_path} ${path}
}; };
if result.is_err() { if result.is_err() {
panic!("Generate dart pb file failed with: {}, {:?}", path, result) panic!("Generate dart pb file failed with: {}, {:?}", path, result)
}; };
}); });
let ts_index = path_string_with_component(&output, vec!["index.ts"]); let ts_index = path_string_with_component(&output, vec!["index.ts"]);
match std::fs::OpenOptions::new() match std::fs::OpenOptions::new()
.create(true) .create(true)
.write(true) .write(true)
.append(false) .append(false)
.truncate(true) .truncate(true)
.open(&ts_index) .open(&ts_index)
{ {
Ok(ref mut file) => { Ok(ref mut file) => {
let mut export = String::new(); let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n"); export.push_str("// Auto-generated, do not edit \n");
for file_name in file_names { for file_name in file_names {
let c = format!("export * from \"./{}\";\n", file_name); let c = format!("export * from \"./{}\";\n", file_name);
export.push_str(c.as_ref()); export.push_str(c.as_ref());
} }
file.write_all(export.as_bytes()).unwrap(); file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap(); File::flush(file).unwrap();
} },
Err(err) => { Err(err) => {
panic!("Failed to open file: {}", err); panic!("Failed to open file: {}", err);
} },
} }
} }
#[cfg(feature = "dart")] #[cfg(feature = "dart")]
fn generate_dart_protobuf_files( fn generate_dart_protobuf_files(
name: &str, name: &str,
proto_file_output_path: &str, proto_file_output_path: &str,
paths: &[String], paths: &[String],
file_names: &Vec<String>, file_names: &Vec<String>,
protoc_bin_path: &Path, protoc_bin_path: &Path,
) { ) {
if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() { if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() {
log::error!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb"); log::error!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb");
return; return;
} }
if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() { if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() {
log::error!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb"); log::error!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb");
return; return;
} }
let mut output = PathBuf::new(); let mut output = PathBuf::new();
output.push(std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap()); output.push(std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap());
output.push(std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap()); output.push(std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap());
output.push("lib"); output.push("lib");
output.push("protobuf"); output.push("protobuf");
output.push(name); output.push(name);
if !output.as_path().exists() { if !output.as_path().exists() {
std::fs::create_dir_all(&output).unwrap(); std::fs::create_dir_all(&output).unwrap();
} }
check_pb_dart_plugin(); check_pb_dart_plugin();
let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned(); let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned();
paths.iter().for_each(|path| { paths.iter().for_each(|path| {
let result = cmd_lib::run_cmd! { let result = cmd_lib::run_cmd! {
${protoc_bin_path} --dart_out=${output} --proto_path=${proto_file_output_path} ${path} ${protoc_bin_path} --dart_out=${output} --proto_path=${proto_file_output_path} ${path}
}; };
if result.is_err() { if result.is_err() {
panic!("Generate dart pb file failed with: {}, {:?}", path, result) panic!("Generate dart pb file failed with: {}, {:?}", path, result)
}; };
}); });
let protobuf_dart = path_string_with_component(&output, vec!["protobuf.dart"]); let protobuf_dart = path_string_with_component(&output, vec!["protobuf.dart"]);
match std::fs::OpenOptions::new() match std::fs::OpenOptions::new()
.create(true) .create(true)
.write(true) .write(true)
.append(false) .append(false)
.truncate(true) .truncate(true)
.open(&protobuf_dart) .open(&protobuf_dart)
{ {
Ok(ref mut file) => { Ok(ref mut file) => {
let mut export = String::new(); let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n"); export.push_str("// Auto-generated, do not edit \n");
for file_name in file_names { for file_name in file_names {
let c = format!("export './{}.pb.dart';\n", file_name); let c = format!("export './{}.pb.dart';\n", file_name);
export.push_str(c.as_ref()); export.push_str(c.as_ref());
} }
file.write_all(export.as_bytes()).unwrap(); file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap(); File::flush(file).unwrap();
} },
Err(err) => { Err(err) => {
panic!("Failed to open file: {}", err); panic!("Failed to open file: {}", err);
} },
} }
} }
pub fn check_pb_dart_plugin() { pub fn check_pb_dart_plugin() {
if cfg!(target_os = "windows") { if cfg!(target_os = "windows") {
//Command::new("cmd") //Command::new("cmd")
// .arg("/C") // .arg("/C")
// .arg(cmd) // .arg(cmd)
// .status() // .status()
// .expect("failed to execute process"); // .expect("failed to execute process");
//panic!("{}", format!("\n❌ The protoc-gen-dart was not installed correctly.")) //panic!("{}", format!("\n❌ The protoc-gen-dart was not installed correctly."))
} else { } else {
let exit_result = Command::new("sh") let exit_result = Command::new("sh")
.arg("-c") .arg("-c")
.arg("command -v protoc-gen-dart") .arg("command -v protoc-gen-dart")
.status() .status()
.expect("failed to execute process"); .expect("failed to execute process");
if !exit_result.success() { if !exit_result.success() {
let mut msg = "\n❌ Can't find protoc-gen-dart in $PATH:\n".to_string(); let mut msg = "\n❌ Can't find protoc-gen-dart in $PATH:\n".to_string();
let output = Command::new("sh").arg("-c").arg("echo $PATH").output(); let output = Command::new("sh").arg("-c").arg("echo $PATH").output();
let paths = String::from_utf8(output.unwrap().stdout) let paths = String::from_utf8(output.unwrap().stdout)
.unwrap() .unwrap()
.split(':') .split(':')
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
paths.iter().for_each(|s| msg.push_str(&format!("{}\n", s))); paths.iter().for_each(|s| msg.push_str(&format!("{}\n", s)));
if let Ok(output) = Command::new("sh").arg("-c").arg("which protoc-gen-dart").output() { if let Ok(output) = Command::new("sh")
msg.push_str(&format!( .arg("-c")
"Installed protoc-gen-dart path: {:?}\n", .arg("which protoc-gen-dart")
String::from_utf8(output.stdout).unwrap() .output()
)); {
} msg.push_str(&format!(
"Installed protoc-gen-dart path: {:?}\n",
String::from_utf8(output.stdout).unwrap()
));
}
msg.push_str("✅ You can fix that by adding:"); msg.push_str("✅ You can fix that by adding:");
msg.push_str("\n\texport PATH=\"$PATH\":\"$HOME/.pub-cache/bin\"\n"); msg.push_str("\n\texport PATH=\"$PATH\":\"$HOME/.pub-cache/bin\"\n");
msg.push_str("to your shell's config file.(.bashrc, .bash, .profile, .zshrc etc.)"); msg.push_str("to your shell's config file.(.bashrc, .bash, .profile, .zshrc etc.)");
panic!("{}", msg) panic!("{}", msg)
}
} }
}
} }
#[cfg(feature = "proto_gen")] #[cfg(feature = "proto_gen")]
fn gen_proto_files(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrate> { fn gen_proto_files(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrate> {
let crate_context = ProtoGenerator::gen(crate_name, crate_path); let crate_context = ProtoGenerator::gen(crate_name, crate_path);
let proto_crates = crate_context let proto_crates = crate_context
.iter() .iter()
.map(|info| info.protobuf_crate.clone()) .map(|info| info.protobuf_crate.clone())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
crate_context.into_iter().flat_map(|info| info.files).for_each(|file| { crate_context
println!("cargo:rerun-if-changed={}", file.file_path); .into_iter()
.flat_map(|info| info.files)
.for_each(|file| {
println!("cargo:rerun-if-changed={}", file.file_path);
}); });
proto_crates proto_crates
} }

View file

@ -14,148 +14,158 @@ use std::{fs::OpenOptions, io::Write};
pub struct ProtoGenerator(); pub struct ProtoGenerator();
impl ProtoGenerator { impl ProtoGenerator {
pub fn gen(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrateContext> { pub fn gen(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrateContext> {
let crate_contexts = parse_protobuf_context_from(vec![crate_path.to_owned()]); let crate_contexts = parse_protobuf_context_from(vec![crate_path.to_owned()]);
write_proto_files(&crate_contexts); write_proto_files(&crate_contexts);
write_rust_crate_mod_file(&crate_contexts); write_rust_crate_mod_file(&crate_contexts);
let proto_cache = ProtoCache::from_crate_contexts(&crate_contexts); let proto_cache = ProtoCache::from_crate_contexts(&crate_contexts);
let proto_cache_str = serde_json::to_string(&proto_cache).unwrap(); let proto_cache_str = serde_json::to_string(&proto_cache).unwrap();
let crate_cache_dir = path_buf_with_component(&cache_dir(), vec![crate_name]); let crate_cache_dir = path_buf_with_component(&cache_dir(), vec![crate_name]);
if !crate_cache_dir.as_path().exists() { if !crate_cache_dir.as_path().exists() {
std::fs::create_dir_all(&crate_cache_dir).unwrap(); std::fs::create_dir_all(&crate_cache_dir).unwrap();
}
let protobuf_cache_path = path_string_with_component(&crate_cache_dir, vec!["proto_cache"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&protobuf_cache_path)
{
Ok(ref mut file) => {
file.write_all(proto_cache_str.as_bytes()).unwrap();
File::flush(file).unwrap();
}
Err(_err) => {
panic!("Failed to open file: {}", protobuf_cache_path);
}
}
crate_contexts
} }
let protobuf_cache_path = path_string_with_component(&crate_cache_dir, vec!["proto_cache"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&protobuf_cache_path)
{
Ok(ref mut file) => {
file.write_all(proto_cache_str.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(_err) => {
panic!("Failed to open file: {}", protobuf_cache_path);
},
}
crate_contexts
}
} }
fn write_proto_files(crate_contexts: &[ProtobufCrateContext]) { fn write_proto_files(crate_contexts: &[ProtobufCrateContext]) {
let file_path_content_map = crate_contexts let file_path_content_map = crate_contexts
.iter()
.flat_map(|ctx| {
ctx
.files
.iter() .iter()
.flat_map(|ctx| { .map(|file| {
ctx.files (
.iter() file.file_path.clone(),
.map(|file| { ProtoFileSymbol {
( file_name: file.file_name.clone(),
file.file_path.clone(), symbols: file.symbols(),
ProtoFileSymbol { },
file_name: file.file_name.clone(), )
symbols: file.symbols(),
},
)
})
.collect::<HashMap<String, ProtoFileSymbol>>()
}) })
.collect::<HashMap<String, ProtoFileSymbol>>(); .collect::<HashMap<String, ProtoFileSymbol>>()
})
.collect::<HashMap<String, ProtoFileSymbol>>();
for context in crate_contexts { for context in crate_contexts {
let dir = context.protobuf_crate.proto_output_path(); let dir = context.protobuf_crate.proto_output_path();
context.files.iter().for_each(|file| { context.files.iter().for_each(|file| {
// syntax // syntax
let mut file_content = file.syntax.clone(); let mut file_content = file.syntax.clone();
// import // import
file_content.push_str(&gen_import_content(file, &file_path_content_map)); file_content.push_str(&gen_import_content(file, &file_path_content_map));
// content // content
file_content.push_str(&file.content); file_content.push_str(&file.content);
let proto_file = format!("{}.proto", &file.file_name); let proto_file = format!("{}.proto", &file.file_name);
let proto_file_path = path_string_with_component(&dir, vec![&proto_file]); let proto_file_path = path_string_with_component(&dir, vec![&proto_file]);
save_content_to_file_with_diff_prompt(&file_content, proto_file_path.as_ref()); save_content_to_file_with_diff_prompt(&file_content, proto_file_path.as_ref());
}); });
} }
} }
fn gen_import_content(current_file: &ProtoFile, file_path_symbols_map: &HashMap<String, ProtoFileSymbol>) -> String { fn gen_import_content(
let mut import_files: Vec<String> = vec![]; current_file: &ProtoFile,
file_path_symbols_map file_path_symbols_map: &HashMap<String, ProtoFileSymbol>,
.iter() ) -> String {
.for_each(|(file_path, proto_file_symbols)| { let mut import_files: Vec<String> = vec![];
if file_path != &current_file.file_path { file_path_symbols_map
current_file.ref_types.iter().for_each(|ref_type| { .iter()
if proto_file_symbols.symbols.contains(ref_type) { .for_each(|(file_path, proto_file_symbols)| {
let import_file = format!("import \"{}.proto\";", proto_file_symbols.file_name); if file_path != &current_file.file_path {
if !import_files.contains(&import_file) { current_file.ref_types.iter().for_each(|ref_type| {
import_files.push(import_file); if proto_file_symbols.symbols.contains(ref_type) {
} let import_file = format!("import \"{}.proto\";", proto_file_symbols.file_name);
} if !import_files.contains(&import_file) {
}); import_files.push(import_file);
} }
}
}); });
if import_files.len() == 1 { }
format!("{}\n", import_files.pop().unwrap()) });
} else { if import_files.len() == 1 {
import_files.join("\n") format!("{}\n", import_files.pop().unwrap())
} } else {
import_files.join("\n")
}
} }
struct ProtoFileSymbol { struct ProtoFileSymbol {
file_name: String, file_name: String,
symbols: Vec<String>, symbols: Vec<String>,
} }
fn write_rust_crate_mod_file(crate_contexts: &[ProtobufCrateContext]) { fn write_rust_crate_mod_file(crate_contexts: &[ProtobufCrateContext]) {
for context in crate_contexts { for context in crate_contexts {
let mod_path = context.protobuf_crate.proto_model_mod_file(); let mod_path = context.protobuf_crate.proto_model_mod_file();
match OpenOptions::new() match OpenOptions::new()
.create(true) .create(true)
.write(true) .write(true)
.append(false) .append(false)
.truncate(true) .truncate(true)
.open(&mod_path) .open(&mod_path)
{ {
Ok(ref mut file) => { Ok(ref mut file) => {
let mut mod_file_content = String::new(); let mut mod_file_content = String::new();
mod_file_content.push_str("#![cfg_attr(rustfmt, rustfmt::skip)]\n"); mod_file_content.push_str("#![cfg_attr(rustfmt, rustfmt::skip)]\n");
mod_file_content.push_str("// Auto-generated, do not edit\n"); mod_file_content.push_str("// Auto-generated, do not edit\n");
walk_dir( walk_dir(
context.protobuf_crate.proto_output_path(), context.protobuf_crate.proto_output_path(),
|e| !e.file_type().is_dir(), |e| !e.file_type().is_dir(),
|_, name| { |_, name| {
let c = format!("\nmod {};\npub use {}::*;\n", &name, &name); let c = format!("\nmod {};\npub use {}::*;\n", &name, &name);
mod_file_content.push_str(c.as_ref()); mod_file_content.push_str(c.as_ref());
}, },
); );
file.write_all(mod_file_content.as_bytes()).unwrap(); file.write_all(mod_file_content.as_bytes()).unwrap();
} },
Err(err) => { Err(err) => {
panic!("Failed to open file: {}", err); panic!("Failed to open file: {}", err);
} },
}
} }
}
} }
impl ProtoCache { impl ProtoCache {
fn from_crate_contexts(crate_contexts: &[ProtobufCrateContext]) -> Self { fn from_crate_contexts(crate_contexts: &[ProtobufCrateContext]) -> Self {
let proto_files = crate_contexts let proto_files = crate_contexts
.iter() .iter()
.flat_map(|crate_info| &crate_info.files) .flat_map(|crate_info| &crate_info.files)
.collect::<Vec<&ProtoFile>>(); .collect::<Vec<&ProtoFile>>();
let structs: Vec<String> = proto_files.iter().flat_map(|info| info.structs.clone()).collect(); let structs: Vec<String> = proto_files
let enums: Vec<String> = proto_files.iter().flat_map(|info| info.enums.clone()).collect(); .iter()
Self { structs, enums } .flat_map(|info| info.structs.clone())
} .collect();
let enums: Vec<String> = proto_files
.iter()
.flat_map(|info| info.enums.clone())
.collect();
Self { structs, enums }
}
} }

View file

@ -9,135 +9,140 @@ use walkdir::WalkDir;
#[derive(Debug)] #[derive(Debug)]
pub struct ProtobufCrateContext { pub struct ProtobufCrateContext {
pub files: Vec<ProtoFile>, pub files: Vec<ProtoFile>,
pub protobuf_crate: ProtobufCrate, pub protobuf_crate: ProtobufCrate,
} }
impl ProtobufCrateContext { impl ProtobufCrateContext {
pub fn from_crate_info(inner: ProtobufCrate, files: Vec<ProtoFile>) -> Self { pub fn from_crate_info(inner: ProtobufCrate, files: Vec<ProtoFile>) -> Self {
Self { Self {
files, files,
protobuf_crate: inner, protobuf_crate: inner,
}
} }
}
pub fn create_crate_mod_file(&self) { pub fn create_crate_mod_file(&self) {
// mod model; // mod model;
// pub use model::*; // pub use model::*;
let mod_file_path = path_string_with_component(&self.protobuf_crate.protobuf_crate_path(), vec!["mod.rs"]); let mod_file_path =
let mut content = "#![cfg_attr(rustfmt, rustfmt::skip)]\n".to_owned(); path_string_with_component(&self.protobuf_crate.protobuf_crate_path(), vec!["mod.rs"]);
content.push_str("// Auto-generated, do not edit\n"); let mut content = "#![cfg_attr(rustfmt, rustfmt::skip)]\n".to_owned();
content.push_str("mod model;\npub use model::*;"); content.push_str("// Auto-generated, do not edit\n");
match OpenOptions::new() content.push_str("mod model;\npub use model::*;");
.create(true) match OpenOptions::new()
.write(true) .create(true)
.append(false) .write(true)
.truncate(true) .append(false)
.open(&mod_file_path) .truncate(true)
{ .open(&mod_file_path)
Ok(ref mut file) => { {
file.write_all(content.as_bytes()).unwrap(); Ok(ref mut file) => {
} file.write_all(content.as_bytes()).unwrap();
Err(err) => { },
panic!("Failed to open protobuf mod file: {}", err); Err(err) => {
} panic!("Failed to open protobuf mod file: {}", err);
} },
} }
}
#[allow(dead_code)] #[allow(dead_code)]
pub fn flutter_mod_dir(&self, root: &str) -> String { pub fn flutter_mod_dir(&self, root: &str) -> String {
let crate_module_dir = format!("{}/{}", root, self.protobuf_crate.crate_folder); let crate_module_dir = format!("{}/{}", root, self.protobuf_crate.crate_folder);
crate_module_dir crate_module_dir
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn flutter_mod_file(&self, root: &str) -> String { pub fn flutter_mod_file(&self, root: &str) -> String {
let crate_module_dir = format!("{}/{}/protobuf.dart", root, self.protobuf_crate.crate_folder); let crate_module_dir = format!(
crate_module_dir "{}/{}/protobuf.dart",
} root, self.protobuf_crate.crate_folder
);
crate_module_dir
}
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ProtobufCrate { pub struct ProtobufCrate {
pub crate_folder: String, pub crate_folder: String,
pub crate_path: PathBuf, pub crate_path: PathBuf,
flowy_config: FlowyConfig, flowy_config: FlowyConfig,
} }
impl ProtobufCrate { impl ProtobufCrate {
pub fn from_config(config: CrateConfig) -> Self { pub fn from_config(config: CrateConfig) -> Self {
ProtobufCrate { ProtobufCrate {
crate_path: config.crate_path, crate_path: config.crate_path,
crate_folder: config.crate_folder, crate_folder: config.crate_folder,
flowy_config: config.flowy_config, flowy_config: config.flowy_config,
}
} }
}
// Return the file paths for each rust file that used to generate the proto file. // Return the file paths for each rust file that used to generate the proto file.
pub fn proto_input_paths(&self) -> Vec<PathBuf> { pub fn proto_input_paths(&self) -> Vec<PathBuf> {
self.flowy_config self
.proto_input .flowy_config
.iter() .proto_input
.map(|name| path_buf_with_component(&self.crate_path, vec![name])) .iter()
.collect::<Vec<PathBuf>>() .map(|name| path_buf_with_component(&self.crate_path, vec![name]))
} .collect::<Vec<PathBuf>>()
}
// The protobuf_crate_path is used to store the generated protobuf Rust structures. // The protobuf_crate_path is used to store the generated protobuf Rust structures.
pub fn protobuf_crate_path(&self) -> PathBuf { pub fn protobuf_crate_path(&self) -> PathBuf {
let crate_path = PathBuf::from(&self.flowy_config.protobuf_crate_path); let crate_path = PathBuf::from(&self.flowy_config.protobuf_crate_path);
create_dir_if_not_exist(&crate_path); create_dir_if_not_exist(&crate_path);
crate_path crate_path
} }
// The proto_output_path is used to store the proto files // The proto_output_path is used to store the proto files
pub fn proto_output_path(&self) -> PathBuf { pub fn proto_output_path(&self) -> PathBuf {
let output_dir = PathBuf::from(&self.flowy_config.proto_output); let output_dir = PathBuf::from(&self.flowy_config.proto_output);
create_dir_if_not_exist(&output_dir); create_dir_if_not_exist(&output_dir);
output_dir output_dir
} }
pub fn proto_model_mod_file(&self) -> String { pub fn proto_model_mod_file(&self) -> String {
path_string_with_component(&self.protobuf_crate_path(), vec!["mod.rs"]) path_string_with_component(&self.protobuf_crate_path(), vec!["mod.rs"])
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct ProtoFile { pub struct ProtoFile {
pub file_path: String, pub file_path: String,
pub file_name: String, pub file_name: String,
pub structs: Vec<String>, pub structs: Vec<String>,
// store the type of current file using // store the type of current file using
pub ref_types: Vec<String>, pub ref_types: Vec<String>,
pub enums: Vec<String>, pub enums: Vec<String>,
// proto syntax. "proto3" or "proto2" // proto syntax. "proto3" or "proto2"
pub syntax: String, pub syntax: String,
// proto message content // proto message content
pub content: String, pub content: String,
} }
impl ProtoFile { impl ProtoFile {
pub fn symbols(&self) -> Vec<String> { pub fn symbols(&self) -> Vec<String> {
let mut symbols = self.structs.clone(); let mut symbols = self.structs.clone();
let mut enum_symbols = self.enums.clone(); let mut enum_symbols = self.enums.clone();
symbols.append(&mut enum_symbols); symbols.append(&mut enum_symbols);
symbols symbols
} }
} }
pub fn parse_crate_info_from_path(roots: Vec<String>) -> Vec<ProtobufCrate> { pub fn parse_crate_info_from_path(roots: Vec<String>) -> Vec<ProtobufCrate> {
let mut protobuf_crates: Vec<ProtobufCrate> = vec![]; let mut protobuf_crates: Vec<ProtobufCrate> = vec![];
roots.iter().for_each(|root| { roots.iter().for_each(|root| {
let crates = WalkDir::new(root) let crates = WalkDir::new(root)
.into_iter() .into_iter()
.filter_entry(|e| !is_hidden(e)) .filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok()) .filter_map(|e| e.ok())
.filter(is_crate_dir) .filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e)) .flat_map(|e| parse_crate_config_from(&e))
.map(ProtobufCrate::from_config) .map(ProtobufCrate::from_config)
.collect::<Vec<ProtobufCrate>>(); .collect::<Vec<ProtobufCrate>>();
protobuf_crates.extend(crates); protobuf_crates.extend(crates);
}); });
protobuf_crates protobuf_crates
} }

View file

@ -3,33 +3,33 @@ use itertools::Itertools;
use tera::Context; use tera::Context;
pub struct ProtobufDeriveMeta { pub struct ProtobufDeriveMeta {
context: Context, context: Context,
structs: Vec<String>, structs: Vec<String>,
enums: Vec<String>, enums: Vec<String>,
} }
#[allow(dead_code)] #[allow(dead_code)]
impl ProtobufDeriveMeta { impl ProtobufDeriveMeta {
pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self { pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self {
let enums: Vec<_> = enums.into_iter().unique().collect(); let enums: Vec<_> = enums.into_iter().unique().collect();
ProtobufDeriveMeta { ProtobufDeriveMeta {
context: Context::new(), context: Context::new(),
structs, structs,
enums, enums,
}
} }
}
pub fn render(&mut self) -> Option<String> { pub fn render(&mut self) -> Option<String> {
self.context.insert("names", &self.structs); self.context.insert("names", &self.structs);
self.context.insert("enums", &self.enums); self.context.insert("enums", &self.enums);
let tera = get_tera("protobuf_file/template/derive_meta"); let tera = get_tera("protobuf_file/template/derive_meta");
match tera.render("derive_meta.tera", &self.context) { match tera.render("derive_meta.tera", &self.context) {
Ok(r) => Some(r), Ok(r) => Some(r),
Err(e) => { Err(e) => {
log::error!("{:?}", e); log::error!("{:?}", e);
None None
} },
}
} }
}
} }

View file

@ -3,36 +3,38 @@ use crate::util::get_tera;
use tera::Context; use tera::Context;
pub struct EnumTemplate { pub struct EnumTemplate {
context: Context, context: Context,
items: Vec<String>, items: Vec<String>,
} }
#[allow(dead_code)] #[allow(dead_code)]
impl EnumTemplate { impl EnumTemplate {
pub fn new() -> Self { pub fn new() -> Self {
EnumTemplate { EnumTemplate {
context: Context::new(), context: Context::new(),
items: vec![], items: vec![],
}
} }
}
pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) { pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
self.context.insert("enum_name", &flowy_enum.name); self.context.insert("enum_name", &flowy_enum.name);
flowy_enum.attrs.iter().for_each(|item| { flowy_enum.attrs.iter().for_each(|item| {
self.items self.items.push(format!(
.push(format!("{} = {};", item.attrs.enum_item_name, item.attrs.value)) "{} = {};",
}) item.attrs.enum_item_name, item.attrs.value
} ))
})
}
pub fn render(&mut self) -> Option<String> { pub fn render(&mut self) -> Option<String> {
self.context.insert("items", &self.items); self.context.insert("items", &self.items);
let tera = get_tera("protobuf_file/template/proto_file"); let tera = get_tera("protobuf_file/template/proto_file");
match tera.render("enum.tera", &self.context) { match tera.render("enum.tera", &self.context) {
Ok(r) => Some(r), Ok(r) => Some(r),
Err(e) => { Err(e) => {
log::error!("{:?}", e); log::error!("{:?}", e);
None None
} },
}
} }
}
} }

View file

@ -16,91 +16,95 @@ pub static RUST_TYPE_MAP: phf::Map<&'static str, &'static str> = phf_map! {
}; };
pub struct StructTemplate { pub struct StructTemplate {
context: Context, context: Context,
fields: Vec<String>, fields: Vec<String>,
} }
#[allow(dead_code)] #[allow(dead_code)]
impl StructTemplate { impl StructTemplate {
pub fn new() -> Self { pub fn new() -> Self {
StructTemplate { StructTemplate {
context: Context::new(), context: Context::new(),
fields: vec![], fields: vec![],
} }
}
pub fn set_message_struct_name(&mut self, name: &str) {
self.context.insert("struct_name", name);
}
pub fn set_field(&mut self, field: &ASTField) {
// {{ field_type }} {{ field_name }} = {{index}};
let name = field.name().unwrap().to_string();
let index = field.pb_attrs.pb_index().unwrap();
let ty: &str = &field.ty_as_str();
let mut mapped_ty: &str = ty;
if RUST_TYPE_MAP.contains_key(ty) {
mapped_ty = RUST_TYPE_MAP[ty];
} }
pub fn set_message_struct_name(&mut self, name: &str) { if let Some(ref category) = field.bracket_category {
self.context.insert("struct_name", name); match category {
BracketCategory::Opt => match &field.bracket_inner_ty {
None => {},
Some(inner_ty) => match inner_ty.to_string().as_str() {
//TODO: support hashmap or something else wrapped by Option
"Vec" => {
self.fields.push(format!(
"oneof one_of_{} {{ bytes {} = {}; }};",
name, name, index
));
},
_ => {
self.fields.push(format!(
"oneof one_of_{} {{ {} {} = {}; }};",
name, mapped_ty, name, index
));
},
},
},
BracketCategory::Map((k, v)) => {
let key: &str = k;
let value: &str = v;
self.fields.push(format!(
// map<string, string> attrs = 1;
"map<{}, {}> {} = {};",
RUST_TYPE_MAP.get(key).unwrap_or(&key),
RUST_TYPE_MAP.get(value).unwrap_or(&value),
name,
index
));
},
BracketCategory::Vec => {
let bracket_ty: &str = &field.bracket_ty.as_ref().unwrap().to_string();
// Vec<u8>
if mapped_ty == "u8" && bracket_ty == "Vec" {
self.fields.push(format!("bytes {} = {};", name, index))
} else {
self.fields.push(format!(
"{} {} {} = {};",
RUST_TYPE_MAP[bracket_ty], mapped_ty, name, index
))
}
},
BracketCategory::Other => self
.fields
.push(format!("{} {} = {};", mapped_ty, name, index)),
}
} }
}
pub fn set_field(&mut self, field: &ASTField) { pub fn render(&mut self) -> Option<String> {
// {{ field_type }} {{ field_name }} = {{index}}; self.context.insert("fields", &self.fields);
let name = field.name().unwrap().to_string(); let tera = get_tera("protobuf_file/template/proto_file");
let index = field.pb_attrs.pb_index().unwrap(); match tera.render("struct.tera", &self.context) {
Ok(r) => Some(r),
let ty: &str = &field.ty_as_str(); Err(e) => {
let mut mapped_ty: &str = ty; log::error!("{:?}", e);
None
if RUST_TYPE_MAP.contains_key(ty) { },
mapped_ty = RUST_TYPE_MAP[ty];
}
if let Some(ref category) = field.bracket_category {
match category {
BracketCategory::Opt => match &field.bracket_inner_ty {
None => {}
Some(inner_ty) => match inner_ty.to_string().as_str() {
//TODO: support hashmap or something else wrapped by Option
"Vec" => {
self.fields
.push(format!("oneof one_of_{} {{ bytes {} = {}; }};", name, name, index));
}
_ => {
self.fields.push(format!(
"oneof one_of_{} {{ {} {} = {}; }};",
name, mapped_ty, name, index
));
}
},
},
BracketCategory::Map((k, v)) => {
let key: &str = k;
let value: &str = v;
self.fields.push(format!(
// map<string, string> attrs = 1;
"map<{}, {}> {} = {};",
RUST_TYPE_MAP.get(key).unwrap_or(&key),
RUST_TYPE_MAP.get(value).unwrap_or(&value),
name,
index
));
}
BracketCategory::Vec => {
let bracket_ty: &str = &field.bracket_ty.as_ref().unwrap().to_string();
// Vec<u8>
if mapped_ty == "u8" && bracket_ty == "Vec" {
self.fields.push(format!("bytes {} = {};", name, index))
} else {
self.fields.push(format!(
"{} {} {} = {};",
RUST_TYPE_MAP[bracket_ty], mapped_ty, name, index
))
}
}
BracketCategory::Other => self.fields.push(format!("{} {} = {};", mapped_ty, name, index)),
}
}
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("fields", &self.fields);
let tera = get_tera("protobuf_file/template/proto_file");
match tera.render("struct.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
} }
}
} }

View file

@ -2,64 +2,69 @@ use crate::util::get_tera;
use tera::Context; use tera::Context;
pub struct EventTemplate { pub struct EventTemplate {
tera_context: Context, tera_context: Context,
} }
pub struct EventRenderContext { pub struct EventRenderContext {
pub input_deserializer: Option<String>, pub input_deserializer: Option<String>,
pub output_deserializer: Option<String>, pub output_deserializer: Option<String>,
pub error_deserializer: String, pub error_deserializer: String,
pub event: String, pub event: String,
pub event_ty: String, pub event_ty: String,
pub prefix: String, pub prefix: String,
} }
#[allow(dead_code)] #[allow(dead_code)]
impl EventTemplate { impl EventTemplate {
pub fn new() -> Self { pub fn new() -> Self {
EventTemplate { EventTemplate {
tera_context: Context::new(), tera_context: Context::new(),
} }
}
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
self.tera_context.insert("index", &index);
let event_func_name = format!("{}{}", ctx.event_ty, ctx.event);
self
.tera_context
.insert("event_func_name", &event_func_name);
self
.tera_context
.insert("event_name", &format!("{}.{}", ctx.prefix, ctx.event_ty));
self.tera_context.insert("event", &ctx.event);
self
.tera_context
.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => {},
Some(ref input) => self
.tera_context
.insert("input_deserializer", &format!("{}.{}", ctx.prefix, input)),
} }
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> { let has_output = ctx.output_deserializer.is_some();
self.tera_context.insert("index", &index); self.tera_context.insert("has_output", &has_output);
let event_func_name = format!("{}{}", ctx.event_ty, ctx.event);
self.tera_context.insert("event_func_name", &event_func_name);
self.tera_context
.insert("event_name", &format!("{}.{}", ctx.prefix, ctx.event_ty));
self.tera_context.insert("event", &ctx.event);
self.tera_context.insert("has_input", &ctx.input_deserializer.is_some()); match ctx.output_deserializer {
match ctx.input_deserializer { None => self.tera_context.insert("output_deserializer", "void"),
None => {} Some(ref output) => self
Some(ref input) => self .tera_context
.tera_context .insert("output_deserializer", &format!("{}.{}", ctx.prefix, output)),
.insert("input_deserializer", &format!("{}.{}", ctx.prefix, input)),
}
let has_output = ctx.output_deserializer.is_some();
self.tera_context.insert("has_output", &has_output);
match ctx.output_deserializer {
None => self.tera_context.insert("output_deserializer", "void"),
Some(ref output) => self
.tera_context
.insert("output_deserializer", &format!("{}.{}", ctx.prefix, output)),
}
self.tera_context.insert(
"error_deserializer",
&format!("{}.{}", ctx.prefix, ctx.error_deserializer),
);
let tera = get_tera("ts_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
} }
self.tera_context.insert(
"error_deserializer",
&format!("{}.{}", ctx.prefix, ctx.error_deserializer),
);
let tera = get_tera("ts_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
} }

View file

@ -13,175 +13,187 @@ use syn::Item;
use walkdir::WalkDir; use walkdir::WalkDir;
pub fn gen(crate_name: &str) { pub fn gen(crate_name: &str) {
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string()); let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path = let tauri_backend_service_path = std::env::var("TAURI_BACKEND_SERVICE_PATH")
std::env::var("TAURI_BACKEND_SERVICE_PATH").unwrap_or("appflowy_tauri/src/services/backend".to_string()); .unwrap_or("appflowy_tauri/src/services/backend".to_string());
let crate_path = std::fs::canonicalize(".").unwrap().as_path().display().to_string(); let crate_path = std::fs::canonicalize(".")
let event_crates = parse_ts_event_files(vec![crate_path]); .unwrap()
let event_ast = event_crates.iter().flat_map(parse_event_crate).collect::<Vec<_>>(); .as_path()
.display()
.to_string();
let event_crates = parse_ts_event_files(vec![crate_path]);
let event_ast = event_crates
.iter()
.flat_map(parse_event_crate)
.collect::<Vec<_>>();
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref()); let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
let mut render_result = TS_HEADER.to_string(); let mut render_result = TS_HEADER.to_string();
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() { for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
let mut event_template = EventTemplate::new(); let mut event_template = EventTemplate::new();
if let Some(content) = event_template.render(render_ctx, index) { if let Some(content) = event_template.render(render_ctx, index) {
render_result.push_str(content.as_ref()) render_result.push_str(content.as_ref())
}
} }
render_result.push_str(TS_FOOTER); }
render_result.push_str(TS_FOOTER);
let ts_event_folder: PathBuf = [&root, &tauri_backend_service_path, "events", crate_name] let ts_event_folder: PathBuf = [&root, &tauri_backend_service_path, "events", crate_name]
.iter() .iter()
.collect(); .collect();
if !ts_event_folder.as_path().exists() { if !ts_event_folder.as_path().exists() {
std::fs::create_dir_all(ts_event_folder.as_path()).unwrap(); std::fs::create_dir_all(ts_event_folder.as_path()).unwrap();
} }
let event_file = "event"; let event_file = "event";
let event_file_ext = "ts"; let event_file_ext = "ts";
let ts_event_file_path = let ts_event_file_path = path_string_with_component(
path_string_with_component(&ts_event_folder, vec![&format!("{}.{}", event_file, event_file_ext)]); &ts_event_folder,
println!("cargo:rerun-if-changed={}", ts_event_file_path); vec![&format!("{}.{}", event_file, event_file_ext)],
);
println!("cargo:rerun-if-changed={}", ts_event_file_path);
match std::fs::OpenOptions::new() match std::fs::OpenOptions::new()
.create(true) .create(true)
.write(true) .write(true)
.append(false) .append(false)
.truncate(true) .truncate(true)
.open(&ts_event_file_path) .open(&ts_event_file_path)
{ {
Ok(ref mut file) => { Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap(); file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap(); File::flush(file).unwrap();
} },
Err(err) => { Err(err) => {
panic!("Failed to open file: {}, {:?}", ts_event_file_path, err); panic!("Failed to open file: {}, {:?}", ts_event_file_path, err);
} },
} }
let ts_index = path_string_with_component(&ts_event_folder, vec!["index.ts"]); let ts_index = path_string_with_component(&ts_event_folder, vec!["index.ts"]);
match std::fs::OpenOptions::new() match std::fs::OpenOptions::new()
.create(true) .create(true)
.write(true) .write(true)
.append(false) .append(false)
.truncate(true) .truncate(true)
.open(&ts_index) .open(&ts_index)
{ {
Ok(ref mut file) => { Ok(ref mut file) => {
let mut export = String::new(); let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n"); export.push_str("// Auto-generated, do not edit \n");
export.push_str(&format!("export * from '../../classes/{}';\n", crate_name)); export.push_str(&format!("export * from '../../classes/{}';\n", crate_name));
export.push_str(&format!("export * from './{}';\n", event_file)); export.push_str(&format!("export * from './{}';\n", event_file));
file.write_all(export.as_bytes()).unwrap(); file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap(); File::flush(file).unwrap();
} },
Err(err) => { Err(err) => {
panic!("Failed to open file: {}", err); panic!("Failed to open file: {}", err);
} },
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct TsEventCrate { pub struct TsEventCrate {
crate_path: PathBuf, crate_path: PathBuf,
event_files: Vec<String>, event_files: Vec<String>,
} }
impl TsEventCrate { impl TsEventCrate {
pub fn from_config(config: &CrateConfig) -> Self { pub fn from_config(config: &CrateConfig) -> Self {
TsEventCrate { TsEventCrate {
crate_path: config.crate_path.clone(), crate_path: config.crate_path.clone(),
event_files: config.flowy_config.event_files.clone(), event_files: config.flowy_config.event_files.clone(),
}
} }
}
} }
pub fn parse_ts_event_files(crate_paths: Vec<String>) -> Vec<TsEventCrate> { pub fn parse_ts_event_files(crate_paths: Vec<String>) -> Vec<TsEventCrate> {
let mut ts_event_crates: Vec<TsEventCrate> = vec![]; let mut ts_event_crates: Vec<TsEventCrate> = vec![];
crate_paths.iter().for_each(|path| { crate_paths.iter().for_each(|path| {
let crates = WalkDir::new(path) let crates = WalkDir::new(path)
.into_iter() .into_iter()
.filter_entry(|e| !is_hidden(e)) .filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok()) .filter_map(|e| e.ok())
.filter(is_crate_dir) .filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e)) .flat_map(|e| parse_crate_config_from(&e))
.map(|crate_config| TsEventCrate::from_config(&crate_config)) .map(|crate_config| TsEventCrate::from_config(&crate_config))
.collect::<Vec<TsEventCrate>>(); .collect::<Vec<TsEventCrate>>();
ts_event_crates.extend(crates); ts_event_crates.extend(crates);
}); });
ts_event_crates ts_event_crates
} }
pub fn parse_event_crate(event_crate: &TsEventCrate) -> Vec<EventASTContext> { pub fn parse_event_crate(event_crate: &TsEventCrate) -> Vec<EventASTContext> {
event_crate event_crate
.event_files .event_files
.iter() .iter()
.flat_map(|event_file| { .flat_map(|event_file| {
let file_path = path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]); let file_path =
path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_content = read_file(file_path.as_ref()).unwrap(); let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file"); let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast.items ast
.iter() .items
.flat_map(|item| match item { .iter()
Item::Enum(item_enum) => { .flat_map(|item| match item {
let ast_result = ASTResult::new(); Item::Enum(item_enum) => {
let attrs = flowy_ast::enum_from_ast( let ast_result = ASTResult::new();
&ast_result, let attrs = flowy_ast::enum_from_ast(
&item_enum.ident, &ast_result,
&item_enum.variants, &item_enum.ident,
&item_enum.attrs, &item_enum.variants,
); &item_enum.attrs,
ast_result.check().unwrap(); );
attrs ast_result.check().unwrap();
.iter() attrs
.filter(|attr| !attr.attrs.event_attrs.ignore) .iter()
.enumerate() .filter(|attr| !attr.attrs.event_attrs.ignore)
.map(|(_index, variant)| EventASTContext::from(&variant.attrs)) .enumerate()
.collect::<Vec<_>>() .map(|(_index, variant)| EventASTContext::from(&variant.attrs))
} .collect::<Vec<_>>()
_ => vec![], },
}) _ => vec![],
.collect::<Vec<_>>()
}) })
.collect::<Vec<EventASTContext>>() .collect::<Vec<_>>()
})
.collect::<Vec<EventASTContext>>()
} }
pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> { pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
let mut import_objects = HashSet::new(); let mut import_objects = HashSet::new();
ast.iter().for_each(|event_ast| { ast.iter().for_each(|event_ast| {
if let Some(input) = event_ast.event_input.as_ref() { if let Some(input) = event_ast.event_input.as_ref() {
import_objects.insert(input.get_ident().unwrap().to_string()); import_objects.insert(input.get_ident().unwrap().to_string());
} }
if let Some(output) = event_ast.event_output.as_ref() { if let Some(output) = event_ast.event_output.as_ref() {
import_objects.insert(output.get_ident().unwrap().to_string()); import_objects.insert(output.get_ident().unwrap().to_string());
} }
}); });
ast.iter() ast
.map(|event_ast| { .iter()
let input_deserializer = event_ast .map(|event_ast| {
.event_input let input_deserializer = event_ast
.as_ref() .event_input
.map(|event_input| event_input.get_ident().unwrap().to_string()); .as_ref()
.map(|event_input| event_input.get_ident().unwrap().to_string());
let output_deserializer = event_ast let output_deserializer = event_ast
.event_output .event_output
.as_ref() .as_ref()
.map(|event_output| event_output.get_ident().unwrap().to_string()); .map(|event_output| event_output.get_ident().unwrap().to_string());
EventRenderContext { EventRenderContext {
input_deserializer, input_deserializer,
output_deserializer, output_deserializer,
error_deserializer: event_ast.event_error.to_string(), error_deserializer: event_ast.event_error.to_string(),
event: event_ast.event.to_string(), event: event_ast.event.to_string(),
event_ty: event_ast.event_ty.to_string(), event_ty: event_ast.event_ty.to_string(),
prefix: "pb".to_string(), prefix: "pb".to_string(),
} }
}) })
.collect::<Vec<EventRenderContext>>() .collect::<Vec<EventRenderContext>>()
} }
const TS_HEADER: &str = r#" const TS_HEADER: &str = r#"

View file

@ -3,172 +3,188 @@ use similar::{ChangeTag, TextDiff};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::FromStr; use std::str::FromStr;
use std::{ use std::{
fs::{File, OpenOptions}, fs::{File, OpenOptions},
io::{Read, Write}, io::{Read, Write},
}; };
use tera::Tera; use tera::Tera;
use walkdir::WalkDir; use walkdir::WalkDir;
pub fn read_file(path: &str) -> Option<String> { pub fn read_file(path: &str) -> Option<String> {
let mut file = File::open(path).unwrap_or_else(|_| panic!("Unable to open file at {}", path)); let mut file = File::open(path).unwrap_or_else(|_| panic!("Unable to open file at {}", path));
let mut content = String::new(); let mut content = String::new();
match file.read_to_string(&mut content) { match file.read_to_string(&mut content) {
Ok(_) => Some(content), Ok(_) => Some(content),
Err(e) => { Err(e) => {
log::error!("{}, with error: {:?}", path, e); log::error!("{}, with error: {:?}", path, e);
Some("".to_string()) Some("".to_string())
} },
} }
} }
pub fn save_content_to_file_with_diff_prompt(content: &str, output_file: &str) { pub fn save_content_to_file_with_diff_prompt(content: &str, output_file: &str) {
if Path::new(output_file).exists() { if Path::new(output_file).exists() {
let old_content = read_file(output_file).unwrap(); let old_content = read_file(output_file).unwrap();
let new_content = content.to_owned(); let new_content = content.to_owned();
let write_to_file = || match OpenOptions::new() let write_to_file = || match OpenOptions::new()
.create(true) .create(true)
.write(true) .write(true)
.append(false) .append(false)
.truncate(true) .truncate(true)
.open(output_file) .open(output_file)
{ {
Ok(ref mut file) => { Ok(ref mut file) => {
file.write_all(new_content.as_bytes()).unwrap(); file.write_all(new_content.as_bytes()).unwrap();
} },
Err(err) => { Err(err) => {
panic!("Failed to open log file: {}", err); panic!("Failed to open log file: {}", err);
} },
}; };
if new_content != old_content { if new_content != old_content {
print_diff(old_content, new_content.clone()); print_diff(old_content, new_content.clone());
write_to_file() write_to_file()
}
} else {
match OpenOptions::new().create(true).write(true).open(output_file) {
Ok(ref mut file) => file.write_all(content.as_bytes()).unwrap(),
Err(err) => panic!("Open or create to {} fail: {}", output_file, err),
}
} }
} else {
match OpenOptions::new()
.create(true)
.write(true)
.open(output_file)
{
Ok(ref mut file) => file.write_all(content.as_bytes()).unwrap(),
Err(err) => panic!("Open or create to {} fail: {}", output_file, err),
}
}
} }
pub fn print_diff(old_content: String, new_content: String) { pub fn print_diff(old_content: String, new_content: String) {
let diff = TextDiff::from_lines(&old_content, &new_content); let diff = TextDiff::from_lines(&old_content, &new_content);
for op in diff.ops() { for op in diff.ops() {
for change in diff.iter_changes(op) { for change in diff.iter_changes(op) {
let (sign, style) = match change.tag() { let (sign, style) = match change.tag() {
ChangeTag::Delete => ("-", Style::new().red()), ChangeTag::Delete => ("-", Style::new().red()),
ChangeTag::Insert => ("+", Style::new().green()), ChangeTag::Insert => ("+", Style::new().green()),
ChangeTag::Equal => (" ", Style::new()), ChangeTag::Equal => (" ", Style::new()),
}; };
match change.tag() { match change.tag() {
ChangeTag::Delete => { ChangeTag::Delete => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change)); print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
} },
ChangeTag::Insert => { ChangeTag::Insert => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change)); print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
} },
ChangeTag::Equal => {} ChangeTag::Equal => {},
}; };
}
println!("---------------------------------------------------");
} }
println!("---------------------------------------------------");
}
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn is_crate_dir(e: &walkdir::DirEntry) -> bool { pub fn is_crate_dir(e: &walkdir::DirEntry) -> bool {
let cargo = e.path().file_stem().unwrap().to_str().unwrap().to_string(); let cargo = e.path().file_stem().unwrap().to_str().unwrap().to_string();
cargo == *"Cargo" cargo == *"Cargo"
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn is_proto_file(e: &walkdir::DirEntry) -> bool { pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
if e.path().extension().is_none() { if e.path().extension().is_none() {
return false; return false;
} }
let ext = e.path().extension().unwrap().to_str().unwrap().to_string(); let ext = e.path().extension().unwrap().to_str().unwrap().to_string();
ext == *"proto" ext == *"proto"
} }
pub fn is_hidden(entry: &walkdir::DirEntry) -> bool { pub fn is_hidden(entry: &walkdir::DirEntry) -> bool {
entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) entry
.file_name()
.to_str()
.map(|s| s.starts_with('.'))
.unwrap_or(false)
} }
pub fn create_dir_if_not_exist(dir: &Path) { pub fn create_dir_if_not_exist(dir: &Path) {
if !dir.exists() { if !dir.exists() {
std::fs::create_dir_all(dir).unwrap(); std::fs::create_dir_all(dir).unwrap();
} }
} }
pub fn path_string_with_component(path: &Path, components: Vec<&str>) -> String { pub fn path_string_with_component(path: &Path, components: Vec<&str>) -> String {
path_buf_with_component(path, components).to_str().unwrap().to_string() path_buf_with_component(path, components)
.to_str()
.unwrap()
.to_string()
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn path_buf_with_component(path: &Path, components: Vec<&str>) -> PathBuf { pub fn path_buf_with_component(path: &Path, components: Vec<&str>) -> PathBuf {
let mut path_buf = path.to_path_buf(); let mut path_buf = path.to_path_buf();
for component in components { for component in components {
path_buf.push(component); path_buf.push(component);
} }
path_buf path_buf
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn walk_dir<P: AsRef<Path>, F1, F2>(dir: P, filter: F2, mut path_and_name: F1) pub fn walk_dir<P: AsRef<Path>, F1, F2>(dir: P, filter: F2, mut path_and_name: F1)
where where
F1: FnMut(String, String), F1: FnMut(String, String),
F2: Fn(&walkdir::DirEntry) -> bool, F2: Fn(&walkdir::DirEntry) -> bool,
{ {
for (path, name) in WalkDir::new(dir) for (path, name) in WalkDir::new(dir)
.into_iter() .into_iter()
.filter_map(|e| e.ok()) .filter_map(|e| e.ok())
.filter(|e| filter(e)) .filter(|e| filter(e))
.map(|e| { .map(|e| {
( (
e.path().to_str().unwrap().to_string(), e.path().to_str().unwrap().to_string(),
e.path().file_stem().unwrap().to_str().unwrap().to_string(), e.path().file_stem().unwrap().to_str().unwrap().to_string(),
) )
}) })
{ {
path_and_name(path, name); path_and_name(path, name);
} }
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn suffix_relative_to_path(path: &str, base: &str) -> String { pub fn suffix_relative_to_path(path: &str, base: &str) -> String {
let base = Path::new(base); let base = Path::new(base);
let path = Path::new(path); let path = Path::new(path);
path.strip_prefix(base).unwrap().to_str().unwrap().to_owned() path
.strip_prefix(base)
.unwrap()
.to_str()
.unwrap()
.to_owned()
} }
pub fn get_tera(directory: &str) -> Tera { pub fn get_tera(directory: &str) -> Tera {
let mut root = format!("{}/src/", env!("CARGO_MANIFEST_DIR")); let mut root = format!("{}/src/", env!("CARGO_MANIFEST_DIR"));
root.push_str(directory); root.push_str(directory);
let root_absolute_path = match std::fs::canonicalize(&root) { let root_absolute_path = match std::fs::canonicalize(&root) {
Ok(p) => p.as_path().display().to_string(), Ok(p) => p.as_path().display().to_string(),
Err(e) => { Err(e) => {
panic!("❌ Canonicalize file path {} failed {:?}", root, e); panic!("❌ Canonicalize file path {} failed {:?}", root, e);
} },
}; };
let mut template_path = format!("{}/**/*.tera", root_absolute_path); let mut template_path = format!("{}/**/*.tera", root_absolute_path);
if cfg!(windows) { if cfg!(windows) {
// remove "\\?\" prefix on windows // remove "\\?\" prefix on windows
template_path = format!("{}/**/*.tera", &root_absolute_path[4..]); template_path = format!("{}/**/*.tera", &root_absolute_path[4..]);
} }
match Tera::new(template_path.as_ref()) { match Tera::new(template_path.as_ref()) {
Ok(t) => t, Ok(t) => t,
Err(e) => { Err(e) => {
log::error!("Parsing error(s): {}", e); log::error!("Parsing error(s): {}", e);
::std::process::exit(1); ::std::process::exit(1);
} },
} }
} }
pub fn cache_dir() -> PathBuf { pub fn cache_dir() -> PathBuf {
let mut path_buf = PathBuf::from_str(env!("CARGO_MANIFEST_DIR")).unwrap(); let mut path_buf = PathBuf::from_str(env!("CARGO_MANIFEST_DIR")).unwrap();
path_buf.push(".cache"); path_buf.push(".cache");
path_buf path_buf
} }

View file

@ -1,8 +1,8 @@
use bytes::Bytes; use bytes::Bytes;
use flowy_client_ws::FlowyWebSocketConnect; use flowy_client_ws::FlowyWebSocketConnect;
use flowy_document::{ use flowy_document::{
errors::{internal_error, FlowyError}, errors::{internal_error, FlowyError},
DocumentCloudService, DocumentConfig, DocumentDatabase, DocumentManager, DocumentUser, DocumentCloudService, DocumentConfig, DocumentDatabase, DocumentManager, DocumentUser,
}; };
use flowy_net::ClientServerConfiguration; use flowy_net::ClientServerConfiguration;
use flowy_net::{http_server::document::DocumentCloudServiceImpl, local_server::LocalServer}; use flowy_net::{http_server::document::DocumentCloudServiceImpl, local_server::LocalServer};
@ -17,98 +17,101 @@ use ws_model::ws_revision::ClientRevisionWSData;
pub struct DocumentDepsResolver(); pub struct DocumentDepsResolver();
impl DocumentDepsResolver { impl DocumentDepsResolver {
pub fn resolve( pub fn resolve(
local_server: Option<Arc<LocalServer>>, local_server: Option<Arc<LocalServer>>,
ws_conn: Arc<FlowyWebSocketConnect>, ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>, user_session: Arc<UserSession>,
server_config: &ClientServerConfiguration, server_config: &ClientServerConfiguration,
document_config: &DocumentConfig, document_config: &DocumentConfig,
) -> Arc<DocumentManager> { ) -> Arc<DocumentManager> {
let user = Arc::new(BlockUserImpl(user_session.clone())); let user = Arc::new(BlockUserImpl(user_session.clone()));
let rev_web_socket = Arc::new(DocumentRevisionWebSocket(ws_conn.clone())); let rev_web_socket = Arc::new(DocumentRevisionWebSocket(ws_conn.clone()));
let cloud_service: Arc<dyn DocumentCloudService> = match local_server { let cloud_service: Arc<dyn DocumentCloudService> = match local_server {
None => Arc::new(DocumentCloudServiceImpl::new(server_config.clone())), None => Arc::new(DocumentCloudServiceImpl::new(server_config.clone())),
Some(local_server) => local_server, Some(local_server) => local_server,
}; };
let database = Arc::new(DocumentDatabaseImpl(user_session)); let database = Arc::new(DocumentDatabaseImpl(user_session));
let manager = Arc::new(DocumentManager::new( let manager = Arc::new(DocumentManager::new(
cloud_service, cloud_service,
user, user,
database, database,
rev_web_socket, rev_web_socket,
document_config.clone(), document_config.clone(),
)); ));
let receiver = Arc::new(DocumentWSMessageReceiverImpl(manager.clone())); let receiver = Arc::new(DocumentWSMessageReceiverImpl(manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap(); ws_conn.add_ws_message_receiver(receiver).unwrap();
manager manager
} }
} }
struct BlockUserImpl(Arc<UserSession>); struct BlockUserImpl(Arc<UserSession>);
impl DocumentUser for BlockUserImpl { impl DocumentUser for BlockUserImpl {
fn user_dir(&self) -> Result<String, FlowyError> { fn user_dir(&self) -> Result<String, FlowyError> {
let dir = self.0.user_dir().map_err(|e| FlowyError::unauthorized().context(e))?; let dir = self
.0
.user_dir()
.map_err(|e| FlowyError::unauthorized().context(e))?;
let doc_dir = format!("{}/document", dir); let doc_dir = format!("{}/document", dir);
if !Path::new(&doc_dir).exists() { if !Path::new(&doc_dir).exists() {
std::fs::create_dir_all(&doc_dir)?; std::fs::create_dir_all(&doc_dir)?;
}
Ok(doc_dir)
} }
Ok(doc_dir)
}
fn user_id(&self) -> Result<String, FlowyError> { fn user_id(&self) -> Result<String, FlowyError> {
self.0.user_id() self.0.user_id()
} }
fn token(&self) -> Result<String, FlowyError> { fn token(&self) -> Result<String, FlowyError> {
self.0.token() self.0.token()
} }
} }
struct DocumentDatabaseImpl(Arc<UserSession>); struct DocumentDatabaseImpl(Arc<UserSession>);
impl DocumentDatabase for DocumentDatabaseImpl { impl DocumentDatabase for DocumentDatabaseImpl {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> { fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool() self.0.db_pool()
} }
} }
struct DocumentRevisionWebSocket(Arc<FlowyWebSocketConnect>); struct DocumentRevisionWebSocket(Arc<FlowyWebSocketConnect>);
impl RevisionWebSocket for DocumentRevisionWebSocket { impl RevisionWebSocket for DocumentRevisionWebSocket {
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> { fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap(); let bytes: Bytes = data.try_into().unwrap();
let msg = WebSocketRawMessage { let msg = WebSocketRawMessage {
channel: WSChannel::Document, channel: WSChannel::Document,
data: bytes.to_vec(), data: bytes.to_vec(),
}; };
let ws_conn = self.0.clone(); let ws_conn = self.0.clone();
Box::pin(async move { Box::pin(async move {
match ws_conn.web_socket().await? { match ws_conn.web_socket().await? {
None => {} None => {},
Some(sender) => { Some(sender) => {
sender.send(msg).map_err(internal_error)?; sender.send(msg).map_err(internal_error)?;
} },
} }
Ok(()) Ok(())
}) })
} }
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> { fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone(); let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await }) Box::pin(async move { ws_conn.subscribe_websocket_state().await })
} }
} }
struct DocumentWSMessageReceiverImpl(Arc<DocumentManager>); struct DocumentWSMessageReceiverImpl(Arc<DocumentManager>);
impl WSMessageReceiver for DocumentWSMessageReceiverImpl { impl WSMessageReceiver for DocumentWSMessageReceiverImpl {
fn source(&self) -> WSChannel { fn source(&self) -> WSChannel {
WSChannel::Document WSChannel::Document
} }
fn receive_message(&self, msg: WebSocketRawMessage) { fn receive_message(&self, msg: WebSocketRawMessage) {
let handler = self.0.clone(); let handler = self.0.clone();
tokio::spawn(async move { tokio::spawn(async move {
handler.receive_ws_data(Bytes::from(msg.data)).await; handler.receive_ws_data(Bytes::from(msg.data)).await;
}); });
} }
} }

View file

@ -10,9 +10,9 @@ use flowy_document::DocumentManager;
use flowy_folder::entities::{ViewDataFormatPB, ViewLayoutTypePB, ViewPB}; use flowy_folder::entities::{ViewDataFormatPB, ViewLayoutTypePB, ViewPB};
use flowy_folder::manager::{ViewDataProcessor, ViewDataProcessorMap}; use flowy_folder::manager::{ViewDataProcessor, ViewDataProcessorMap};
use flowy_folder::{ use flowy_folder::{
errors::{internal_error, FlowyError}, errors::{internal_error, FlowyError},
event_map::{FolderCouldServiceV1, WorkspaceDatabase, WorkspaceUser}, event_map::{FolderCouldServiceV1, WorkspaceDatabase, WorkspaceUser},
manager::FolderManager, manager::FolderManager,
}; };
use flowy_net::ClientServerConfiguration; use flowy_net::ClientServerConfiguration;
use flowy_net::{http_server::folder::FolderHttpCloudService, local_server::LocalServer}; use flowy_net::{http_server::folder::FolderHttpCloudService, local_server::LocalServer};
@ -30,294 +30,320 @@ use ws_model::ws_revision::ClientRevisionWSData;
pub struct FolderDepsResolver(); pub struct FolderDepsResolver();
impl FolderDepsResolver { impl FolderDepsResolver {
pub async fn resolve( pub async fn resolve(
local_server: Option<Arc<LocalServer>>, local_server: Option<Arc<LocalServer>>,
user_session: Arc<UserSession>, user_session: Arc<UserSession>,
server_config: &ClientServerConfiguration, server_config: &ClientServerConfiguration,
ws_conn: &Arc<FlowyWebSocketConnect>, ws_conn: &Arc<FlowyWebSocketConnect>,
text_block_manager: &Arc<DocumentManager>, text_block_manager: &Arc<DocumentManager>,
grid_manager: &Arc<DatabaseManager>, grid_manager: &Arc<DatabaseManager>,
) -> Arc<FolderManager> { ) -> Arc<FolderManager> {
let user: Arc<dyn WorkspaceUser> = Arc::new(WorkspaceUserImpl(user_session.clone())); let user: Arc<dyn WorkspaceUser> = Arc::new(WorkspaceUserImpl(user_session.clone()));
let database: Arc<dyn WorkspaceDatabase> = Arc::new(WorkspaceDatabaseImpl(user_session)); let database: Arc<dyn WorkspaceDatabase> = Arc::new(WorkspaceDatabaseImpl(user_session));
let web_socket = Arc::new(FolderRevisionWebSocket(ws_conn.clone())); let web_socket = Arc::new(FolderRevisionWebSocket(ws_conn.clone()));
let cloud_service: Arc<dyn FolderCouldServiceV1> = match local_server { let cloud_service: Arc<dyn FolderCouldServiceV1> = match local_server {
None => Arc::new(FolderHttpCloudService::new(server_config.clone())), None => Arc::new(FolderHttpCloudService::new(server_config.clone())),
Some(local_server) => local_server, Some(local_server) => local_server,
}; };
let view_data_processor = make_view_data_processor(text_block_manager.clone(), grid_manager.clone()); let view_data_processor =
let folder_manager = make_view_data_processor(text_block_manager.clone(), grid_manager.clone());
Arc::new(FolderManager::new(user.clone(), cloud_service, database, view_data_processor, web_socket).await); let folder_manager = Arc::new(
FolderManager::new(
user.clone(),
cloud_service,
database,
view_data_processor,
web_socket,
)
.await,
);
if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) { if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) {
match folder_manager.initialize(&user_id, &token).await { match folder_manager.initialize(&user_id, &token).await {
Ok(_) => {} Ok(_) => {},
Err(e) => tracing::error!("Initialize folder manager failed: {}", e), Err(e) => tracing::error!("Initialize folder manager failed: {}", e),
} }
}
let receiver = Arc::new(FolderWSMessageReceiverImpl(folder_manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap();
folder_manager
} }
let receiver = Arc::new(FolderWSMessageReceiverImpl(folder_manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap();
folder_manager
}
} }
fn make_view_data_processor( fn make_view_data_processor(
document_manager: Arc<DocumentManager>, document_manager: Arc<DocumentManager>,
grid_manager: Arc<DatabaseManager>, grid_manager: Arc<DatabaseManager>,
) -> ViewDataProcessorMap { ) -> ViewDataProcessorMap {
let mut map: HashMap<ViewDataFormatPB, Arc<dyn ViewDataProcessor + Send + Sync>> = HashMap::new(); let mut map: HashMap<ViewDataFormatPB, Arc<dyn ViewDataProcessor + Send + Sync>> = HashMap::new();
let document_processor = Arc::new(DocumentViewDataProcessor(document_manager)); let document_processor = Arc::new(DocumentViewDataProcessor(document_manager));
document_processor.data_types().into_iter().for_each(|data_type| { document_processor
map.insert(data_type, document_processor.clone()); .data_types()
.into_iter()
.for_each(|data_type| {
map.insert(data_type, document_processor.clone());
}); });
let grid_data_impl = Arc::new(GridViewDataProcessor(grid_manager)); let grid_data_impl = Arc::new(GridViewDataProcessor(grid_manager));
grid_data_impl.data_types().into_iter().for_each(|data_type| { grid_data_impl
map.insert(data_type, grid_data_impl.clone()); .data_types()
.into_iter()
.for_each(|data_type| {
map.insert(data_type, grid_data_impl.clone());
}); });
Arc::new(map) Arc::new(map)
} }
struct WorkspaceDatabaseImpl(Arc<UserSession>); struct WorkspaceDatabaseImpl(Arc<UserSession>);
impl WorkspaceDatabase for WorkspaceDatabaseImpl { impl WorkspaceDatabase for WorkspaceDatabaseImpl {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> { fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool().map_err(|e| FlowyError::internal().context(e)) self
} .0
.db_pool()
.map_err(|e| FlowyError::internal().context(e))
}
} }
struct WorkspaceUserImpl(Arc<UserSession>); struct WorkspaceUserImpl(Arc<UserSession>);
impl WorkspaceUser for WorkspaceUserImpl { impl WorkspaceUser for WorkspaceUserImpl {
fn user_id(&self) -> Result<String, FlowyError> { fn user_id(&self) -> Result<String, FlowyError> {
self.0.user_id().map_err(|e| FlowyError::internal().context(e)) self
} .0
.user_id()
.map_err(|e| FlowyError::internal().context(e))
}
fn token(&self) -> Result<String, FlowyError> { fn token(&self) -> Result<String, FlowyError> {
self.0.token().map_err(|e| FlowyError::internal().context(e)) self
} .0
.token()
.map_err(|e| FlowyError::internal().context(e))
}
} }
struct FolderRevisionWebSocket(Arc<FlowyWebSocketConnect>); struct FolderRevisionWebSocket(Arc<FlowyWebSocketConnect>);
impl RevisionWebSocket for FolderRevisionWebSocket { impl RevisionWebSocket for FolderRevisionWebSocket {
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> { fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap(); let bytes: Bytes = data.try_into().unwrap();
let msg = WebSocketRawMessage { let msg = WebSocketRawMessage {
channel: WSChannel::Folder, channel: WSChannel::Folder,
data: bytes.to_vec(), data: bytes.to_vec(),
}; };
let ws_conn = self.0.clone(); let ws_conn = self.0.clone();
Box::pin(async move { Box::pin(async move {
match ws_conn.web_socket().await? { match ws_conn.web_socket().await? {
None => {} None => {},
Some(sender) => { Some(sender) => {
sender.send(msg).map_err(internal_error)?; sender.send(msg).map_err(internal_error)?;
} },
} }
Ok(()) Ok(())
}) })
} }
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> { fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone(); let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await }) Box::pin(async move { ws_conn.subscribe_websocket_state().await })
} }
} }
struct FolderWSMessageReceiverImpl(Arc<FolderManager>); struct FolderWSMessageReceiverImpl(Arc<FolderManager>);
impl WSMessageReceiver for FolderWSMessageReceiverImpl { impl WSMessageReceiver for FolderWSMessageReceiverImpl {
fn source(&self) -> WSChannel { fn source(&self) -> WSChannel {
WSChannel::Folder WSChannel::Folder
} }
fn receive_message(&self, msg: WebSocketRawMessage) { fn receive_message(&self, msg: WebSocketRawMessage) {
let handler = self.0.clone(); let handler = self.0.clone();
tokio::spawn(async move { tokio::spawn(async move {
handler.did_receive_ws_data(Bytes::from(msg.data)).await; handler.did_receive_ws_data(Bytes::from(msg.data)).await;
}); });
} }
} }
struct DocumentViewDataProcessor(Arc<DocumentManager>); struct DocumentViewDataProcessor(Arc<DocumentManager>);
impl ViewDataProcessor for DocumentViewDataProcessor { impl ViewDataProcessor for DocumentViewDataProcessor {
fn create_view( fn create_view(
&self, &self,
_user_id: &str, _user_id: &str,
view_id: &str, view_id: &str,
layout: ViewLayoutTypePB, layout: ViewLayoutTypePB,
view_data: Bytes, view_data: Bytes,
) -> FutureResult<(), FlowyError> { ) -> FutureResult<(), FlowyError> {
// Only accept Document type // Only accept Document type
debug_assert_eq!(layout, ViewLayoutTypePB::Document); debug_assert_eq!(layout, ViewLayoutTypePB::Document);
let view_data = match String::from_utf8(view_data.to_vec()) { let view_data = match String::from_utf8(view_data.to_vec()) {
Ok(content) => match make_transaction_from_document_content(&content) { Ok(content) => match make_transaction_from_document_content(&content) {
Ok(transaction) => transaction.to_bytes().unwrap_or(vec![]), Ok(transaction) => transaction.to_bytes().unwrap_or(vec![]),
Err(_) => vec![], Err(_) => vec![],
}, },
Err(_) => vec![], Err(_) => vec![],
}; };
let revision = Revision::initial_revision(view_id, Bytes::from(view_data)); let revision = Revision::initial_revision(view_id, Bytes::from(view_data));
let view_id = view_id.to_string(); let view_id = view_id.to_string();
let manager = self.0.clone(); let manager = self.0.clone();
FutureResult::new(async move { FutureResult::new(async move {
manager.create_document(view_id, vec![revision]).await?; manager.create_document(view_id, vec![revision]).await?;
Ok(()) Ok(())
}) })
} }
fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> { fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> {
let manager = self.0.clone(); let manager = self.0.clone();
let view_id = view_id.to_string(); let view_id = view_id.to_string();
FutureResult::new(async move { FutureResult::new(async move {
manager.close_document_editor(view_id).await?; manager.close_document_editor(view_id).await?;
Ok(()) Ok(())
}) })
} }
fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError> { fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError> {
let view_id = view.id.clone(); let view_id = view.id.clone();
let manager = self.0.clone(); let manager = self.0.clone();
FutureResult::new(async move { FutureResult::new(async move {
let editor = manager.open_document_editor(view_id).await?; let editor = manager.open_document_editor(view_id).await?;
let document_data = Bytes::from(editor.duplicate().await?); let document_data = Bytes::from(editor.duplicate().await?);
Ok(document_data) Ok(document_data)
}) })
} }
fn create_default_view( fn create_default_view(
&self, &self,
user_id: &str, user_id: &str,
view_id: &str, view_id: &str,
layout: ViewLayoutTypePB, layout: ViewLayoutTypePB,
_data_format: ViewDataFormatPB, _data_format: ViewDataFormatPB,
) -> FutureResult<Bytes, FlowyError> { ) -> FutureResult<Bytes, FlowyError> {
debug_assert_eq!(layout, ViewLayoutTypePB::Document); debug_assert_eq!(layout, ViewLayoutTypePB::Document);
let _user_id = user_id.to_string(); let _user_id = user_id.to_string();
let view_id = view_id.to_string(); let view_id = view_id.to_string();
let manager = self.0.clone(); let manager = self.0.clone();
let document_content = self.0.initial_document_content(); let document_content = self.0.initial_document_content();
FutureResult::new(async move { FutureResult::new(async move {
let delta_data = Bytes::from(document_content); let delta_data = Bytes::from(document_content);
let revision = Revision::initial_revision(&view_id, delta_data.clone()); let revision = Revision::initial_revision(&view_id, delta_data.clone());
manager.create_document(view_id, vec![revision]).await?; manager.create_document(view_id, vec![revision]).await?;
Ok(delta_data) Ok(delta_data)
}) })
} }
fn create_view_with_data( fn create_view_with_data(
&self, &self,
_user_id: &str, _user_id: &str,
_view_id: &str, _view_id: &str,
data: Vec<u8>, data: Vec<u8>,
layout: ViewLayoutTypePB, layout: ViewLayoutTypePB,
) -> FutureResult<Bytes, FlowyError> { ) -> FutureResult<Bytes, FlowyError> {
debug_assert_eq!(layout, ViewLayoutTypePB::Document); debug_assert_eq!(layout, ViewLayoutTypePB::Document);
FutureResult::new(async move { Ok(Bytes::from(data)) }) FutureResult::new(async move { Ok(Bytes::from(data)) })
} }
fn data_types(&self) -> Vec<ViewDataFormatPB> { fn data_types(&self) -> Vec<ViewDataFormatPB> {
vec![ViewDataFormatPB::DeltaFormat, ViewDataFormatPB::NodeFormat] vec![ViewDataFormatPB::DeltaFormat, ViewDataFormatPB::NodeFormat]
} }
} }
struct GridViewDataProcessor(Arc<DatabaseManager>); struct GridViewDataProcessor(Arc<DatabaseManager>);
impl ViewDataProcessor for GridViewDataProcessor { impl ViewDataProcessor for GridViewDataProcessor {
fn create_view( fn create_view(
&self, &self,
_user_id: &str, _user_id: &str,
view_id: &str, view_id: &str,
_layout: ViewLayoutTypePB, _layout: ViewLayoutTypePB,
delta_data: Bytes, delta_data: Bytes,
) -> FutureResult<(), FlowyError> { ) -> FutureResult<(), FlowyError> {
let revision = Revision::initial_revision(view_id, delta_data); let revision = Revision::initial_revision(view_id, delta_data);
let view_id = view_id.to_string(); let view_id = view_id.to_string();
let grid_manager = self.0.clone(); let grid_manager = self.0.clone();
FutureResult::new(async move { FutureResult::new(async move {
grid_manager.create_database(view_id, vec![revision]).await?; grid_manager
Ok(()) .create_database(view_id, vec![revision])
}) .await?;
} Ok(())
})
}
fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> { fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> {
let grid_manager = self.0.clone(); let grid_manager = self.0.clone();
let view_id = view_id.to_string(); let view_id = view_id.to_string();
FutureResult::new(async move { FutureResult::new(async move {
grid_manager.close_database(view_id).await?; grid_manager.close_database(view_id).await?;
Ok(()) Ok(())
}) })
} }
fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError> { fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError> {
let grid_manager = self.0.clone(); let grid_manager = self.0.clone();
let view_id = view.id.clone(); let view_id = view.id.clone();
FutureResult::new(async move { FutureResult::new(async move {
let editor = grid_manager.open_database(view_id).await?; let editor = grid_manager.open_database(view_id).await?;
let delta_bytes = editor.duplicate_grid().await?; let delta_bytes = editor.duplicate_grid().await?;
Ok(delta_bytes.into()) Ok(delta_bytes.into())
}) })
} }
fn create_default_view( fn create_default_view(
&self, &self,
user_id: &str, user_id: &str,
view_id: &str, view_id: &str,
layout: ViewLayoutTypePB, layout: ViewLayoutTypePB,
data_format: ViewDataFormatPB, data_format: ViewDataFormatPB,
) -> FutureResult<Bytes, FlowyError> { ) -> FutureResult<Bytes, FlowyError> {
debug_assert_eq!(data_format, ViewDataFormatPB::DatabaseFormat); debug_assert_eq!(data_format, ViewDataFormatPB::DatabaseFormat);
let (build_context, layout) = match layout { let (build_context, layout) = match layout {
ViewLayoutTypePB::Grid => (make_default_grid(), LayoutTypePB::Grid), ViewLayoutTypePB::Grid => (make_default_grid(), LayoutTypePB::Grid),
ViewLayoutTypePB::Board => (make_default_board(), LayoutTypePB::Board), ViewLayoutTypePB::Board => (make_default_board(), LayoutTypePB::Board),
ViewLayoutTypePB::Calendar => (make_default_calendar(), LayoutTypePB::Calendar), ViewLayoutTypePB::Calendar => (make_default_calendar(), LayoutTypePB::Calendar),
ViewLayoutTypePB::Document => { ViewLayoutTypePB::Document => {
return FutureResult::new(async move { return FutureResult::new(async move {
Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout))) Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout)))
}); });
} },
}; };
let user_id = user_id.to_string(); let user_id = user_id.to_string();
let view_id = view_id.to_string(); let view_id = view_id.to_string();
let grid_manager = self.0.clone(); let grid_manager = self.0.clone();
FutureResult::new(async move { FutureResult::new(async move {
make_database_view_data(&user_id, &view_id, layout, grid_manager, build_context).await make_database_view_data(&user_id, &view_id, layout, grid_manager, build_context).await
}) })
} }
fn create_view_with_data( fn create_view_with_data(
&self, &self,
user_id: &str, user_id: &str,
view_id: &str, view_id: &str,
data: Vec<u8>, data: Vec<u8>,
layout: ViewLayoutTypePB, layout: ViewLayoutTypePB,
) -> FutureResult<Bytes, FlowyError> { ) -> FutureResult<Bytes, FlowyError> {
let user_id = user_id.to_string(); let user_id = user_id.to_string();
let view_id = view_id.to_string(); let view_id = view_id.to_string();
let grid_manager = self.0.clone(); let grid_manager = self.0.clone();
let layout = match layout { let layout = match layout {
ViewLayoutTypePB::Grid => LayoutTypePB::Grid, ViewLayoutTypePB::Grid => LayoutTypePB::Grid,
ViewLayoutTypePB::Board => LayoutTypePB::Board, ViewLayoutTypePB::Board => LayoutTypePB::Board,
ViewLayoutTypePB::Calendar => LayoutTypePB::Calendar, ViewLayoutTypePB::Calendar => LayoutTypePB::Calendar,
ViewLayoutTypePB::Document => { ViewLayoutTypePB::Document => {
return FutureResult::new(async move { return FutureResult::new(async move {
Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout))) Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout)))
}); });
} },
}; };
FutureResult::new(async move { FutureResult::new(async move {
let bytes = Bytes::from(data); let bytes = Bytes::from(data);
let build_context = BuildDatabaseContext::try_from(bytes)?; let build_context = BuildDatabaseContext::try_from(bytes)?;
make_database_view_data(&user_id, &view_id, layout, grid_manager, build_context).await make_database_view_data(&user_id, &view_id, layout, grid_manager, build_context).await
}) })
} }
fn data_types(&self) -> Vec<ViewDataFormatPB> { fn data_types(&self) -> Vec<ViewDataFormatPB> {
vec![ViewDataFormatPB::DatabaseFormat] vec![ViewDataFormatPB::DatabaseFormat]
} }
} }

View file

@ -18,76 +18,81 @@ use ws_model::ws_revision::ClientRevisionWSData;
pub struct GridDepsResolver(); pub struct GridDepsResolver();
impl GridDepsResolver { impl GridDepsResolver {
pub async fn resolve( pub async fn resolve(
ws_conn: Arc<FlowyWebSocketConnect>, ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>, user_session: Arc<UserSession>,
task_scheduler: Arc<RwLock<TaskDispatcher>>, task_scheduler: Arc<RwLock<TaskDispatcher>>,
) -> Arc<DatabaseManager> { ) -> Arc<DatabaseManager> {
let user = Arc::new(GridUserImpl(user_session.clone())); let user = Arc::new(GridUserImpl(user_session.clone()));
let rev_web_socket = Arc::new(GridRevisionWebSocket(ws_conn)); let rev_web_socket = Arc::new(GridRevisionWebSocket(ws_conn));
let grid_manager = Arc::new(DatabaseManager::new( let grid_manager = Arc::new(DatabaseManager::new(
user.clone(), user.clone(),
rev_web_socket, rev_web_socket,
task_scheduler, task_scheduler,
Arc::new(GridDatabaseImpl(user_session)), Arc::new(GridDatabaseImpl(user_session)),
)); ));
if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) { if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) {
match grid_manager.initialize(&user_id, &token).await { match grid_manager.initialize(&user_id, &token).await {
Ok(_) => {} Ok(_) => {},
Err(e) => tracing::error!("Initialize grid manager failed: {}", e), Err(e) => tracing::error!("Initialize grid manager failed: {}", e),
} }
}
grid_manager
} }
grid_manager
}
} }
struct GridDatabaseImpl(Arc<UserSession>); struct GridDatabaseImpl(Arc<UserSession>);
impl GridDatabase for GridDatabaseImpl { impl GridDatabase for GridDatabaseImpl {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> { fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool().map_err(|e| FlowyError::internal().context(e)) self
} .0
.db_pool()
.map_err(|e| FlowyError::internal().context(e))
}
} }
struct GridUserImpl(Arc<UserSession>); struct GridUserImpl(Arc<UserSession>);
impl DatabaseUser for GridUserImpl { impl DatabaseUser for GridUserImpl {
fn user_id(&self) -> Result<String, FlowyError> { fn user_id(&self) -> Result<String, FlowyError> {
self.0.user_id() self.0.user_id()
} }
fn token(&self) -> Result<String, FlowyError> { fn token(&self) -> Result<String, FlowyError> {
self.0.token() self.0.token()
} }
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> { fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool() self.0.db_pool()
} }
} }
struct GridRevisionWebSocket(Arc<FlowyWebSocketConnect>); struct GridRevisionWebSocket(Arc<FlowyWebSocketConnect>);
impl RevisionWebSocket for GridRevisionWebSocket { impl RevisionWebSocket for GridRevisionWebSocket {
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> { fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap(); let bytes: Bytes = data.try_into().unwrap();
let msg = WebSocketRawMessage { let msg = WebSocketRawMessage {
channel: WSChannel::Database, channel: WSChannel::Database,
data: bytes.to_vec(), data: bytes.to_vec(),
}; };
let ws_conn = self.0.clone(); let ws_conn = self.0.clone();
Box::pin(async move { Box::pin(async move {
match ws_conn.web_socket().await? { match ws_conn.web_socket().await? {
None => {} None => {},
Some(sender) => { Some(sender) => {
sender.send(msg).map_err(|e| FlowyError::internal().context(e))?; sender
} .send(msg)
} .map_err(|e| FlowyError::internal().context(e))?;
Ok(()) },
}) }
} Ok(())
})
}
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> { fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone(); let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await }) Box::pin(async move { ws_conn.subscribe_websocket_state().await })
} }
} }

View file

@ -6,13 +6,13 @@ use std::sync::Arc;
pub struct UserDepsResolver(); pub struct UserDepsResolver();
impl UserDepsResolver { impl UserDepsResolver {
pub fn resolve( pub fn resolve(
local_server: &Option<Arc<LocalServer>>, local_server: &Option<Arc<LocalServer>>,
server_config: &ClientServerConfiguration, server_config: &ClientServerConfiguration,
) -> Arc<dyn UserCloudService> { ) -> Arc<dyn UserCloudService> {
match local_server.clone() { match local_server.clone() {
None => Arc::new(UserHttpCloudService::new(server_config)), None => Arc::new(UserHttpCloudService::new(server_config)),
Some(local_server) => local_server, Some(local_server) => local_server,
}
} }
}
} }

View file

@ -22,11 +22,11 @@ use module::make_plugins;
pub use module::*; pub use module::*;
use std::time::Duration; use std::time::Duration;
use std::{ use std::{
fmt, fmt,
sync::{ sync::{
atomic::{AtomicBool, Ordering}, atomic::{AtomicBool, Ordering},
Arc, Arc,
}, },
}; };
use tokio::sync::{broadcast, RwLock}; use tokio::sync::{broadcast, RwLock};
use user_model::UserProfile; use user_model::UserProfile;
@ -35,316 +35,331 @@ static INIT_LOG: AtomicBool = AtomicBool::new(false);
#[derive(Clone)] #[derive(Clone)]
pub struct AppFlowyCoreConfig { pub struct AppFlowyCoreConfig {
/// Different `AppFlowyCoreConfig` instance should have different name /// Different `AppFlowyCoreConfig` instance should have different name
name: String, name: String,
/// Panics if the `root` path is not existing /// Panics if the `root` path is not existing
storage_path: String, storage_path: String,
log_filter: String, log_filter: String,
server_config: ClientServerConfiguration, server_config: ClientServerConfiguration,
pub document: DocumentConfig, pub document: DocumentConfig,
} }
impl fmt::Debug for AppFlowyCoreConfig { impl fmt::Debug for AppFlowyCoreConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("AppFlowyCoreConfig") f.debug_struct("AppFlowyCoreConfig")
.field("storage_path", &self.storage_path) .field("storage_path", &self.storage_path)
.field("server-config", &self.server_config) .field("server-config", &self.server_config)
.field("document-config", &self.document) .field("document-config", &self.document)
.finish() .finish()
} }
} }
impl AppFlowyCoreConfig { impl AppFlowyCoreConfig {
pub fn new(root: &str, name: String, server_config: ClientServerConfiguration) -> Self { pub fn new(root: &str, name: String, server_config: ClientServerConfiguration) -> Self {
AppFlowyCoreConfig { AppFlowyCoreConfig {
name, name,
storage_path: root.to_owned(), storage_path: root.to_owned(),
log_filter: create_log_filter("info".to_owned(), vec![]), log_filter: create_log_filter("info".to_owned(), vec![]),
server_config, server_config,
document: DocumentConfig::default(), document: DocumentConfig::default(),
}
} }
}
pub fn with_document_version(mut self, version: DocumentVersionPB) -> Self { pub fn with_document_version(mut self, version: DocumentVersionPB) -> Self {
self.document.version = version; self.document.version = version;
self self
} }
pub fn log_filter(mut self, level: &str, with_crates: Vec<String>) -> Self { pub fn log_filter(mut self, level: &str, with_crates: Vec<String>) -> Self {
self.log_filter = create_log_filter(level.to_owned(), with_crates); self.log_filter = create_log_filter(level.to_owned(), with_crates);
self self
} }
} }
fn create_log_filter(level: String, with_crates: Vec<String>) -> String { fn create_log_filter(level: String, with_crates: Vec<String>) -> String {
let level = std::env::var("RUST_LOG").unwrap_or(level); let level = std::env::var("RUST_LOG").unwrap_or(level);
let mut filters = with_crates let mut filters = with_crates
.into_iter() .into_iter()
.map(|crate_name| format!("{}={}", crate_name, level)) .map(|crate_name| format!("{}={}", crate_name, level))
.collect::<Vec<String>>(); .collect::<Vec<String>>();
filters.push(format!("flowy_core={}", level)); filters.push(format!("flowy_core={}", level));
filters.push(format!("flowy_folder={}", level)); filters.push(format!("flowy_folder={}", level));
filters.push(format!("flowy_user={}", level)); filters.push(format!("flowy_user={}", level));
filters.push(format!("flowy_document={}", level)); filters.push(format!("flowy_document={}", level));
filters.push(format!("flowy_database={}", level)); filters.push(format!("flowy_database={}", level));
filters.push(format!("flowy_sync={}", "info")); filters.push(format!("flowy_sync={}", "info"));
filters.push(format!("flowy_client_sync={}", "info")); filters.push(format!("flowy_client_sync={}", "info"));
filters.push(format!("flowy_notification={}", "info")); filters.push(format!("flowy_notification={}", "info"));
filters.push(format!("lib_ot={}", level)); filters.push(format!("lib_ot={}", level));
filters.push(format!("lib_ws={}", level)); filters.push(format!("lib_ws={}", level));
filters.push(format!("lib_infra={}", level)); filters.push(format!("lib_infra={}", level));
filters.push(format!("flowy_sync={}", level)); filters.push(format!("flowy_sync={}", level));
filters.push(format!("flowy_revision={}", level)); filters.push(format!("flowy_revision={}", level));
filters.push(format!("flowy_revision_persistence={}", level)); filters.push(format!("flowy_revision_persistence={}", level));
filters.push(format!("flowy_task={}", level)); filters.push(format!("flowy_task={}", level));
// filters.push(format!("lib_dispatch={}", level)); // filters.push(format!("lib_dispatch={}", level));
filters.push(format!("dart_ffi={}", "info")); filters.push(format!("dart_ffi={}", "info"));
filters.push(format!("flowy_sqlite={}", "info")); filters.push(format!("flowy_sqlite={}", "info"));
filters.push(format!("flowy_net={}", "info")); filters.push(format!("flowy_net={}", "info"));
filters.join(",") filters.join(",")
} }
#[derive(Clone)] #[derive(Clone)]
pub struct AppFlowyCore { pub struct AppFlowyCore {
#[allow(dead_code)] #[allow(dead_code)]
pub config: AppFlowyCoreConfig, pub config: AppFlowyCoreConfig,
pub user_session: Arc<UserSession>, pub user_session: Arc<UserSession>,
pub document_manager: Arc<DocumentManager>, pub document_manager: Arc<DocumentManager>,
pub folder_manager: Arc<FolderManager>, pub folder_manager: Arc<FolderManager>,
pub grid_manager: Arc<DatabaseManager>, pub grid_manager: Arc<DatabaseManager>,
pub event_dispatcher: Arc<AFPluginDispatcher>, pub event_dispatcher: Arc<AFPluginDispatcher>,
pub ws_conn: Arc<FlowyWebSocketConnect>, pub ws_conn: Arc<FlowyWebSocketConnect>,
pub local_server: Option<Arc<LocalServer>>, pub local_server: Option<Arc<LocalServer>>,
pub task_dispatcher: Arc<RwLock<TaskDispatcher>>, pub task_dispatcher: Arc<RwLock<TaskDispatcher>>,
} }
impl AppFlowyCore { impl AppFlowyCore {
pub fn new(config: AppFlowyCoreConfig) -> Self { pub fn new(config: AppFlowyCoreConfig) -> Self {
init_log(&config); init_log(&config);
init_kv(&config.storage_path); init_kv(&config.storage_path);
tracing::debug!("🔥 {:?}", config); tracing::debug!("🔥 {:?}", config);
let runtime = tokio_default_runtime().unwrap(); let runtime = tokio_default_runtime().unwrap();
let task_scheduler = TaskDispatcher::new(Duration::from_secs(2)); let task_scheduler = TaskDispatcher::new(Duration::from_secs(2));
let task_dispatcher = Arc::new(RwLock::new(task_scheduler)); let task_dispatcher = Arc::new(RwLock::new(task_scheduler));
runtime.spawn(TaskRunner::run(task_dispatcher.clone())); runtime.spawn(TaskRunner::run(task_dispatcher.clone()));
let (local_server, ws_conn) = mk_local_server(&config.server_config); let (local_server, ws_conn) = mk_local_server(&config.server_config);
let (user_session, document_manager, folder_manager, local_server, grid_manager) = runtime.block_on(async { let (user_session, document_manager, folder_manager, local_server, grid_manager) = runtime
let user_session = mk_user_session(&config, &local_server, &config.server_config); .block_on(async {
let document_manager = DocumentDepsResolver::resolve( let user_session = mk_user_session(&config, &local_server, &config.server_config);
local_server.clone(), let document_manager = DocumentDepsResolver::resolve(
ws_conn.clone(), local_server.clone(),
user_session.clone(), ws_conn.clone(),
&config.server_config, user_session.clone(),
&config.document, &config.server_config,
); &config.document,
);
let grid_manager = let grid_manager = GridDepsResolver::resolve(
GridDepsResolver::resolve(ws_conn.clone(), user_session.clone(), task_dispatcher.clone()).await; ws_conn.clone(),
user_session.clone(),
task_dispatcher.clone(),
)
.await;
let folder_manager = FolderDepsResolver::resolve( let folder_manager = FolderDepsResolver::resolve(
local_server.clone(), local_server.clone(),
user_session.clone(), user_session.clone(),
&config.server_config, &config.server_config,
&ws_conn, &ws_conn,
&document_manager, &document_manager,
&grid_manager, &grid_manager,
) )
.await; .await;
if let Some(local_server) = local_server.as_ref() { if let Some(local_server) = local_server.as_ref() {
local_server.run(); local_server.run();
}
ws_conn.init().await;
(
user_session,
document_manager,
folder_manager,
local_server,
grid_manager,
)
});
let user_status_listener = UserStatusListener {
document_manager: document_manager.clone(),
folder_manager: folder_manager.clone(),
grid_manager: grid_manager.clone(),
ws_conn: ws_conn.clone(),
config: config.clone(),
};
let user_status_callback = UserStatusCallbackImpl {
listener: Arc::new(user_status_listener),
};
let cloned_user_session = user_session.clone();
runtime.block_on(async move {
cloned_user_session.clone().init(user_status_callback).await;
});
let event_dispatcher = Arc::new(AFPluginDispatcher::construct(runtime, || {
make_plugins(
&ws_conn,
&folder_manager,
&grid_manager,
&user_session,
&document_manager,
)
}));
_start_listening(&event_dispatcher, &ws_conn, &folder_manager);
Self {
config,
user_session,
document_manager,
folder_manager,
grid_manager,
event_dispatcher,
ws_conn,
local_server,
task_dispatcher,
} }
} ws_conn.init().await;
(
user_session,
document_manager,
folder_manager,
local_server,
grid_manager,
)
});
pub fn dispatcher(&self) -> Arc<AFPluginDispatcher> { let user_status_listener = UserStatusListener {
self.event_dispatcher.clone() document_manager: document_manager.clone(),
folder_manager: folder_manager.clone(),
grid_manager: grid_manager.clone(),
ws_conn: ws_conn.clone(),
config: config.clone(),
};
let user_status_callback = UserStatusCallbackImpl {
listener: Arc::new(user_status_listener),
};
let cloned_user_session = user_session.clone();
runtime.block_on(async move {
cloned_user_session.clone().init(user_status_callback).await;
});
let event_dispatcher = Arc::new(AFPluginDispatcher::construct(runtime, || {
make_plugins(
&ws_conn,
&folder_manager,
&grid_manager,
&user_session,
&document_manager,
)
}));
_start_listening(&event_dispatcher, &ws_conn, &folder_manager);
Self {
config,
user_session,
document_manager,
folder_manager,
grid_manager,
event_dispatcher,
ws_conn,
local_server,
task_dispatcher,
} }
}
pub fn dispatcher(&self) -> Arc<AFPluginDispatcher> {
self.event_dispatcher.clone()
}
} }
fn _start_listening( fn _start_listening(
event_dispatcher: &AFPluginDispatcher, event_dispatcher: &AFPluginDispatcher,
ws_conn: &Arc<FlowyWebSocketConnect>, ws_conn: &Arc<FlowyWebSocketConnect>,
folder_manager: &Arc<FolderManager>, folder_manager: &Arc<FolderManager>,
) { ) {
let subscribe_network_type = ws_conn.subscribe_network_ty(); let subscribe_network_type = ws_conn.subscribe_network_ty();
let folder_manager = folder_manager.clone(); let folder_manager = folder_manager.clone();
let cloned_folder_manager = folder_manager; let cloned_folder_manager = folder_manager;
let ws_conn = ws_conn.clone(); let ws_conn = ws_conn.clone();
event_dispatcher.spawn(async move { event_dispatcher.spawn(async move {
listen_on_websocket(ws_conn.clone()); listen_on_websocket(ws_conn.clone());
}); });
event_dispatcher.spawn(async move { event_dispatcher.spawn(async move {
_listen_network_status(subscribe_network_type, cloned_folder_manager).await; _listen_network_status(subscribe_network_type, cloned_folder_manager).await;
}); });
} }
fn mk_local_server( fn mk_local_server(
server_config: &ClientServerConfiguration, server_config: &ClientServerConfiguration,
) -> (Option<Arc<LocalServer>>, Arc<FlowyWebSocketConnect>) { ) -> (Option<Arc<LocalServer>>, Arc<FlowyWebSocketConnect>) {
let ws_addr = server_config.ws_addr(); let ws_addr = server_config.ws_addr();
if cfg!(feature = "http_sync") { if cfg!(feature = "http_sync") {
let ws_conn = Arc::new(FlowyWebSocketConnect::new(ws_addr)); let ws_conn = Arc::new(FlowyWebSocketConnect::new(ws_addr));
(None, ws_conn) (None, ws_conn)
} else { } else {
let context = flowy_net::local_server::build_server(server_config); let context = flowy_net::local_server::build_server(server_config);
let local_ws = Arc::new(context.local_ws); let local_ws = Arc::new(context.local_ws);
let ws_conn = Arc::new(FlowyWebSocketConnect::from_local(ws_addr, local_ws)); let ws_conn = Arc::new(FlowyWebSocketConnect::from_local(ws_addr, local_ws));
(Some(Arc::new(context.local_server)), ws_conn) (Some(Arc::new(context.local_server)), ws_conn)
} }
} }
async fn _listen_network_status(mut subscribe: broadcast::Receiver<NetworkType>, _core: Arc<FolderManager>) { async fn _listen_network_status(
while let Ok(_new_type) = subscribe.recv().await { mut subscribe: broadcast::Receiver<NetworkType>,
// core.network_state_changed(new_type); _core: Arc<FolderManager>,
} ) {
while let Ok(_new_type) = subscribe.recv().await {
// core.network_state_changed(new_type);
}
} }
fn init_kv(root: &str) { fn init_kv(root: &str) {
match flowy_sqlite::kv::KV::init(root) { match flowy_sqlite::kv::KV::init(root) {
Ok(_) => {} Ok(_) => {},
Err(e) => tracing::error!("Init kv store failed: {}", e), Err(e) => tracing::error!("Init kv store failed: {}", e),
} }
} }
fn init_log(config: &AppFlowyCoreConfig) { fn init_log(config: &AppFlowyCoreConfig) {
if !INIT_LOG.load(Ordering::SeqCst) { if !INIT_LOG.load(Ordering::SeqCst) {
INIT_LOG.store(true, Ordering::SeqCst); INIT_LOG.store(true, Ordering::SeqCst);
let _ = lib_log::Builder::new("AppFlowy-Client", &config.storage_path) let _ = lib_log::Builder::new("AppFlowy-Client", &config.storage_path)
.env_filter(&config.log_filter) .env_filter(&config.log_filter)
.build(); .build();
} }
} }
fn mk_user_session( fn mk_user_session(
config: &AppFlowyCoreConfig, config: &AppFlowyCoreConfig,
local_server: &Option<Arc<LocalServer>>, local_server: &Option<Arc<LocalServer>>,
server_config: &ClientServerConfiguration, server_config: &ClientServerConfiguration,
) -> Arc<UserSession> { ) -> Arc<UserSession> {
let user_config = UserSessionConfig::new(&config.name, &config.storage_path); let user_config = UserSessionConfig::new(&config.name, &config.storage_path);
let cloud_service = UserDepsResolver::resolve(local_server, server_config); let cloud_service = UserDepsResolver::resolve(local_server, server_config);
Arc::new(UserSession::new(user_config, cloud_service)) Arc::new(UserSession::new(user_config, cloud_service))
} }
struct UserStatusListener { struct UserStatusListener {
document_manager: Arc<DocumentManager>, document_manager: Arc<DocumentManager>,
folder_manager: Arc<FolderManager>, folder_manager: Arc<FolderManager>,
grid_manager: Arc<DatabaseManager>, grid_manager: Arc<DatabaseManager>,
ws_conn: Arc<FlowyWebSocketConnect>, ws_conn: Arc<FlowyWebSocketConnect>,
config: AppFlowyCoreConfig, config: AppFlowyCoreConfig,
} }
impl UserStatusListener { impl UserStatusListener {
async fn did_sign_in(&self, token: &str, user_id: &str) -> FlowyResult<()> { async fn did_sign_in(&self, token: &str, user_id: &str) -> FlowyResult<()> {
self.folder_manager.initialize(user_id, token).await?; self.folder_manager.initialize(user_id, token).await?;
self.document_manager.initialize(user_id).await?; self.document_manager.initialize(user_id).await?;
self.grid_manager.initialize(user_id, token).await?; self.grid_manager.initialize(user_id, token).await?;
self.ws_conn.start(token.to_owned(), user_id.to_owned()).await?; self
Ok(()) .ws_conn
} .start(token.to_owned(), user_id.to_owned())
.await?;
Ok(())
}
async fn did_sign_up(&self, user_profile: &UserProfile) -> FlowyResult<()> { async fn did_sign_up(&self, user_profile: &UserProfile) -> FlowyResult<()> {
let view_data_type = match self.config.document.version { let view_data_type = match self.config.document.version {
DocumentVersionPB::V0 => ViewDataFormatPB::DeltaFormat, DocumentVersionPB::V0 => ViewDataFormatPB::DeltaFormat,
DocumentVersionPB::V1 => ViewDataFormatPB::NodeFormat, DocumentVersionPB::V1 => ViewDataFormatPB::NodeFormat,
}; };
self.folder_manager self
.initialize_with_new_user(&user_profile.id, &user_profile.token, view_data_type) .folder_manager
.await?; .initialize_with_new_user(&user_profile.id, &user_profile.token, view_data_type)
self.document_manager .await?;
.initialize_with_new_user(&user_profile.id, &user_profile.token) self
.await?; .document_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token)
.await?;
self.grid_manager self
.initialize_with_new_user(&user_profile.id, &user_profile.token) .grid_manager
.await?; .initialize_with_new_user(&user_profile.id, &user_profile.token)
.await?;
self.ws_conn self
.start(user_profile.token.clone(), user_profile.id.clone()) .ws_conn
.await?; .start(user_profile.token.clone(), user_profile.id.clone())
Ok(()) .await?;
} Ok(())
}
async fn did_expired(&self, _token: &str, user_id: &str) -> FlowyResult<()> { async fn did_expired(&self, _token: &str, user_id: &str) -> FlowyResult<()> {
self.folder_manager.clear(user_id).await; self.folder_manager.clear(user_id).await;
self.ws_conn.stop().await; self.ws_conn.stop().await;
Ok(()) Ok(())
} }
} }
struct UserStatusCallbackImpl { struct UserStatusCallbackImpl {
listener: Arc<UserStatusListener>, listener: Arc<UserStatusListener>,
} }
impl UserStatusCallback for UserStatusCallbackImpl { impl UserStatusCallback for UserStatusCallbackImpl {
fn did_sign_in(&self, token: &str, user_id: &str) -> Fut<FlowyResult<()>> { fn did_sign_in(&self, token: &str, user_id: &str) -> Fut<FlowyResult<()>> {
let listener = self.listener.clone(); let listener = self.listener.clone();
let token = token.to_owned(); let token = token.to_owned();
let user_id = user_id.to_owned(); let user_id = user_id.to_owned();
to_fut(async move { listener.did_sign_in(&token, &user_id).await }) to_fut(async move { listener.did_sign_in(&token, &user_id).await })
} }
fn did_sign_up(&self, user_profile: &UserProfile) -> Fut<FlowyResult<()>> { fn did_sign_up(&self, user_profile: &UserProfile) -> Fut<FlowyResult<()>> {
let listener = self.listener.clone(); let listener = self.listener.clone();
let user_profile = user_profile.clone(); let user_profile = user_profile.clone();
to_fut(async move { listener.did_sign_up(&user_profile).await }) to_fut(async move { listener.did_sign_up(&user_profile).await })
} }
fn did_expired(&self, token: &str, user_id: &str) -> Fut<FlowyResult<()>> { fn did_expired(&self, token: &str, user_id: &str) -> Fut<FlowyResult<()>> {
let listener = self.listener.clone(); let listener = self.listener.clone();
let token = token.to_owned(); let token = token.to_owned();
let user_id = user_id.to_owned(); let user_id = user_id.to_owned();
to_fut(async move { listener.did_expired(&token, &user_id).await }) to_fut(async move { listener.did_expired(&token, &user_id).await })
} }
} }

View file

@ -7,16 +7,22 @@ use lib_dispatch::prelude::AFPlugin;
use std::sync::Arc; use std::sync::Arc;
pub fn make_plugins( pub fn make_plugins(
ws_conn: &Arc<FlowyWebSocketConnect>, ws_conn: &Arc<FlowyWebSocketConnect>,
folder_manager: &Arc<FolderManager>, folder_manager: &Arc<FolderManager>,
grid_manager: &Arc<DatabaseManager>, grid_manager: &Arc<DatabaseManager>,
user_session: &Arc<UserSession>, user_session: &Arc<UserSession>,
document_manager: &Arc<DocumentManager>, document_manager: &Arc<DocumentManager>,
) -> Vec<AFPlugin> { ) -> Vec<AFPlugin> {
let user_plugin = flowy_user::event_map::init(user_session.clone()); let user_plugin = flowy_user::event_map::init(user_session.clone());
let folder_plugin = flowy_folder::event_map::init(folder_manager.clone()); let folder_plugin = flowy_folder::event_map::init(folder_manager.clone());
let network_plugin = flowy_net::event_map::init(ws_conn.clone()); let network_plugin = flowy_net::event_map::init(ws_conn.clone());
let grid_plugin = flowy_database::event_map::init(grid_manager.clone()); let grid_plugin = flowy_database::event_map::init(grid_manager.clone());
let document_plugin = flowy_document::event_map::init(document_manager.clone()); let document_plugin = flowy_document::event_map::init(document_manager.clone());
vec![user_plugin, folder_plugin, network_plugin, grid_plugin, document_plugin] vec![
user_plugin,
folder_plugin,
network_plugin,
grid_plugin,
document_plugin,
]
} }

View file

@ -1,10 +1,10 @@
fn main() { fn main() {
let crate_name = env!("CARGO_PKG_NAME"); let crate_name = env!("CARGO_PKG_NAME");
flowy_codegen::protobuf_file::gen(crate_name); flowy_codegen::protobuf_file::gen(crate_name);
#[cfg(feature = "dart")] #[cfg(feature = "dart")]
flowy_codegen::dart_event::gen(crate_name); flowy_codegen::dart_event::gen(crate_name);
#[cfg(feature = "ts")] #[cfg(feature = "ts")]
flowy_codegen::ts_event::gen(crate_name); flowy_codegen::ts_event::gen(crate_name);
} }

View file

@ -7,166 +7,169 @@ use std::collections::HashMap;
#[derive(ProtoBuf, Default)] #[derive(ProtoBuf, Default)]
pub struct CreateSelectOptionPayloadPB { pub struct CreateSelectOptionPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub field_id: String, pub field_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub database_id: String, pub database_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub option_name: String, pub option_name: String,
} }
pub struct CreateSelectOptionParams { pub struct CreateSelectOptionParams {
pub field_id: String, pub field_id: String,
pub database_id: String, pub database_id: String,
pub option_name: String, pub option_name: String,
} }
impl TryInto<CreateSelectOptionParams> for CreateSelectOptionPayloadPB { impl TryInto<CreateSelectOptionParams> for CreateSelectOptionPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<CreateSelectOptionParams, Self::Error> { fn try_into(self) -> Result<CreateSelectOptionParams, Self::Error> {
let option_name = NotEmptyStr::parse(self.option_name).map_err(|_| ErrorCode::SelectOptionNameIsEmpty)?; let option_name =
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?; NotEmptyStr::parse(self.option_name).map_err(|_| ErrorCode::SelectOptionNameIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?; let database_id =
Ok(CreateSelectOptionParams { NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
field_id: field_id.0, let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
option_name: option_name.0, Ok(CreateSelectOptionParams {
database_id: database_id.0, field_id: field_id.0,
}) option_name: option_name.0,
} database_id: database_id.0,
})
}
} }
#[derive(Debug, Clone, Default, ProtoBuf)] #[derive(Debug, Clone, Default, ProtoBuf)]
pub struct CellIdPB { pub struct CellIdPB {
#[pb(index = 1)] #[pb(index = 1)]
pub database_id: String, pub database_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_id: String, pub field_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub row_id: String, pub row_id: String,
} }
/// Represents as the cell identifier. It's used to locate the cell in corresponding /// Represents as the cell identifier. It's used to locate the cell in corresponding
/// view's row with the field id. /// view's row with the field id.
pub struct CellIdParams { pub struct CellIdParams {
pub database_id: String, pub database_id: String,
pub field_id: String, pub field_id: String,
pub row_id: String, pub row_id: String,
} }
impl TryInto<CellIdParams> for CellIdPB { impl TryInto<CellIdParams> for CellIdPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<CellIdParams, Self::Error> { fn try_into(self) -> Result<CellIdParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?; let database_id =
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?; NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?; let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(CellIdParams { let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
database_id: database_id.0, Ok(CellIdParams {
field_id: field_id.0, database_id: database_id.0,
row_id: row_id.0, field_id: field_id.0,
}) row_id: row_id.0,
} })
}
} }
/// Represents as the data of the cell. /// Represents as the data of the cell.
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct CellPB { pub struct CellPB {
#[pb(index = 1)] #[pb(index = 1)]
pub field_id: String, pub field_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub row_id: String, pub row_id: String,
/// Encoded the data using the helper struct `CellProtobufBlob`. /// Encoded the data using the helper struct `CellProtobufBlob`.
/// Check out the `CellProtobufBlob` for more information. /// Check out the `CellProtobufBlob` for more information.
#[pb(index = 3)] #[pb(index = 3)]
pub data: Vec<u8>, pub data: Vec<u8>,
/// the field_type will be None if the field with field_id is not found /// the field_type will be None if the field with field_id is not found
#[pb(index = 4, one_of)] #[pb(index = 4, one_of)]
pub field_type: Option<FieldType>, pub field_type: Option<FieldType>,
} }
impl CellPB { impl CellPB {
pub fn new(field_id: &str, row_id: &str, field_type: FieldType, data: Vec<u8>) -> Self { pub fn new(field_id: &str, row_id: &str, field_type: FieldType, data: Vec<u8>) -> Self {
Self { Self {
field_id: field_id.to_owned(), field_id: field_id.to_owned(),
row_id: row_id.to_string(), row_id: row_id.to_string(),
data, data,
field_type: Some(field_type), field_type: Some(field_type),
}
} }
}
pub fn empty(field_id: &str, row_id: &str) -> Self { pub fn empty(field_id: &str, row_id: &str) -> Self {
Self { Self {
field_id: field_id.to_owned(), field_id: field_id.to_owned(),
row_id: row_id.to_owned(), row_id: row_id.to_owned(),
data: vec![], data: vec![],
field_type: None, field_type: None,
}
} }
}
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct RepeatedCellPB { pub struct RepeatedCellPB {
#[pb(index = 1)] #[pb(index = 1)]
pub items: Vec<CellPB>, pub items: Vec<CellPB>,
} }
impl std::ops::Deref for RepeatedCellPB { impl std::ops::Deref for RepeatedCellPB {
type Target = Vec<CellPB>; type Target = Vec<CellPB>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.items &self.items
} }
} }
impl std::ops::DerefMut for RepeatedCellPB { impl std::ops::DerefMut for RepeatedCellPB {
fn deref_mut(&mut self) -> &mut Self::Target { fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items &mut self.items
} }
} }
impl std::convert::From<Vec<CellPB>> for RepeatedCellPB { impl std::convert::From<Vec<CellPB>> for RepeatedCellPB {
fn from(items: Vec<CellPB>) -> Self { fn from(items: Vec<CellPB>) -> Self {
Self { items } Self { items }
} }
} }
/// ///
#[derive(Debug, Clone, Default, ProtoBuf)] #[derive(Debug, Clone, Default, ProtoBuf)]
pub struct CellChangesetPB { pub struct CellChangesetPB {
#[pb(index = 1)] #[pb(index = 1)]
pub database_id: String, pub database_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub row_id: String, pub row_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub field_id: String, pub field_id: String,
#[pb(index = 4)] #[pb(index = 4)]
pub type_cell_data: String, pub type_cell_data: String,
} }
impl std::convert::From<CellChangesetPB> for RowChangeset { impl std::convert::From<CellChangesetPB> for RowChangeset {
fn from(changeset: CellChangesetPB) -> Self { fn from(changeset: CellChangesetPB) -> Self {
let mut cell_by_field_id = HashMap::with_capacity(1); let mut cell_by_field_id = HashMap::with_capacity(1);
let field_id = changeset.field_id; let field_id = changeset.field_id;
let cell_rev = CellRevision { let cell_rev = CellRevision {
type_cell_data: changeset.type_cell_data, type_cell_data: changeset.type_cell_data,
}; };
cell_by_field_id.insert(field_id, cell_rev); cell_by_field_id.insert(field_id, cell_rev);
RowChangeset { RowChangeset {
row_id: changeset.row_id, row_id: changeset.row_id,
height: None, height: None,
visibility: None, visibility: None,
cell_by_field_id, cell_by_field_id,
}
} }
}
} }

View file

@ -5,58 +5,58 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct CheckboxFilterPB { pub struct CheckboxFilterPB {
#[pb(index = 1)] #[pb(index = 1)]
pub condition: CheckboxFilterConditionPB, pub condition: CheckboxFilterConditionPB,
} }
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)] #[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)] #[repr(u8)]
pub enum CheckboxFilterConditionPB { pub enum CheckboxFilterConditionPB {
IsChecked = 0, IsChecked = 0,
IsUnChecked = 1, IsUnChecked = 1,
} }
impl std::convert::From<CheckboxFilterConditionPB> for u32 { impl std::convert::From<CheckboxFilterConditionPB> for u32 {
fn from(value: CheckboxFilterConditionPB) -> Self { fn from(value: CheckboxFilterConditionPB) -> Self {
value as u32 value as u32
} }
} }
impl std::default::Default for CheckboxFilterConditionPB { impl std::default::Default for CheckboxFilterConditionPB {
fn default() -> Self { fn default() -> Self {
CheckboxFilterConditionPB::IsChecked CheckboxFilterConditionPB::IsChecked
} }
} }
impl std::convert::TryFrom<u8> for CheckboxFilterConditionPB { impl std::convert::TryFrom<u8> for CheckboxFilterConditionPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> { fn try_from(value: u8) -> Result<Self, Self::Error> {
match value { match value {
0 => Ok(CheckboxFilterConditionPB::IsChecked), 0 => Ok(CheckboxFilterConditionPB::IsChecked),
1 => Ok(CheckboxFilterConditionPB::IsUnChecked), 1 => Ok(CheckboxFilterConditionPB::IsUnChecked),
_ => Err(ErrorCode::InvalidData), _ => Err(ErrorCode::InvalidData),
}
} }
}
} }
impl FromFilterString for CheckboxFilterPB { impl FromFilterString for CheckboxFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where where
Self: Sized, Self: Sized,
{ {
CheckboxFilterPB { CheckboxFilterPB {
condition: CheckboxFilterConditionPB::try_from(filter_rev.condition) condition: CheckboxFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(CheckboxFilterConditionPB::IsChecked), .unwrap_or(CheckboxFilterConditionPB::IsChecked),
}
} }
}
} }
impl std::convert::From<&FilterRevision> for CheckboxFilterPB { impl std::convert::From<&FilterRevision> for CheckboxFilterPB {
fn from(rev: &FilterRevision) -> Self { fn from(rev: &FilterRevision) -> Self {
CheckboxFilterPB { CheckboxFilterPB {
condition: CheckboxFilterConditionPB::try_from(rev.condition) condition: CheckboxFilterConditionPB::try_from(rev.condition)
.unwrap_or(CheckboxFilterConditionPB::IsChecked), .unwrap_or(CheckboxFilterConditionPB::IsChecked),
}
} }
}
} }

View file

@ -5,58 +5,58 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct ChecklistFilterPB { pub struct ChecklistFilterPB {
#[pb(index = 1)] #[pb(index = 1)]
pub condition: ChecklistFilterConditionPB, pub condition: ChecklistFilterConditionPB,
} }
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)] #[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)] #[repr(u8)]
pub enum ChecklistFilterConditionPB { pub enum ChecklistFilterConditionPB {
IsComplete = 0, IsComplete = 0,
IsIncomplete = 1, IsIncomplete = 1,
} }
impl std::convert::From<ChecklistFilterConditionPB> for u32 { impl std::convert::From<ChecklistFilterConditionPB> for u32 {
fn from(value: ChecklistFilterConditionPB) -> Self { fn from(value: ChecklistFilterConditionPB) -> Self {
value as u32 value as u32
} }
} }
impl std::default::Default for ChecklistFilterConditionPB { impl std::default::Default for ChecklistFilterConditionPB {
fn default() -> Self { fn default() -> Self {
ChecklistFilterConditionPB::IsIncomplete ChecklistFilterConditionPB::IsIncomplete
} }
} }
impl std::convert::TryFrom<u8> for ChecklistFilterConditionPB { impl std::convert::TryFrom<u8> for ChecklistFilterConditionPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> { fn try_from(value: u8) -> Result<Self, Self::Error> {
match value { match value {
0 => Ok(ChecklistFilterConditionPB::IsComplete), 0 => Ok(ChecklistFilterConditionPB::IsComplete),
1 => Ok(ChecklistFilterConditionPB::IsIncomplete), 1 => Ok(ChecklistFilterConditionPB::IsIncomplete),
_ => Err(ErrorCode::InvalidData), _ => Err(ErrorCode::InvalidData),
}
} }
}
} }
impl FromFilterString for ChecklistFilterPB { impl FromFilterString for ChecklistFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where where
Self: Sized, Self: Sized,
{ {
ChecklistFilterPB { ChecklistFilterPB {
condition: ChecklistFilterConditionPB::try_from(filter_rev.condition) condition: ChecklistFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(ChecklistFilterConditionPB::IsIncomplete), .unwrap_or(ChecklistFilterConditionPB::IsIncomplete),
}
} }
}
} }
impl std::convert::From<&FilterRevision> for ChecklistFilterPB { impl std::convert::From<&FilterRevision> for ChecklistFilterPB {
fn from(rev: &FilterRevision) -> Self { fn from(rev: &FilterRevision) -> Self {
ChecklistFilterPB { ChecklistFilterPB {
condition: ChecklistFilterConditionPB::try_from(rev.condition) condition: ChecklistFilterConditionPB::try_from(rev.condition)
.unwrap_or(ChecklistFilterConditionPB::IsIncomplete), .unwrap_or(ChecklistFilterConditionPB::IsIncomplete),
}
} }
}
} }

View file

@ -7,114 +7,116 @@ use std::str::FromStr;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct DateFilterPB { pub struct DateFilterPB {
#[pb(index = 1)] #[pb(index = 1)]
pub condition: DateFilterConditionPB, pub condition: DateFilterConditionPB,
#[pb(index = 2, one_of)] #[pb(index = 2, one_of)]
pub start: Option<i64>, pub start: Option<i64>,
#[pb(index = 3, one_of)] #[pb(index = 3, one_of)]
pub end: Option<i64>, pub end: Option<i64>,
#[pb(index = 4, one_of)] #[pb(index = 4, one_of)]
pub timestamp: Option<i64>, pub timestamp: Option<i64>,
} }
#[derive(Deserialize, Serialize, Default, Clone, Debug)] #[derive(Deserialize, Serialize, Default, Clone, Debug)]
pub struct DateFilterContentPB { pub struct DateFilterContentPB {
pub start: Option<i64>, pub start: Option<i64>,
pub end: Option<i64>, pub end: Option<i64>,
pub timestamp: Option<i64>, pub timestamp: Option<i64>,
} }
impl ToString for DateFilterContentPB { impl ToString for DateFilterContentPB {
fn to_string(&self) -> String { fn to_string(&self) -> String {
serde_json::to_string(self).unwrap() serde_json::to_string(self).unwrap()
} }
} }
impl FromStr for DateFilterContentPB { impl FromStr for DateFilterContentPB {
type Err = serde_json::Error; type Err = serde_json::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
serde_json::from_str(s) serde_json::from_str(s)
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)] #[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)] #[repr(u8)]
pub enum DateFilterConditionPB { pub enum DateFilterConditionPB {
DateIs = 0, DateIs = 0,
DateBefore = 1, DateBefore = 1,
DateAfter = 2, DateAfter = 2,
DateOnOrBefore = 3, DateOnOrBefore = 3,
DateOnOrAfter = 4, DateOnOrAfter = 4,
DateWithIn = 5, DateWithIn = 5,
DateIsEmpty = 6, DateIsEmpty = 6,
DateIsNotEmpty = 7, DateIsNotEmpty = 7,
} }
impl std::convert::From<DateFilterConditionPB> for u32 { impl std::convert::From<DateFilterConditionPB> for u32 {
fn from(value: DateFilterConditionPB) -> Self { fn from(value: DateFilterConditionPB) -> Self {
value as u32 value as u32
} }
} }
impl std::default::Default for DateFilterConditionPB { impl std::default::Default for DateFilterConditionPB {
fn default() -> Self { fn default() -> Self {
DateFilterConditionPB::DateIs DateFilterConditionPB::DateIs
} }
} }
impl std::convert::TryFrom<u8> for DateFilterConditionPB { impl std::convert::TryFrom<u8> for DateFilterConditionPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> { fn try_from(value: u8) -> Result<Self, Self::Error> {
match value { match value {
0 => Ok(DateFilterConditionPB::DateIs), 0 => Ok(DateFilterConditionPB::DateIs),
1 => Ok(DateFilterConditionPB::DateBefore), 1 => Ok(DateFilterConditionPB::DateBefore),
2 => Ok(DateFilterConditionPB::DateAfter), 2 => Ok(DateFilterConditionPB::DateAfter),
3 => Ok(DateFilterConditionPB::DateOnOrBefore), 3 => Ok(DateFilterConditionPB::DateOnOrBefore),
4 => Ok(DateFilterConditionPB::DateOnOrAfter), 4 => Ok(DateFilterConditionPB::DateOnOrAfter),
5 => Ok(DateFilterConditionPB::DateWithIn), 5 => Ok(DateFilterConditionPB::DateWithIn),
6 => Ok(DateFilterConditionPB::DateIsEmpty), 6 => Ok(DateFilterConditionPB::DateIsEmpty),
_ => Err(ErrorCode::InvalidData), _ => Err(ErrorCode::InvalidData),
}
} }
}
} }
impl FromFilterString for DateFilterPB { impl FromFilterString for DateFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where where
Self: Sized, Self: Sized,
{ {
let condition = DateFilterConditionPB::try_from(filter_rev.condition).unwrap_or(DateFilterConditionPB::DateIs); let condition = DateFilterConditionPB::try_from(filter_rev.condition)
let mut filter = DateFilterPB { .unwrap_or(DateFilterConditionPB::DateIs);
condition, let mut filter = DateFilterPB {
..Default::default() condition,
}; ..Default::default()
};
if let Ok(content) = DateFilterContentPB::from_str(&filter_rev.content) { if let Ok(content) = DateFilterContentPB::from_str(&filter_rev.content) {
filter.start = content.start; filter.start = content.start;
filter.end = content.end; filter.end = content.end;
filter.timestamp = content.timestamp; filter.timestamp = content.timestamp;
}; };
filter filter
} }
} }
impl std::convert::From<&FilterRevision> for DateFilterPB { impl std::convert::From<&FilterRevision> for DateFilterPB {
fn from(rev: &FilterRevision) -> Self { fn from(rev: &FilterRevision) -> Self {
let condition = DateFilterConditionPB::try_from(rev.condition).unwrap_or(DateFilterConditionPB::DateIs); let condition =
let mut filter = DateFilterPB { DateFilterConditionPB::try_from(rev.condition).unwrap_or(DateFilterConditionPB::DateIs);
condition, let mut filter = DateFilterPB {
..Default::default() condition,
}; ..Default::default()
};
if let Ok(content) = DateFilterContentPB::from_str(&rev.content) { if let Ok(content) = DateFilterContentPB::from_str(&rev.content) {
filter.start = content.start; filter.start = content.start;
filter.end = content.end; filter.end = content.end;
filter.timestamp = content.timestamp; filter.timestamp = content.timestamp;
}; };
filter filter
} }
} }

View file

@ -3,52 +3,52 @@ use flowy_derive::ProtoBuf;
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct FilterChangesetNotificationPB { pub struct FilterChangesetNotificationPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub insert_filters: Vec<FilterPB>, pub insert_filters: Vec<FilterPB>,
#[pb(index = 3)] #[pb(index = 3)]
pub delete_filters: Vec<FilterPB>, pub delete_filters: Vec<FilterPB>,
#[pb(index = 4)] #[pb(index = 4)]
pub update_filters: Vec<UpdatedFilter>, pub update_filters: Vec<UpdatedFilter>,
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct UpdatedFilter { pub struct UpdatedFilter {
#[pb(index = 1)] #[pb(index = 1)]
pub filter_id: String, pub filter_id: String,
#[pb(index = 2, one_of)] #[pb(index = 2, one_of)]
pub filter: Option<FilterPB>, pub filter: Option<FilterPB>,
} }
impl FilterChangesetNotificationPB { impl FilterChangesetNotificationPB {
pub fn from_insert(view_id: &str, filters: Vec<FilterPB>) -> Self { pub fn from_insert(view_id: &str, filters: Vec<FilterPB>) -> Self {
Self { Self {
view_id: view_id.to_string(), view_id: view_id.to_string(),
insert_filters: filters, insert_filters: filters,
delete_filters: Default::default(), delete_filters: Default::default(),
update_filters: Default::default(), update_filters: Default::default(),
}
} }
pub fn from_delete(view_id: &str, filters: Vec<FilterPB>) -> Self { }
Self { pub fn from_delete(view_id: &str, filters: Vec<FilterPB>) -> Self {
view_id: view_id.to_string(), Self {
insert_filters: Default::default(), view_id: view_id.to_string(),
delete_filters: filters, insert_filters: Default::default(),
update_filters: Default::default(), delete_filters: filters,
} update_filters: Default::default(),
} }
}
pub fn from_update(view_id: &str, filters: Vec<UpdatedFilter>) -> Self { pub fn from_update(view_id: &str, filters: Vec<UpdatedFilter>) -> Self {
Self { Self {
view_id: view_id.to_string(), view_id: view_id.to_string(),
insert_filters: Default::default(), insert_filters: Default::default(),
delete_filters: Default::default(), delete_filters: Default::default(),
update_filters: filters, update_filters: filters,
}
} }
}
} }

View file

@ -5,72 +5,73 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct NumberFilterPB { pub struct NumberFilterPB {
#[pb(index = 1)] #[pb(index = 1)]
pub condition: NumberFilterConditionPB, pub condition: NumberFilterConditionPB,
#[pb(index = 2)] #[pb(index = 2)]
pub content: String, pub content: String,
} }
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)] #[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)] #[repr(u8)]
pub enum NumberFilterConditionPB { pub enum NumberFilterConditionPB {
Equal = 0, Equal = 0,
NotEqual = 1, NotEqual = 1,
GreaterThan = 2, GreaterThan = 2,
LessThan = 3, LessThan = 3,
GreaterThanOrEqualTo = 4, GreaterThanOrEqualTo = 4,
LessThanOrEqualTo = 5, LessThanOrEqualTo = 5,
NumberIsEmpty = 6, NumberIsEmpty = 6,
NumberIsNotEmpty = 7, NumberIsNotEmpty = 7,
} }
impl std::default::Default for NumberFilterConditionPB { impl std::default::Default for NumberFilterConditionPB {
fn default() -> Self { fn default() -> Self {
NumberFilterConditionPB::Equal NumberFilterConditionPB::Equal
} }
} }
impl std::convert::From<NumberFilterConditionPB> for u32 { impl std::convert::From<NumberFilterConditionPB> for u32 {
fn from(value: NumberFilterConditionPB) -> Self { fn from(value: NumberFilterConditionPB) -> Self {
value as u32 value as u32
} }
} }
impl std::convert::TryFrom<u8> for NumberFilterConditionPB { impl std::convert::TryFrom<u8> for NumberFilterConditionPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_from(n: u8) -> Result<Self, Self::Error> { fn try_from(n: u8) -> Result<Self, Self::Error> {
match n { match n {
0 => Ok(NumberFilterConditionPB::Equal), 0 => Ok(NumberFilterConditionPB::Equal),
1 => Ok(NumberFilterConditionPB::NotEqual), 1 => Ok(NumberFilterConditionPB::NotEqual),
2 => Ok(NumberFilterConditionPB::GreaterThan), 2 => Ok(NumberFilterConditionPB::GreaterThan),
3 => Ok(NumberFilterConditionPB::LessThan), 3 => Ok(NumberFilterConditionPB::LessThan),
4 => Ok(NumberFilterConditionPB::GreaterThanOrEqualTo), 4 => Ok(NumberFilterConditionPB::GreaterThanOrEqualTo),
5 => Ok(NumberFilterConditionPB::LessThanOrEqualTo), 5 => Ok(NumberFilterConditionPB::LessThanOrEqualTo),
6 => Ok(NumberFilterConditionPB::NumberIsEmpty), 6 => Ok(NumberFilterConditionPB::NumberIsEmpty),
7 => Ok(NumberFilterConditionPB::NumberIsNotEmpty), 7 => Ok(NumberFilterConditionPB::NumberIsNotEmpty),
_ => Err(ErrorCode::InvalidData), _ => Err(ErrorCode::InvalidData),
}
} }
}
} }
impl FromFilterString for NumberFilterPB { impl FromFilterString for NumberFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where where
Self: Sized, Self: Sized,
{ {
NumberFilterPB { NumberFilterPB {
condition: NumberFilterConditionPB::try_from(filter_rev.condition) condition: NumberFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(NumberFilterConditionPB::Equal), .unwrap_or(NumberFilterConditionPB::Equal),
content: filter_rev.content.clone(), content: filter_rev.content.clone(),
}
} }
}
} }
impl std::convert::From<&FilterRevision> for NumberFilterPB { impl std::convert::From<&FilterRevision> for NumberFilterPB {
fn from(rev: &FilterRevision) -> Self { fn from(rev: &FilterRevision) -> Self {
NumberFilterPB { NumberFilterPB {
condition: NumberFilterConditionPB::try_from(rev.condition).unwrap_or(NumberFilterConditionPB::Equal), condition: NumberFilterConditionPB::try_from(rev.condition)
content: rev.content.clone(), .unwrap_or(NumberFilterConditionPB::Equal),
} content: rev.content.clone(),
} }
}
} }

View file

@ -6,67 +6,68 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct SelectOptionFilterPB { pub struct SelectOptionFilterPB {
#[pb(index = 1)] #[pb(index = 1)]
pub condition: SelectOptionConditionPB, pub condition: SelectOptionConditionPB,
#[pb(index = 2)] #[pb(index = 2)]
pub option_ids: Vec<String>, pub option_ids: Vec<String>,
} }
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)] #[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)] #[repr(u8)]
pub enum SelectOptionConditionPB { pub enum SelectOptionConditionPB {
OptionIs = 0, OptionIs = 0,
OptionIsNot = 1, OptionIsNot = 1,
OptionIsEmpty = 2, OptionIsEmpty = 2,
OptionIsNotEmpty = 3, OptionIsNotEmpty = 3,
} }
impl std::convert::From<SelectOptionConditionPB> for u32 { impl std::convert::From<SelectOptionConditionPB> for u32 {
fn from(value: SelectOptionConditionPB) -> Self { fn from(value: SelectOptionConditionPB) -> Self {
value as u32 value as u32
} }
} }
impl std::default::Default for SelectOptionConditionPB { impl std::default::Default for SelectOptionConditionPB {
fn default() -> Self { fn default() -> Self {
SelectOptionConditionPB::OptionIs SelectOptionConditionPB::OptionIs
} }
} }
impl std::convert::TryFrom<u8> for SelectOptionConditionPB { impl std::convert::TryFrom<u8> for SelectOptionConditionPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> { fn try_from(value: u8) -> Result<Self, Self::Error> {
match value { match value {
0 => Ok(SelectOptionConditionPB::OptionIs), 0 => Ok(SelectOptionConditionPB::OptionIs),
1 => Ok(SelectOptionConditionPB::OptionIsNot), 1 => Ok(SelectOptionConditionPB::OptionIsNot),
2 => Ok(SelectOptionConditionPB::OptionIsEmpty), 2 => Ok(SelectOptionConditionPB::OptionIsEmpty),
3 => Ok(SelectOptionConditionPB::OptionIsNotEmpty), 3 => Ok(SelectOptionConditionPB::OptionIsNotEmpty),
_ => Err(ErrorCode::InvalidData), _ => Err(ErrorCode::InvalidData),
}
} }
}
} }
impl FromFilterString for SelectOptionFilterPB { impl FromFilterString for SelectOptionFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where where
Self: Sized, Self: Sized,
{ {
let ids = SelectOptionIds::from(filter_rev.content.clone()); let ids = SelectOptionIds::from(filter_rev.content.clone());
SelectOptionFilterPB { SelectOptionFilterPB {
condition: SelectOptionConditionPB::try_from(filter_rev.condition) condition: SelectOptionConditionPB::try_from(filter_rev.condition)
.unwrap_or(SelectOptionConditionPB::OptionIs), .unwrap_or(SelectOptionConditionPB::OptionIs),
option_ids: ids.into_inner(), option_ids: ids.into_inner(),
}
} }
}
} }
impl std::convert::From<&FilterRevision> for SelectOptionFilterPB { impl std::convert::From<&FilterRevision> for SelectOptionFilterPB {
fn from(rev: &FilterRevision) -> Self { fn from(rev: &FilterRevision) -> Self {
let ids = SelectOptionIds::from(rev.content.clone()); let ids = SelectOptionIds::from(rev.content.clone());
SelectOptionFilterPB { SelectOptionFilterPB {
condition: SelectOptionConditionPB::try_from(rev.condition).unwrap_or(SelectOptionConditionPB::OptionIs), condition: SelectOptionConditionPB::try_from(rev.condition)
option_ids: ids.into_inner(), .unwrap_or(SelectOptionConditionPB::OptionIs),
} option_ids: ids.into_inner(),
} }
}
} }

View file

@ -5,73 +5,75 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct TextFilterPB { pub struct TextFilterPB {
#[pb(index = 1)] #[pb(index = 1)]
pub condition: TextFilterConditionPB, pub condition: TextFilterConditionPB,
#[pb(index = 2)] #[pb(index = 2)]
pub content: String, pub content: String,
} }
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)] #[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)] #[repr(u8)]
pub enum TextFilterConditionPB { pub enum TextFilterConditionPB {
Is = 0, Is = 0,
IsNot = 1, IsNot = 1,
Contains = 2, Contains = 2,
DoesNotContain = 3, DoesNotContain = 3,
StartsWith = 4, StartsWith = 4,
EndsWith = 5, EndsWith = 5,
TextIsEmpty = 6, TextIsEmpty = 6,
TextIsNotEmpty = 7, TextIsNotEmpty = 7,
} }
impl std::convert::From<TextFilterConditionPB> for u32 { impl std::convert::From<TextFilterConditionPB> for u32 {
fn from(value: TextFilterConditionPB) -> Self { fn from(value: TextFilterConditionPB) -> Self {
value as u32 value as u32
} }
} }
impl std::default::Default for TextFilterConditionPB { impl std::default::Default for TextFilterConditionPB {
fn default() -> Self { fn default() -> Self {
TextFilterConditionPB::Is TextFilterConditionPB::Is
} }
} }
impl std::convert::TryFrom<u8> for TextFilterConditionPB { impl std::convert::TryFrom<u8> for TextFilterConditionPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> { fn try_from(value: u8) -> Result<Self, Self::Error> {
match value { match value {
0 => Ok(TextFilterConditionPB::Is), 0 => Ok(TextFilterConditionPB::Is),
1 => Ok(TextFilterConditionPB::IsNot), 1 => Ok(TextFilterConditionPB::IsNot),
2 => Ok(TextFilterConditionPB::Contains), 2 => Ok(TextFilterConditionPB::Contains),
3 => Ok(TextFilterConditionPB::DoesNotContain), 3 => Ok(TextFilterConditionPB::DoesNotContain),
4 => Ok(TextFilterConditionPB::StartsWith), 4 => Ok(TextFilterConditionPB::StartsWith),
5 => Ok(TextFilterConditionPB::EndsWith), 5 => Ok(TextFilterConditionPB::EndsWith),
6 => Ok(TextFilterConditionPB::TextIsEmpty), 6 => Ok(TextFilterConditionPB::TextIsEmpty),
7 => Ok(TextFilterConditionPB::TextIsNotEmpty), 7 => Ok(TextFilterConditionPB::TextIsNotEmpty),
_ => Err(ErrorCode::InvalidData), _ => Err(ErrorCode::InvalidData),
}
} }
}
} }
impl FromFilterString for TextFilterPB { impl FromFilterString for TextFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where where
Self: Sized, Self: Sized,
{ {
TextFilterPB { TextFilterPB {
condition: TextFilterConditionPB::try_from(filter_rev.condition).unwrap_or(TextFilterConditionPB::Is), condition: TextFilterConditionPB::try_from(filter_rev.condition)
content: filter_rev.content.clone(), .unwrap_or(TextFilterConditionPB::Is),
} content: filter_rev.content.clone(),
} }
}
} }
impl std::convert::From<&FilterRevision> for TextFilterPB { impl std::convert::From<&FilterRevision> for TextFilterPB {
fn from(rev: &FilterRevision) -> Self { fn from(rev: &FilterRevision) -> Self {
TextFilterPB { TextFilterPB {
condition: TextFilterConditionPB::try_from(rev.condition).unwrap_or(TextFilterConditionPB::Is), condition: TextFilterConditionPB::try_from(rev.condition)
content: rev.content.clone(), .unwrap_or(TextFilterConditionPB::Is),
} content: rev.content.clone(),
} }
}
} }

View file

@ -1,7 +1,7 @@
use crate::entities::parser::NotEmptyStr; use crate::entities::parser::NotEmptyStr;
use crate::entities::{ use crate::entities::{
CheckboxFilterPB, ChecklistFilterPB, DateFilterContentPB, DateFilterPB, FieldType, NumberFilterPB, CheckboxFilterPB, ChecklistFilterPB, DateFilterContentPB, DateFilterPB, FieldType,
SelectOptionFilterPB, TextFilterPB, NumberFilterPB, SelectOptionFilterPB, TextFilterPB,
}; };
use crate::services::field::SelectOptionIds; use crate::services::field::SelectOptionIds;
use crate::services::filter::FilterType; use crate::services::filter::FilterType;
@ -14,217 +14,221 @@ use std::sync::Arc;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct FilterPB { pub struct FilterPB {
#[pb(index = 1)] #[pb(index = 1)]
pub id: String, pub id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_id: String, pub field_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub field_type: FieldType, pub field_type: FieldType,
#[pb(index = 4)] #[pb(index = 4)]
pub data: Vec<u8>, pub data: Vec<u8>,
} }
impl std::convert::From<&FilterRevision> for FilterPB { impl std::convert::From<&FilterRevision> for FilterPB {
fn from(rev: &FilterRevision) -> Self { fn from(rev: &FilterRevision) -> Self {
let field_type: FieldType = rev.field_type.into(); let field_type: FieldType = rev.field_type.into();
let bytes: Bytes = match field_type { let bytes: Bytes = match field_type {
FieldType::RichText => TextFilterPB::from(rev).try_into().unwrap(), FieldType::RichText => TextFilterPB::from(rev).try_into().unwrap(),
FieldType::Number => NumberFilterPB::from(rev).try_into().unwrap(), FieldType::Number => NumberFilterPB::from(rev).try_into().unwrap(),
FieldType::DateTime => DateFilterPB::from(rev).try_into().unwrap(), FieldType::DateTime => DateFilterPB::from(rev).try_into().unwrap(),
FieldType::SingleSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(), FieldType::SingleSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(),
FieldType::MultiSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(), FieldType::MultiSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(),
FieldType::Checklist => ChecklistFilterPB::from(rev).try_into().unwrap(), FieldType::Checklist => ChecklistFilterPB::from(rev).try_into().unwrap(),
FieldType::Checkbox => CheckboxFilterPB::from(rev).try_into().unwrap(), FieldType::Checkbox => CheckboxFilterPB::from(rev).try_into().unwrap(),
FieldType::URL => TextFilterPB::from(rev).try_into().unwrap(), FieldType::URL => TextFilterPB::from(rev).try_into().unwrap(),
}; };
Self { Self {
id: rev.id.clone(), id: rev.id.clone(),
field_id: rev.field_id.clone(), field_id: rev.field_id.clone(),
field_type: rev.field_type.into(), field_type: rev.field_type.into(),
data: bytes.to_vec(), data: bytes.to_vec(),
}
} }
}
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedFilterPB { pub struct RepeatedFilterPB {
#[pb(index = 1)] #[pb(index = 1)]
pub items: Vec<FilterPB>, pub items: Vec<FilterPB>,
} }
impl std::convert::From<Vec<Arc<FilterRevision>>> for RepeatedFilterPB { impl std::convert::From<Vec<Arc<FilterRevision>>> for RepeatedFilterPB {
fn from(revs: Vec<Arc<FilterRevision>>) -> Self { fn from(revs: Vec<Arc<FilterRevision>>) -> Self {
RepeatedFilterPB { RepeatedFilterPB {
items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(), items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(),
}
} }
}
} }
impl std::convert::From<Vec<FilterPB>> for RepeatedFilterPB { impl std::convert::From<Vec<FilterPB>> for RepeatedFilterPB {
fn from(items: Vec<FilterPB>) -> Self { fn from(items: Vec<FilterPB>) -> Self {
Self { items } Self { items }
} }
} }
#[derive(ProtoBuf, Debug, Default, Clone)] #[derive(ProtoBuf, Debug, Default, Clone)]
pub struct DeleteFilterPayloadPB { pub struct DeleteFilterPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub field_id: String, pub field_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_type: FieldType, pub field_type: FieldType,
#[pb(index = 3)] #[pb(index = 3)]
pub filter_id: String, pub filter_id: String,
#[pb(index = 4)] #[pb(index = 4)]
pub view_id: String, pub view_id: String,
} }
impl TryInto<DeleteFilterParams> for DeleteFilterPayloadPB { impl TryInto<DeleteFilterParams> for DeleteFilterPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<DeleteFilterParams, Self::Error> { fn try_into(self) -> Result<DeleteFilterParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id) let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)? .map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0; .0;
let field_id = NotEmptyStr::parse(self.field_id) let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)? .map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0; .0;
let filter_id = NotEmptyStr::parse(self.filter_id) let filter_id = NotEmptyStr::parse(self.filter_id)
.map_err(|_| ErrorCode::UnexpectedEmptyString)? .map_err(|_| ErrorCode::UnexpectedEmptyString)?
.0; .0;
let filter_type = FilterType { let filter_type = FilterType {
field_id, field_id,
field_type: self.field_type, field_type: self.field_type,
}; };
Ok(DeleteFilterParams { Ok(DeleteFilterParams {
view_id, view_id,
filter_id, filter_id,
filter_type, filter_type,
}) })
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct DeleteFilterParams { pub struct DeleteFilterParams {
pub view_id: String, pub view_id: String,
pub filter_type: FilterType, pub filter_type: FilterType,
pub filter_id: String, pub filter_id: String,
} }
#[derive(ProtoBuf, Debug, Default, Clone)] #[derive(ProtoBuf, Debug, Default, Clone)]
pub struct AlterFilterPayloadPB { pub struct AlterFilterPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub field_id: String, pub field_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_type: FieldType, pub field_type: FieldType,
/// Create a new filter if the filter_id is None /// Create a new filter if the filter_id is None
#[pb(index = 3, one_of)] #[pb(index = 3, one_of)]
pub filter_id: Option<String>, pub filter_id: Option<String>,
#[pb(index = 4)] #[pb(index = 4)]
pub data: Vec<u8>, pub data: Vec<u8>,
#[pb(index = 5)] #[pb(index = 5)]
pub view_id: String, pub view_id: String,
} }
impl AlterFilterPayloadPB { impl AlterFilterPayloadPB {
#[allow(dead_code)] #[allow(dead_code)]
pub fn new<T: TryInto<Bytes, Error = ::protobuf::ProtobufError>>( pub fn new<T: TryInto<Bytes, Error = ::protobuf::ProtobufError>>(
view_id: &str, view_id: &str,
field_rev: &FieldRevision, field_rev: &FieldRevision,
data: T, data: T,
) -> Self { ) -> Self {
let data = data.try_into().unwrap_or_else(|_| Bytes::new()); let data = data.try_into().unwrap_or_else(|_| Bytes::new());
Self { Self {
view_id: view_id.to_owned(), view_id: view_id.to_owned(),
field_id: field_rev.id.clone(), field_id: field_rev.id.clone(),
field_type: field_rev.ty.into(), field_type: field_rev.ty.into(),
filter_id: None, filter_id: None,
data: data.to_vec(), data: data.to_vec(),
}
} }
}
} }
impl TryInto<AlterFilterParams> for AlterFilterPayloadPB { impl TryInto<AlterFilterParams> for AlterFilterPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<AlterFilterParams, Self::Error> { fn try_into(self) -> Result<AlterFilterParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id) let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)? .map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0; .0;
let field_id = NotEmptyStr::parse(self.field_id) let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)? .map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0; .0;
let filter_id = match self.filter_id { let filter_id = match self.filter_id {
None => None, None => None,
Some(filter_id) => Some(NotEmptyStr::parse(filter_id).map_err(|_| ErrorCode::FilterIdIsEmpty)?.0), Some(filter_id) => Some(
}; NotEmptyStr::parse(filter_id)
let condition; .map_err(|_| ErrorCode::FilterIdIsEmpty)?
let mut content = "".to_string(); .0,
let bytes: &[u8] = self.data.as_ref(); ),
};
let condition;
let mut content = "".to_string();
let bytes: &[u8] = self.data.as_ref();
match self.field_type { match self.field_type {
FieldType::RichText | FieldType::URL => { FieldType::RichText | FieldType::URL => {
let filter = TextFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?; let filter = TextFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8; condition = filter.condition as u8;
content = filter.content; content = filter.content;
} },
FieldType::Checkbox => { FieldType::Checkbox => {
let filter = CheckboxFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?; let filter = CheckboxFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8; condition = filter.condition as u8;
} },
FieldType::Number => { FieldType::Number => {
let filter = NumberFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?; let filter = NumberFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8; condition = filter.condition as u8;
content = filter.content; content = filter.content;
} },
FieldType::DateTime => { FieldType::DateTime => {
let filter = DateFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?; let filter = DateFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8; condition = filter.condition as u8;
content = DateFilterContentPB { content = DateFilterContentPB {
start: filter.start, start: filter.start,
end: filter.end, end: filter.end,
timestamp: filter.timestamp, timestamp: filter.timestamp,
}
.to_string();
}
FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Checklist => {
let filter = SelectOptionFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = SelectOptionIds::from(filter.option_ids).to_string();
}
} }
.to_string();
Ok(AlterFilterParams { },
view_id, FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Checklist => {
field_id, let filter = SelectOptionFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
filter_id, condition = filter.condition as u8;
field_type: self.field_type.into(), content = SelectOptionIds::from(filter.option_ids).to_string();
condition, },
content,
})
} }
Ok(AlterFilterParams {
view_id,
field_id,
filter_id,
field_type: self.field_type.into(),
condition,
content,
})
}
} }
#[derive(Debug)] #[derive(Debug)]
pub struct AlterFilterParams { pub struct AlterFilterParams {
pub view_id: String, pub view_id: String,
pub field_id: String, pub field_id: String,
/// Create a new filter if the filter_id is None /// Create a new filter if the filter_id is None
pub filter_id: Option<String>, pub filter_id: Option<String>,
pub field_type: FieldTypeRevision, pub field_type: FieldTypeRevision,
pub condition: u8, pub condition: u8,
pub content: String, pub content: String,
} }

View file

@ -6,145 +6,150 @@ use flowy_error::ErrorCode;
/// [DatabasePB] describes how many fields and blocks the grid has /// [DatabasePB] describes how many fields and blocks the grid has
#[derive(Debug, Clone, Default, ProtoBuf)] #[derive(Debug, Clone, Default, ProtoBuf)]
pub struct DatabasePB { pub struct DatabasePB {
#[pb(index = 1)] #[pb(index = 1)]
pub id: String, pub id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub fields: Vec<FieldIdPB>, pub fields: Vec<FieldIdPB>,
#[pb(index = 3)] #[pb(index = 3)]
pub rows: Vec<RowPB>, pub rows: Vec<RowPB>,
} }
#[derive(ProtoBuf, Default)] #[derive(ProtoBuf, Default)]
pub struct CreateDatabasePayloadPB { pub struct CreateDatabasePayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub name: String, pub name: String,
} }
#[derive(Clone, ProtoBuf, Default, Debug)] #[derive(Clone, ProtoBuf, Default, Debug)]
pub struct DatabaseIdPB { pub struct DatabaseIdPB {
#[pb(index = 1)] #[pb(index = 1)]
pub value: String, pub value: String,
} }
impl AsRef<str> for DatabaseIdPB { impl AsRef<str> for DatabaseIdPB {
fn as_ref(&self) -> &str { fn as_ref(&self) -> &str {
&self.value &self.value
} }
} }
#[derive(Debug, Clone, Default, ProtoBuf)] #[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveFieldPayloadPB { pub struct MoveFieldPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_id: String, pub field_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub from_index: i32, pub from_index: i32,
#[pb(index = 4)] #[pb(index = 4)]
pub to_index: i32, pub to_index: i32,
} }
#[derive(Clone)] #[derive(Clone)]
pub struct MoveFieldParams { pub struct MoveFieldParams {
pub view_id: String, pub view_id: String,
pub field_id: String, pub field_id: String,
pub from_index: i32, pub from_index: i32,
pub to_index: i32, pub to_index: i32,
} }
impl TryInto<MoveFieldParams> for MoveFieldPayloadPB { impl TryInto<MoveFieldParams> for MoveFieldPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<MoveFieldParams, Self::Error> { fn try_into(self) -> Result<MoveFieldParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?; let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let item_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::InvalidData)?; let item_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::InvalidData)?;
Ok(MoveFieldParams { Ok(MoveFieldParams {
view_id: view_id.0, view_id: view_id.0,
field_id: item_id.0, field_id: item_id.0,
from_index: self.from_index, from_index: self.from_index,
to_index: self.to_index, to_index: self.to_index,
}) })
} }
} }
#[derive(Debug, Clone, Default, ProtoBuf)] #[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveRowPayloadPB { pub struct MoveRowPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub from_row_id: String, pub from_row_id: String,
#[pb(index = 4)] #[pb(index = 4)]
pub to_row_id: String, pub to_row_id: String,
} }
pub struct MoveRowParams { pub struct MoveRowParams {
pub view_id: String, pub view_id: String,
pub from_row_id: String, pub from_row_id: String,
pub to_row_id: String, pub to_row_id: String,
} }
impl TryInto<MoveRowParams> for MoveRowPayloadPB { impl TryInto<MoveRowParams> for MoveRowPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<MoveRowParams, Self::Error> { fn try_into(self) -> Result<MoveRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?; let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?; let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
let to_row_id = NotEmptyStr::parse(self.to_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?; let to_row_id = NotEmptyStr::parse(self.to_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(MoveRowParams { Ok(MoveRowParams {
view_id: view_id.0, view_id: view_id.0,
from_row_id: from_row_id.0, from_row_id: from_row_id.0,
to_row_id: to_row_id.0, to_row_id: to_row_id.0,
}) })
} }
} }
#[derive(Debug, Clone, Default, ProtoBuf)] #[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveGroupRowPayloadPB { pub struct MoveGroupRowPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub from_row_id: String, pub from_row_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub to_group_id: String, pub to_group_id: String,
#[pb(index = 4, one_of)] #[pb(index = 4, one_of)]
pub to_row_id: Option<String>, pub to_row_id: Option<String>,
} }
pub struct MoveGroupRowParams { pub struct MoveGroupRowParams {
pub view_id: String, pub view_id: String,
pub from_row_id: String, pub from_row_id: String,
pub to_group_id: String, pub to_group_id: String,
pub to_row_id: Option<String>, pub to_row_id: Option<String>,
} }
impl TryInto<MoveGroupRowParams> for MoveGroupRowPayloadPB { impl TryInto<MoveGroupRowParams> for MoveGroupRowPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<MoveGroupRowParams, Self::Error> { fn try_into(self) -> Result<MoveGroupRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?; let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?; let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
let to_group_id = NotEmptyStr::parse(self.to_group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?; let to_group_id =
NotEmptyStr::parse(self.to_group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
let to_row_id = match self.to_row_id { let to_row_id = match self.to_row_id {
None => None, None => None,
Some(to_row_id) => Some(NotEmptyStr::parse(to_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?.0), Some(to_row_id) => Some(
}; NotEmptyStr::parse(to_row_id)
.map_err(|_| ErrorCode::RowIdIsEmpty)?
.0,
),
};
Ok(MoveGroupRowParams { Ok(MoveGroupRowParams {
view_id: view_id.0, view_id: view_id.0,
from_row_id: from_row_id.0, from_row_id: from_row_id.0,
to_group_id: to_group_id.0, to_group_id: to_group_id.0,
to_row_id, to_row_id,
}) })
} }
} }

View file

@ -3,81 +3,83 @@ use grid_model::{GroupRevision, SelectOptionGroupConfigurationRevision};
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct UrlGroupConfigurationPB { pub struct UrlGroupConfigurationPB {
#[pb(index = 1)] #[pb(index = 1)]
hide_empty: bool, hide_empty: bool,
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct TextGroupConfigurationPB { pub struct TextGroupConfigurationPB {
#[pb(index = 1)] #[pb(index = 1)]
hide_empty: bool, hide_empty: bool,
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct SelectOptionGroupConfigurationPB { pub struct SelectOptionGroupConfigurationPB {
#[pb(index = 1)] #[pb(index = 1)]
hide_empty: bool, hide_empty: bool,
} }
impl std::convert::From<SelectOptionGroupConfigurationRevision> for SelectOptionGroupConfigurationPB { impl std::convert::From<SelectOptionGroupConfigurationRevision>
fn from(rev: SelectOptionGroupConfigurationRevision) -> Self { for SelectOptionGroupConfigurationPB
Self { {
hide_empty: rev.hide_empty, fn from(rev: SelectOptionGroupConfigurationRevision) -> Self {
} Self {
hide_empty: rev.hide_empty,
} }
}
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct GroupRecordPB { pub struct GroupRecordPB {
#[pb(index = 1)] #[pb(index = 1)]
group_id: String, group_id: String,
#[pb(index = 2)] #[pb(index = 2)]
visible: bool, visible: bool,
} }
impl std::convert::From<GroupRevision> for GroupRecordPB { impl std::convert::From<GroupRevision> for GroupRecordPB {
fn from(rev: GroupRevision) -> Self { fn from(rev: GroupRevision) -> Self {
Self { Self {
group_id: rev.id, group_id: rev.id,
visible: rev.visible, visible: rev.visible,
}
} }
}
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct NumberGroupConfigurationPB { pub struct NumberGroupConfigurationPB {
#[pb(index = 1)] #[pb(index = 1)]
hide_empty: bool, hide_empty: bool,
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct DateGroupConfigurationPB { pub struct DateGroupConfigurationPB {
#[pb(index = 1)] #[pb(index = 1)]
pub condition: DateCondition, pub condition: DateCondition,
#[pb(index = 2)] #[pb(index = 2)]
hide_empty: bool, hide_empty: bool,
} }
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)] #[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)] #[repr(u8)]
pub enum DateCondition { pub enum DateCondition {
Relative = 0, Relative = 0,
Day = 1, Day = 1,
Week = 2, Week = 2,
Month = 3, Month = 3,
Year = 4, Year = 4,
} }
impl std::default::Default for DateCondition { impl std::default::Default for DateCondition {
fn default() -> Self { fn default() -> Self {
DateCondition::Relative DateCondition::Relative
} }
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct CheckboxGroupConfigurationPB { pub struct CheckboxGroupConfigurationPB {
#[pb(index = 1)] #[pb(index = 1)]
pub(crate) hide_empty: bool, pub(crate) hide_empty: bool,
} }

View file

@ -9,188 +9,193 @@ use std::sync::Arc;
#[derive(ProtoBuf, Debug, Default, Clone)] #[derive(ProtoBuf, Debug, Default, Clone)]
pub struct CreateBoardCardPayloadPB { pub struct CreateBoardCardPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub database_id: String, pub database_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub group_id: String, pub group_id: String,
#[pb(index = 3, one_of)] #[pb(index = 3, one_of)]
pub start_row_id: Option<String>, pub start_row_id: Option<String>,
} }
impl TryInto<CreateRowParams> for CreateBoardCardPayloadPB { impl TryInto<CreateRowParams> for CreateBoardCardPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<CreateRowParams, Self::Error> { fn try_into(self) -> Result<CreateRowParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?; let database_id =
let group_id = NotEmptyStr::parse(self.group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?; NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let start_row_id = match self.start_row_id { let group_id = NotEmptyStr::parse(self.group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
None => None, let start_row_id = match self.start_row_id {
Some(start_row_id) => Some(NotEmptyStr::parse(start_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?.0), None => None,
}; Some(start_row_id) => Some(
Ok(CreateRowParams { NotEmptyStr::parse(start_row_id)
database_id: database_id.0, .map_err(|_| ErrorCode::RowIdIsEmpty)?
start_row_id, .0,
group_id: Some(group_id.0), ),
layout: LayoutTypePB::Board, };
}) Ok(CreateRowParams {
} database_id: database_id.0,
start_row_id,
group_id: Some(group_id.0),
layout: LayoutTypePB::Board,
})
}
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct GroupConfigurationPB { pub struct GroupConfigurationPB {
#[pb(index = 1)] #[pb(index = 1)]
pub id: String, pub id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_id: String, pub field_id: String,
} }
impl std::convert::From<&GroupConfigurationRevision> for GroupConfigurationPB { impl std::convert::From<&GroupConfigurationRevision> for GroupConfigurationPB {
fn from(rev: &GroupConfigurationRevision) -> Self { fn from(rev: &GroupConfigurationRevision) -> Self {
GroupConfigurationPB { GroupConfigurationPB {
id: rev.id.clone(), id: rev.id.clone(),
field_id: rev.field_id.clone(), field_id: rev.field_id.clone(),
}
} }
}
} }
#[derive(ProtoBuf, Debug, Default, Clone)] #[derive(ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedGroupPB { pub struct RepeatedGroupPB {
#[pb(index = 1)] #[pb(index = 1)]
pub items: Vec<GroupPB>, pub items: Vec<GroupPB>,
} }
impl std::ops::Deref for RepeatedGroupPB { impl std::ops::Deref for RepeatedGroupPB {
type Target = Vec<GroupPB>; type Target = Vec<GroupPB>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.items &self.items
} }
} }
impl std::ops::DerefMut for RepeatedGroupPB { impl std::ops::DerefMut for RepeatedGroupPB {
fn deref_mut(&mut self) -> &mut Self::Target { fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items &mut self.items
} }
} }
#[derive(ProtoBuf, Debug, Default, Clone)] #[derive(ProtoBuf, Debug, Default, Clone)]
pub struct GroupPB { pub struct GroupPB {
#[pb(index = 1)] #[pb(index = 1)]
pub field_id: String, pub field_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub group_id: String, pub group_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub desc: String, pub desc: String,
#[pb(index = 4)] #[pb(index = 4)]
pub rows: Vec<RowPB>, pub rows: Vec<RowPB>,
#[pb(index = 5)] #[pb(index = 5)]
pub is_default: bool, pub is_default: bool,
#[pb(index = 6)] #[pb(index = 6)]
pub is_visible: bool, pub is_visible: bool,
} }
impl std::convert::From<Group> for GroupPB { impl std::convert::From<Group> for GroupPB {
fn from(group: Group) -> Self { fn from(group: Group) -> Self {
Self { Self {
field_id: group.field_id, field_id: group.field_id,
group_id: group.id, group_id: group.id,
desc: group.name, desc: group.name,
rows: group.rows, rows: group.rows,
is_default: group.is_default, is_default: group.is_default,
is_visible: group.is_visible, is_visible: group.is_visible,
}
} }
}
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedGroupConfigurationPB { pub struct RepeatedGroupConfigurationPB {
#[pb(index = 1)] #[pb(index = 1)]
pub items: Vec<GroupConfigurationPB>, pub items: Vec<GroupConfigurationPB>,
} }
impl std::convert::From<Vec<GroupConfigurationPB>> for RepeatedGroupConfigurationPB { impl std::convert::From<Vec<GroupConfigurationPB>> for RepeatedGroupConfigurationPB {
fn from(items: Vec<GroupConfigurationPB>) -> Self { fn from(items: Vec<GroupConfigurationPB>) -> Self {
Self { items } Self { items }
} }
} }
impl std::convert::From<Vec<Arc<GroupConfigurationRevision>>> for RepeatedGroupConfigurationPB { impl std::convert::From<Vec<Arc<GroupConfigurationRevision>>> for RepeatedGroupConfigurationPB {
fn from(revs: Vec<Arc<GroupConfigurationRevision>>) -> Self { fn from(revs: Vec<Arc<GroupConfigurationRevision>>) -> Self {
RepeatedGroupConfigurationPB { RepeatedGroupConfigurationPB {
items: revs.iter().map(|rev| rev.as_ref().into()).collect(), items: revs.iter().map(|rev| rev.as_ref().into()).collect(),
}
} }
}
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct InsertGroupPayloadPB { pub struct InsertGroupPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub field_id: String, pub field_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_type: FieldType, pub field_type: FieldType,
} }
impl TryInto<InsertGroupParams> for InsertGroupPayloadPB { impl TryInto<InsertGroupParams> for InsertGroupPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<InsertGroupParams, Self::Error> { fn try_into(self) -> Result<InsertGroupParams, Self::Error> {
let field_id = NotEmptyStr::parse(self.field_id) let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)? .map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0; .0;
Ok(InsertGroupParams { Ok(InsertGroupParams {
field_id, field_id,
field_type_rev: self.field_type.into(), field_type_rev: self.field_type.into(),
}) })
} }
} }
pub struct InsertGroupParams { pub struct InsertGroupParams {
pub field_id: String, pub field_id: String,
pub field_type_rev: FieldTypeRevision, pub field_type_rev: FieldTypeRevision,
} }
#[derive(ProtoBuf, Debug, Default, Clone)] #[derive(ProtoBuf, Debug, Default, Clone)]
pub struct DeleteGroupPayloadPB { pub struct DeleteGroupPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub field_id: String, pub field_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub group_id: String, pub group_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub field_type: FieldType, pub field_type: FieldType,
} }
impl TryInto<DeleteGroupParams> for DeleteGroupPayloadPB { impl TryInto<DeleteGroupParams> for DeleteGroupPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<DeleteGroupParams, Self::Error> { fn try_into(self) -> Result<DeleteGroupParams, Self::Error> {
let field_id = NotEmptyStr::parse(self.field_id) let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)? .map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0; .0;
let group_id = NotEmptyStr::parse(self.group_id) let group_id = NotEmptyStr::parse(self.group_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)? .map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0; .0;
Ok(DeleteGroupParams { Ok(DeleteGroupParams {
field_id, field_id,
field_type_rev: self.field_type.into(), field_type_rev: self.field_type.into(),
group_id, group_id,
}) })
} }
} }
pub struct DeleteGroupParams { pub struct DeleteGroupParams {
pub field_id: String, pub field_id: String,
pub group_id: String, pub group_id: String,
pub field_type_rev: FieldTypeRevision, pub field_type_rev: FieldTypeRevision,
} }

View file

@ -6,158 +6,158 @@ use std::fmt::Formatter;
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct GroupRowsNotificationPB { pub struct GroupRowsNotificationPB {
#[pb(index = 1)] #[pb(index = 1)]
pub group_id: String, pub group_id: String,
#[pb(index = 2, one_of)] #[pb(index = 2, one_of)]
pub group_name: Option<String>, pub group_name: Option<String>,
#[pb(index = 3)] #[pb(index = 3)]
pub inserted_rows: Vec<InsertedRowPB>, pub inserted_rows: Vec<InsertedRowPB>,
#[pb(index = 4)] #[pb(index = 4)]
pub deleted_rows: Vec<String>, pub deleted_rows: Vec<String>,
#[pb(index = 5)] #[pb(index = 5)]
pub updated_rows: Vec<RowPB>, pub updated_rows: Vec<RowPB>,
} }
impl std::fmt::Display for GroupRowsNotificationPB { impl std::fmt::Display for GroupRowsNotificationPB {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
for inserted_row in &self.inserted_rows { for inserted_row in &self.inserted_rows {
f.write_fmt(format_args!( f.write_fmt(format_args!(
"Insert: {} row at {:?}", "Insert: {} row at {:?}",
inserted_row.row.id, inserted_row.index inserted_row.row.id, inserted_row.index
))?; ))?;
}
for deleted_row in &self.deleted_rows {
f.write_fmt(format_args!("Delete: {} row", deleted_row))?;
}
Ok(())
} }
for deleted_row in &self.deleted_rows {
f.write_fmt(format_args!("Delete: {} row", deleted_row))?;
}
Ok(())
}
} }
impl GroupRowsNotificationPB { impl GroupRowsNotificationPB {
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.group_name.is_none() self.group_name.is_none()
&& self.inserted_rows.is_empty() && self.inserted_rows.is_empty()
&& self.deleted_rows.is_empty() && self.deleted_rows.is_empty()
&& self.updated_rows.is_empty() && self.updated_rows.is_empty()
} }
pub fn new(group_id: String) -> Self { pub fn new(group_id: String) -> Self {
Self { Self {
group_id, group_id,
..Default::default() ..Default::default()
}
} }
}
pub fn name(group_id: String, name: &str) -> Self { pub fn name(group_id: String, name: &str) -> Self {
Self { Self {
group_id, group_id,
group_name: Some(name.to_owned()), group_name: Some(name.to_owned()),
..Default::default() ..Default::default()
}
} }
}
pub fn insert(group_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self { pub fn insert(group_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self {
Self { Self {
group_id, group_id,
inserted_rows, inserted_rows,
..Default::default() ..Default::default()
}
} }
}
pub fn delete(group_id: String, deleted_rows: Vec<String>) -> Self { pub fn delete(group_id: String, deleted_rows: Vec<String>) -> Self {
Self { Self {
group_id, group_id,
deleted_rows, deleted_rows,
..Default::default() ..Default::default()
}
} }
}
pub fn update(group_id: String, updated_rows: Vec<RowPB>) -> Self { pub fn update(group_id: String, updated_rows: Vec<RowPB>) -> Self {
Self { Self {
group_id, group_id,
updated_rows, updated_rows,
..Default::default() ..Default::default()
}
} }
}
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct MoveGroupPayloadPB { pub struct MoveGroupPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub from_group_id: String, pub from_group_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub to_group_id: String, pub to_group_id: String,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct MoveGroupParams { pub struct MoveGroupParams {
pub view_id: String, pub view_id: String,
pub from_group_id: String, pub from_group_id: String,
pub to_group_id: String, pub to_group_id: String,
} }
impl TryInto<MoveGroupParams> for MoveGroupPayloadPB { impl TryInto<MoveGroupParams> for MoveGroupPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<MoveGroupParams, Self::Error> { fn try_into(self) -> Result<MoveGroupParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id) let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)? .map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0; .0;
let from_group_id = NotEmptyStr::parse(self.from_group_id) let from_group_id = NotEmptyStr::parse(self.from_group_id)
.map_err(|_| ErrorCode::GroupIdIsEmpty)? .map_err(|_| ErrorCode::GroupIdIsEmpty)?
.0; .0;
let to_group_id = NotEmptyStr::parse(self.to_group_id) let to_group_id = NotEmptyStr::parse(self.to_group_id)
.map_err(|_| ErrorCode::GroupIdIsEmpty)? .map_err(|_| ErrorCode::GroupIdIsEmpty)?
.0; .0;
Ok(MoveGroupParams { Ok(MoveGroupParams {
view_id, view_id,
from_group_id, from_group_id,
to_group_id, to_group_id,
}) })
} }
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct GroupChangesetPB { pub struct GroupChangesetPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub inserted_groups: Vec<InsertedGroupPB>, pub inserted_groups: Vec<InsertedGroupPB>,
#[pb(index = 3)] #[pb(index = 3)]
pub initial_groups: Vec<GroupPB>, pub initial_groups: Vec<GroupPB>,
#[pb(index = 4)] #[pb(index = 4)]
pub deleted_groups: Vec<String>, pub deleted_groups: Vec<String>,
#[pb(index = 5)] #[pb(index = 5)]
pub update_groups: Vec<GroupPB>, pub update_groups: Vec<GroupPB>,
} }
impl GroupChangesetPB { impl GroupChangesetPB {
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.initial_groups.is_empty() self.initial_groups.is_empty()
&& self.inserted_groups.is_empty() && self.inserted_groups.is_empty()
&& self.deleted_groups.is_empty() && self.deleted_groups.is_empty()
&& self.update_groups.is_empty() && self.update_groups.is_empty()
} }
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct InsertedGroupPB { pub struct InsertedGroupPB {
#[pb(index = 1)] #[pb(index = 1)]
pub group: GroupPB, pub group: GroupPB,
#[pb(index = 2)] #[pb(index = 2)]
pub index: i32, pub index: i32,
} }

View file

@ -2,16 +2,16 @@
pub struct NotEmptyStr(pub String); pub struct NotEmptyStr(pub String);
impl NotEmptyStr { impl NotEmptyStr {
pub fn parse(s: String) -> Result<Self, String> { pub fn parse(s: String) -> Result<Self, String> {
if s.trim().is_empty() { if s.trim().is_empty() {
return Err("Input string is empty".to_owned()); return Err("Input string is empty".to_owned());
}
Ok(Self(s))
} }
Ok(Self(s))
}
} }
impl AsRef<str> for NotEmptyStr { impl AsRef<str> for NotEmptyStr {
fn as_ref(&self) -> &str { fn as_ref(&self) -> &str {
&self.0 &self.0
} }
} }

View file

@ -8,196 +8,198 @@ use std::sync::Arc;
/// [RowPB] Describes a row. Has the id of the parent Block. Has the metadata of the row. /// [RowPB] Describes a row. Has the id of the parent Block. Has the metadata of the row.
#[derive(Debug, Default, Clone, ProtoBuf, Eq, PartialEq)] #[derive(Debug, Default, Clone, ProtoBuf, Eq, PartialEq)]
pub struct RowPB { pub struct RowPB {
#[pb(index = 1)] #[pb(index = 1)]
pub block_id: String, pub block_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub id: String, pub id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub height: i32, pub height: i32,
} }
impl RowPB { impl RowPB {
pub fn row_id(&self) -> &str { pub fn row_id(&self) -> &str {
&self.id &self.id
} }
pub fn block_id(&self) -> &str { pub fn block_id(&self) -> &str {
&self.block_id &self.block_id
} }
} }
impl std::convert::From<&RowRevision> for RowPB { impl std::convert::From<&RowRevision> for RowPB {
fn from(rev: &RowRevision) -> Self { fn from(rev: &RowRevision) -> Self {
Self { Self {
block_id: rev.block_id.clone(), block_id: rev.block_id.clone(),
id: rev.id.clone(), id: rev.id.clone(),
height: rev.height, height: rev.height,
}
} }
}
} }
impl std::convert::From<&mut RowRevision> for RowPB { impl std::convert::From<&mut RowRevision> for RowPB {
fn from(rev: &mut RowRevision) -> Self { fn from(rev: &mut RowRevision) -> Self {
Self { Self {
block_id: rev.block_id.clone(), block_id: rev.block_id.clone(),
id: rev.id.clone(), id: rev.id.clone(),
height: rev.height, height: rev.height,
}
} }
}
} }
impl std::convert::From<&Arc<RowRevision>> for RowPB { impl std::convert::From<&Arc<RowRevision>> for RowPB {
fn from(rev: &Arc<RowRevision>) -> Self { fn from(rev: &Arc<RowRevision>) -> Self {
Self { Self {
block_id: rev.block_id.clone(), block_id: rev.block_id.clone(),
id: rev.id.clone(), id: rev.id.clone(),
height: rev.height, height: rev.height,
}
} }
}
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct OptionalRowPB { pub struct OptionalRowPB {
#[pb(index = 1, one_of)] #[pb(index = 1, one_of)]
pub row: Option<RowPB>, pub row: Option<RowPB>,
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct RepeatedRowPB { pub struct RepeatedRowPB {
#[pb(index = 1)] #[pb(index = 1)]
pub items: Vec<RowPB>, pub items: Vec<RowPB>,
} }
impl std::convert::From<Vec<RowPB>> for RepeatedRowPB { impl std::convert::From<Vec<RowPB>> for RepeatedRowPB {
fn from(items: Vec<RowPB>) -> Self { fn from(items: Vec<RowPB>) -> Self {
Self { items } Self { items }
} }
} }
#[derive(Debug, Clone, Default, ProtoBuf)] #[derive(Debug, Clone, Default, ProtoBuf)]
pub struct InsertedRowPB { pub struct InsertedRowPB {
#[pb(index = 1)] #[pb(index = 1)]
pub row: RowPB, pub row: RowPB,
#[pb(index = 2, one_of)] #[pb(index = 2, one_of)]
pub index: Option<i32>, pub index: Option<i32>,
#[pb(index = 3)] #[pb(index = 3)]
pub is_new: bool, pub is_new: bool,
} }
impl InsertedRowPB { impl InsertedRowPB {
pub fn new(row: RowPB) -> Self { pub fn new(row: RowPB) -> Self {
Self { Self {
row, row,
index: None, index: None,
is_new: false, is_new: false,
}
} }
}
pub fn with_index(row: RowPB, index: i32) -> Self { pub fn with_index(row: RowPB, index: i32) -> Self {
Self { Self {
row, row,
index: Some(index), index: Some(index),
is_new: false, is_new: false,
}
} }
}
} }
impl std::convert::From<RowPB> for InsertedRowPB { impl std::convert::From<RowPB> for InsertedRowPB {
fn from(row: RowPB) -> Self { fn from(row: RowPB) -> Self {
Self { Self {
row, row,
index: None, index: None,
is_new: false, is_new: false,
}
} }
}
} }
impl std::convert::From<&RowRevision> for InsertedRowPB { impl std::convert::From<&RowRevision> for InsertedRowPB {
fn from(row: &RowRevision) -> Self { fn from(row: &RowRevision) -> Self {
let row_order = RowPB::from(row); let row_order = RowPB::from(row);
Self::from(row_order) Self::from(row_order)
} }
} }
#[derive(Debug, Clone, Default, ProtoBuf)] #[derive(Debug, Clone, Default, ProtoBuf)]
pub struct UpdatedRowPB { pub struct UpdatedRowPB {
#[pb(index = 1)] #[pb(index = 1)]
pub row: RowPB, pub row: RowPB,
// represents as the cells that were updated in this row. // represents as the cells that were updated in this row.
#[pb(index = 2)] #[pb(index = 2)]
pub field_ids: Vec<String>, pub field_ids: Vec<String>,
} }
#[derive(Debug, Default, Clone, ProtoBuf)] #[derive(Debug, Default, Clone, ProtoBuf)]
pub struct RowIdPB { pub struct RowIdPB {
#[pb(index = 1)] #[pb(index = 1)]
pub database_id: String, pub database_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub row_id: String, pub row_id: String,
} }
pub struct RowIdParams { pub struct RowIdParams {
pub database_id: String, pub database_id: String,
pub row_id: String, pub row_id: String,
} }
impl TryInto<RowIdParams> for RowIdPB { impl TryInto<RowIdParams> for RowIdPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<RowIdParams, Self::Error> { fn try_into(self) -> Result<RowIdParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?; let database_id =
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?; NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(RowIdParams { Ok(RowIdParams {
database_id: database_id.0, database_id: database_id.0,
row_id: row_id.0, row_id: row_id.0,
}) })
} }
} }
#[derive(Debug, Default, Clone, ProtoBuf)] #[derive(Debug, Default, Clone, ProtoBuf)]
pub struct BlockRowIdPB { pub struct BlockRowIdPB {
#[pb(index = 1)] #[pb(index = 1)]
pub block_id: String, pub block_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub row_id: String, pub row_id: String,
} }
#[derive(ProtoBuf, Default)] #[derive(ProtoBuf, Default)]
pub struct CreateRowPayloadPB { pub struct CreateRowPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub database_id: String, pub database_id: String,
#[pb(index = 2, one_of)] #[pb(index = 2, one_of)]
pub start_row_id: Option<String>, pub start_row_id: Option<String>,
} }
#[derive(Default)] #[derive(Default)]
pub struct CreateRowParams { pub struct CreateRowParams {
pub database_id: String, pub database_id: String,
pub start_row_id: Option<String>, pub start_row_id: Option<String>,
pub group_id: Option<String>, pub group_id: Option<String>,
pub layout: LayoutTypePB, pub layout: LayoutTypePB,
} }
impl TryInto<CreateRowParams> for CreateRowPayloadPB { impl TryInto<CreateRowParams> for CreateRowPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<CreateRowParams, Self::Error> { fn try_into(self) -> Result<CreateRowParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?; let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
Ok(CreateRowParams { Ok(CreateRowParams {
database_id: database_id.0, database_id: database_id.0,
start_row_id: self.start_row_id, start_row_id: self.start_row_id,
group_id: None, group_id: None,
layout: LayoutTypePB::Grid, layout: LayoutTypePB::Grid,
}) })
} }
} }

View file

@ -1,8 +1,9 @@
use crate::entities::parser::NotEmptyStr; use crate::entities::parser::NotEmptyStr;
use crate::entities::{ use crate::entities::{
AlterFilterParams, AlterFilterPayloadPB, AlterSortParams, AlterSortPayloadPB, DeleteFilterParams, AlterFilterParams, AlterFilterPayloadPB, AlterSortParams, AlterSortPayloadPB, DeleteFilterParams,
DeleteFilterPayloadPB, DeleteGroupParams, DeleteGroupPayloadPB, DeleteSortParams, DeleteSortPayloadPB, DeleteFilterPayloadPB, DeleteGroupParams, DeleteGroupPayloadPB, DeleteSortParams,
InsertGroupParams, InsertGroupPayloadPB, RepeatedFilterPB, RepeatedGroupConfigurationPB, RepeatedSortPB, DeleteSortPayloadPB, InsertGroupParams, InsertGroupPayloadPB, RepeatedFilterPB,
RepeatedGroupConfigurationPB, RepeatedSortPB,
}; };
use flowy_derive::{ProtoBuf, ProtoBuf_Enum}; use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode; use flowy_error::ErrorCode;
@ -14,164 +15,164 @@ use strum_macros::EnumIter;
/// [DatabaseViewSettingPB] defines the setting options for the grid. Such as the filter, group, and sort. /// [DatabaseViewSettingPB] defines the setting options for the grid. Such as the filter, group, and sort.
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct DatabaseViewSettingPB { pub struct DatabaseViewSettingPB {
#[pb(index = 1)] #[pb(index = 1)]
pub support_layouts: Vec<ViewLayoutPB>, pub support_layouts: Vec<ViewLayoutPB>,
#[pb(index = 2)] #[pb(index = 2)]
pub current_layout: LayoutTypePB, pub current_layout: LayoutTypePB,
#[pb(index = 3)] #[pb(index = 3)]
pub filters: RepeatedFilterPB, pub filters: RepeatedFilterPB,
#[pb(index = 4)] #[pb(index = 4)]
pub group_configurations: RepeatedGroupConfigurationPB, pub group_configurations: RepeatedGroupConfigurationPB,
#[pb(index = 5)] #[pb(index = 5)]
pub sorts: RepeatedSortPB, pub sorts: RepeatedSortPB,
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct ViewLayoutPB { pub struct ViewLayoutPB {
#[pb(index = 1)] #[pb(index = 1)]
ty: LayoutTypePB, ty: LayoutTypePB,
} }
impl ViewLayoutPB { impl ViewLayoutPB {
pub fn all() -> Vec<ViewLayoutPB> { pub fn all() -> Vec<ViewLayoutPB> {
let mut layouts = vec![]; let mut layouts = vec![];
for layout_ty in LayoutTypePB::iter() { for layout_ty in LayoutTypePB::iter() {
layouts.push(ViewLayoutPB { ty: layout_ty }) layouts.push(ViewLayoutPB { ty: layout_ty })
}
layouts
} }
layouts
}
} }
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum, EnumIter)] #[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum, EnumIter)]
#[repr(u8)] #[repr(u8)]
pub enum LayoutTypePB { pub enum LayoutTypePB {
Grid = 0, Grid = 0,
Board = 1, Board = 1,
Calendar = 2, Calendar = 2,
} }
impl std::default::Default for LayoutTypePB { impl std::default::Default for LayoutTypePB {
fn default() -> Self { fn default() -> Self {
LayoutTypePB::Grid LayoutTypePB::Grid
} }
} }
impl std::convert::From<LayoutRevision> for LayoutTypePB { impl std::convert::From<LayoutRevision> for LayoutTypePB {
fn from(rev: LayoutRevision) -> Self { fn from(rev: LayoutRevision) -> Self {
match rev { match rev {
LayoutRevision::Grid => LayoutTypePB::Grid, LayoutRevision::Grid => LayoutTypePB::Grid,
LayoutRevision::Board => LayoutTypePB::Board, LayoutRevision::Board => LayoutTypePB::Board,
LayoutRevision::Calendar => LayoutTypePB::Calendar, LayoutRevision::Calendar => LayoutTypePB::Calendar,
}
} }
}
} }
impl std::convert::From<LayoutTypePB> for LayoutRevision { impl std::convert::From<LayoutTypePB> for LayoutRevision {
fn from(layout: LayoutTypePB) -> Self { fn from(layout: LayoutTypePB) -> Self {
match layout { match layout {
LayoutTypePB::Grid => LayoutRevision::Grid, LayoutTypePB::Grid => LayoutRevision::Grid,
LayoutTypePB::Board => LayoutRevision::Board, LayoutTypePB::Board => LayoutRevision::Board,
LayoutTypePB::Calendar => LayoutRevision::Calendar, LayoutTypePB::Calendar => LayoutRevision::Calendar,
}
} }
}
} }
#[derive(Default, ProtoBuf)] #[derive(Default, ProtoBuf)]
pub struct DatabaseSettingChangesetPB { pub struct DatabaseSettingChangesetPB {
#[pb(index = 1)] #[pb(index = 1)]
pub database_id: String, pub database_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub layout_type: LayoutTypePB, pub layout_type: LayoutTypePB,
#[pb(index = 3, one_of)] #[pb(index = 3, one_of)]
pub alter_filter: Option<AlterFilterPayloadPB>, pub alter_filter: Option<AlterFilterPayloadPB>,
#[pb(index = 4, one_of)] #[pb(index = 4, one_of)]
pub delete_filter: Option<DeleteFilterPayloadPB>, pub delete_filter: Option<DeleteFilterPayloadPB>,
#[pb(index = 5, one_of)] #[pb(index = 5, one_of)]
pub insert_group: Option<InsertGroupPayloadPB>, pub insert_group: Option<InsertGroupPayloadPB>,
#[pb(index = 6, one_of)] #[pb(index = 6, one_of)]
pub delete_group: Option<DeleteGroupPayloadPB>, pub delete_group: Option<DeleteGroupPayloadPB>,
#[pb(index = 7, one_of)] #[pb(index = 7, one_of)]
pub alter_sort: Option<AlterSortPayloadPB>, pub alter_sort: Option<AlterSortPayloadPB>,
#[pb(index = 8, one_of)] #[pb(index = 8, one_of)]
pub delete_sort: Option<DeleteSortPayloadPB>, pub delete_sort: Option<DeleteSortPayloadPB>,
} }
impl TryInto<DatabaseSettingChangesetParams> for DatabaseSettingChangesetPB { impl TryInto<DatabaseSettingChangesetParams> for DatabaseSettingChangesetPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<DatabaseSettingChangesetParams, Self::Error> { fn try_into(self) -> Result<DatabaseSettingChangesetParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id) let database_id = NotEmptyStr::parse(self.database_id)
.map_err(|_| ErrorCode::ViewIdInvalid)? .map_err(|_| ErrorCode::ViewIdInvalid)?
.0; .0;
let insert_filter = match self.alter_filter { let insert_filter = match self.alter_filter {
None => None, None => None,
Some(payload) => Some(payload.try_into()?), Some(payload) => Some(payload.try_into()?),
}; };
let delete_filter = match self.delete_filter { let delete_filter = match self.delete_filter {
None => None, None => None,
Some(payload) => Some(payload.try_into()?), Some(payload) => Some(payload.try_into()?),
}; };
let insert_group = match self.insert_group { let insert_group = match self.insert_group {
Some(payload) => Some(payload.try_into()?), Some(payload) => Some(payload.try_into()?),
None => None, None => None,
}; };
let delete_group = match self.delete_group { let delete_group = match self.delete_group {
Some(payload) => Some(payload.try_into()?), Some(payload) => Some(payload.try_into()?),
None => None, None => None,
}; };
let alert_sort = match self.alter_sort { let alert_sort = match self.alter_sort {
None => None, None => None,
Some(payload) => Some(payload.try_into()?), Some(payload) => Some(payload.try_into()?),
}; };
let delete_sort = match self.delete_sort { let delete_sort = match self.delete_sort {
None => None, None => None,
Some(payload) => Some(payload.try_into()?), Some(payload) => Some(payload.try_into()?),
}; };
Ok(DatabaseSettingChangesetParams { Ok(DatabaseSettingChangesetParams {
database_id, database_id,
layout_type: self.layout_type.into(), layout_type: self.layout_type.into(),
insert_filter, insert_filter,
delete_filter, delete_filter,
insert_group, insert_group,
delete_group, delete_group,
alert_sort, alert_sort,
delete_sort, delete_sort,
}) })
} }
} }
pub struct DatabaseSettingChangesetParams { pub struct DatabaseSettingChangesetParams {
pub database_id: String, pub database_id: String,
pub layout_type: LayoutRevision, pub layout_type: LayoutRevision,
pub insert_filter: Option<AlterFilterParams>, pub insert_filter: Option<AlterFilterParams>,
pub delete_filter: Option<DeleteFilterParams>, pub delete_filter: Option<DeleteFilterParams>,
pub insert_group: Option<InsertGroupParams>, pub insert_group: Option<InsertGroupParams>,
pub delete_group: Option<DeleteGroupParams>, pub delete_group: Option<DeleteGroupParams>,
pub alert_sort: Option<AlterSortParams>, pub alert_sort: Option<AlterSortParams>,
pub delete_sort: Option<DeleteSortParams>, pub delete_sort: Option<DeleteSortParams>,
} }
impl DatabaseSettingChangesetParams { impl DatabaseSettingChangesetParams {
pub fn is_filter_changed(&self) -> bool { pub fn is_filter_changed(&self) -> bool {
self.insert_filter.is_some() || self.delete_filter.is_some() self.insert_filter.is_some() || self.delete_filter.is_some()
} }
} }

View file

@ -9,227 +9,231 @@ use grid_model::{FieldTypeRevision, SortCondition, SortRevision};
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct SortPB { pub struct SortPB {
#[pb(index = 1)] #[pb(index = 1)]
pub id: String, pub id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_id: String, pub field_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub field_type: FieldType, pub field_type: FieldType,
#[pb(index = 4)] #[pb(index = 4)]
pub condition: SortConditionPB, pub condition: SortConditionPB,
} }
impl std::convert::From<&SortRevision> for SortPB { impl std::convert::From<&SortRevision> for SortPB {
fn from(sort_rev: &SortRevision) -> Self { fn from(sort_rev: &SortRevision) -> Self {
Self { Self {
id: sort_rev.id.clone(), id: sort_rev.id.clone(),
field_id: sort_rev.field_id.clone(), field_id: sort_rev.field_id.clone(),
field_type: sort_rev.field_type.into(), field_type: sort_rev.field_type.into(),
condition: sort_rev.condition.clone().into(), condition: sort_rev.condition.clone().into(),
}
} }
}
} }
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)] #[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedSortPB { pub struct RepeatedSortPB {
#[pb(index = 1)] #[pb(index = 1)]
pub items: Vec<SortPB>, pub items: Vec<SortPB>,
} }
impl std::convert::From<Vec<Arc<SortRevision>>> for RepeatedSortPB { impl std::convert::From<Vec<Arc<SortRevision>>> for RepeatedSortPB {
fn from(revs: Vec<Arc<SortRevision>>) -> Self { fn from(revs: Vec<Arc<SortRevision>>) -> Self {
RepeatedSortPB { RepeatedSortPB {
items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(), items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(),
}
} }
}
} }
impl std::convert::From<Vec<SortPB>> for RepeatedSortPB { impl std::convert::From<Vec<SortPB>> for RepeatedSortPB {
fn from(items: Vec<SortPB>) -> Self { fn from(items: Vec<SortPB>) -> Self {
Self { items } Self { items }
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)] #[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)] #[repr(u8)]
pub enum SortConditionPB { pub enum SortConditionPB {
Ascending = 0, Ascending = 0,
Descending = 1, Descending = 1,
} }
impl std::default::Default for SortConditionPB { impl std::default::Default for SortConditionPB {
fn default() -> Self { fn default() -> Self {
Self::Ascending Self::Ascending
} }
} }
impl std::convert::From<SortCondition> for SortConditionPB { impl std::convert::From<SortCondition> for SortConditionPB {
fn from(condition: SortCondition) -> Self { fn from(condition: SortCondition) -> Self {
match condition { match condition {
SortCondition::Ascending => SortConditionPB::Ascending, SortCondition::Ascending => SortConditionPB::Ascending,
SortCondition::Descending => SortConditionPB::Descending, SortCondition::Descending => SortConditionPB::Descending,
}
} }
}
} }
#[derive(ProtoBuf, Debug, Default, Clone)] #[derive(ProtoBuf, Debug, Default, Clone)]
pub struct AlterSortPayloadPB { pub struct AlterSortPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_id: String, pub field_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub field_type: FieldType, pub field_type: FieldType,
/// Create a new sort if the sort_id is None /// Create a new sort if the sort_id is None
#[pb(index = 4, one_of)] #[pb(index = 4, one_of)]
pub sort_id: Option<String>, pub sort_id: Option<String>,
#[pb(index = 5)] #[pb(index = 5)]
pub condition: SortConditionPB, pub condition: SortConditionPB,
} }
impl TryInto<AlterSortParams> for AlterSortPayloadPB { impl TryInto<AlterSortParams> for AlterSortPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<AlterSortParams, Self::Error> { fn try_into(self) -> Result<AlterSortParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id) let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)? .map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0; .0;
let field_id = NotEmptyStr::parse(self.field_id) let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)? .map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0; .0;
let sort_id = match self.sort_id { let sort_id = match self.sort_id {
None => None, None => None,
Some(sort_id) => Some(NotEmptyStr::parse(sort_id).map_err(|_| ErrorCode::SortIdIsEmpty)?.0), Some(sort_id) => Some(
}; NotEmptyStr::parse(sort_id)
.map_err(|_| ErrorCode::SortIdIsEmpty)?
.0,
),
};
Ok(AlterSortParams { Ok(AlterSortParams {
view_id, view_id,
field_id, field_id,
sort_id, sort_id,
field_type: self.field_type.into(), field_type: self.field_type.into(),
condition: self.condition as u8, condition: self.condition as u8,
}) })
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct AlterSortParams { pub struct AlterSortParams {
pub view_id: String, pub view_id: String,
pub field_id: String, pub field_id: String,
/// Create a new sort if the sort is None /// Create a new sort if the sort is None
pub sort_id: Option<String>, pub sort_id: Option<String>,
pub field_type: FieldTypeRevision, pub field_type: FieldTypeRevision,
pub condition: u8, pub condition: u8,
} }
#[derive(ProtoBuf, Debug, Default, Clone)] #[derive(ProtoBuf, Debug, Default, Clone)]
pub struct DeleteSortPayloadPB { pub struct DeleteSortPayloadPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub field_id: String, pub field_id: String,
#[pb(index = 3)] #[pb(index = 3)]
pub field_type: FieldType, pub field_type: FieldType,
#[pb(index = 4)] #[pb(index = 4)]
pub sort_id: String, pub sort_id: String,
} }
impl TryInto<DeleteSortParams> for DeleteSortPayloadPB { impl TryInto<DeleteSortParams> for DeleteSortPayloadPB {
type Error = ErrorCode; type Error = ErrorCode;
fn try_into(self) -> Result<DeleteSortParams, Self::Error> { fn try_into(self) -> Result<DeleteSortParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id) let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)? .map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0; .0;
let field_id = NotEmptyStr::parse(self.field_id) let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)? .map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0; .0;
let sort_id = NotEmptyStr::parse(self.sort_id) let sort_id = NotEmptyStr::parse(self.sort_id)
.map_err(|_| ErrorCode::UnexpectedEmptyString)? .map_err(|_| ErrorCode::UnexpectedEmptyString)?
.0; .0;
let sort_type = SortType { let sort_type = SortType {
field_id, field_id,
field_type: self.field_type, field_type: self.field_type,
}; };
Ok(DeleteSortParams { Ok(DeleteSortParams {
view_id, view_id,
sort_type, sort_type,
sort_id, sort_id,
}) })
} }
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct DeleteSortParams { pub struct DeleteSortParams {
pub view_id: String, pub view_id: String,
pub sort_type: SortType, pub sort_type: SortType,
pub sort_id: String, pub sort_id: String,
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct SortChangesetNotificationPB { pub struct SortChangesetNotificationPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub insert_sorts: Vec<SortPB>, pub insert_sorts: Vec<SortPB>,
#[pb(index = 3)] #[pb(index = 3)]
pub delete_sorts: Vec<SortPB>, pub delete_sorts: Vec<SortPB>,
#[pb(index = 4)] #[pb(index = 4)]
pub update_sorts: Vec<SortPB>, pub update_sorts: Vec<SortPB>,
} }
impl SortChangesetNotificationPB { impl SortChangesetNotificationPB {
pub fn new(view_id: String) -> Self { pub fn new(view_id: String) -> Self {
Self { Self {
view_id, view_id,
insert_sorts: vec![], insert_sorts: vec![],
delete_sorts: vec![], delete_sorts: vec![],
update_sorts: vec![], update_sorts: vec![],
}
} }
}
pub fn extend(&mut self, other: SortChangesetNotificationPB) { pub fn extend(&mut self, other: SortChangesetNotificationPB) {
self.insert_sorts.extend(other.insert_sorts); self.insert_sorts.extend(other.insert_sorts);
self.delete_sorts.extend(other.delete_sorts); self.delete_sorts.extend(other.delete_sorts);
self.update_sorts.extend(other.update_sorts); self.update_sorts.extend(other.update_sorts);
} }
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.insert_sorts.is_empty() && self.delete_sorts.is_empty() && self.update_sorts.is_empty() self.insert_sorts.is_empty() && self.delete_sorts.is_empty() && self.update_sorts.is_empty()
} }
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct ReorderAllRowsPB { pub struct ReorderAllRowsPB {
#[pb(index = 1)] #[pb(index = 1)]
pub row_orders: Vec<String>, pub row_orders: Vec<String>,
} }
#[derive(Debug, Default, ProtoBuf)] #[derive(Debug, Default, ProtoBuf)]
pub struct ReorderSingleRowPB { pub struct ReorderSingleRowPB {
#[pb(index = 1)] #[pb(index = 1)]
pub row_id: String, pub row_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub old_index: i32, pub old_index: i32,
#[pb(index = 3)] #[pb(index = 3)]
pub new_index: i32, pub new_index: i32,
} }

View file

@ -3,62 +3,66 @@ use flowy_derive::ProtoBuf;
#[derive(Debug, Default, Clone, ProtoBuf)] #[derive(Debug, Default, Clone, ProtoBuf)]
pub struct ViewRowsVisibilityChangesetPB { pub struct ViewRowsVisibilityChangesetPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 5)] #[pb(index = 5)]
pub visible_rows: Vec<InsertedRowPB>, pub visible_rows: Vec<InsertedRowPB>,
#[pb(index = 6)] #[pb(index = 6)]
pub invisible_rows: Vec<String>, pub invisible_rows: Vec<String>,
} }
#[derive(Debug, Default, Clone, ProtoBuf)] #[derive(Debug, Default, Clone, ProtoBuf)]
pub struct ViewRowsChangesetPB { pub struct ViewRowsChangesetPB {
#[pb(index = 1)] #[pb(index = 1)]
pub view_id: String, pub view_id: String,
#[pb(index = 2)] #[pb(index = 2)]
pub inserted_rows: Vec<InsertedRowPB>, pub inserted_rows: Vec<InsertedRowPB>,
#[pb(index = 3)] #[pb(index = 3)]
pub deleted_rows: Vec<String>, pub deleted_rows: Vec<String>,
#[pb(index = 4)] #[pb(index = 4)]
pub updated_rows: Vec<UpdatedRowPB>, pub updated_rows: Vec<UpdatedRowPB>,
} }
impl ViewRowsChangesetPB { impl ViewRowsChangesetPB {
pub fn from_insert(view_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self { pub fn from_insert(view_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self {
Self { Self {
view_id, view_id,
inserted_rows, inserted_rows,
..Default::default() ..Default::default()
}
} }
}
pub fn from_delete(view_id: String, deleted_rows: Vec<String>) -> Self { pub fn from_delete(view_id: String, deleted_rows: Vec<String>) -> Self {
Self { Self {
view_id, view_id,
deleted_rows, deleted_rows,
..Default::default() ..Default::default()
}
} }
}
pub fn from_update(view_id: String, updated_rows: Vec<UpdatedRowPB>) -> Self { pub fn from_update(view_id: String, updated_rows: Vec<UpdatedRowPB>) -> Self {
Self { Self {
view_id, view_id,
updated_rows, updated_rows,
..Default::default() ..Default::default()
}
} }
}
pub fn from_move(view_id: String, deleted_rows: Vec<String>, inserted_rows: Vec<InsertedRowPB>) -> Self { pub fn from_move(
Self { view_id: String,
view_id, deleted_rows: Vec<String>,
inserted_rows, inserted_rows: Vec<InsertedRowPB>,
deleted_rows, ) -> Self {
..Default::default() Self {
} view_id,
inserted_rows,
deleted_rows,
..Default::default()
} }
}
} }

View file

@ -2,10 +2,10 @@ use crate::entities::*;
use crate::manager::DatabaseManager; use crate::manager::DatabaseManager;
use crate::services::cell::{FromCellString, ToCellChangesetString, TypeCellData}; use crate::services::cell::{FromCellString, ToCellChangesetString, TypeCellData};
use crate::services::field::{ use crate::services::field::{
default_type_option_builder_from_type, select_type_option_from_field_rev, type_option_builder_from_json_str, default_type_option_builder_from_type, select_type_option_from_field_rev,
DateCellChangeset, DateChangesetPB, SelectOptionCellChangeset, SelectOptionCellChangesetPB, type_option_builder_from_json_str, DateCellChangeset, DateChangesetPB, SelectOptionCellChangeset,
SelectOptionCellChangesetParams, SelectOptionCellDataPB, SelectOptionChangeset, SelectOptionChangesetPB, SelectOptionCellChangesetPB, SelectOptionCellChangesetParams, SelectOptionCellDataPB,
SelectOptionIds, SelectOptionPB, SelectOptionChangeset, SelectOptionChangesetPB, SelectOptionIds, SelectOptionPB,
}; };
use crate::services::row::make_row_from_row_rev; use crate::services::row::make_row_from_row_rev;
use flowy_error::{ErrorCode, FlowyError, FlowyResult}; use flowy_error::{ErrorCode, FlowyError, FlowyResult};
@ -15,528 +15,562 @@ use std::sync::Arc;
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_database_data_handler( pub(crate) async fn get_database_data_handler(
data: AFPluginData<DatabaseIdPB>, data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<DatabasePB, FlowyError> { ) -> DataResult<DatabasePB, FlowyError> {
let database_id: DatabaseIdPB = data.into_inner(); let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id.as_ref()).await?; let editor = manager.open_database(database_id.as_ref()).await?;
let database = editor.get_database(database_id.as_ref()).await?; let database = editor.get_database(database_id.as_ref()).await?;
data_result(database) data_result(database)
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_database_setting_handler( pub(crate) async fn get_database_setting_handler(
data: AFPluginData<DatabaseIdPB>, data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<DatabaseViewSettingPB, FlowyError> { ) -> DataResult<DatabaseViewSettingPB, FlowyError> {
let database_id: DatabaseIdPB = data.into_inner(); let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id).await?; let editor = manager.open_database(database_id).await?;
let database_setting = editor.get_setting().await?; let database_setting = editor.get_setting().await?;
data_result(database_setting) data_result(database_setting)
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_database_setting_handler( pub(crate) async fn update_database_setting_handler(
data: AFPluginData<DatabaseSettingChangesetPB>, data: AFPluginData<DatabaseSettingChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: DatabaseSettingChangesetParams = data.into_inner().try_into()?; let params: DatabaseSettingChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
if let Some(insert_params) = params.insert_group { if let Some(insert_params) = params.insert_group {
editor.insert_group(insert_params).await?; editor.insert_group(insert_params).await?;
} }
if let Some(delete_params) = params.delete_group { if let Some(delete_params) = params.delete_group {
editor.delete_group(delete_params).await?; editor.delete_group(delete_params).await?;
} }
if let Some(alter_filter) = params.insert_filter { if let Some(alter_filter) = params.insert_filter {
editor.create_or_update_filter(alter_filter).await?; editor.create_or_update_filter(alter_filter).await?;
} }
if let Some(delete_filter) = params.delete_filter { if let Some(delete_filter) = params.delete_filter {
editor.delete_filter(delete_filter).await?; editor.delete_filter(delete_filter).await?;
} }
if let Some(alter_sort) = params.alert_sort { if let Some(alter_sort) = params.alert_sort {
let _ = editor.create_or_update_sort(alter_sort).await?; let _ = editor.create_or_update_sort(alter_sort).await?;
} }
if let Some(delete_sort) = params.delete_sort { if let Some(delete_sort) = params.delete_sort {
editor.delete_sort(delete_sort).await?; editor.delete_sort(delete_sort).await?;
} }
Ok(()) Ok(())
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_all_filters_handler( pub(crate) async fn get_all_filters_handler(
data: AFPluginData<DatabaseIdPB>, data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedFilterPB, FlowyError> { ) -> DataResult<RepeatedFilterPB, FlowyError> {
let database_id: DatabaseIdPB = data.into_inner(); let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id).await?; let editor = manager.open_database(database_id).await?;
let filters = RepeatedFilterPB { let filters = RepeatedFilterPB {
items: editor.get_all_filters().await?, items: editor.get_all_filters().await?,
}; };
data_result(filters) data_result(filters)
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_all_sorts_handler( pub(crate) async fn get_all_sorts_handler(
data: AFPluginData<DatabaseIdPB>, data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedSortPB, FlowyError> { ) -> DataResult<RepeatedSortPB, FlowyError> {
let database_id: DatabaseIdPB = data.into_inner(); let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id.as_ref()).await?; let editor = manager.open_database(database_id.as_ref()).await?;
let sorts = RepeatedSortPB { let sorts = RepeatedSortPB {
items: editor.get_all_sorts(database_id.as_ref()).await?, items: editor.get_all_sorts(database_id.as_ref()).await?,
}; };
data_result(sorts) data_result(sorts)
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn delete_all_sorts_handler( pub(crate) async fn delete_all_sorts_handler(
data: AFPluginData<DatabaseIdPB>, data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let database_id: DatabaseIdPB = data.into_inner(); let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id.as_ref()).await?; let editor = manager.open_database(database_id.as_ref()).await?;
editor.delete_all_sorts(database_id.as_ref()).await?; editor.delete_all_sorts(database_id.as_ref()).await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_fields_handler( pub(crate) async fn get_fields_handler(
data: AFPluginData<GetFieldPayloadPB>, data: AFPluginData<GetFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedFieldPB, FlowyError> { ) -> DataResult<RepeatedFieldPB, FlowyError> {
let params: GetFieldParams = data.into_inner().try_into()?; let params: GetFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
let field_revs = editor.get_field_revs(params.field_ids).await?; let field_revs = editor.get_field_revs(params.field_ids).await?;
let repeated_field: RepeatedFieldPB = field_revs.into_iter().map(FieldPB::from).collect::<Vec<_>>().into(); let repeated_field: RepeatedFieldPB = field_revs
data_result(repeated_field) .into_iter()
.map(FieldPB::from)
.collect::<Vec<_>>()
.into();
data_result(repeated_field)
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_field_handler( pub(crate) async fn update_field_handler(
data: AFPluginData<FieldChangesetPB>, data: AFPluginData<FieldChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let changeset: FieldChangesetParams = data.into_inner().try_into()?; let changeset: FieldChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&changeset.database_id).await?; let editor = manager.get_database_editor(&changeset.database_id).await?;
editor.update_field(changeset).await?; editor.update_field(changeset).await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_field_type_option_handler( pub(crate) async fn update_field_type_option_handler(
data: AFPluginData<TypeOptionChangesetPB>, data: AFPluginData<TypeOptionChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: TypeOptionChangesetParams = data.into_inner().try_into()?; let params: TypeOptionChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
let old_field_rev = editor.get_field_rev(&params.field_id).await; let old_field_rev = editor.get_field_rev(&params.field_id).await;
editor editor
.update_field_type_option( .update_field_type_option(
&params.database_id, &params.database_id,
&params.field_id, &params.field_id,
params.type_option_data, params.type_option_data,
old_field_rev, old_field_rev,
) )
.await?; .await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn delete_field_handler( pub(crate) async fn delete_field_handler(
data: AFPluginData<DeleteFieldPayloadPB>, data: AFPluginData<DeleteFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?; let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
editor.delete_field(&params.field_id).await?; editor.delete_field(&params.field_id).await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn switch_to_field_handler( pub(crate) async fn switch_to_field_handler(
data: AFPluginData<UpdateFieldTypePayloadPB>, data: AFPluginData<UpdateFieldTypePayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: EditFieldParams = data.into_inner().try_into()?; let params: EditFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
let old_field_rev = editor.get_field_rev(&params.field_id).await; let old_field_rev = editor.get_field_rev(&params.field_id).await;
editor editor
.switch_to_field_type(&params.field_id, &params.field_type) .switch_to_field_type(&params.field_id, &params.field_type)
.await?; .await?;
// Get the field_rev with field_id, if it doesn't exist, we create the default FieldRevision from the FieldType. // Get the field_rev with field_id, if it doesn't exist, we create the default FieldRevision from the FieldType.
let new_field_rev = editor let new_field_rev = editor
.get_field_rev(&params.field_id) .get_field_rev(&params.field_id)
.await .await
.unwrap_or(Arc::new(editor.next_field_rev(&params.field_type).await?)); .unwrap_or(Arc::new(editor.next_field_rev(&params.field_type).await?));
// Update the type-option data after the field type has been changed // Update the type-option data after the field type has been changed
let type_option_data = get_type_option_data(&new_field_rev, &params.field_type).await?; let type_option_data = get_type_option_data(&new_field_rev, &params.field_type).await?;
editor editor
.update_field_type_option(&params.database_id, &new_field_rev.id, type_option_data, old_field_rev) .update_field_type_option(
.await?; &params.database_id,
&new_field_rev.id,
type_option_data,
old_field_rev,
)
.await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn duplicate_field_handler( pub(crate) async fn duplicate_field_handler(
data: AFPluginData<DuplicateFieldPayloadPB>, data: AFPluginData<DuplicateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?; let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
editor.duplicate_field(&params.field_id).await?; editor.duplicate_field(&params.field_id).await?;
Ok(()) Ok(())
} }
/// Return the FieldTypeOptionData if the Field exists otherwise return record not found error. /// Return the FieldTypeOptionData if the Field exists otherwise return record not found error.
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_field_type_option_data_handler( pub(crate) async fn get_field_type_option_data_handler(
data: AFPluginData<TypeOptionPathPB>, data: AFPluginData<TypeOptionPathPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<TypeOptionPB, FlowyError> { ) -> DataResult<TypeOptionPB, FlowyError> {
let params: TypeOptionPathParams = data.into_inner().try_into()?; let params: TypeOptionPathParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await { match editor.get_field_rev(&params.field_id).await {
None => Err(FlowyError::record_not_found()), None => Err(FlowyError::record_not_found()),
Some(field_rev) => { Some(field_rev) => {
let field_type = field_rev.ty.into(); let field_type = field_rev.ty.into();
let type_option_data = get_type_option_data(&field_rev, &field_type).await?; let type_option_data = get_type_option_data(&field_rev, &field_type).await?;
let data = TypeOptionPB { let data = TypeOptionPB {
database_id: params.database_id, database_id: params.database_id,
field: field_rev.into(), field: field_rev.into(),
type_option_data, type_option_data,
}; };
data_result(data) data_result(data)
} },
} }
} }
/// Create FieldMeta and save it. Return the FieldTypeOptionData. /// Create FieldMeta and save it. Return the FieldTypeOptionData.
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn create_field_type_option_data_handler( pub(crate) async fn create_field_type_option_data_handler(
data: AFPluginData<CreateFieldPayloadPB>, data: AFPluginData<CreateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<TypeOptionPB, FlowyError> { ) -> DataResult<TypeOptionPB, FlowyError> {
let params: CreateFieldParams = data.into_inner().try_into()?; let params: CreateFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
let field_rev = editor let field_rev = editor
.create_new_field_rev_with_type_option(&params.field_type, params.type_option_data) .create_new_field_rev_with_type_option(&params.field_type, params.type_option_data)
.await?; .await?;
let field_type: FieldType = field_rev.ty.into(); let field_type: FieldType = field_rev.ty.into();
let type_option_data = get_type_option_data(&field_rev, &field_type).await?; let type_option_data = get_type_option_data(&field_rev, &field_type).await?;
data_result(TypeOptionPB { data_result(TypeOptionPB {
database_id: params.database_id, database_id: params.database_id,
field: field_rev.into(), field: field_rev.into(),
type_option_data, type_option_data,
}) })
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn move_field_handler( pub(crate) async fn move_field_handler(
data: AFPluginData<MoveFieldPayloadPB>, data: AFPluginData<MoveFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: MoveFieldParams = data.into_inner().try_into()?; let params: MoveFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?; let editor = manager.get_database_editor(&params.view_id).await?;
editor.move_field(params).await?; editor.move_field(params).await?;
Ok(()) Ok(())
} }
/// The [FieldRevision] contains multiple data, each of them belongs to a specific FieldType. /// The [FieldRevision] contains multiple data, each of them belongs to a specific FieldType.
async fn get_type_option_data(field_rev: &FieldRevision, field_type: &FieldType) -> FlowyResult<Vec<u8>> { async fn get_type_option_data(
let s = field_rev field_rev: &FieldRevision,
.get_type_option_str(field_type) field_type: &FieldType,
.map(|value| value.to_owned()) ) -> FlowyResult<Vec<u8>> {
.unwrap_or_else(|| { let s = field_rev
default_type_option_builder_from_type(field_type) .get_type_option_str(field_type)
.serializer() .map(|value| value.to_owned())
.json_str() .unwrap_or_else(|| {
}); default_type_option_builder_from_type(field_type)
let field_type: FieldType = field_rev.ty.into(); .serializer()
let builder = type_option_builder_from_json_str(&s, &field_type); .json_str()
let type_option_data = builder.serializer().protobuf_bytes().to_vec(); });
let field_type: FieldType = field_rev.ty.into();
let builder = type_option_builder_from_json_str(&s, &field_type);
let type_option_data = builder.serializer().protobuf_bytes().to_vec();
Ok(type_option_data) Ok(type_option_data)
} }
// #[tracing::instrument(level = "debug", skip(data, manager), err)] // #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_row_handler( pub(crate) async fn get_row_handler(
data: AFPluginData<RowIdPB>, data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<OptionalRowPB, FlowyError> { ) -> DataResult<OptionalRowPB, FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
let row = editor.get_row_rev(&params.row_id).await?.map(make_row_from_row_rev); let row = editor
.get_row_rev(&params.row_id)
.await?
.map(make_row_from_row_rev);
data_result(OptionalRowPB { row }) data_result(OptionalRowPB { row })
} }
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn delete_row_handler( pub(crate) async fn delete_row_handler(
data: AFPluginData<RowIdPB>, data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
editor.delete_row(&params.row_id).await?; editor.delete_row(&params.row_id).await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn duplicate_row_handler( pub(crate) async fn duplicate_row_handler(
data: AFPluginData<RowIdPB>, data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
editor.duplicate_row(&params.row_id).await?; editor.duplicate_row(&params.row_id).await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_row_handler( pub(crate) async fn move_row_handler(
data: AFPluginData<MoveRowPayloadPB>, data: AFPluginData<MoveRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: MoveRowParams = data.into_inner().try_into()?; let params: MoveRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?; let editor = manager.get_database_editor(&params.view_id).await?;
editor.move_row(params).await?; editor.move_row(params).await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn create_table_row_handler( pub(crate) async fn create_table_row_handler(
data: AFPluginData<CreateRowPayloadPB>, data: AFPluginData<CreateRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RowPB, FlowyError> { ) -> DataResult<RowPB, FlowyError> {
let params: CreateRowParams = data.into_inner().try_into()?; let params: CreateRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.database_id.as_ref()).await?; let editor = manager
let row = editor.create_row(params).await?; .get_database_editor(params.database_id.as_ref())
data_result(row) .await?;
let row = editor.create_row(params).await?;
data_result(row)
} }
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_cell_handler( pub(crate) async fn get_cell_handler(
data: AFPluginData<CellIdPB>, data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<CellPB, FlowyError> { ) -> DataResult<CellPB, FlowyError> {
let params: CellIdParams = data.into_inner().try_into()?; let params: CellIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_cell(&params).await { match editor.get_cell(&params).await {
None => data_result(CellPB::empty(&params.field_id, &params.row_id)), None => data_result(CellPB::empty(&params.field_id, &params.row_id)),
Some(cell) => data_result(cell), Some(cell) => data_result(cell),
} }
} }
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_cell_handler( pub(crate) async fn update_cell_handler(
data: AFPluginData<CellChangesetPB>, data: AFPluginData<CellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let changeset: CellChangesetPB = data.into_inner(); let changeset: CellChangesetPB = data.into_inner();
let editor = manager.get_database_editor(&changeset.database_id).await?; let editor = manager.get_database_editor(&changeset.database_id).await?;
editor editor
.update_cell_with_changeset(&changeset.row_id, &changeset.field_id, changeset.type_cell_data) .update_cell_with_changeset(
.await?; &changeset.row_id,
Ok(()) &changeset.field_id,
changeset.type_cell_data,
)
.await?;
Ok(())
} }
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn new_select_option_handler( pub(crate) async fn new_select_option_handler(
data: AFPluginData<CreateSelectOptionPayloadPB>, data: AFPluginData<CreateSelectOptionPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<SelectOptionPB, FlowyError> { ) -> DataResult<SelectOptionPB, FlowyError> {
let params: CreateSelectOptionParams = data.into_inner().try_into()?; let params: CreateSelectOptionParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await { match editor.get_field_rev(&params.field_id).await {
None => Err(ErrorCode::InvalidData.into()), None => Err(ErrorCode::InvalidData.into()),
Some(field_rev) => { Some(field_rev) => {
let type_option = select_type_option_from_field_rev(&field_rev)?; let type_option = select_type_option_from_field_rev(&field_rev)?;
let select_option = type_option.create_option(&params.option_name); let select_option = type_option.create_option(&params.option_name);
data_result(select_option) data_result(select_option)
} },
} }
} }
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_select_option_handler( pub(crate) async fn update_select_option_handler(
data: AFPluginData<SelectOptionChangesetPB>, data: AFPluginData<SelectOptionChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let changeset: SelectOptionChangeset = data.into_inner().try_into()?; let changeset: SelectOptionChangeset = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&changeset.cell_path.database_id).await?; let editor = manager
let field_id = changeset.cell_path.field_id.clone(); .get_database_editor(&changeset.cell_path.database_id)
let (tx, rx) = tokio::sync::oneshot::channel(); .await?;
editor let field_id = changeset.cell_path.field_id.clone();
.modify_field_rev(&field_id, |field_rev| { let (tx, rx) = tokio::sync::oneshot::channel();
let mut type_option = select_type_option_from_field_rev(field_rev)?; editor
let mut cell_changeset_str = None; .modify_field_rev(&field_id, |field_rev| {
let mut is_changed = None; let mut type_option = select_type_option_from_field_rev(field_rev)?;
let mut cell_changeset_str = None;
let mut is_changed = None;
for option in changeset.insert_options { for option in changeset.insert_options {
cell_changeset_str = cell_changeset_str = Some(
Some(SelectOptionCellChangeset::from_insert_option_id(&option.id).to_cell_changeset_str()); SelectOptionCellChangeset::from_insert_option_id(&option.id).to_cell_changeset_str(),
type_option.insert_option(option); );
is_changed = Some(()); type_option.insert_option(option);
} is_changed = Some(());
}
for option in changeset.update_options { for option in changeset.update_options {
type_option.insert_option(option); type_option.insert_option(option);
is_changed = Some(()); is_changed = Some(());
} }
for option in changeset.delete_options { for option in changeset.delete_options {
cell_changeset_str = cell_changeset_str = Some(
Some(SelectOptionCellChangeset::from_delete_option_id(&option.id).to_cell_changeset_str()); SelectOptionCellChangeset::from_delete_option_id(&option.id).to_cell_changeset_str(),
type_option.delete_option(option); );
is_changed = Some(()); type_option.delete_option(option);
} is_changed = Some(());
}
if is_changed.is_some() { if is_changed.is_some() {
field_rev.insert_type_option(&*type_option); field_rev.insert_type_option(&*type_option);
} }
let _ = tx.send(cell_changeset_str); let _ = tx.send(cell_changeset_str);
Ok(is_changed) Ok(is_changed)
}) })
.await?; .await?;
if let Ok(Some(cell_changeset_str)) = rx.await { if let Ok(Some(cell_changeset_str)) = rx.await {
match editor match editor
.update_cell_with_changeset( .update_cell_with_changeset(
&changeset.cell_path.row_id, &changeset.cell_path.row_id,
&changeset.cell_path.field_id, &changeset.cell_path.field_id,
cell_changeset_str, cell_changeset_str,
) )
.await .await
{ {
Ok(_) => {} Ok(_) => {},
Err(e) => tracing::error!("{}", e), Err(e) => tracing::error!("{}", e),
}
} }
Ok(()) }
Ok(())
} }
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_select_option_handler( pub(crate) async fn get_select_option_handler(
data: AFPluginData<CellIdPB>, data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<SelectOptionCellDataPB, FlowyError> { ) -> DataResult<SelectOptionCellDataPB, FlowyError> {
let params: CellIdParams = data.into_inner().try_into()?; let params: CellIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?; let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await { match editor.get_field_rev(&params.field_id).await {
None => { None => {
tracing::error!("Can't find the select option field with id: {}", params.field_id); tracing::error!(
data_result(SelectOptionCellDataPB::default()) "Can't find the select option field with id: {}",
} params.field_id
Some(field_rev) => { );
// data_result(SelectOptionCellDataPB::default())
let cell_rev = editor.get_cell_rev(&params.row_id, &params.field_id).await?; },
let type_option = select_type_option_from_field_rev(&field_rev)?; Some(field_rev) => {
let type_cell_data: TypeCellData = match cell_rev { //
None => TypeCellData { let cell_rev = editor
cell_str: "".to_string(), .get_cell_rev(&params.row_id, &params.field_id)
field_type: field_rev.ty.into(), .await?;
}, let type_option = select_type_option_from_field_rev(&field_rev)?;
Some(cell_rev) => cell_rev.try_into()?, let type_cell_data: TypeCellData = match cell_rev {
}; None => TypeCellData {
let ids = SelectOptionIds::from_cell_str(&type_cell_data.cell_str)?; cell_str: "".to_string(),
let selected_options = type_option.get_selected_options(ids); field_type: field_rev.ty.into(),
data_result(selected_options) },
} Some(cell_rev) => cell_rev.try_into()?,
} };
let ids = SelectOptionIds::from_cell_str(&type_cell_data.cell_str)?;
let selected_options = type_option.get_selected_options(ids);
data_result(selected_options)
},
}
} }
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_select_option_cell_handler( pub(crate) async fn update_select_option_cell_handler(
data: AFPluginData<SelectOptionCellChangesetPB>, data: AFPluginData<SelectOptionCellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?; let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.cell_identifier.database_id).await?; let editor = manager
let changeset = SelectOptionCellChangeset { .get_database_editor(&params.cell_identifier.database_id)
insert_option_ids: params.insert_option_ids, .await?;
delete_option_ids: params.delete_option_ids, let changeset = SelectOptionCellChangeset {
}; insert_option_ids: params.insert_option_ids,
delete_option_ids: params.delete_option_ids,
};
editor editor
.update_cell_with_changeset( .update_cell_with_changeset(
&params.cell_identifier.row_id, &params.cell_identifier.row_id,
&params.cell_identifier.field_id, &params.cell_identifier.field_id,
changeset, changeset,
) )
.await?; .await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_date_cell_handler( pub(crate) async fn update_date_cell_handler(
data: AFPluginData<DateChangesetPB>, data: AFPluginData<DateChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let data = data.into_inner(); let data = data.into_inner();
let cell_path: CellIdParams = data.cell_path.try_into()?; let cell_path: CellIdParams = data.cell_path.try_into()?;
let cell_changeset = DateCellChangeset { let cell_changeset = DateCellChangeset {
date: data.date, date: data.date,
time: data.time, time: data.time,
is_utc: data.is_utc, is_utc: data.is_utc,
}; };
let editor = manager.get_database_editor(&cell_path.database_id).await?; let editor = manager.get_database_editor(&cell_path.database_id).await?;
editor editor
.update_cell(cell_path.row_id, cell_path.field_id, cell_changeset) .update_cell(cell_path.row_id, cell_path.field_id, cell_changeset)
.await?; .await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_groups_handler( pub(crate) async fn get_groups_handler(
data: AFPluginData<DatabaseIdPB>, data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedGroupPB, FlowyError> { ) -> DataResult<RepeatedGroupPB, FlowyError> {
let params: DatabaseIdPB = data.into_inner(); let params: DatabaseIdPB = data.into_inner();
let editor = manager.get_database_editor(&params.value).await?; let editor = manager.get_database_editor(&params.value).await?;
let group = editor.load_groups().await?; let group = editor.load_groups().await?;
data_result(group) data_result(group)
} }
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn create_board_card_handler( pub(crate) async fn create_board_card_handler(
data: AFPluginData<CreateBoardCardPayloadPB>, data: AFPluginData<CreateBoardCardPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RowPB, FlowyError> { ) -> DataResult<RowPB, FlowyError> {
let params: CreateRowParams = data.into_inner().try_into()?; let params: CreateRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.database_id.as_ref()).await?; let editor = manager
let row = editor.create_row(params).await?; .get_database_editor(params.database_id.as_ref())
data_result(row) .await?;
let row = editor.create_row(params).await?;
data_result(row)
} }
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_group_handler( pub(crate) async fn move_group_handler(
data: AFPluginData<MoveGroupPayloadPB>, data: AFPluginData<MoveGroupPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let params: MoveGroupParams = data.into_inner().try_into()?; let params: MoveGroupParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.view_id.as_ref()).await?; let editor = manager.get_database_editor(params.view_id.as_ref()).await?;
editor.move_group(params).await?; editor.move_group(params).await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_group_row_handler( pub(crate) async fn move_group_row_handler(
data: AFPluginData<MoveGroupRowPayloadPB>, data: AFPluginData<MoveGroupRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>, manager: AFPluginState<Arc<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let params: MoveGroupRowParams = data.into_inner().try_into()?; let params: MoveGroupRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.view_id.as_ref()).await?; let editor = manager.get_database_editor(params.view_id.as_ref()).await?;
editor.move_group_row(params).await?; editor.move_group_row(params).await?;
Ok(()) Ok(())
} }

View file

@ -6,8 +6,10 @@ use std::sync::Arc;
use strum_macros::Display; use strum_macros::Display;
pub fn init(database_manager: Arc<DatabaseManager>) -> AFPlugin { pub fn init(database_manager: Arc<DatabaseManager>) -> AFPlugin {
let mut plugin = AFPlugin::new().name(env!("CARGO_PKG_NAME")).state(database_manager); let mut plugin = AFPlugin::new()
plugin = plugin .name(env!("CARGO_PKG_NAME"))
.state(database_manager);
plugin = plugin
.event(DatabaseEvent::GetDatabase, get_database_data_handler) .event(DatabaseEvent::GetDatabase, get_database_data_handler)
// .event(GridEvent::GetGridBlocks, get_grid_blocks_handler) // .event(GridEvent::GetGridBlocks, get_grid_blocks_handler)
.event(DatabaseEvent::GetDatabaseSetting, get_database_setting_handler) .event(DatabaseEvent::GetDatabaseSetting, get_database_setting_handler)
@ -47,7 +49,7 @@ pub fn init(database_manager: Arc<DatabaseManager>) -> AFPlugin {
.event(DatabaseEvent::MoveGroupRow, move_group_row_handler) .event(DatabaseEvent::MoveGroupRow, move_group_row_handler)
.event(DatabaseEvent::GetGroup, get_groups_handler); .event(DatabaseEvent::GetGroup, get_groups_handler);
plugin plugin
} }
/// [DatabaseEvent] defines events that are used to interact with the Grid. You could check [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/backend/protobuf) /// [DatabaseEvent] defines events that are used to interact with the Grid. You could check [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/backend/protobuf)
@ -55,176 +57,176 @@ pub fn init(database_manager: Arc<DatabaseManager>) -> AFPlugin {
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
#[event_err = "FlowyError"] #[event_err = "FlowyError"]
pub enum DatabaseEvent { pub enum DatabaseEvent {
/// [GetDatabase] event is used to get the [DatabasePB] /// [GetDatabase] event is used to get the [DatabasePB]
/// ///
/// The event handler accepts a [DatabaseIdPB] and returns a [DatabasePB] if there are no errors. /// The event handler accepts a [DatabaseIdPB] and returns a [DatabasePB] if there are no errors.
#[event(input = "DatabaseIdPB", output = "DatabasePB")] #[event(input = "DatabaseIdPB", output = "DatabasePB")]
GetDatabase = 0, GetDatabase = 0,
/// [GetDatabaseSetting] event is used to get the database's settings. /// [GetDatabaseSetting] event is used to get the database's settings.
/// ///
/// The event handler accepts [DatabaseIdPB] and return [DatabaseViewSettingPB] /// The event handler accepts [DatabaseIdPB] and return [DatabaseViewSettingPB]
/// if there is no errors. /// if there is no errors.
#[event(input = "DatabaseIdPB", output = "DatabaseViewSettingPB")] #[event(input = "DatabaseIdPB", output = "DatabaseViewSettingPB")]
GetDatabaseSetting = 2, GetDatabaseSetting = 2,
/// [UpdateDatabaseSetting] event is used to update the database's settings. /// [UpdateDatabaseSetting] event is used to update the database's settings.
/// ///
/// The event handler accepts [DatabaseSettingChangesetPB] and return errors if failed to modify the grid's settings. /// The event handler accepts [DatabaseSettingChangesetPB] and return errors if failed to modify the grid's settings.
#[event(input = "DatabaseSettingChangesetPB")] #[event(input = "DatabaseSettingChangesetPB")]
UpdateDatabaseSetting = 3, UpdateDatabaseSetting = 3,
#[event(input = "DatabaseIdPB", output = "RepeatedFilterPB")] #[event(input = "DatabaseIdPB", output = "RepeatedFilterPB")]
GetAllFilters = 4, GetAllFilters = 4,
#[event(input = "DatabaseIdPB", output = "RepeatedSortPB")] #[event(input = "DatabaseIdPB", output = "RepeatedSortPB")]
GetAllSorts = 5, GetAllSorts = 5,
#[event(input = "DatabaseIdPB")] #[event(input = "DatabaseIdPB")]
DeleteAllSorts = 6, DeleteAllSorts = 6,
/// [GetFields] event is used to get the database's settings. /// [GetFields] event is used to get the database's settings.
/// ///
/// The event handler accepts a [GetFieldPayloadPB] and returns a [RepeatedFieldPB] /// The event handler accepts a [GetFieldPayloadPB] and returns a [RepeatedFieldPB]
/// if there are no errors. /// if there are no errors.
#[event(input = "GetFieldPayloadPB", output = "RepeatedFieldPB")] #[event(input = "GetFieldPayloadPB", output = "RepeatedFieldPB")]
GetFields = 10, GetFields = 10,
/// [UpdateField] event is used to update a field's attributes. /// [UpdateField] event is used to update a field's attributes.
/// ///
/// The event handler accepts a [FieldChangesetPB] and returns errors if failed to modify the /// The event handler accepts a [FieldChangesetPB] and returns errors if failed to modify the
/// field. /// field.
#[event(input = "FieldChangesetPB")] #[event(input = "FieldChangesetPB")]
UpdateField = 11, UpdateField = 11,
/// [UpdateFieldTypeOption] event is used to update the field's type-option data. Certain field /// [UpdateFieldTypeOption] event is used to update the field's type-option data. Certain field
/// types have user-defined options such as color, date format, number format, or a list of values /// types have user-defined options such as color, date format, number format, or a list of values
/// for a multi-select list. These options are defined within a specialization of the /// for a multi-select list. These options are defined within a specialization of the
/// FieldTypeOption class. /// FieldTypeOption class.
/// ///
/// Check out [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/frontend/grid#fieldtype) /// Check out [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/frontend/grid#fieldtype)
/// for more information. /// for more information.
/// ///
/// The event handler accepts a [TypeOptionChangesetPB] and returns errors if failed to modify the /// The event handler accepts a [TypeOptionChangesetPB] and returns errors if failed to modify the
/// field. /// field.
#[event(input = "TypeOptionChangesetPB")] #[event(input = "TypeOptionChangesetPB")]
UpdateFieldTypeOption = 12, UpdateFieldTypeOption = 12,
/// [DeleteField] event is used to delete a Field. [DeleteFieldPayloadPB] is the context that /// [DeleteField] event is used to delete a Field. [DeleteFieldPayloadPB] is the context that
/// is used to delete the field from the Database. /// is used to delete the field from the Database.
#[event(input = "DeleteFieldPayloadPB")] #[event(input = "DeleteFieldPayloadPB")]
DeleteField = 14, DeleteField = 14,
/// [UpdateFieldType] event is used to update the current Field's type. /// [UpdateFieldType] event is used to update the current Field's type.
/// It will insert a new FieldTypeOptionData if the new FieldType doesn't exist before, otherwise /// It will insert a new FieldTypeOptionData if the new FieldType doesn't exist before, otherwise
/// reuse the existing FieldTypeOptionData. You could check the [DatabaseRevisionPad] for more details. /// reuse the existing FieldTypeOptionData. You could check the [DatabaseRevisionPad] for more details.
#[event(input = "UpdateFieldTypePayloadPB")] #[event(input = "UpdateFieldTypePayloadPB")]
UpdateFieldType = 20, UpdateFieldType = 20,
/// [DuplicateField] event is used to duplicate a Field. The duplicated field data is kind of /// [DuplicateField] event is used to duplicate a Field. The duplicated field data is kind of
/// deep copy of the target field. The passed in [DuplicateFieldPayloadPB] is the context that is /// deep copy of the target field. The passed in [DuplicateFieldPayloadPB] is the context that is
/// used to duplicate the field. /// used to duplicate the field.
/// ///
/// Return errors if failed to duplicate the field. /// Return errors if failed to duplicate the field.
/// ///
#[event(input = "DuplicateFieldPayloadPB")] #[event(input = "DuplicateFieldPayloadPB")]
DuplicateField = 21, DuplicateField = 21,
/// [MoveItem] event is used to move an item. For the moment, Item has two types defined in /// [MoveItem] event is used to move an item. For the moment, Item has two types defined in
/// [MoveItemTypePB]. /// [MoveItemTypePB].
#[event(input = "MoveFieldPayloadPB")] #[event(input = "MoveFieldPayloadPB")]
MoveField = 22, MoveField = 22,
/// [TypeOptionPathPB] event is used to get the FieldTypeOption data for a specific field type. /// [TypeOptionPathPB] event is used to get the FieldTypeOption data for a specific field type.
/// ///
/// Check out the [TypeOptionPB] for more details. If the [FieldTypeOptionData] does exist /// Check out the [TypeOptionPB] for more details. If the [FieldTypeOptionData] does exist
/// for the target type, the [TypeOptionBuilder] will create the default data for that type. /// for the target type, the [TypeOptionBuilder] will create the default data for that type.
/// ///
/// Return the [TypeOptionPB] if there are no errors. /// Return the [TypeOptionPB] if there are no errors.
#[event(input = "TypeOptionPathPB", output = "TypeOptionPB")] #[event(input = "TypeOptionPathPB", output = "TypeOptionPB")]
GetTypeOption = 23, GetTypeOption = 23,
/// [CreateTypeOption] event is used to create a new FieldTypeOptionData. /// [CreateTypeOption] event is used to create a new FieldTypeOptionData.
#[event(input = "CreateFieldPayloadPB", output = "TypeOptionPB")] #[event(input = "CreateFieldPayloadPB", output = "TypeOptionPB")]
CreateTypeOption = 24, CreateTypeOption = 24,
/// [CreateSelectOption] event is used to create a new select option. Returns a [SelectOptionPB] if /// [CreateSelectOption] event is used to create a new select option. Returns a [SelectOptionPB] if
/// there are no errors. /// there are no errors.
#[event(input = "CreateSelectOptionPayloadPB", output = "SelectOptionPB")] #[event(input = "CreateSelectOptionPayloadPB", output = "SelectOptionPB")]
CreateSelectOption = 30, CreateSelectOption = 30,
/// [GetSelectOptionCellData] event is used to get the select option data for cell editing. /// [GetSelectOptionCellData] event is used to get the select option data for cell editing.
/// [CellIdPB] locate which cell data that will be read from. The return value, [SelectOptionCellDataPB] /// [CellIdPB] locate which cell data that will be read from. The return value, [SelectOptionCellDataPB]
/// contains the available options and the currently selected options. /// contains the available options and the currently selected options.
#[event(input = "CellIdPB", output = "SelectOptionCellDataPB")] #[event(input = "CellIdPB", output = "SelectOptionCellDataPB")]
GetSelectOptionCellData = 31, GetSelectOptionCellData = 31,
/// [UpdateSelectOption] event is used to update a FieldTypeOptionData whose field_type is /// [UpdateSelectOption] event is used to update a FieldTypeOptionData whose field_type is
/// FieldType::SingleSelect or FieldType::MultiSelect. /// FieldType::SingleSelect or FieldType::MultiSelect.
/// ///
/// This event may trigger the DatabaseNotification::DidUpdateCell event. /// This event may trigger the DatabaseNotification::DidUpdateCell event.
/// For example, DatabaseNotification::DidUpdateCell will be triggered if the [SelectOptionChangesetPB] /// For example, DatabaseNotification::DidUpdateCell will be triggered if the [SelectOptionChangesetPB]
/// carries a change that updates the name of the option. /// carries a change that updates the name of the option.
#[event(input = "SelectOptionChangesetPB")] #[event(input = "SelectOptionChangesetPB")]
UpdateSelectOption = 32, UpdateSelectOption = 32,
#[event(input = "CreateRowPayloadPB", output = "RowPB")] #[event(input = "CreateRowPayloadPB", output = "RowPB")]
CreateRow = 50, CreateRow = 50,
/// [GetRow] event is used to get the row data,[RowPB]. [OptionalRowPB] is a wrapper that enables /// [GetRow] event is used to get the row data,[RowPB]. [OptionalRowPB] is a wrapper that enables
/// to return a nullable row data. /// to return a nullable row data.
#[event(input = "RowIdPB", output = "OptionalRowPB")] #[event(input = "RowIdPB", output = "OptionalRowPB")]
GetRow = 51, GetRow = 51,
#[event(input = "RowIdPB")] #[event(input = "RowIdPB")]
DeleteRow = 52, DeleteRow = 52,
#[event(input = "RowIdPB")] #[event(input = "RowIdPB")]
DuplicateRow = 53, DuplicateRow = 53,
#[event(input = "MoveRowPayloadPB")] #[event(input = "MoveRowPayloadPB")]
MoveRow = 54, MoveRow = 54,
#[event(input = "CellIdPB", output = "CellPB")] #[event(input = "CellIdPB", output = "CellPB")]
GetCell = 70, GetCell = 70,
/// [UpdateCell] event is used to update the cell content. The passed in data, [CellChangesetPB], /// [UpdateCell] event is used to update the cell content. The passed in data, [CellChangesetPB],
/// carries the changes that will be applied to the cell content by calling `update_cell` function. /// carries the changes that will be applied to the cell content by calling `update_cell` function.
/// ///
/// The 'content' property of the [CellChangesetPB] is a String type. It can be used directly if the /// The 'content' property of the [CellChangesetPB] is a String type. It can be used directly if the
/// cell uses string data. For example, the TextCell or NumberCell. /// cell uses string data. For example, the TextCell or NumberCell.
/// ///
/// But,it can be treated as a generic type, because we can use [serde] to deserialize the string /// But,it can be treated as a generic type, because we can use [serde] to deserialize the string
/// into a specific data type. For the moment, the 'content' will be deserialized to a concrete type /// into a specific data type. For the moment, the 'content' will be deserialized to a concrete type
/// when the FieldType is SingleSelect, DateTime, and MultiSelect. Please see /// when the FieldType is SingleSelect, DateTime, and MultiSelect. Please see
/// the [UpdateSelectOptionCell] and [UpdateDateCell] events for more details. /// the [UpdateSelectOptionCell] and [UpdateDateCell] events for more details.
#[event(input = "CellChangesetPB")] #[event(input = "CellChangesetPB")]
UpdateCell = 71, UpdateCell = 71,
/// [UpdateSelectOptionCell] event is used to update a select option cell's data. [SelectOptionCellChangesetPB] /// [UpdateSelectOptionCell] event is used to update a select option cell's data. [SelectOptionCellChangesetPB]
/// contains options that will be deleted or inserted. It can be cast to [CellChangesetPB] that /// contains options that will be deleted or inserted. It can be cast to [CellChangesetPB] that
/// will be used by the `update_cell` function. /// will be used by the `update_cell` function.
#[event(input = "SelectOptionCellChangesetPB")] #[event(input = "SelectOptionCellChangesetPB")]
UpdateSelectOptionCell = 72, UpdateSelectOptionCell = 72,
/// [UpdateDateCell] event is used to update a date cell's data. [DateChangesetPB] /// [UpdateDateCell] event is used to update a date cell's data. [DateChangesetPB]
/// contains the date and the time string. It can be cast to [CellChangesetPB] that /// contains the date and the time string. It can be cast to [CellChangesetPB] that
/// will be used by the `update_cell` function. /// will be used by the `update_cell` function.
#[event(input = "DateChangesetPB")] #[event(input = "DateChangesetPB")]
UpdateDateCell = 80, UpdateDateCell = 80,
#[event(input = "DatabaseIdPB", output = "RepeatedGroupPB")] #[event(input = "DatabaseIdPB", output = "RepeatedGroupPB")]
GetGroup = 100, GetGroup = 100,
#[event(input = "CreateBoardCardPayloadPB", output = "RowPB")] #[event(input = "CreateBoardCardPayloadPB", output = "RowPB")]
CreateBoardCard = 110, CreateBoardCard = 110,
#[event(input = "MoveGroupPayloadPB")] #[event(input = "MoveGroupPayloadPB")]
MoveGroup = 111, MoveGroup = 111,
#[event(input = "MoveGroupRowPayloadPB")] #[event(input = "MoveGroupRowPayloadPB")]
MoveGroupRow = 112, MoveGroupRow = 112,
#[event(input = "MoveGroupRowPayloadPB")] #[event(input = "MoveGroupRowPayloadPB")]
GroupByField = 113, GroupByField = 113,
} }

View file

@ -1,92 +1,92 @@
#[macro_export] #[macro_export]
macro_rules! impl_into_box_type_option_builder { macro_rules! impl_into_box_type_option_builder {
($target: ident) => { ($target: ident) => {
impl std::convert::From<$target> for BoxTypeOptionBuilder { impl std::convert::From<$target> for BoxTypeOptionBuilder {
fn from(target: $target) -> BoxTypeOptionBuilder { fn from(target: $target) -> BoxTypeOptionBuilder {
Box::new(target) Box::new(target)
} }
} }
}; };
} }
macro_rules! impl_builder_from_json_str_and_from_bytes { macro_rules! impl_builder_from_json_str_and_from_bytes {
($target: ident,$type_option: ident) => { ($target: ident,$type_option: ident) => {
impl $target { impl $target {
pub fn from_protobuf_bytes(bytes: Bytes) -> $target { pub fn from_protobuf_bytes(bytes: Bytes) -> $target {
let type_option = $type_option::from_protobuf_bytes(bytes); let type_option = $type_option::from_protobuf_bytes(bytes);
$target(type_option) $target(type_option)
} }
pub fn from_json_str(s: &str) -> $target { pub fn from_json_str(s: &str) -> $target {
let type_option = $type_option::from_json_str(s); let type_option = $type_option::from_json_str(s);
$target(type_option) $target(type_option)
} }
} }
}; };
} }
#[macro_export] #[macro_export]
macro_rules! impl_type_option { macro_rules! impl_type_option {
($target: ident, $field_type:expr) => { ($target: ident, $field_type:expr) => {
impl std::convert::From<&FieldRevision> for $target { impl std::convert::From<&FieldRevision> for $target {
fn from(field_rev: &FieldRevision) -> $target { fn from(field_rev: &FieldRevision) -> $target {
match field_rev.get_type_option::<$target>($field_type.into()) { match field_rev.get_type_option::<$target>($field_type.into()) {
None => $target::default(), None => $target::default(),
Some(target) => target, Some(target) => target,
}
}
} }
}
}
impl std::convert::From<&std::sync::Arc<FieldRevision>> for $target { impl std::convert::From<&std::sync::Arc<FieldRevision>> for $target {
fn from(field_rev: &std::sync::Arc<FieldRevision>) -> $target { fn from(field_rev: &std::sync::Arc<FieldRevision>) -> $target {
match field_rev.get_type_option::<$target>($field_type.into()) { match field_rev.get_type_option::<$target>($field_type.into()) {
None => $target::default(), None => $target::default(),
Some(target) => target, Some(target) => target,
}
}
} }
}
}
impl std::convert::From<$target> for String { impl std::convert::From<$target> for String {
fn from(type_option: $target) -> String { fn from(type_option: $target) -> String {
type_option.json_str() type_option.json_str()
} }
}
impl TypeOptionDataSerializer for $target {
fn json_str(&self) -> String {
match serde_json::to_string(&self) {
Ok(s) => s,
Err(e) => {
tracing::error!("Field type data serialize to json fail, error: {:?}", e);
serde_json::to_string(&$target::default()).unwrap()
},
} }
}
impl TypeOptionDataSerializer for $target { fn protobuf_bytes(&self) -> Bytes {
fn json_str(&self) -> String { self.clone().try_into().unwrap()
match serde_json::to_string(&self) { }
Ok(s) => s, }
Err(e) => {
tracing::error!("Field type data serialize to json fail, error: {:?}", e);
serde_json::to_string(&$target::default()).unwrap()
}
}
}
fn protobuf_bytes(&self) -> Bytes { impl TypeOptionDataDeserializer for $target {
self.clone().try_into().unwrap() fn from_json_str(s: &str) -> $target {
} match serde_json::from_str(s) {
Ok(obj) => obj,
Err(err) => {
tracing::error!(
"{} type option deserialize from {} failed, {:?}",
stringify!($target),
s,
err
);
$target::default()
},
} }
}
impl TypeOptionDataDeserializer for $target { fn from_protobuf_bytes(bytes: Bytes) -> $target {
fn from_json_str(s: &str) -> $target { $target::try_from(bytes).unwrap_or($target::default())
match serde_json::from_str(s) { }
Ok(obj) => obj, }
Err(err) => { };
tracing::error!(
"{} type option deserialize from {} failed, {:?}",
stringify!($target),
s,
err
);
$target::default()
}
}
}
fn from_protobuf_bytes(bytes: Bytes) -> $target {
$target::try_from(bytes).unwrap_or($target::default())
}
}
};
} }

View file

@ -1,21 +1,23 @@
use crate::entities::LayoutTypePB; use crate::entities::LayoutTypePB;
use crate::services::grid_editor::{ use crate::services::grid_editor::{
DatabaseRevisionEditor, GridRevisionCloudService, GridRevisionMergeable, GridRevisionSerde, DatabaseRevisionEditor, GridRevisionCloudService, GridRevisionMergeable, GridRevisionSerde,
}; };
use crate::services::persistence::block_index::BlockIndexCache; use crate::services::persistence::block_index::BlockIndexCache;
use crate::services::persistence::kv::DatabaseKVPersistence; use crate::services::persistence::kv::DatabaseKVPersistence;
use crate::services::persistence::migration::DatabaseMigration; use crate::services::persistence::migration::DatabaseMigration;
use crate::services::persistence::rev_sqlite::{ use crate::services::persistence::rev_sqlite::{
SQLiteDatabaseRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence, SQLiteDatabaseRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence,
}; };
use crate::services::persistence::GridDatabase; use crate::services::persistence::GridDatabase;
use crate::services::view_editor::make_database_view_rev_manager; use crate::services::view_editor::make_database_view_rev_manager;
use bytes::Bytes; use bytes::Bytes;
use flowy_client_sync::client_database::{ use flowy_client_sync::client_database::{
make_database_block_operations, make_database_operations, make_grid_view_operations, make_database_block_operations, make_database_operations, make_grid_view_operations,
}; };
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration, RevisionWebSocket}; use flowy_revision::{
RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration, RevisionWebSocket,
};
use flowy_sqlite::ConnectionPool; use flowy_sqlite::ConnectionPool;
use grid_model::{BuildDatabaseContext, DatabaseRevision, DatabaseViewRevision}; use grid_model::{BuildDatabaseContext, DatabaseRevision, DatabaseViewRevision};
use lib_infra::async_trait::async_trait; use lib_infra::async_trait::async_trait;
@ -28,228 +30,264 @@ use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
pub trait DatabaseUser: Send + Sync { pub trait DatabaseUser: Send + Sync {
fn user_id(&self) -> Result<String, FlowyError>; fn user_id(&self) -> Result<String, FlowyError>;
fn token(&self) -> Result<String, FlowyError>; fn token(&self) -> Result<String, FlowyError>;
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>; fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>;
} }
pub struct DatabaseManager { pub struct DatabaseManager {
database_editors: RwLock<RefCountHashMap<Arc<DatabaseRevisionEditor>>>, database_editors: RwLock<RefCountHashMap<Arc<DatabaseRevisionEditor>>>,
database_user: Arc<dyn DatabaseUser>, database_user: Arc<dyn DatabaseUser>,
block_index_cache: Arc<BlockIndexCache>, block_index_cache: Arc<BlockIndexCache>,
#[allow(dead_code)] #[allow(dead_code)]
kv_persistence: Arc<DatabaseKVPersistence>, kv_persistence: Arc<DatabaseKVPersistence>,
task_scheduler: Arc<RwLock<TaskDispatcher>>, task_scheduler: Arc<RwLock<TaskDispatcher>>,
migration: DatabaseMigration, migration: DatabaseMigration,
} }
impl DatabaseManager { impl DatabaseManager {
pub fn new( pub fn new(
grid_user: Arc<dyn DatabaseUser>, grid_user: Arc<dyn DatabaseUser>,
_rev_web_socket: Arc<dyn RevisionWebSocket>, _rev_web_socket: Arc<dyn RevisionWebSocket>,
task_scheduler: Arc<RwLock<TaskDispatcher>>, task_scheduler: Arc<RwLock<TaskDispatcher>>,
database: Arc<dyn GridDatabase>, database: Arc<dyn GridDatabase>,
) -> Self { ) -> Self {
let grid_editors = RwLock::new(RefCountHashMap::new()); let grid_editors = RwLock::new(RefCountHashMap::new());
let kv_persistence = Arc::new(DatabaseKVPersistence::new(database.clone())); let kv_persistence = Arc::new(DatabaseKVPersistence::new(database.clone()));
let block_index_cache = Arc::new(BlockIndexCache::new(database.clone())); let block_index_cache = Arc::new(BlockIndexCache::new(database.clone()));
let migration = DatabaseMigration::new(grid_user.clone(), database); let migration = DatabaseMigration::new(grid_user.clone(), database);
Self { Self {
database_editors: grid_editors, database_editors: grid_editors,
database_user: grid_user, database_user: grid_user,
kv_persistence, kv_persistence,
block_index_cache, block_index_cache,
task_scheduler, task_scheduler,
migration, migration,
} }
}
pub async fn initialize_with_new_user(&self, _user_id: &str, _token: &str) -> FlowyResult<()> {
Ok(())
}
pub async fn initialize(&self, _user_id: &str, _token: &str) -> FlowyResult<()> {
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn create_database<T: AsRef<str>>(
&self,
database_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let database_id = database_id.as_ref();
let db_pool = self.database_user.db_pool()?;
let rev_manager = self.make_database_rev_manager(database_id, db_pool)?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
async fn create_database_view<T: AsRef<str>>(
&self,
view_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let view_id = view_id.as_ref();
let rev_manager = make_database_view_rev_manager(&self.database_user, view_id).await?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn create_database_block<T: AsRef<str>>(
&self,
block_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let block_id = block_id.as_ref();
let rev_manager = make_database_block_rev_manager(&self.database_user, block_id)?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
pub async fn open_database<T: AsRef<str>>(
&self,
database_id: T,
) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
let database_id = database_id.as_ref();
let _ = self.migration.run_v1_migration(database_id).await;
self.get_or_create_database_editor(database_id).await
}
#[tracing::instrument(level = "debug", skip_all, fields(database_id), err)]
pub async fn close_database<T: AsRef<str>>(&self, database_id: T) -> FlowyResult<()> {
let database_id = database_id.as_ref();
tracing::Span::current().record("database_id", database_id);
self
.database_editors
.write()
.await
.remove(database_id)
.await;
Ok(())
}
// #[tracing::instrument(level = "debug", skip(self), err)]
pub async fn get_database_editor(
&self,
database_id: &str,
) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
let read_guard = self.database_editors.read().await;
let editor = read_guard.get(database_id);
match editor {
None => {
// Drop the read_guard ASAP in case of the following read/write lock
drop(read_guard);
self.open_database(database_id).await
},
Some(editor) => Ok(editor),
}
}
async fn get_or_create_database_editor(
&self,
database_id: &str,
) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
if let Some(editor) = self.database_editors.read().await.get(database_id) {
return Ok(editor);
} }
pub async fn initialize_with_new_user(&self, _user_id: &str, _token: &str) -> FlowyResult<()> { let mut database_editors = self.database_editors.write().await;
Ok(()) let db_pool = self.database_user.db_pool()?;
} let editor = self.make_database_rev_editor(database_id, db_pool).await?;
tracing::trace!("Open database: {}", database_id);
database_editors.insert(database_id.to_string(), editor.clone());
Ok(editor)
}
pub async fn initialize(&self, _user_id: &str, _token: &str) -> FlowyResult<()> { #[tracing::instrument(level = "trace", skip(self, pool), err)]
Ok(()) async fn make_database_rev_editor(
} &self,
database_id: &str,
pool: Arc<ConnectionPool>,
) -> Result<Arc<DatabaseRevisionEditor>, FlowyError> {
let user = self.database_user.clone();
let token = user.token()?;
let cloud = Arc::new(GridRevisionCloudService::new(token));
let mut rev_manager = self.make_database_rev_manager(database_id, pool.clone())?;
let database_pad = Arc::new(RwLock::new(
rev_manager
.initialize::<GridRevisionSerde>(Some(cloud))
.await?,
));
let database_editor = DatabaseRevisionEditor::new(
database_id,
user,
database_pad,
rev_manager,
self.block_index_cache.clone(),
self.task_scheduler.clone(),
)
.await?;
Ok(database_editor)
}
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "trace", skip(self, pool), err)]
pub async fn create_database<T: AsRef<str>>(&self, database_id: T, revisions: Vec<Revision>) -> FlowyResult<()> { pub fn make_database_rev_manager(
let database_id = database_id.as_ref(); &self,
let db_pool = self.database_user.db_pool()?; database_id: &str,
let rev_manager = self.make_database_rev_manager(database_id, db_pool)?; pool: Arc<ConnectionPool>,
rev_manager.reset_object(revisions).await?; ) -> FlowyResult<RevisionManager<Arc<ConnectionPool>>> {
let user_id = self.database_user.user_id()?;
Ok(()) // Create revision persistence
} let disk_cache = SQLiteDatabaseRevisionPersistence::new(&user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(6, false);
let rev_persistence =
RevisionPersistence::new(&user_id, database_id, disk_cache, configuration);
#[tracing::instrument(level = "debug", skip_all, err)] // Create snapshot persistence
async fn create_database_view<T: AsRef<str>>(&self, view_id: T, revisions: Vec<Revision>) -> FlowyResult<()> { let snapshot_object_id = format!("grid:{}", database_id);
let view_id = view_id.as_ref(); let snapshot_persistence =
let rev_manager = make_database_view_rev_manager(&self.database_user, view_id).await?; SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
rev_manager.reset_object(revisions).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)] let rev_compress = GridRevisionMergeable();
pub async fn create_database_block<T: AsRef<str>>(&self, block_id: T, revisions: Vec<Revision>) -> FlowyResult<()> { let rev_manager = RevisionManager::new(
let block_id = block_id.as_ref(); &user_id,
let rev_manager = make_database_block_rev_manager(&self.database_user, block_id)?; database_id,
rev_manager.reset_object(revisions).await?; rev_persistence,
Ok(()) rev_compress,
} snapshot_persistence,
);
pub async fn open_database<T: AsRef<str>>(&self, database_id: T) -> FlowyResult<Arc<DatabaseRevisionEditor>> { Ok(rev_manager)
let database_id = database_id.as_ref(); }
let _ = self.migration.run_v1_migration(database_id).await;
self.get_or_create_database_editor(database_id).await
}
#[tracing::instrument(level = "debug", skip_all, fields(database_id), err)]
pub async fn close_database<T: AsRef<str>>(&self, database_id: T) -> FlowyResult<()> {
let database_id = database_id.as_ref();
tracing::Span::current().record("database_id", database_id);
self.database_editors.write().await.remove(database_id).await;
Ok(())
}
// #[tracing::instrument(level = "debug", skip(self), err)]
pub async fn get_database_editor(&self, database_id: &str) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
let read_guard = self.database_editors.read().await;
let editor = read_guard.get(database_id);
match editor {
None => {
// Drop the read_guard ASAP in case of the following read/write lock
drop(read_guard);
self.open_database(database_id).await
}
Some(editor) => Ok(editor),
}
}
async fn get_or_create_database_editor(&self, database_id: &str) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
if let Some(editor) = self.database_editors.read().await.get(database_id) {
return Ok(editor);
}
let mut database_editors = self.database_editors.write().await;
let db_pool = self.database_user.db_pool()?;
let editor = self.make_database_rev_editor(database_id, db_pool).await?;
tracing::trace!("Open database: {}", database_id);
database_editors.insert(database_id.to_string(), editor.clone());
Ok(editor)
}
#[tracing::instrument(level = "trace", skip(self, pool), err)]
async fn make_database_rev_editor(
&self,
database_id: &str,
pool: Arc<ConnectionPool>,
) -> Result<Arc<DatabaseRevisionEditor>, FlowyError> {
let user = self.database_user.clone();
let token = user.token()?;
let cloud = Arc::new(GridRevisionCloudService::new(token));
let mut rev_manager = self.make_database_rev_manager(database_id, pool.clone())?;
let database_pad = Arc::new(RwLock::new(
rev_manager.initialize::<GridRevisionSerde>(Some(cloud)).await?,
));
let database_editor = DatabaseRevisionEditor::new(
database_id,
user,
database_pad,
rev_manager,
self.block_index_cache.clone(),
self.task_scheduler.clone(),
)
.await?;
Ok(database_editor)
}
#[tracing::instrument(level = "trace", skip(self, pool), err)]
pub fn make_database_rev_manager(
&self,
database_id: &str,
pool: Arc<ConnectionPool>,
) -> FlowyResult<RevisionManager<Arc<ConnectionPool>>> {
let user_id = self.database_user.user_id()?;
// Create revision persistence
let disk_cache = SQLiteDatabaseRevisionPersistence::new(&user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(6, false);
let rev_persistence = RevisionPersistence::new(&user_id, database_id, disk_cache, configuration);
// Create snapshot persistence
let snapshot_object_id = format!("grid:{}", database_id);
let snapshot_persistence = SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_compress = GridRevisionMergeable();
let rev_manager = RevisionManager::new(
&user_id,
database_id,
rev_persistence,
rev_compress,
snapshot_persistence,
);
Ok(rev_manager)
}
} }
pub async fn make_database_view_data( pub async fn make_database_view_data(
_user_id: &str, _user_id: &str,
view_id: &str, view_id: &str,
layout: LayoutTypePB, layout: LayoutTypePB,
database_manager: Arc<DatabaseManager>, database_manager: Arc<DatabaseManager>,
build_context: BuildDatabaseContext, build_context: BuildDatabaseContext,
) -> FlowyResult<Bytes> { ) -> FlowyResult<Bytes> {
let BuildDatabaseContext { let BuildDatabaseContext {
field_revs, field_revs,
block_metas, block_metas,
blocks, blocks,
grid_view_revision_data, grid_view_revision_data,
} = build_context; } = build_context;
for block_meta_data in &blocks { for block_meta_data in &blocks {
let block_id = &block_meta_data.block_id; let block_id = &block_meta_data.block_id;
// Indexing the block's rows // Indexing the block's rows
block_meta_data.rows.iter().for_each(|row| { block_meta_data.rows.iter().for_each(|row| {
let _ = database_manager.block_index_cache.insert(&row.block_id, &row.id); let _ = database_manager
}); .block_index_cache
.insert(&row.block_id, &row.id);
});
// Create grid's block // Create grid's block
let grid_block_delta = make_database_block_operations(block_meta_data); let grid_block_delta = make_database_block_operations(block_meta_data);
let block_delta_data = grid_block_delta.json_bytes(); let block_delta_data = grid_block_delta.json_bytes();
let revision = Revision::initial_revision(block_id, block_delta_data); let revision = Revision::initial_revision(block_id, block_delta_data);
database_manager database_manager
.create_database_block(&block_id, vec![revision]) .create_database_block(&block_id, vec![revision])
.await?; .await?;
} }
// Will replace the grid_id with the value returned by the gen_grid_id() // Will replace the grid_id with the value returned by the gen_grid_id()
let grid_id = view_id.to_owned(); let grid_id = view_id.to_owned();
let grid_rev = DatabaseRevision::from_build_context(&grid_id, field_revs, block_metas); let grid_rev = DatabaseRevision::from_build_context(&grid_id, field_revs, block_metas);
// Create grid // Create grid
let grid_rev_delta = make_database_operations(&grid_rev); let grid_rev_delta = make_database_operations(&grid_rev);
let grid_rev_delta_bytes = grid_rev_delta.json_bytes(); let grid_rev_delta_bytes = grid_rev_delta.json_bytes();
let revision = Revision::initial_revision(&grid_id, grid_rev_delta_bytes.clone()); let revision = Revision::initial_revision(&grid_id, grid_rev_delta_bytes.clone());
database_manager.create_database(&grid_id, vec![revision]).await?; database_manager
.create_database(&grid_id, vec![revision])
.await?;
// Create grid view // Create grid view
let grid_view = if grid_view_revision_data.is_empty() { let grid_view = if grid_view_revision_data.is_empty() {
DatabaseViewRevision::new(grid_id, view_id.to_owned(), layout.into()) DatabaseViewRevision::new(grid_id, view_id.to_owned(), layout.into())
} else { } else {
DatabaseViewRevision::from_json(grid_view_revision_data)? DatabaseViewRevision::from_json(grid_view_revision_data)?
}; };
let grid_view_delta = make_grid_view_operations(&grid_view); let grid_view_delta = make_grid_view_operations(&grid_view);
let grid_view_delta_bytes = grid_view_delta.json_bytes(); let grid_view_delta_bytes = grid_view_delta.json_bytes();
let revision = Revision::initial_revision(view_id, grid_view_delta_bytes); let revision = Revision::initial_revision(view_id, grid_view_delta_bytes);
database_manager.create_database_view(view_id, vec![revision]).await?; database_manager
.create_database_view(view_id, vec![revision])
.await?;
Ok(grid_rev_delta_bytes) Ok(grid_rev_delta_bytes)
} }
#[async_trait] #[async_trait]
impl RefCountValue for DatabaseRevisionEditor { impl RefCountValue for DatabaseRevisionEditor {
async fn did_remove(&self) { async fn did_remove(&self) {
self.close().await; self.close().await;
} }
} }

View file

@ -4,48 +4,48 @@ const OBSERVABLE_CATEGORY: &str = "Grid";
#[derive(ProtoBuf_Enum, Debug)] #[derive(ProtoBuf_Enum, Debug)]
pub enum DatabaseNotification { pub enum DatabaseNotification {
Unknown = 0, Unknown = 0,
/// Trigger after inserting/deleting/updating a row /// Trigger after inserting/deleting/updating a row
DidUpdateViewRows = 20, DidUpdateViewRows = 20,
/// Trigger when the visibility of the row was changed. For example, updating the filter will trigger the notification /// Trigger when the visibility of the row was changed. For example, updating the filter will trigger the notification
DidUpdateViewRowsVisibility = 21, DidUpdateViewRowsVisibility = 21,
/// Trigger after inserting/deleting/updating a field /// Trigger after inserting/deleting/updating a field
DidUpdateFields = 22, DidUpdateFields = 22,
/// Trigger after editing a cell /// Trigger after editing a cell
DidUpdateCell = 40, DidUpdateCell = 40,
/// Trigger after editing a field properties including rename,update type option, etc /// Trigger after editing a field properties including rename,update type option, etc
DidUpdateField = 50, DidUpdateField = 50,
/// Trigger after the number of groups is changed /// Trigger after the number of groups is changed
DidUpdateGroups = 60, DidUpdateGroups = 60,
/// Trigger after inserting/deleting/updating/moving a row /// Trigger after inserting/deleting/updating/moving a row
DidUpdateGroupRow = 61, DidUpdateGroupRow = 61,
/// Trigger when setting a new grouping field /// Trigger when setting a new grouping field
DidGroupByField = 62, DidGroupByField = 62,
/// Trigger after inserting/deleting/updating a filter /// Trigger after inserting/deleting/updating a filter
DidUpdateFilter = 63, DidUpdateFilter = 63,
/// Trigger after inserting/deleting/updating a sort /// Trigger after inserting/deleting/updating a sort
DidUpdateSort = 64, DidUpdateSort = 64,
/// Trigger after the sort configurations are changed /// Trigger after the sort configurations are changed
DidReorderRows = 65, DidReorderRows = 65,
/// Trigger after editing the row that hit the sort rule /// Trigger after editing the row that hit the sort rule
DidReorderSingleRow = 66, DidReorderSingleRow = 66,
/// Trigger when the settings of the database are changed /// Trigger when the settings of the database are changed
DidUpdateSettings = 70, DidUpdateSettings = 70,
} }
impl std::default::Default for DatabaseNotification { impl std::default::Default for DatabaseNotification {
fn default() -> Self { fn default() -> Self {
DatabaseNotification::Unknown DatabaseNotification::Unknown
} }
} }
impl std::convert::From<DatabaseNotification> for i32 { impl std::convert::From<DatabaseNotification> for i32 {
fn from(notification: DatabaseNotification) -> Self { fn from(notification: DatabaseNotification) -> Self {
notification as i32 notification as i32
} }
} }
#[tracing::instrument(level = "trace")] #[tracing::instrument(level = "trace")]
pub fn send_notification(id: &str, ty: DatabaseNotification) -> NotificationBuilder { pub fn send_notification(id: &str, ty: DatabaseNotification) -> NotificationBuilder {
NotificationBuilder::new(id, ty, OBSERVABLE_CATEGORY) NotificationBuilder::new(id, ty, OBSERVABLE_CATEGORY)
} }

View file

@ -4,7 +4,8 @@ use flowy_client_sync::client_database::{GridBlockRevisionChangeset, GridBlockRe
use flowy_client_sync::make_operations_from_revisions; use flowy_client_sync::make_operations_from_revisions;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{ use flowy_revision::{
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer, RevisionObjectSerializer, RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer,
RevisionObjectSerializer,
}; };
use flowy_sqlite::ConnectionPool; use flowy_sqlite::ConnectionPool;
use grid_model::{CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision}; use grid_model::{CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision};
@ -17,201 +18,218 @@ use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
pub struct DatabaseBlockRevisionEditor { pub struct DatabaseBlockRevisionEditor {
#[allow(dead_code)] #[allow(dead_code)]
user_id: String, user_id: String,
pub block_id: String, pub block_id: String,
pad: Arc<RwLock<GridBlockRevisionPad>>, pad: Arc<RwLock<GridBlockRevisionPad>>,
rev_manager: Arc<RevisionManager<Arc<ConnectionPool>>>, rev_manager: Arc<RevisionManager<Arc<ConnectionPool>>>,
} }
impl DatabaseBlockRevisionEditor { impl DatabaseBlockRevisionEditor {
pub async fn new( pub async fn new(
user_id: &str, user_id: &str,
token: &str, token: &str,
block_id: &str, block_id: &str,
mut rev_manager: RevisionManager<Arc<ConnectionPool>>, mut rev_manager: RevisionManager<Arc<ConnectionPool>>,
) -> FlowyResult<Self> { ) -> FlowyResult<Self> {
let cloud = Arc::new(GridBlockRevisionCloudService { let cloud = Arc::new(GridBlockRevisionCloudService {
token: token.to_owned(), token: token.to_owned(),
}); });
let block_revision_pad = rev_manager let block_revision_pad = rev_manager
.initialize::<DatabaseBlockRevisionSerde>(Some(cloud)) .initialize::<DatabaseBlockRevisionSerde>(Some(cloud))
.await?; .await?;
let pad = Arc::new(RwLock::new(block_revision_pad)); let pad = Arc::new(RwLock::new(block_revision_pad));
let rev_manager = Arc::new(rev_manager); let rev_manager = Arc::new(rev_manager);
let user_id = user_id.to_owned(); let user_id = user_id.to_owned();
let block_id = block_id.to_owned(); let block_id = block_id.to_owned();
Ok(Self { Ok(Self {
user_id, user_id,
block_id, block_id,
pad, pad,
rev_manager, rev_manager,
}) })
} }
pub async fn close(&self) { pub async fn close(&self) {
self.rev_manager.generate_snapshot().await; self.rev_manager.generate_snapshot().await;
self.rev_manager.close().await; self.rev_manager.close().await;
} }
pub async fn duplicate_block(&self, duplicated_block_id: &str) -> DatabaseBlockRevision { pub async fn duplicate_block(&self, duplicated_block_id: &str) -> DatabaseBlockRevision {
self.pad.read().await.duplicate_data(duplicated_block_id) self.pad.read().await.duplicate_data(duplicated_block_id)
} }
/// Create a row after the the with prev_row_id. If prev_row_id is None, the row will be appended to the list /// Create a row after the the with prev_row_id. If prev_row_id is None, the row will be appended to the list
pub(crate) async fn create_row( pub(crate) async fn create_row(
&self, &self,
row: RowRevision, row: RowRevision,
prev_row_id: Option<String>, prev_row_id: Option<String>,
) -> FlowyResult<(i32, Option<i32>)> { ) -> FlowyResult<(i32, Option<i32>)> {
let mut row_count = 0; let mut row_count = 0;
let mut row_index = None; let mut row_index = None;
self.modify(|block_pad| { self
if let Some(start_row_id) = prev_row_id.as_ref() { .modify(|block_pad| {
match block_pad.index_of_row(start_row_id) { if let Some(start_row_id) = prev_row_id.as_ref() {
None => {} match block_pad.index_of_row(start_row_id) {
Some(index) => row_index = Some(index as i32 + 1), None => {},
} Some(index) => row_index = Some(index as i32 + 1),
} }
let change = block_pad.add_row_rev(row, prev_row_id)?;
row_count = block_pad.number_of_rows();
if row_index.is_none() {
row_index = Some(row_count - 1);
}
Ok(change)
})
.await?;
Ok((row_count, row_index))
}
pub async fn delete_rows(&self, ids: Vec<Cow<'_, String>>) -> FlowyResult<i32> {
let mut row_count = 0;
self.modify(|block_pad| {
let changeset = block_pad.delete_rows(ids)?;
row_count = block_pad.number_of_rows();
Ok(changeset)
})
.await?;
Ok(row_count)
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
self.modify(|block_pad| Ok(block_pad.update_row(changeset)?)).await?;
Ok(())
}
pub async fn move_row(&self, row_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self.modify(|block_pad| Ok(block_pad.move_row(row_id, from, to)?))
.await?;
Ok(())
}
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.pad.read().await.index_of_row(row_id)
}
pub async fn number_of_rows(&self) -> i32 {
self.pad.read().await.rows.len() as i32
}
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> {
if let Ok(pad) = self.pad.try_read() {
Ok(pad.get_row_rev(row_id))
} else {
tracing::error!("Required grid block read lock failed, retrying");
let retry = GetRowDataRetryAction {
row_id: row_id.to_owned(),
pad: self.pad.clone(),
};
match spawn_retry(3, 300, retry).await {
Ok(value) => Ok(value),
Err(err) => {
tracing::error!("Read row revision failed with: {}", err);
Ok(None)
}
}
} }
}
pub async fn get_row_revs<T>(&self, row_ids: Option<Vec<Cow<'_, T>>>) -> FlowyResult<Vec<Arc<RowRevision>>> let change = block_pad.add_row_rev(row, prev_row_id)?;
where row_count = block_pad.number_of_rows();
T: AsRef<str> + ToOwned + ?Sized,
{
let row_revs = self.pad.read().await.get_row_revs(row_ids)?;
Ok(row_revs)
}
pub async fn get_cell_revs( if row_index.is_none() {
&self, row_index = Some(row_count - 1);
field_id: &str,
row_ids: Option<Vec<Cow<'_, String>>>,
) -> FlowyResult<Vec<CellRevision>> {
let cell_revs = self.pad.read().await.get_cell_revs(field_id, row_ids)?;
Ok(cell_revs)
}
async fn modify<F>(&self, f: F) -> FlowyResult<()>
where
F: for<'a> FnOnce(&'a mut GridBlockRevisionPad) -> FlowyResult<Option<GridBlockRevisionChangeset>>,
{
let mut write_guard = self.pad.write().await;
let changeset = f(&mut write_guard)?;
match changeset {
None => {}
Some(changeset) => {
self.apply_change(changeset).await?;
}
} }
Ok(()) Ok(change)
} })
.await?;
async fn apply_change(&self, change: GridBlockRevisionChangeset) -> FlowyResult<()> { Ok((row_count, row_index))
let GridBlockRevisionChangeset { operations: delta, md5 } = change; }
let data = delta.json_bytes();
let _ = self.rev_manager.add_local_revision(data, md5).await?; pub async fn delete_rows(&self, ids: Vec<Cow<'_, String>>) -> FlowyResult<i32> {
Ok(()) let mut row_count = 0;
self
.modify(|block_pad| {
let changeset = block_pad.delete_rows(ids)?;
row_count = block_pad.number_of_rows();
Ok(changeset)
})
.await?;
Ok(row_count)
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
self
.modify(|block_pad| Ok(block_pad.update_row(changeset)?))
.await?;
Ok(())
}
pub async fn move_row(&self, row_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self
.modify(|block_pad| Ok(block_pad.move_row(row_id, from, to)?))
.await?;
Ok(())
}
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.pad.read().await.index_of_row(row_id)
}
pub async fn number_of_rows(&self) -> i32 {
self.pad.read().await.rows.len() as i32
}
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> {
if let Ok(pad) = self.pad.try_read() {
Ok(pad.get_row_rev(row_id))
} else {
tracing::error!("Required grid block read lock failed, retrying");
let retry = GetRowDataRetryAction {
row_id: row_id.to_owned(),
pad: self.pad.clone(),
};
match spawn_retry(3, 300, retry).await {
Ok(value) => Ok(value),
Err(err) => {
tracing::error!("Read row revision failed with: {}", err);
Ok(None)
},
}
} }
}
pub async fn get_row_revs<T>(
&self,
row_ids: Option<Vec<Cow<'_, T>>>,
) -> FlowyResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
let row_revs = self.pad.read().await.get_row_revs(row_ids)?;
Ok(row_revs)
}
pub async fn get_cell_revs(
&self,
field_id: &str,
row_ids: Option<Vec<Cow<'_, String>>>,
) -> FlowyResult<Vec<CellRevision>> {
let cell_revs = self.pad.read().await.get_cell_revs(field_id, row_ids)?;
Ok(cell_revs)
}
async fn modify<F>(&self, f: F) -> FlowyResult<()>
where
F: for<'a> FnOnce(
&'a mut GridBlockRevisionPad,
) -> FlowyResult<Option<GridBlockRevisionChangeset>>,
{
let mut write_guard = self.pad.write().await;
let changeset = f(&mut write_guard)?;
match changeset {
None => {},
Some(changeset) => {
self.apply_change(changeset).await?;
},
}
Ok(())
}
async fn apply_change(&self, change: GridBlockRevisionChangeset) -> FlowyResult<()> {
let GridBlockRevisionChangeset {
operations: delta,
md5,
} = change;
let data = delta.json_bytes();
let _ = self.rev_manager.add_local_revision(data, md5).await?;
Ok(())
}
} }
struct GridBlockRevisionCloudService { struct GridBlockRevisionCloudService {
#[allow(dead_code)] #[allow(dead_code)]
token: String, token: String,
} }
impl RevisionCloudService for GridBlockRevisionCloudService { impl RevisionCloudService for GridBlockRevisionCloudService {
#[tracing::instrument(level = "trace", skip(self))] #[tracing::instrument(level = "trace", skip(self))]
fn fetch_object(&self, _user_id: &str, _object_id: &str) -> FutureResult<Vec<Revision>, FlowyError> { fn fetch_object(
FutureResult::new(async move { Ok(vec![]) }) &self,
} _user_id: &str,
_object_id: &str,
) -> FutureResult<Vec<Revision>, FlowyError> {
FutureResult::new(async move { Ok(vec![]) })
}
} }
struct DatabaseBlockRevisionSerde(); struct DatabaseBlockRevisionSerde();
impl RevisionObjectDeserializer for DatabaseBlockRevisionSerde { impl RevisionObjectDeserializer for DatabaseBlockRevisionSerde {
type Output = GridBlockRevisionPad; type Output = GridBlockRevisionPad;
fn deserialize_revisions(object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> { fn deserialize_revisions(object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
let pad = GridBlockRevisionPad::from_revisions(object_id, revisions)?; let pad = GridBlockRevisionPad::from_revisions(object_id, revisions)?;
Ok(pad) Ok(pad)
} }
fn recover_from_revisions(_revisions: Vec<Revision>) -> Option<(Self::Output, i64)> { fn recover_from_revisions(_revisions: Vec<Revision>) -> Option<(Self::Output, i64)> {
None None
} }
} }
impl RevisionObjectSerializer for DatabaseBlockRevisionSerde { impl RevisionObjectSerializer for DatabaseBlockRevisionSerde {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> { fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?; let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes()) Ok(operations.json_bytes())
} }
} }
pub struct GridBlockRevisionMergeable(); pub struct GridBlockRevisionMergeable();
impl RevisionMergeable for GridBlockRevisionMergeable { impl RevisionMergeable for GridBlockRevisionMergeable {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> { fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
DatabaseBlockRevisionSerde::combine_revisions(revisions) DatabaseBlockRevisionSerde::combine_revisions(revisions)
} }
} }

View file

@ -4,14 +4,16 @@ use crate::notification::{send_notification, DatabaseNotification};
use crate::services::block_editor::{DatabaseBlockRevisionEditor, GridBlockRevisionMergeable}; use crate::services::block_editor::{DatabaseBlockRevisionEditor, GridBlockRevisionMergeable};
use crate::services::persistence::block_index::BlockIndexCache; use crate::services::persistence::block_index::BlockIndexCache;
use crate::services::persistence::rev_sqlite::{ use crate::services::persistence::rev_sqlite::{
SQLiteDatabaseBlockRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence, SQLiteDatabaseBlockRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence,
}; };
use crate::services::row::{make_row_from_row_rev, DatabaseBlockRow, DatabaseBlockRowRevision}; use crate::services::row::{make_row_from_row_rev, DatabaseBlockRow, DatabaseBlockRowRevision};
use dashmap::DashMap; use dashmap::DashMap;
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration}; use flowy_revision::{RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration};
use flowy_sqlite::ConnectionPool; use flowy_sqlite::ConnectionPool;
use grid_model::{GridBlockMetaRevision, GridBlockMetaRevisionChangeset, RowChangeset, RowRevision}; use grid_model::{
GridBlockMetaRevision, GridBlockMetaRevisionChangeset, RowChangeset, RowRevision,
};
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
@ -19,301 +21,335 @@ use tokio::sync::broadcast;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum DatabaseBlockEvent { pub enum DatabaseBlockEvent {
InsertRow { InsertRow {
block_id: String, block_id: String,
row: InsertedRowPB, row: InsertedRowPB,
}, },
UpdateRow { UpdateRow {
block_id: String, block_id: String,
row: UpdatedRowPB, row: UpdatedRowPB,
}, },
DeleteRow { DeleteRow {
block_id: String, block_id: String,
row_id: String, row_id: String,
}, },
Move { Move {
block_id: String, block_id: String,
deleted_row_id: String, deleted_row_id: String,
inserted_row: InsertedRowPB, inserted_row: InsertedRowPB,
}, },
} }
type BlockId = String; type BlockId = String;
pub(crate) struct DatabaseBlockManager { pub(crate) struct DatabaseBlockManager {
user: Arc<dyn DatabaseUser>, user: Arc<dyn DatabaseUser>,
persistence: Arc<BlockIndexCache>, persistence: Arc<BlockIndexCache>,
block_editors: DashMap<BlockId, Arc<DatabaseBlockRevisionEditor>>, block_editors: DashMap<BlockId, Arc<DatabaseBlockRevisionEditor>>,
event_notifier: broadcast::Sender<DatabaseBlockEvent>, event_notifier: broadcast::Sender<DatabaseBlockEvent>,
} }
impl DatabaseBlockManager { impl DatabaseBlockManager {
pub(crate) async fn new( pub(crate) async fn new(
user: &Arc<dyn DatabaseUser>, user: &Arc<dyn DatabaseUser>,
block_meta_revs: Vec<Arc<GridBlockMetaRevision>>, block_meta_revs: Vec<Arc<GridBlockMetaRevision>>,
persistence: Arc<BlockIndexCache>, persistence: Arc<BlockIndexCache>,
event_notifier: broadcast::Sender<DatabaseBlockEvent>, event_notifier: broadcast::Sender<DatabaseBlockEvent>,
) -> FlowyResult<Self> { ) -> FlowyResult<Self> {
let block_editors = make_block_editors(user, block_meta_revs).await?; let block_editors = make_block_editors(user, block_meta_revs).await?;
let user = user.clone(); let user = user.clone();
let manager = Self { let manager = Self {
user, user,
block_editors, block_editors,
persistence, persistence,
event_notifier, event_notifier,
}; };
Ok(manager) Ok(manager)
} }
pub async fn close(&self) { pub async fn close(&self) {
for block_editor in self.block_editors.iter() { for block_editor in self.block_editors.iter() {
block_editor.close().await; block_editor.close().await;
}
} }
}
// #[tracing::instrument(level = "trace", skip(self))] // #[tracing::instrument(level = "trace", skip(self))]
pub(crate) async fn get_block_editor(&self, block_id: &str) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> { pub(crate) async fn get_block_editor(
debug_assert!(!block_id.is_empty()); &self,
match self.block_editors.get(block_id) { block_id: &str,
None => { ) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
tracing::error!("This is a fatal error, block with id:{} is not exist", block_id); debug_assert!(!block_id.is_empty());
let editor = Arc::new(make_database_block_editor(&self.user, block_id).await?); match self.block_editors.get(block_id) {
self.block_editors.insert(block_id.to_owned(), editor.clone()); None => {
Ok(editor) tracing::error!(
} "This is a fatal error, block with id:{} is not exist",
Some(editor) => Ok(editor.clone()), block_id
} );
let editor = Arc::new(make_database_block_editor(&self.user, block_id).await?);
self
.block_editors
.insert(block_id.to_owned(), editor.clone());
Ok(editor)
},
Some(editor) => Ok(editor.clone()),
} }
}
pub(crate) async fn get_editor_from_row_id(&self, row_id: &str) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> { pub(crate) async fn get_editor_from_row_id(
let block_id = self.persistence.get_block_id(row_id)?; &self,
self.get_block_editor(&block_id).await row_id: &str,
} ) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
let block_id = self.persistence.get_block_id(row_id)?;
self.get_block_editor(&block_id).await
}
#[tracing::instrument(level = "trace", skip(self, start_row_id), err)] #[tracing::instrument(level = "trace", skip(self, start_row_id), err)]
pub(crate) async fn create_row(&self, row_rev: RowRevision, start_row_id: Option<String>) -> FlowyResult<i32> { pub(crate) async fn create_row(
let block_id = row_rev.block_id.clone(); &self,
row_rev: RowRevision,
start_row_id: Option<String>,
) -> FlowyResult<i32> {
let block_id = row_rev.block_id.clone();
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let editor = self.get_block_editor(&row_rev.block_id).await?;
let mut row = InsertedRowPB::from(&row_rev);
let (number_of_rows, index) = editor.create_row(row_rev, start_row_id).await?;
row.index = index;
let _ = self
.event_notifier
.send(DatabaseBlockEvent::InsertRow { block_id, row });
Ok(number_of_rows)
}
pub(crate) async fn insert_row(
&self,
rows_by_block_id: HashMap<String, Vec<RowRevision>>,
) -> FlowyResult<Vec<GridBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for (block_id, row_revs) in rows_by_block_id {
let editor = self.get_block_editor(&block_id).await?;
for row_rev in row_revs {
self.persistence.insert(&row_rev.block_id, &row_rev.id)?; self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let editor = self.get_block_editor(&row_rev.block_id).await?;
let mut row = InsertedRowPB::from(&row_rev); let mut row = InsertedRowPB::from(&row_rev);
let (number_of_rows, index) = editor.create_row(row_rev, start_row_id).await?; row.index = editor.create_row(row_rev, None).await?.1;
row.index = index; let _ = self.event_notifier.send(DatabaseBlockEvent::InsertRow {
block_id: block_id.clone(),
let _ = self row,
.event_notifier });
.send(DatabaseBlockEvent::InsertRow { block_id, row }); }
Ok(number_of_rows) changesets.push(GridBlockMetaRevisionChangeset::from_row_count(
block_id.clone(),
editor.number_of_rows().await,
));
} }
pub(crate) async fn insert_row( Ok(changesets)
&self, }
rows_by_block_id: HashMap<String, Vec<RowRevision>>,
) -> FlowyResult<Vec<GridBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for (block_id, row_revs) in rows_by_block_id {
let editor = self.get_block_editor(&block_id).await?;
for row_rev in row_revs {
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let mut row = InsertedRowPB::from(&row_rev);
row.index = editor.create_row(row_rev, None).await?.1;
let _ = self.event_notifier.send(DatabaseBlockEvent::InsertRow {
block_id: block_id.clone(),
row,
});
}
changesets.push(GridBlockMetaRevisionChangeset::from_row_count(
block_id.clone(),
editor.number_of_rows().await,
));
}
Ok(changesets) pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
} let editor = self.get_editor_from_row_id(&changeset.row_id).await?;
editor.update_row(changeset.clone()).await?;
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> { match editor.get_row_rev(&changeset.row_id).await? {
let editor = self.get_editor_from_row_id(&changeset.row_id).await?; None => tracing::error!(
editor.update_row(changeset.clone()).await?; "Update row failed, can't find the row with id: {}",
match editor.get_row_rev(&changeset.row_id).await? { changeset.row_id
None => tracing::error!("Update row failed, can't find the row with id: {}", changeset.row_id), ),
Some((_, row_rev)) => { Some((_, row_rev)) => {
let changed_field_ids = changeset.cell_by_field_id.keys().cloned().collect::<Vec<String>>(); let changed_field_ids = changeset
let row = UpdatedRowPB { .cell_by_field_id
row: make_row_from_row_rev(row_rev), .keys()
field_ids: changed_field_ids, .cloned()
}; .collect::<Vec<String>>();
let row = UpdatedRowPB {
let _ = self.event_notifier.send(DatabaseBlockEvent::UpdateRow { row: make_row_from_row_rev(row_rev),
block_id: editor.block_id.clone(), field_ids: changed_field_ids,
row,
});
}
}
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub async fn delete_row(&self, row_id: &str) -> FlowyResult<Option<Arc<RowRevision>>> {
let row_id = row_id.to_owned();
let block_id = self.persistence.get_block_id(&row_id)?;
let editor = self.get_block_editor(&block_id).await?;
match editor.get_row_rev(&row_id).await? {
None => Ok(None),
Some((_, row_rev)) => {
let _ = editor.delete_rows(vec![Cow::Borrowed(&row_id)]).await?;
let _ = self.event_notifier.send(DatabaseBlockEvent::DeleteRow {
block_id: editor.block_id.clone(),
row_id: row_rev.id.clone(),
});
Ok(Some(row_rev))
}
}
}
pub(crate) async fn delete_rows(
&self,
block_rows: Vec<DatabaseBlockRow>,
) -> FlowyResult<Vec<GridBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for block_row in block_rows {
let editor = self.get_block_editor(&block_row.block_id).await?;
let row_ids = block_row
.row_ids
.into_iter()
.map(Cow::Owned)
.collect::<Vec<Cow<String>>>();
let row_count = editor.delete_rows(row_ids).await?;
let changeset = GridBlockMetaRevisionChangeset::from_row_count(block_row.block_id, row_count);
changesets.push(changeset);
}
Ok(changesets)
}
// This function will be moved to GridViewRevisionEditor
pub(crate) async fn move_row(&self, row_rev: Arc<RowRevision>, from: usize, to: usize) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&row_rev.id).await?;
editor.move_row(&row_rev.id, from, to).await?;
let delete_row_id = row_rev.id.clone();
let insert_row = InsertedRowPB {
index: Some(to as i32),
row: make_row_from_row_rev(row_rev),
is_new: false,
}; };
let _ = self.event_notifier.send(DatabaseBlockEvent::Move { let _ = self.event_notifier.send(DatabaseBlockEvent::UpdateRow {
block_id: editor.block_id.clone(), block_id: editor.block_id.clone(),
deleted_row_id: delete_row_id, row,
inserted_row: insert_row, });
},
}
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub async fn delete_row(&self, row_id: &str) -> FlowyResult<Option<Arc<RowRevision>>> {
let row_id = row_id.to_owned();
let block_id = self.persistence.get_block_id(&row_id)?;
let editor = self.get_block_editor(&block_id).await?;
match editor.get_row_rev(&row_id).await? {
None => Ok(None),
Some((_, row_rev)) => {
let _ = editor.delete_rows(vec![Cow::Borrowed(&row_id)]).await?;
let _ = self.event_notifier.send(DatabaseBlockEvent::DeleteRow {
block_id: editor.block_id.clone(),
row_id: row_rev.id.clone(),
}); });
Ok(()) Ok(Some(row_rev))
},
}
}
pub(crate) async fn delete_rows(
&self,
block_rows: Vec<DatabaseBlockRow>,
) -> FlowyResult<Vec<GridBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for block_row in block_rows {
let editor = self.get_block_editor(&block_row.block_id).await?;
let row_ids = block_row
.row_ids
.into_iter()
.map(Cow::Owned)
.collect::<Vec<Cow<String>>>();
let row_count = editor.delete_rows(row_ids).await?;
let changeset = GridBlockMetaRevisionChangeset::from_row_count(block_row.block_id, row_count);
changesets.push(changeset);
} }
// This function will be moved to GridViewRevisionEditor. Ok(changesets)
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> { }
match self.get_editor_from_row_id(row_id).await { // This function will be moved to GridViewRevisionEditor
Ok(editor) => editor.index_of_row(row_id).await, pub(crate) async fn move_row(
Err(_) => None, &self,
} row_rev: Arc<RowRevision>,
} from: usize,
to: usize,
) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&row_rev.id).await?;
editor.move_row(&row_rev.id, from, to).await?;
pub async fn update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> { let delete_row_id = row_rev.id.clone();
let row_changeset: RowChangeset = changeset.clone().into(); let insert_row = InsertedRowPB {
self.update_row(row_changeset).await?; index: Some(to as i32),
self.notify_did_update_cell(changeset).await?; row: make_row_from_row_rev(row_rev),
Ok(()) is_new: false,
} };
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> { let _ = self.event_notifier.send(DatabaseBlockEvent::Move {
let editor = self.get_editor_from_row_id(row_id).await?; block_id: editor.block_id.clone(),
editor.get_row_rev(row_id).await deleted_row_id: delete_row_id,
} inserted_row: insert_row,
});
#[allow(dead_code)] Ok(())
pub async fn get_row_revs(&self) -> FlowyResult<Vec<Arc<RowRevision>>> { }
let mut row_revs = vec![];
// This function will be moved to GridViewRevisionEditor.
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> {
match self.get_editor_from_row_id(row_id).await {
Ok(editor) => editor.index_of_row(row_id).await,
Err(_) => None,
}
}
pub async fn update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> {
let row_changeset: RowChangeset = changeset.clone().into();
self.update_row(row_changeset).await?;
self.notify_did_update_cell(changeset).await?;
Ok(())
}
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> {
let editor = self.get_editor_from_row_id(row_id).await?;
editor.get_row_rev(row_id).await
}
#[allow(dead_code)]
pub async fn get_row_revs(&self) -> FlowyResult<Vec<Arc<RowRevision>>> {
let mut row_revs = vec![];
for iter in self.block_editors.iter() {
let editor = iter.value();
row_revs.extend(editor.get_row_revs::<&str>(None).await?);
}
Ok(row_revs)
}
pub(crate) async fn get_blocks(
&self,
block_ids: Option<Vec<String>>,
) -> FlowyResult<Vec<DatabaseBlockRowRevision>> {
let mut blocks = vec![];
match block_ids {
None => {
for iter in self.block_editors.iter() { for iter in self.block_editors.iter() {
let editor = iter.value(); let editor = iter.value();
row_revs.extend(editor.get_row_revs::<&str>(None).await?); let block_id = editor.block_id.clone();
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
} }
Ok(row_revs) },
} Some(block_ids) => {
for block_id in block_ids {
pub(crate) async fn get_blocks( let editor = self.get_block_editor(&block_id).await?;
&self, let row_revs = editor.get_row_revs::<&str>(None).await?;
block_ids: Option<Vec<String>>, blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
) -> FlowyResult<Vec<DatabaseBlockRowRevision>> {
let mut blocks = vec![];
match block_ids {
None => {
for iter in self.block_editors.iter() {
let editor = iter.value();
let block_id = editor.block_id.clone();
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
}
Some(block_ids) => {
for block_id in block_ids {
let editor = self.get_block_editor(&block_id).await?;
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
}
} }
Ok(blocks) },
} }
Ok(blocks)
}
async fn notify_did_update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> { async fn notify_did_update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> {
let id = format!("{}:{}", changeset.row_id, changeset.field_id); let id = format!("{}:{}", changeset.row_id, changeset.field_id);
send_notification(&id, DatabaseNotification::DidUpdateCell).send(); send_notification(&id, DatabaseNotification::DidUpdateCell).send();
Ok(()) Ok(())
} }
} }
/// Initialize each block editor /// Initialize each block editor
async fn make_block_editors( async fn make_block_editors(
user: &Arc<dyn DatabaseUser>, user: &Arc<dyn DatabaseUser>,
block_meta_revs: Vec<Arc<GridBlockMetaRevision>>, block_meta_revs: Vec<Arc<GridBlockMetaRevision>>,
) -> FlowyResult<DashMap<String, Arc<DatabaseBlockRevisionEditor>>> { ) -> FlowyResult<DashMap<String, Arc<DatabaseBlockRevisionEditor>>> {
let editor_map = DashMap::new(); let editor_map = DashMap::new();
for block_meta_rev in block_meta_revs { for block_meta_rev in block_meta_revs {
let editor = make_database_block_editor(user, &block_meta_rev.block_id).await?; let editor = make_database_block_editor(user, &block_meta_rev.block_id).await?;
editor_map.insert(block_meta_rev.block_id.clone(), Arc::new(editor)); editor_map.insert(block_meta_rev.block_id.clone(), Arc::new(editor));
} }
Ok(editor_map) Ok(editor_map)
} }
async fn make_database_block_editor( async fn make_database_block_editor(
user: &Arc<dyn DatabaseUser>, user: &Arc<dyn DatabaseUser>,
block_id: &str, block_id: &str,
) -> FlowyResult<DatabaseBlockRevisionEditor> { ) -> FlowyResult<DatabaseBlockRevisionEditor> {
tracing::trace!("Open block:{} editor", block_id); tracing::trace!("Open block:{} editor", block_id);
let token = user.token()?; let token = user.token()?;
let user_id = user.user_id()?; let user_id = user.user_id()?;
let rev_manager = make_database_block_rev_manager(user, block_id)?; let rev_manager = make_database_block_rev_manager(user, block_id)?;
DatabaseBlockRevisionEditor::new(&user_id, &token, block_id, rev_manager).await DatabaseBlockRevisionEditor::new(&user_id, &token, block_id, rev_manager).await
} }
pub fn make_database_block_rev_manager( pub fn make_database_block_rev_manager(
user: &Arc<dyn DatabaseUser>, user: &Arc<dyn DatabaseUser>,
block_id: &str, block_id: &str,
) -> FlowyResult<RevisionManager<Arc<ConnectionPool>>> { ) -> FlowyResult<RevisionManager<Arc<ConnectionPool>>> {
let user_id = user.user_id()?; let user_id = user.user_id()?;
// Create revision persistence // Create revision persistence
let pool = user.db_pool()?; let pool = user.db_pool()?;
let disk_cache = SQLiteDatabaseBlockRevisionPersistence::new(&user_id, pool.clone()); let disk_cache = SQLiteDatabaseBlockRevisionPersistence::new(&user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(4, false); let configuration = RevisionPersistenceConfiguration::new(4, false);
let rev_persistence = RevisionPersistence::new(&user_id, block_id, disk_cache, configuration); let rev_persistence = RevisionPersistence::new(&user_id, block_id, disk_cache, configuration);
// Create snapshot persistence // Create snapshot persistence
let snapshot_object_id = format!("grid_block:{}", block_id); let snapshot_object_id = format!("grid_block:{}", block_id);
let snapshot_persistence = SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool); let snapshot_persistence =
SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_compress = GridBlockRevisionMergeable(); let rev_compress = GridBlockRevisionMergeable();
let rev_manager = RevisionManager::new(&user_id, block_id, rev_persistence, rev_compress, snapshot_persistence); let rev_manager = RevisionManager::new(
Ok(rev_manager) &user_id,
block_id,
rev_persistence,
rev_compress,
snapshot_persistence,
);
Ok(rev_manager)
} }

View file

@ -16,90 +16,97 @@ pub struct AnyTypeCache<TypeValueKey>(HashMap<TypeValueKey, TypeValue>);
impl<TypeValueKey> AnyTypeCache<TypeValueKey> impl<TypeValueKey> AnyTypeCache<TypeValueKey>
where where
TypeValueKey: Clone + Hash + Eq, TypeValueKey: Clone + Hash + Eq,
{ {
pub fn new() -> Arc<RwLock<AnyTypeCache<TypeValueKey>>> { pub fn new() -> Arc<RwLock<AnyTypeCache<TypeValueKey>>> {
Arc::new(RwLock::new(AnyTypeCache(HashMap::default()))) Arc::new(RwLock::new(AnyTypeCache(HashMap::default())))
} }
pub fn insert<T>(&mut self, key: &TypeValueKey, val: T) -> Option<T> pub fn insert<T>(&mut self, key: &TypeValueKey, val: T) -> Option<T>
where where
T: 'static + Send + Sync, T: 'static + Send + Sync,
{ {
self.0.insert(key.clone(), TypeValue::new(val)).and_then(downcast_owned) self
} .0
.insert(key.clone(), TypeValue::new(val))
.and_then(downcast_owned)
}
pub fn remove(&mut self, key: &TypeValueKey) { pub fn remove(&mut self, key: &TypeValueKey) {
self.0.remove(key); self.0.remove(key);
} }
// pub fn remove<T, K: AsRef<TypeValueKey>>(&mut self, key: K) -> Option<T> // pub fn remove<T, K: AsRef<TypeValueKey>>(&mut self, key: K) -> Option<T>
// where // where
// T: 'static + Send + Sync, // T: 'static + Send + Sync,
// { // {
// self.0.remove(key.as_ref()).and_then(downcast_owned) // self.0.remove(key.as_ref()).and_then(downcast_owned)
// } // }
pub fn get<T>(&self, key: &TypeValueKey) -> Option<&T> pub fn get<T>(&self, key: &TypeValueKey) -> Option<&T>
where where
T: 'static + Send + Sync, T: 'static + Send + Sync,
{ {
self.0.get(key).and_then(|type_value| type_value.boxed.downcast_ref()) self
} .0
.get(key)
.and_then(|type_value| type_value.boxed.downcast_ref())
}
pub fn get_mut<T>(&mut self, key: &TypeValueKey) -> Option<&mut T> pub fn get_mut<T>(&mut self, key: &TypeValueKey) -> Option<&mut T>
where where
T: 'static + Send + Sync, T: 'static + Send + Sync,
{ {
self.0 self
.get_mut(key) .0
.and_then(|type_value| type_value.boxed.downcast_mut()) .get_mut(key)
} .and_then(|type_value| type_value.boxed.downcast_mut())
}
pub fn contains(&self, key: &TypeValueKey) -> bool { pub fn contains(&self, key: &TypeValueKey) -> bool {
self.0.contains_key(key) self.0.contains_key(key)
} }
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.0.is_empty() self.0.is_empty()
} }
} }
fn downcast_owned<T: 'static + Send + Sync>(type_value: TypeValue) -> Option<T> { fn downcast_owned<T: 'static + Send + Sync>(type_value: TypeValue) -> Option<T> {
type_value.boxed.downcast().ok().map(|boxed| *boxed) type_value.boxed.downcast().ok().map(|boxed| *boxed)
} }
#[derive(Debug)] #[derive(Debug)]
struct TypeValue { struct TypeValue {
boxed: Box<dyn Any + Send + Sync + 'static>, boxed: Box<dyn Any + Send + Sync + 'static>,
#[allow(dead_code)] #[allow(dead_code)]
ty: &'static str, ty: &'static str,
} }
impl TypeValue { impl TypeValue {
pub fn new<T>(value: T) -> Self pub fn new<T>(value: T) -> Self
where where
T: Send + Sync + 'static, T: Send + Sync + 'static,
{ {
Self { Self {
boxed: Box::new(value), boxed: Box::new(value),
ty: type_name::<T>(), ty: type_name::<T>(),
}
} }
}
} }
impl std::ops::Deref for TypeValue { impl std::ops::Deref for TypeValue {
type Target = Box<dyn Any + Send + Sync + 'static>; type Target = Box<dyn Any + Send + Sync + 'static>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.boxed &self.boxed
} }
} }
impl std::ops::DerefMut for TypeValue { impl std::ops::DerefMut for TypeValue {
fn deref_mut(&mut self) -> &mut Self::Target { fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.boxed &mut self.boxed
} }
} }
// #[cfg(test)] // #[cfg(test)]

View file

@ -9,41 +9,41 @@ use std::fmt::Debug;
/// Decode the opaque cell data into readable format content /// Decode the opaque cell data into readable format content
pub trait CellDataDecoder: TypeOption { pub trait CellDataDecoder: TypeOption {
/// ///
/// Tries to decode the opaque cell string to `decoded_field_type`'s cell data. Sometimes, the `field_type` /// Tries to decode the opaque cell string to `decoded_field_type`'s cell data. Sometimes, the `field_type`
/// of the `FieldRevision` is not equal to the `decoded_field_type`(This happened When switching /// of the `FieldRevision` is not equal to the `decoded_field_type`(This happened When switching
/// the field type of the `FieldRevision` to another field type). So the cell data is need to do /// the field type of the `FieldRevision` to another field type). So the cell data is need to do
/// some transformation. /// some transformation.
/// ///
/// For example, the current field type of the `FieldRevision` is a checkbox. When switching the field /// For example, the current field type of the `FieldRevision` is a checkbox. When switching the field
/// type from the checkbox to single select, it will create two new options,`Yes` and `No`, if they don't exist. /// type from the checkbox to single select, it will create two new options,`Yes` and `No`, if they don't exist.
/// But the data of the cell doesn't change. We can't iterate all the rows to transform the cell /// But the data of the cell doesn't change. We can't iterate all the rows to transform the cell
/// data that can be parsed by the current field type. One approach is to transform the cell data /// data that can be parsed by the current field type. One approach is to transform the cell data
/// when it get read. For the moment, the cell data is a string, `Yes` or `No`. It needs to compare /// when it get read. For the moment, the cell data is a string, `Yes` or `No`. It needs to compare
/// with the option's name, if match return the id of the option. /// with the option's name, if match return the id of the option.
fn decode_cell_str( fn decode_cell_str(
&self, &self,
cell_str: String, cell_str: String,
decoded_field_type: &FieldType, decoded_field_type: &FieldType,
field_rev: &FieldRevision, field_rev: &FieldRevision,
) -> FlowyResult<<Self as TypeOption>::CellData>; ) -> FlowyResult<<Self as TypeOption>::CellData>;
/// Same as `decode_cell_data` does but Decode the cell data to readable `String` /// Same as `decode_cell_data` does but Decode the cell data to readable `String`
/// For example, The string of the Multi-Select cell will be a list of the option's name /// For example, The string of the Multi-Select cell will be a list of the option's name
/// separated by a comma. /// separated by a comma.
fn decode_cell_data_to_str(&self, cell_data: <Self as TypeOption>::CellData) -> String; fn decode_cell_data_to_str(&self, cell_data: <Self as TypeOption>::CellData) -> String;
} }
pub trait CellDataChangeset: TypeOption { pub trait CellDataChangeset: TypeOption {
/// The changeset is able to parse into the concrete data struct if `TypeOption::CellChangeset` /// The changeset is able to parse into the concrete data struct if `TypeOption::CellChangeset`
/// implements the `FromCellChangesetString` trait. /// implements the `FromCellChangesetString` trait.
/// For example,the SelectOptionCellChangeset,DateCellChangeset. etc. /// For example,the SelectOptionCellChangeset,DateCellChangeset. etc.
/// ///
fn apply_changeset( fn apply_changeset(
&self, &self,
changeset: <Self as TypeOption>::CellChangeset, changeset: <Self as TypeOption>::CellChangeset,
type_cell_data: Option<TypeCellData>, type_cell_data: Option<TypeCellData>,
) -> FlowyResult<(String, <Self as TypeOption>::CellData)>; ) -> FlowyResult<(String, <Self as TypeOption>::CellData)>;
} }
/// changeset: It will be deserialized into specific data base on the FieldType. /// changeset: It will be deserialized into specific data base on the FieldType.
@ -53,78 +53,90 @@ pub trait CellDataChangeset: TypeOption {
/// ///
/// cell_rev: It will be None if the cell does not contain any data. /// cell_rev: It will be None if the cell does not contain any data.
pub fn apply_cell_data_changeset<C: ToCellChangesetString, T: AsRef<FieldRevision>>( pub fn apply_cell_data_changeset<C: ToCellChangesetString, T: AsRef<FieldRevision>>(
changeset: C, changeset: C,
cell_rev: Option<CellRevision>, cell_rev: Option<CellRevision>,
field_rev: T, field_rev: T,
cell_data_cache: Option<AtomicCellDataCache>, cell_data_cache: Option<AtomicCellDataCache>,
) -> Result<String, FlowyError> { ) -> Result<String, FlowyError> {
let field_rev = field_rev.as_ref(); let field_rev = field_rev.as_ref();
let changeset = changeset.to_cell_changeset_str(); let changeset = changeset.to_cell_changeset_str();
let field_type: FieldType = field_rev.ty.into(); let field_type: FieldType = field_rev.ty.into();
let type_cell_data = cell_rev.and_then(|cell_rev| match TypeCellData::try_from(cell_rev) { let type_cell_data = cell_rev.and_then(|cell_rev| match TypeCellData::try_from(cell_rev) {
Ok(type_cell_data) => Some(type_cell_data), Ok(type_cell_data) => Some(type_cell_data),
Err(_) => None, Err(_) => None,
}); });
let cell_str = match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache) let cell_str = match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(&field_type) .get_type_option_cell_data_handler(&field_type)
{ {
None => "".to_string(), None => "".to_string(),
Some(handler) => handler.handle_cell_changeset(changeset, type_cell_data, field_rev)?, Some(handler) => handler.handle_cell_changeset(changeset, type_cell_data, field_rev)?,
}; };
Ok(TypeCellData::new(cell_str, field_type).to_json()) Ok(TypeCellData::new(cell_str, field_type).to_json())
} }
pub fn get_type_cell_protobuf<T: TryInto<TypeCellData, Error = FlowyError> + Debug>( pub fn get_type_cell_protobuf<T: TryInto<TypeCellData, Error = FlowyError> + Debug>(
data: T, data: T,
field_rev: &FieldRevision, field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>, cell_data_cache: Option<AtomicCellDataCache>,
) -> (FieldType, CellProtobufBlob) { ) -> (FieldType, CellProtobufBlob) {
let to_field_type = field_rev.ty.into(); let to_field_type = field_rev.ty.into();
match data.try_into() { match data.try_into() {
Ok(type_cell_data) => { Ok(type_cell_data) => {
let TypeCellData { cell_str, field_type } = type_cell_data; let TypeCellData {
match try_decode_cell_str_to_cell_protobuf( cell_str,
cell_str, field_type,
&field_type, } = type_cell_data;
&to_field_type, match try_decode_cell_str_to_cell_protobuf(
field_rev, cell_str,
cell_data_cache, &field_type,
) { &to_field_type,
Ok(cell_bytes) => (field_type, cell_bytes), field_rev,
Err(e) => { cell_data_cache,
tracing::error!("Decode cell data failed, {:?}", e); ) {
(field_type, CellProtobufBlob::default()) Ok(cell_bytes) => (field_type, cell_bytes),
} Err(e) => {
} tracing::error!("Decode cell data failed, {:?}", e);
} (field_type, CellProtobufBlob::default())
Err(_err) => { },
// It's okay to ignore this error, because it's okay that the current cell can't }
// display the existing cell data. For example, the UI of the text cell will be blank if },
// the type of the data of cell is Number. Err(_err) => {
(to_field_type, CellProtobufBlob::default()) // It's okay to ignore this error, because it's okay that the current cell can't
} // display the existing cell data. For example, the UI of the text cell will be blank if
} // the type of the data of cell is Number.
(to_field_type, CellProtobufBlob::default())
},
}
} }
pub fn get_type_cell_data<CellData, Output>( pub fn get_type_cell_data<CellData, Output>(
data: CellData, data: CellData,
field_rev: &FieldRevision, field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>, cell_data_cache: Option<AtomicCellDataCache>,
) -> Option<Output> ) -> Option<Output>
where where
CellData: TryInto<TypeCellData, Error = FlowyError> + Debug, CellData: TryInto<TypeCellData, Error = FlowyError> + Debug,
Output: Default + 'static, Output: Default + 'static,
{ {
let to_field_type = field_rev.ty.into(); let to_field_type = field_rev.ty.into();
match data.try_into() { match data.try_into() {
Ok(type_cell_data) => { Ok(type_cell_data) => {
let TypeCellData { cell_str, field_type } = type_cell_data; let TypeCellData {
try_decode_cell_str_to_cell_data(cell_str, &field_type, &to_field_type, field_rev, cell_data_cache) cell_str,
} field_type,
Err(_err) => None, } = type_cell_data;
} try_decode_cell_str_to_cell_data(
cell_str,
&field_type,
&to_field_type,
field_rev,
cell_data_cache,
)
},
Err(_err) => None,
}
} }
/// Decode the opaque cell data from one field type to another using the corresponding `TypeOption` /// Decode the opaque cell data from one field type to another using the corresponding `TypeOption`
@ -145,33 +157,33 @@ where
/// returns: CellBytes /// returns: CellBytes
/// ///
pub fn try_decode_cell_str_to_cell_protobuf( pub fn try_decode_cell_str_to_cell_protobuf(
cell_str: String, cell_str: String,
from_field_type: &FieldType, from_field_type: &FieldType,
to_field_type: &FieldType, to_field_type: &FieldType,
field_rev: &FieldRevision, field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>, cell_data_cache: Option<AtomicCellDataCache>,
) -> FlowyResult<CellProtobufBlob> { ) -> FlowyResult<CellProtobufBlob> {
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache) match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(to_field_type) .get_type_option_cell_data_handler(to_field_type)
{ {
None => Ok(CellProtobufBlob::default()), None => Ok(CellProtobufBlob::default()),
Some(handler) => handler.handle_cell_str(cell_str, from_field_type, field_rev), Some(handler) => handler.handle_cell_str(cell_str, from_field_type, field_rev),
} }
} }
pub fn try_decode_cell_str_to_cell_data<T: Default + 'static>( pub fn try_decode_cell_str_to_cell_data<T: Default + 'static>(
cell_str: String, cell_str: String,
from_field_type: &FieldType, from_field_type: &FieldType,
to_field_type: &FieldType, to_field_type: &FieldType,
field_rev: &FieldRevision, field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>, cell_data_cache: Option<AtomicCellDataCache>,
) -> Option<T> { ) -> Option<T> {
let handler = TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache) let handler = TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(to_field_type)?; .get_type_option_cell_data_handler(to_field_type)?;
handler handler
.get_cell_data(cell_str, from_field_type, field_rev) .get_cell_data(cell_str, from_field_type, field_rev)
.ok()? .ok()?
.unbox_or_none::<T>() .unbox_or_none::<T>()
} }
/// Returns a string that represents the current field_type's cell data. /// Returns a string that represents the current field_type's cell data.
/// For example, The string of the Multi-Select cell will be a list of the option's name /// For example, The string of the Multi-Select cell will be a list of the option's name
@ -187,123 +199,133 @@ pub fn try_decode_cell_str_to_cell_data<T: Default + 'static>(
/// ///
/// returns: String /// returns: String
pub fn stringify_cell_data( pub fn stringify_cell_data(
cell_str: String, cell_str: String,
decoded_field_type: &FieldType, decoded_field_type: &FieldType,
field_type: &FieldType, field_type: &FieldType,
field_rev: &FieldRevision, field_rev: &FieldRevision,
) -> String { ) -> String {
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, None).get_type_option_cell_data_handler(field_type) { match TypeOptionCellExt::new_with_cell_data_cache(field_rev, None)
None => "".to_string(), .get_type_option_cell_data_handler(field_type)
Some(handler) => handler.stringify_cell_str(cell_str, decoded_field_type, field_rev), {
} None => "".to_string(),
Some(handler) => handler.stringify_cell_str(cell_str, decoded_field_type, field_rev),
}
} }
pub fn insert_text_cell(s: String, field_rev: &FieldRevision) -> CellRevision { pub fn insert_text_cell(s: String, field_rev: &FieldRevision) -> CellRevision {
let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap(); let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap();
CellRevision::new(data) CellRevision::new(data)
} }
pub fn insert_number_cell(num: i64, field_rev: &FieldRevision) -> CellRevision { pub fn insert_number_cell(num: i64, field_rev: &FieldRevision) -> CellRevision {
let data = apply_cell_data_changeset(num.to_string(), None, field_rev, None).unwrap(); let data = apply_cell_data_changeset(num.to_string(), None, field_rev, None).unwrap();
CellRevision::new(data) CellRevision::new(data)
} }
pub fn insert_url_cell(url: String, field_rev: &FieldRevision) -> CellRevision { pub fn insert_url_cell(url: String, field_rev: &FieldRevision) -> CellRevision {
let data = apply_cell_data_changeset(url, None, field_rev, None).unwrap(); let data = apply_cell_data_changeset(url, None, field_rev, None).unwrap();
CellRevision::new(data) CellRevision::new(data)
} }
pub fn insert_checkbox_cell(is_check: bool, field_rev: &FieldRevision) -> CellRevision { pub fn insert_checkbox_cell(is_check: bool, field_rev: &FieldRevision) -> CellRevision {
let s = if is_check { let s = if is_check {
CHECK.to_string() CHECK.to_string()
} else { } else {
UNCHECK.to_string() UNCHECK.to_string()
}; };
let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap(); let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap();
CellRevision::new(data) CellRevision::new(data)
} }
pub fn insert_date_cell(timestamp: i64, field_rev: &FieldRevision) -> CellRevision { pub fn insert_date_cell(timestamp: i64, field_rev: &FieldRevision) -> CellRevision {
let cell_data = serde_json::to_string(&DateCellChangeset { let cell_data = serde_json::to_string(&DateCellChangeset {
date: Some(timestamp.to_string()), date: Some(timestamp.to_string()),
time: None, time: None,
is_utc: true, is_utc: true,
}) })
.unwrap(); .unwrap();
let data = apply_cell_data_changeset(cell_data, None, field_rev, None).unwrap(); let data = apply_cell_data_changeset(cell_data, None, field_rev, None).unwrap();
CellRevision::new(data) CellRevision::new(data)
} }
pub fn insert_select_option_cell(option_ids: Vec<String>, field_rev: &FieldRevision) -> CellRevision { pub fn insert_select_option_cell(
let changeset = SelectOptionCellChangeset::from_insert_options(option_ids).to_cell_changeset_str(); option_ids: Vec<String>,
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap(); field_rev: &FieldRevision,
CellRevision::new(data) ) -> CellRevision {
let changeset =
SelectOptionCellChangeset::from_insert_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
} }
pub fn delete_select_option_cell(option_ids: Vec<String>, field_rev: &FieldRevision) -> CellRevision { pub fn delete_select_option_cell(
let changeset = SelectOptionCellChangeset::from_delete_options(option_ids).to_cell_changeset_str(); option_ids: Vec<String>,
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap(); field_rev: &FieldRevision,
CellRevision::new(data) ) -> CellRevision {
let changeset =
SelectOptionCellChangeset::from_delete_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
} }
/// Deserialize the String into cell specific data type. /// Deserialize the String into cell specific data type.
pub trait FromCellString { pub trait FromCellString {
fn from_cell_str(s: &str) -> FlowyResult<Self> fn from_cell_str(s: &str) -> FlowyResult<Self>
where where
Self: Sized; Self: Sized;
} }
/// If the changeset applying to the cell is not String type, it should impl this trait. /// If the changeset applying to the cell is not String type, it should impl this trait.
/// Deserialize the string into cell specific changeset. /// Deserialize the string into cell specific changeset.
pub trait FromCellChangesetString { pub trait FromCellChangesetString {
fn from_changeset(changeset: String) -> FlowyResult<Self> fn from_changeset(changeset: String) -> FlowyResult<Self>
where where
Self: Sized; Self: Sized;
} }
impl FromCellChangesetString for String { impl FromCellChangesetString for String {
fn from_changeset(changeset: String) -> FlowyResult<Self> fn from_changeset(changeset: String) -> FlowyResult<Self>
where where
Self: Sized, Self: Sized,
{ {
Ok(changeset) Ok(changeset)
} }
} }
pub trait ToCellChangesetString: Debug { pub trait ToCellChangesetString: Debug {
fn to_cell_changeset_str(&self) -> String; fn to_cell_changeset_str(&self) -> String;
} }
impl ToCellChangesetString for String { impl ToCellChangesetString for String {
fn to_cell_changeset_str(&self) -> String { fn to_cell_changeset_str(&self) -> String {
self.clone() self.clone()
} }
} }
pub struct AnyCellChangeset<T>(pub Option<T>); pub struct AnyCellChangeset<T>(pub Option<T>);
impl<T> AnyCellChangeset<T> { impl<T> AnyCellChangeset<T> {
pub fn try_into_inner(self) -> FlowyResult<T> { pub fn try_into_inner(self) -> FlowyResult<T> {
match self.0 { match self.0 {
None => Err(ErrorCode::InvalidData.into()), None => Err(ErrorCode::InvalidData.into()),
Some(data) => Ok(data), Some(data) => Ok(data),
}
} }
}
} }
impl<T, C: ToString> std::convert::From<C> for AnyCellChangeset<T> impl<T, C: ToString> std::convert::From<C> for AnyCellChangeset<T>
where where
T: FromCellChangesetString, T: FromCellChangesetString,
{ {
fn from(changeset: C) -> Self { fn from(changeset: C) -> Self {
match T::from_changeset(changeset.to_string()) { match T::from_changeset(changeset.to_string()) {
Ok(data) => AnyCellChangeset(Some(data)), Ok(data) => AnyCellChangeset(Some(data)),
Err(e) => { Err(e) => {
tracing::error!("Deserialize CellDataChangeset failed: {}", e); tracing::error!("Deserialize CellDataChangeset failed: {}", e);
AnyCellChangeset(None) AnyCellChangeset(None)
} },
}
} }
}
} }
// impl std::convert::From<String> for AnyCellChangeset<String> { // impl std::convert::From<String> for AnyCellChangeset<String> {
// fn from(s: String) -> Self { // fn from(s: String) -> Self {

Some files were not shown because too many files have changed in this diff Show more