config document kv store

This commit is contained in:
appflowy 2021-12-22 18:53:52 +08:00
parent eb601932ea
commit cb4398eab0
29 changed files with 503 additions and 238 deletions

View file

@ -2,7 +2,5 @@
CREATE TABLE IF NOT EXISTS doc_table( CREATE TABLE IF NOT EXISTS doc_table(
id uuid NOT NULL, id uuid NOT NULL,
PRIMARY KEY (id), PRIMARY KEY (id),
-- data bytea NOT NULL DEFAULT '',
data TEXT NOT NULL DEFAULT '',
rev_id bigint NOT NULL DEFAULT 0 rev_id bigint NOT NULL DEFAULT 0
); );

View file

@ -42,7 +42,7 @@ impl Application {
pub fn run(listener: TcpListener, app_ctx: AppContext) -> Result<Server, std::io::Error> { pub fn run(listener: TcpListener, app_ctx: AppContext) -> Result<Server, std::io::Error> {
let domain = domain(); let domain = domain();
let secret: String = secret(); let secret: String = secret();
actix_rt::spawn(period_check(app_ctx.pg_pool.clone())); actix_rt::spawn(period_check(app_ctx.persistence.pg_pool()));
let server = HttpServer::new(move || { let server = HttpServer::new(move || {
App::new() App::new()
@ -54,16 +54,17 @@ pub fn run(listener: TcpListener, app_ctx: AppContext) -> Result<Server, std::io
.service(ws_scope()) .service(ws_scope())
.service(user_scope()) .service(user_scope())
.app_data(app_ctx.ws_server.clone()) .app_data(app_ctx.ws_server.clone())
.app_data(app_ctx.pg_pool.clone()) .app_data(app_ctx.persistence.clone())
.app_data(Data::new(app_ctx.persistence.pg_pool()))
.app_data(app_ctx.ws_receivers.clone()) .app_data(app_ctx.ws_receivers.clone())
.app_data(app_ctx.document_mng.clone())
}) })
.listen(listener)? .listen(listener)?
.run(); .run();
Ok(server) Ok(server)
} }
async fn period_check(_pool: Data<PgPool>) { #[allow(dead_code)]
async fn period_check(_pool: PgPool) {
let mut i = interval(Duration::from_secs(60)); let mut i = interval(Duration::from_secs(60));
loop { loop {
i.tick().await; i.tick().await;

View file

@ -1,10 +1,11 @@
use crate::services::{ use crate::services::{
document::manager::DocumentManager,
kv_store::{KVStore, PostgresKV}, kv_store::{KVStore, PostgresKV},
web_socket::{WSServer, WebSocketReceivers}, web_socket::{WSServer, WebSocketReceivers},
}; };
use actix::Addr; use actix::Addr;
use actix_web::web::Data; use actix_web::web::Data;
use crate::services::document::{controller::make_document_ws_receiver, persistence::DocumentKVPersistence};
use lib_ws::WSModule; use lib_ws::WSModule;
use sqlx::PgPool; use sqlx::PgPool;
use std::sync::Arc; use std::sync::Arc;
@ -12,30 +13,41 @@ use std::sync::Arc;
#[derive(Clone)] #[derive(Clone)]
pub struct AppContext { pub struct AppContext {
pub ws_server: Data<Addr<WSServer>>, pub ws_server: Data<Addr<WSServer>>,
pub pg_pool: Data<PgPool>, pub persistence: Data<Arc<FlowyPersistence>>,
pub ws_receivers: Data<WebSocketReceivers>, pub ws_receivers: Data<WebSocketReceivers>,
pub document_mng: Data<Arc<DocumentManager>>,
pub kv_store: Data<Arc<dyn KVStore>>,
} }
impl AppContext { impl AppContext {
pub fn new(ws_server: Addr<WSServer>, db_pool: PgPool) -> Self { pub fn new(ws_server: Addr<WSServer>, pg_pool: PgPool) -> Self {
let ws_server = Data::new(ws_server); let ws_server = Data::new(ws_server);
let pg_pool = Data::new(db_pool);
let mut ws_receivers = WebSocketReceivers::new(); let mut ws_receivers = WebSocketReceivers::new();
let document_mng = Arc::new(DocumentManager::new(pg_pool.clone()));
ws_receivers.set(WSModule::Doc, document_mng.clone());
let kv_store = Arc::new(PostgresKV {
pg_pool: pg_pool.clone(),
});
let kv_store = make_document_kv_store(pg_pool.clone());
let persistence = Arc::new(FlowyPersistence { pg_pool, kv_store });
let document_ws_receiver = make_document_ws_receiver(persistence.clone());
ws_receivers.set(WSModule::Doc, document_ws_receiver);
AppContext { AppContext {
ws_server, ws_server,
pg_pool, persistence: Data::new(persistence),
ws_receivers: Data::new(ws_receivers), ws_receivers: Data::new(ws_receivers),
document_mng: Data::new(document_mng),
kv_store: Data::new(kv_store),
} }
} }
} }
fn make_document_kv_store(pg_pool: PgPool) -> Arc<DocumentKVPersistence> {
let kv_impl = Arc::new(PostgresKV { pg_pool });
Arc::new(DocumentKVPersistence::new(kv_impl))
}
#[derive(Clone)]
pub struct FlowyPersistence {
pg_pool: PgPool,
kv_store: Arc<DocumentKVPersistence>,
}
impl FlowyPersistence {
pub fn pg_pool(&self) -> PgPool { self.pg_pool.clone() }
pub fn kv_store(&self) -> Arc<DocumentKVPersistence> { self.kv_store.clone() }
}

View file

@ -1,20 +0,0 @@
use flowy_collaboration::protobuf::Doc;
pub(crate) const DOC_TABLE: &str = "doc_table";
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct DocTable {
pub(crate) id: uuid::Uuid,
pub(crate) data: String,
pub(crate) rev_id: i64,
}
impl std::convert::From<DocTable> for Doc {
fn from(table: DocTable) -> Self {
let mut doc = Doc::new();
doc.set_id(table.id.to_string());
doc.set_data(table.data);
doc.set_rev_id(table.rev_id);
doc
}
}

View file

@ -1,4 +1,3 @@
pub mod doc;
pub mod logged_user; pub mod logged_user;
pub mod token; pub mod token;
pub mod user; pub mod user;

View file

@ -1,8 +1,9 @@
use crate::services::document::{create_doc_with_transaction, delete_doc};
use crate::{ use crate::{
entities::logged_user::LoggedUser, entities::logged_user::LoggedUser,
services::core::{trash::read_trash_ids, view::persistence::*}, services::{
core::{trash::read_trash_ids, view::persistence::*},
document::persistence::{create_doc_with_transaction, delete_doc},
},
util::sqlx_ext::{map_sqlx_error, DBTransaction, SqlBuilder}, util::sqlx_ext::{map_sqlx_error, DBTransaction, SqlBuilder},
}; };
use backend_service::errors::{invalid_params, ServerError}; use backend_service::errors::{invalid_params, ServerError};

View file

@ -1,9 +1,6 @@
use crate::{ use crate::{
entities::logged_user::LoggedUser, entities::logged_user::LoggedUser,
services::{ services::core::view::{create_view, delete_view, persistence::check_view_ids, read_view, update_view},
core::view::{create_view, delete_view, persistence::check_view_ids, read_view, update_view},
document::manager::DocumentManager,
},
util::serde_ext::parse_from_payload, util::serde_ext::parse_from_payload,
}; };
use actix_web::{ use actix_web::{
@ -20,13 +17,8 @@ use flowy_core_data_model::{
protobuf::{CreateViewParams, QueryViewRequest, UpdateViewParams, ViewIdentifier}, protobuf::{CreateViewParams, QueryViewRequest, UpdateViewParams, ViewIdentifier},
}; };
use sqlx::PgPool; use sqlx::PgPool;
use std::sync::Arc;
pub async fn create_handler( pub async fn create_handler(payload: Payload, pool: Data<PgPool>) -> Result<HttpResponse, ServerError> {
payload: Payload,
pool: Data<PgPool>,
_doc_biz: Data<Arc<DocumentManager>>,
) -> Result<HttpResponse, ServerError> {
let params: CreateViewParams = parse_from_payload(payload).await?; let params: CreateViewParams = parse_from_payload(payload).await?;
let mut transaction = pool let mut transaction = pool
.begin() .begin()

View file

@ -1,46 +1,53 @@
use crate::services::{ use crate::services::{
document::{ document::{
create_doc, persistence::{create_doc, read_doc, update_doc},
read_doc,
update_doc,
ws_actor::{DocumentWebSocketActor, WSActorMessage}, ws_actor::{DocumentWebSocketActor, WSActorMessage},
}, },
web_socket::{WSClientData, WebSocketReceiver}, web_socket::{WSClientData, WebSocketReceiver},
}; };
use actix_web::web::Data;
use backend_service::errors::ServerError; use backend_service::errors::ServerError;
use crate::context::FlowyPersistence;
use flowy_collaboration::{ use flowy_collaboration::{
core::sync::{ServerDocManager, ServerDocPersistence}, core::sync::{DocumentPersistence, ServerDocumentManager},
entities::doc::Doc, entities::doc::Doc,
errors::CollaborateError, errors::CollaborateError,
protobuf::{CreateDocParams, DocIdentifier, UpdateDocParams}, protobuf::{CreateDocParams, DocIdentifier, UpdateDocParams},
}; };
use lib_infra::future::FutureResultSend; use lib_infra::future::FutureResultSend;
use lib_ot::{revision::Revision, rich_text::RichTextDelta}; use lib_ot::{revision::Revision, rich_text::RichTextDelta};
use sqlx::PgPool;
use std::{convert::TryInto, sync::Arc}; use std::{convert::TryInto, sync::Arc};
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
pub struct DocumentManager { pub fn make_document_ws_receiver(persistence: Arc<FlowyPersistence>) -> Arc<DocumentWebSocketReceiver> {
ws_sender: mpsc::Sender<WSActorMessage>, let document_persistence = Arc::new(DocumentPersistenceImpl(persistence.clone()));
pg_pool: Data<PgPool>, let document_manager = Arc::new(ServerDocumentManager::new(document_persistence));
let (ws_sender, rx) = tokio::sync::mpsc::channel(100);
let actor = DocumentWebSocketActor::new(rx, document_manager);
tokio::task::spawn(actor.run());
Arc::new(DocumentWebSocketReceiver::new(persistence, ws_sender))
} }
impl DocumentManager { pub struct DocumentWebSocketReceiver {
pub fn new(pg_pool: Data<PgPool>) -> Self { ws_sender: mpsc::Sender<WSActorMessage>,
let inner = Arc::new(ServerDocManager::new(Arc::new(DocPersistenceImpl(pg_pool.clone())))); persistence: Arc<FlowyPersistence>,
let (ws_sender, rx) = mpsc::channel(100); }
let actor = DocumentWebSocketActor::new(rx, inner);
tokio::task::spawn(actor.run()); impl DocumentWebSocketReceiver {
Self { ws_sender, pg_pool } pub fn new(persistence: Arc<FlowyPersistence>, ws_sender: mpsc::Sender<WSActorMessage>) -> Self {
Self { ws_sender, persistence }
} }
} }
impl WebSocketReceiver for DocumentManager { impl WebSocketReceiver for DocumentWebSocketReceiver {
fn receive(&self, data: WSClientData) { fn receive(&self, data: WSClientData) {
let (ret, rx) = oneshot::channel(); let (ret, rx) = oneshot::channel();
let sender = self.ws_sender.clone(); let sender = self.ws_sender.clone();
let pool = self.pg_pool.clone(); let pool = self.persistence.pg_pool();
actix_rt::spawn(async move { actix_rt::spawn(async move {
let msg = WSActorMessage::ClientData { let msg = WSActorMessage::ClientData {
@ -60,10 +67,10 @@ impl WebSocketReceiver for DocumentManager {
} }
} }
struct DocPersistenceImpl(Data<PgPool>); struct DocumentPersistenceImpl(Arc<FlowyPersistence>);
impl ServerDocPersistence for DocPersistenceImpl { impl DocumentPersistence for DocumentPersistenceImpl {
fn update_doc(&self, doc_id: &str, rev_id: i64, delta: RichTextDelta) -> FutureResultSend<(), CollaborateError> { fn update_doc(&self, doc_id: &str, rev_id: i64, delta: RichTextDelta) -> FutureResultSend<(), CollaborateError> {
let pg_pool = self.0.clone(); let pg_pool = self.0.pg_pool();
let mut params = UpdateDocParams::new(); let mut params = UpdateDocParams::new();
let doc_json = delta.to_json(); let doc_json = delta.to_json();
params.set_doc_id(doc_id.to_string()); params.set_doc_id(doc_id.to_string());
@ -71,7 +78,7 @@ impl ServerDocPersistence for DocPersistenceImpl {
params.set_rev_id(rev_id); params.set_rev_id(rev_id);
FutureResultSend::new(async move { FutureResultSend::new(async move {
let _ = update_doc(pg_pool.get_ref(), params) let _ = update_doc(&pg_pool, params)
.await .await
.map_err(server_error_to_collaborate_error)?; .map_err(server_error_to_collaborate_error)?;
Ok(()) Ok(())
@ -83,9 +90,9 @@ impl ServerDocPersistence for DocPersistenceImpl {
doc_id: doc_id.to_string(), doc_id: doc_id.to_string(),
..Default::default() ..Default::default()
}; };
let pg_pool = self.0.clone(); let pg_pool = self.0.pg_pool();
FutureResultSend::new(async move { FutureResultSend::new(async move {
let mut pb_doc = read_doc(pg_pool.get_ref(), params) let mut pb_doc = read_doc(&pg_pool, params)
.await .await
.map_err(server_error_to_collaborate_error)?; .map_err(server_error_to_collaborate_error)?;
let doc = (&mut pb_doc) let doc = (&mut pb_doc)
@ -96,7 +103,7 @@ impl ServerDocPersistence for DocPersistenceImpl {
} }
fn create_doc(&self, revision: Revision) -> FutureResultSend<Doc, CollaborateError> { fn create_doc(&self, revision: Revision) -> FutureResultSend<Doc, CollaborateError> {
let pg_pool = self.0.clone(); let pg_pool = self.0.pg_pool();
FutureResultSend::new(async move { FutureResultSend::new(async move {
let delta = RichTextDelta::from_bytes(&revision.delta_data)?; let delta = RichTextDelta::from_bytes(&revision.delta_data)?;
let doc_json = delta.to_json(); let doc_json = delta.to_json();
@ -108,7 +115,7 @@ impl ServerDocPersistence for DocPersistenceImpl {
cached_size: Default::default(), cached_size: Default::default(),
}; };
let _ = create_doc(pg_pool.get_ref(), params) let _ = create_doc(&pg_pool, params)
.await .await
.map_err(server_error_to_collaborate_error)?; .map_err(server_error_to_collaborate_error)?;
let doc: Doc = revision.try_into()?; let doc: Doc = revision.try_into()?;

View file

@ -1,9 +1,6 @@
#![allow(clippy::module_inception)] #![allow(clippy::module_inception)]
pub(crate) use crud::*; pub(crate) mod controller;
pub use router::*; pub(crate) mod persistence;
pub(crate) mod router;
pub mod crud; pub(crate) mod ws_actor;
pub mod manager;
pub mod router;
mod ws_actor;

View file

@ -0,0 +1,48 @@
use crate::{services::kv_store::KVStore, util::serde_ext::parse_from_bytes};
use backend_service::errors::ServerError;
use bytes::Bytes;
use lib_ot::protobuf::{RepeatedRevision, Revision};
use protobuf::Message;
use std::sync::Arc;
pub struct DocumentKVPersistence {
inner: Arc<dyn KVStore>,
}
impl std::ops::Deref for DocumentKVPersistence {
type Target = Arc<dyn KVStore>;
fn deref(&self) -> &Self::Target { &self.inner }
}
impl std::ops::DerefMut for DocumentKVPersistence {
fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner }
}
impl DocumentKVPersistence {
pub(crate) fn new(kv_store: Arc<dyn KVStore>) -> Self { DocumentKVPersistence { inner: kv_store } }
pub(crate) async fn set_revision(&self, revision: Revision) -> Result<(), ServerError> {
let key = revision.rev_id.to_string();
let bytes = revision.write_to_bytes()?;
let _ = self.inner.set(&key, Bytes::from(bytes)).await?;
Ok(())
}
pub(crate) async fn batch_get_revisions(&self, rev_ids: Vec<i64>) -> Result<RepeatedRevision, ServerError> {
let keys = rev_ids
.into_iter()
.map(|rev_id| rev_id.to_string())
.collect::<Vec<String>>();
let items = self.inner.batch_get(keys).await?;
let revisions = items
.into_iter()
.filter_map(|kv| parse_from_bytes::<Revision>(&kv.value).ok())
.collect::<Vec<Revision>>();
let mut repeated_revision = RepeatedRevision::new();
repeated_revision.set_items(revisions.into());
Ok(repeated_revision)
}
}

View file

@ -0,0 +1,5 @@
mod kv;
mod postgres;
pub use kv::*;
pub use postgres::*;

View file

@ -1,20 +1,29 @@
use crate::{ use crate::{
entities::doc::{DocTable, DOC_TABLE}, services::kv_store::KVStore,
util::sqlx_ext::{map_sqlx_error, DBTransaction, SqlBuilder}, util::sqlx_ext::{map_sqlx_error, DBTransaction, SqlBuilder},
}; };
use anyhow::Context; use anyhow::Context;
use backend_service::errors::ServerError; use backend_service::errors::ServerError;
use flowy_collaboration::protobuf::{CreateDocParams, Doc, DocIdentifier, UpdateDocParams}; use flowy_collaboration::protobuf::{CreateDocParams, Doc, DocIdentifier, UpdateDocParams};
use protobuf::Message;
use sqlx::{postgres::PgArguments, PgPool, Postgres}; use sqlx::{postgres::PgArguments, PgPool, Postgres};
use uuid::Uuid; use uuid::Uuid;
const DOC_TABLE: &str = "doc_table";
#[tracing::instrument(level = "debug", skip(transaction), err)] #[tracing::instrument(level = "debug", skip(transaction), err)]
pub(crate) async fn create_doc_with_transaction( pub(crate) async fn create_doc_with_transaction(
transaction: &mut DBTransaction<'_>, transaction: &mut DBTransaction<'_>,
params: CreateDocParams, params: CreateDocParams,
// kv_store: Data<Arc<dyn KVStore>>,
) -> Result<(), ServerError> { ) -> Result<(), ServerError> {
let uuid = Uuid::parse_str(&params.id)?; let uuid = Uuid::parse_str(&params.id)?;
let (sql, args) = NewDocSqlBuilder::new(uuid).data(params.data).build()?; let (sql, args) = SqlBuilder::create(DOC_TABLE)
.add_field_with_arg("id", uuid)
.add_field_with_arg("rev_id", 0)
.build()?;
// TODO kv
let _ = sqlx::query_with(&sql, args) let _ = sqlx::query_with(&sql, args)
.execute(transaction) .execute(transaction)
.await .await
@ -51,18 +60,20 @@ pub(crate) async fn read_doc(pool: &PgPool, params: DocIdentifier) -> Result<Doc
let (sql, args) = builder.build()?; let (sql, args) = builder.build()?;
// TODO: benchmark the speed of different documents with different size // TODO: benchmark the speed of different documents with different size
let doc: Doc = sqlx::query_as_with::<Postgres, DocTable, PgArguments>(&sql, args) let _table = sqlx::query_as_with::<Postgres, DocTable, PgArguments>(&sql, args)
.fetch_one(&mut transaction) .fetch_one(&mut transaction)
.await .await
.map_err(map_sqlx_error)? .map_err(map_sqlx_error)?;
.into();
transaction // TODO: kv
.commit() panic!("")
.await
.context("Failed to commit SQL transaction to read document.")?;
Ok(doc) // transaction
// .commit()
// .await
// .context("Failed to commit SQL transaction to read document.")?;
//
// Ok(doc)
} }
#[tracing::instrument(level = "debug", skip(pool, params), fields(delta), err)] #[tracing::instrument(level = "debug", skip(pool, params), fields(delta), err)]
@ -107,32 +118,17 @@ pub(crate) async fn delete_doc(transaction: &mut DBTransaction<'_>, doc_id: Uuid
Ok(()) Ok(())
} }
pub struct NewDocSqlBuilder { #[derive(Debug, Clone, sqlx::FromRow)]
table: DocTable, struct DocTable {
id: uuid::Uuid,
rev_id: i64,
} }
impl NewDocSqlBuilder { // impl std::convert::From<DocTable> for Doc {
pub fn new(id: Uuid) -> Self { // fn from(table: DocTable) -> Self {
let table = DocTable { // let mut doc = Doc::new();
id, // doc.set_id(table.id.to_string());
data: "".to_owned(), // doc.set_rev_id(table.rev_id);
rev_id: 0, // doc
}; // }
Self { table } // }
}
pub fn data(mut self, data: String) -> Self {
self.table.data = data;
self
}
pub fn build(self) -> Result<(String, PgArguments), ServerError> {
let (sql, args) = SqlBuilder::create(DOC_TABLE)
.add_field_with_arg("id", self.table.id)
.add_field_with_arg("data", self.table.data)
.add_field_with_arg("rev_id", self.table.rev_id)
.build()?;
Ok((sql, args))
}
}

View file

@ -1,10 +1,11 @@
use crate::services::document::{create_doc, read_doc, update_doc}; use crate::{
services::document::persistence::{create_doc, read_doc, update_doc},
util::serde_ext::parse_from_payload,
};
use actix_web::{ use actix_web::{
web::{Data, Payload}, web::{Data, Payload},
HttpResponse, HttpResponse,
}; };
use crate::util::serde_ext::parse_from_payload;
use backend_service::{errors::ServerError, response::FlowyResponse}; use backend_service::{errors::ServerError, response::FlowyResponse};
use flowy_collaboration::protobuf::{CreateDocParams, DocIdentifier, UpdateDocParams}; use flowy_collaboration::protobuf::{CreateDocParams, DocIdentifier, UpdateDocParams};
use sqlx::PgPool; use sqlx::PgPool;

View file

@ -1,16 +1,16 @@
use crate::{ use crate::{
services::{ services::{
document::update_doc, document::persistence::update_doc,
web_socket::{entities::Socket, WSClientData, WSMessageAdaptor, WSUser}, web_socket::{entities::Socket, WSClientData, WSMessageAdaptor, WSUser},
}, },
util::serde_ext::{md5, parse_from_bytes}, util::serde_ext::{md5, parse_from_bytes},
}; };
use actix_rt::task::spawn_blocking; use actix_rt::task::spawn_blocking;
use actix_web::web::Data;
use async_stream::stream; use async_stream::stream;
use backend_service::errors::{internal_error, Result, ServerError}; use backend_service::errors::{internal_error, Result, ServerError};
use flowy_collaboration::{ use flowy_collaboration::{
core::sync::{RevisionUser, ServerDocManager, SyncResponse}, core::sync::{RevisionUser, ServerDocumentManager, SyncResponse},
protobuf::{DocumentWSData, DocumentWSDataType, NewDocumentUser, UpdateDocParams}, protobuf::{DocumentWSData, DocumentWSDataType, NewDocumentUser, UpdateDocParams},
}; };
use futures::stream::StreamExt; use futures::stream::StreamExt;
@ -22,18 +22,18 @@ use tokio::sync::{mpsc, oneshot};
pub enum WSActorMessage { pub enum WSActorMessage {
ClientData { ClientData {
client_data: WSClientData, client_data: WSClientData,
pool: Data<PgPool>, pool: PgPool,
ret: oneshot::Sender<Result<()>>, ret: oneshot::Sender<Result<()>>,
}, },
} }
pub struct DocumentWebSocketActor { pub struct DocumentWebSocketActor {
receiver: Option<mpsc::Receiver<WSActorMessage>>, receiver: Option<mpsc::Receiver<WSActorMessage>>,
doc_manager: Arc<ServerDocManager>, doc_manager: Arc<ServerDocumentManager>,
} }
impl DocumentWebSocketActor { impl DocumentWebSocketActor {
pub fn new(receiver: mpsc::Receiver<WSActorMessage>, manager: Arc<ServerDocManager>) -> Self { pub fn new(receiver: mpsc::Receiver<WSActorMessage>, manager: Arc<ServerDocumentManager>) -> Self {
Self { Self {
receiver: Some(receiver), receiver: Some(receiver),
doc_manager: manager, doc_manager: manager,
@ -66,7 +66,7 @@ impl DocumentWebSocketActor {
} }
} }
async fn handle_client_data(&self, client_data: WSClientData, pg_pool: Data<PgPool>) -> Result<()> { async fn handle_client_data(&self, client_data: WSClientData, pg_pool: PgPool) -> Result<()> {
let WSClientData { user, socket, data } = client_data; let WSClientData { user, socket, data } = client_data;
let document_data = spawn_blocking(move || { let document_data = spawn_blocking(move || {
let document_data: DocumentWSData = parse_from_bytes(&data)?; let document_data: DocumentWSData = parse_from_bytes(&data)?;
@ -151,7 +151,7 @@ fn verify_md5(revision: &Revision) -> Result<()> {
pub struct ServerDocUser { pub struct ServerDocUser {
pub user: Arc<WSUser>, pub user: Arc<WSUser>,
pub(crate) socket: Socket, pub(crate) socket: Socket,
pub pg_pool: Data<PgPool>, pub pg_pool: PgPool,
} }
impl RevisionUser for ServerDocUser { impl RevisionUser for ServerDocUser {
@ -182,7 +182,7 @@ impl RevisionUser for ServerDocUser {
params.set_doc_id(doc_id); params.set_doc_id(doc_id);
params.set_data(doc_json); params.set_data(doc_json);
params.set_rev_id(rev_id); params.set_rev_id(rev_id);
match update_doc(pg_pool.get_ref(), params).await { match update_doc(&pg_pool, params).await {
Ok(_) => {}, Ok(_) => {},
Err(e) => log::error!("{}", e), Err(e) => log::error!("{}", e),
} }

View file

@ -2,7 +2,7 @@ use crate::{
services::kv_store::{KVStore, KeyValue}, services::kv_store::{KVStore, KeyValue},
util::sqlx_ext::{map_sqlx_error, SqlBuilder}, util::sqlx_ext::{map_sqlx_error, SqlBuilder},
}; };
use actix_web::web::Data;
use anyhow::Context; use anyhow::Context;
use backend_service::errors::ServerError; use backend_service::errors::ServerError;
use bytes::Bytes; use bytes::Bytes;
@ -13,7 +13,7 @@ use sqlx::{postgres::PgArguments, Error, PgPool, Postgres, Row};
const KV_TABLE: &str = "kv_table"; const KV_TABLE: &str = "kv_table";
pub(crate) struct PostgresKV { pub(crate) struct PostgresKV {
pub(crate) pg_pool: Data<PgPool>, pub(crate) pg_pool: PgPool,
} }
impl KVStore for PostgresKV { impl KVStore for PostgresKV {

View file

@ -29,7 +29,9 @@ impl std::default::Default for WebSocketReceivers {
impl WebSocketReceivers { impl WebSocketReceivers {
pub fn new() -> Self { WebSocketReceivers::default() } pub fn new() -> Self { WebSocketReceivers::default() }
pub fn set(&mut self, source: WSModule, handler: Arc<dyn WebSocketReceiver>) { self.inner.insert(source, handler); } pub fn set(&mut self, source: WSModule, receiver: Arc<dyn WebSocketReceiver>) {
self.inner.insert(source, receiver);
}
pub fn get(&self, source: &WSModule) -> Option<Arc<dyn WebSocketReceiver>> { self.inner.get(source).cloned() } pub fn get(&self, source: &WSModule) -> Option<Arc<dyn WebSocketReceiver>> { self.inner.get(source).cloned() }
} }

View file

@ -5,7 +5,7 @@ use std::str;
#[actix_rt::test] #[actix_rt::test]
async fn kv_set_test() { async fn kv_set_test() {
let server = spawn_server().await; let server = spawn_server().await;
let kv = server.app_ctx.kv_store.clone(); let kv = server.app_ctx.persistence.kv_store();
let s1 = "123".to_string(); let s1 = "123".to_string();
let key = "1"; let key = "1";
@ -18,7 +18,7 @@ async fn kv_set_test() {
#[actix_rt::test] #[actix_rt::test]
async fn kv_delete_test() { async fn kv_delete_test() {
let server = spawn_server().await; let server = spawn_server().await;
let kv = server.app_ctx.kv_store.clone(); let kv = server.app_ctx.persistence.kv_store();
let s1 = "123".to_string(); let s1 = "123".to_string();
let key = "1"; let key = "1";
@ -30,7 +30,7 @@ async fn kv_delete_test() {
#[actix_rt::test] #[actix_rt::test]
async fn kv_batch_set_test() { async fn kv_batch_set_test() {
let server = spawn_server().await; let server = spawn_server().await;
let kv = server.app_ctx.kv_store.clone(); let kv = server.app_ctx.persistence.kv_store();
let kvs = vec![ let kvs = vec![
KeyValue { KeyValue {
key: "1".to_string(), key: "1".to_string(),

View file

@ -145,6 +145,47 @@ class Revision extends $pb.GeneratedMessage {
void clearUserId() => clearField(7); void clearUserId() => clearField(7);
} }
class RepeatedRevision extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'RepeatedRevision', createEmptyInstance: create)
..pc<Revision>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'items', $pb.PbFieldType.PM, subBuilder: Revision.create)
..hasRequiredFields = false
;
RepeatedRevision._() : super();
factory RepeatedRevision({
$core.Iterable<Revision>? items,
}) {
final _result = create();
if (items != null) {
_result.items.addAll(items);
}
return _result;
}
factory RepeatedRevision.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory RepeatedRevision.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
RepeatedRevision clone() => RepeatedRevision()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
RepeatedRevision copyWith(void Function(RepeatedRevision) updates) => super.copyWith((message) => updates(message as RepeatedRevision)) as RepeatedRevision; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static RepeatedRevision create() => RepeatedRevision._();
RepeatedRevision createEmptyInstance() => create();
static $pb.PbList<RepeatedRevision> createRepeated() => $pb.PbList<RepeatedRevision>();
@$core.pragma('dart2js:noInline')
static RepeatedRevision getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<RepeatedRevision>(create);
static RepeatedRevision? _defaultInstance;
@$pb.TagNumber(1)
$core.List<Revision> get items => $_getList(0);
}
class RevId extends $pb.GeneratedMessage { class RevId extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'RevId', createEmptyInstance: create) static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'RevId', createEmptyInstance: create)
..aInt64(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value') ..aInt64(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value')

View file

@ -26,11 +26,11 @@ class RevType extends $pb.ProtobufEnum {
class RevState extends $pb.ProtobufEnum { class RevState extends $pb.ProtobufEnum {
static const RevState StateLocal = RevState._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'StateLocal'); static const RevState StateLocal = RevState._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'StateLocal');
static const RevState Acked = RevState._(1, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'Acked'); static const RevState Ack = RevState._(1, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'Ack');
static const $core.List<RevState> values = <RevState> [ static const $core.List<RevState> values = <RevState> [
StateLocal, StateLocal,
Acked, Ack,
]; ];
static final $core.Map<$core.int, RevState> _byValue = $pb.ProtobufEnum.initByValue(values); static final $core.Map<$core.int, RevState> _byValue = $pb.ProtobufEnum.initByValue(values);

View file

@ -24,12 +24,12 @@ const RevState$json = const {
'1': 'RevState', '1': 'RevState',
'2': const [ '2': const [
const {'1': 'StateLocal', '2': 0}, const {'1': 'StateLocal', '2': 0},
const {'1': 'Acked', '2': 1}, const {'1': 'Ack', '2': 1},
], ],
}; };
/// Descriptor for `RevState`. Decode as a `google.protobuf.EnumDescriptorProto`. /// Descriptor for `RevState`. Decode as a `google.protobuf.EnumDescriptorProto`.
final $typed_data.Uint8List revStateDescriptor = $convert.base64Decode('CghSZXZTdGF0ZRIOCgpTdGF0ZUxvY2FsEAASCQoFQWNrZWQQAQ=='); final $typed_data.Uint8List revStateDescriptor = $convert.base64Decode('CghSZXZTdGF0ZRIOCgpTdGF0ZUxvY2FsEAASBwoDQWNrEAE=');
@$core.Deprecated('Use revisionDescriptor instead') @$core.Deprecated('Use revisionDescriptor instead')
const Revision$json = const { const Revision$json = const {
'1': 'Revision', '1': 'Revision',
@ -46,6 +46,16 @@ const Revision$json = const {
/// Descriptor for `Revision`. Decode as a `google.protobuf.DescriptorProto`. /// Descriptor for `Revision`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List revisionDescriptor = $convert.base64Decode('CghSZXZpc2lvbhIeCgtiYXNlX3Jldl9pZBgBIAEoA1IJYmFzZVJldklkEhUKBnJldl9pZBgCIAEoA1IFcmV2SWQSHQoKZGVsdGFfZGF0YRgDIAEoDFIJZGVsdGFEYXRhEhAKA21kNRgEIAEoCVIDbWQ1EhUKBmRvY19pZBgFIAEoCVIFZG9jSWQSGAoCdHkYBiABKA4yCC5SZXZUeXBlUgJ0eRIXCgd1c2VyX2lkGAcgASgJUgZ1c2VySWQ='); final $typed_data.Uint8List revisionDescriptor = $convert.base64Decode('CghSZXZpc2lvbhIeCgtiYXNlX3Jldl9pZBgBIAEoA1IJYmFzZVJldklkEhUKBnJldl9pZBgCIAEoA1IFcmV2SWQSHQoKZGVsdGFfZGF0YRgDIAEoDFIJZGVsdGFEYXRhEhAKA21kNRgEIAEoCVIDbWQ1EhUKBmRvY19pZBgFIAEoCVIFZG9jSWQSGAoCdHkYBiABKA4yCC5SZXZUeXBlUgJ0eRIXCgd1c2VyX2lkGAcgASgJUgZ1c2VySWQ=');
@$core.Deprecated('Use repeatedRevisionDescriptor instead')
const RepeatedRevision$json = const {
'1': 'RepeatedRevision',
'2': const [
const {'1': 'items', '3': 1, '4': 3, '5': 11, '6': '.Revision', '10': 'items'},
],
};
/// Descriptor for `RepeatedRevision`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List repeatedRevisionDescriptor = $convert.base64Decode('ChBSZXBlYXRlZFJldmlzaW9uEh8KBWl0ZW1zGAEgAygLMgkuUmV2aXNpb25SBWl0ZW1z');
@$core.Deprecated('Use revIdDescriptor instead') @$core.Deprecated('Use revIdDescriptor instead')
const RevId$json = const { const RevId$json = const {
'1': 'RevId', '1': 'RevId',

View file

@ -72,7 +72,7 @@ impl RevisionCache {
let rev_id = revision.rev_id; let rev_id = revision.rev_id;
let record = RevisionRecord { let record = RevisionRecord {
revision, revision,
state: RevState::Acked, state: RevState::Ack,
}; };
self.memory_cache.add_revision(&record).await; self.memory_cache.add_revision(&record).await;
let _ = self.latest_rev_id.fetch_update(SeqCst, SeqCst, |_e| Some(rev_id)); let _ = self.latest_rev_id.fetch_update(SeqCst, SeqCst, |_e| Some(rev_id));
@ -170,7 +170,7 @@ pub struct RevisionRecord {
} }
impl RevisionRecord { impl RevisionRecord {
pub fn ack(&mut self) { self.state = RevState::Acked; } pub fn ack(&mut self) { self.state = RevState::Ack; }
} }
struct RevisionSyncSeq { struct RevisionSyncSeq {

View file

@ -145,7 +145,7 @@ impl RevisionLoader {
Ok(_) => {}, Ok(_) => {},
Err(e) => tracing::error!("{}", e), Err(e) => tracing::error!("{}", e),
}, },
RevState::Acked => {}, RevState::Ack => {},
} }
} }
revisions = records.into_iter().map(|record| record.revision).collect::<_>(); revisions = records.into_iter().map(|record| record.revision).collect::<_>();

View file

@ -50,7 +50,7 @@ impl std::convert::From<RevTableState> for RevState {
fn from(s: RevTableState) -> Self { fn from(s: RevTableState) -> Self {
match s { match s {
RevTableState::Local => RevState::StateLocal, RevTableState::Local => RevState::StateLocal,
RevTableState::Acked => RevState::Acked, RevTableState::Acked => RevState::Ack,
} }
} }
} }
@ -59,7 +59,7 @@ impl std::convert::From<RevState> for RevTableState {
fn from(s: RevState) -> Self { fn from(s: RevState) -> Self {
match s { match s {
RevState::StateLocal => RevTableState::Local, RevState::StateLocal => RevTableState::Local,
RevState::Acked => RevTableState::Acked, RevState::Ack => RevTableState::Acked,
} }
} }
} }

View file

@ -2,7 +2,7 @@ use crate::services::ws::{FlowyError, FlowyWebSocket, FlowyWsSender, WSConnectSt
use bytes::Bytes; use bytes::Bytes;
use dashmap::DashMap; use dashmap::DashMap;
use flowy_collaboration::{ use flowy_collaboration::{
core::sync::{RevisionUser, ServerDocManager, ServerDocPersistence, SyncResponse}, core::sync::{DocumentPersistence, RevisionUser, ServerDocumentManager, SyncResponse},
entities::{ entities::{
doc::Doc, doc::Doc,
ws::{DocumentWSData, DocumentWSDataBuilder, DocumentWSDataType, NewDocumentUser}, ws::{DocumentWSData, DocumentWSDataBuilder, DocumentWSDataType, NewDocumentUser},
@ -96,13 +96,13 @@ lazy_static! {
} }
struct MockDocServer { struct MockDocServer {
pub manager: Arc<ServerDocManager>, pub manager: Arc<ServerDocumentManager>,
} }
impl std::default::Default for MockDocServer { impl std::default::Default for MockDocServer {
fn default() -> Self { fn default() -> Self {
let persistence = Arc::new(MockDocServerPersistence::default()); let persistence = Arc::new(MockDocServerPersistence::default());
let manager = Arc::new(ServerDocManager::new(persistence)); let manager = Arc::new(ServerDocumentManager::new(persistence));
MockDocServer { manager } MockDocServer { manager }
} }
} }
@ -160,7 +160,7 @@ impl std::default::Default for MockDocServerPersistence {
} }
} }
impl ServerDocPersistence for MockDocServerPersistence { impl DocumentPersistence for MockDocServerPersistence {
fn update_doc(&self, _doc_id: &str, _rev_id: i64, _delta: RichTextDelta) -> FutureResultSend<(), CollaborateError> { fn update_doc(&self, _doc_id: &str, _rev_id: i64, _delta: RichTextDelta) -> FutureResultSend<(), CollaborateError> {
unimplemented!() unimplemented!()
} }

View file

@ -17,40 +17,19 @@ use tokio::{
task::spawn_blocking, task::spawn_blocking,
}; };
pub trait ServerDocPersistence: Send + Sync { pub trait DocumentPersistence: Send + Sync {
fn update_doc(&self, doc_id: &str, rev_id: i64, delta: RichTextDelta) -> FutureResultSend<(), CollaborateError>; fn update_doc(&self, doc_id: &str, rev_id: i64, delta: RichTextDelta) -> FutureResultSend<(), CollaborateError>;
fn read_doc(&self, doc_id: &str) -> FutureResultSend<Doc, CollaborateError>; fn read_doc(&self, doc_id: &str) -> FutureResultSend<Doc, CollaborateError>;
fn create_doc(&self, revision: Revision) -> FutureResultSend<Doc, CollaborateError>; fn create_doc(&self, revision: Revision) -> FutureResultSend<Doc, CollaborateError>;
} }
#[rustfmt::skip] pub struct ServerDocumentManager {
// ┌─────────────────┐
// │ServerDocManager │
// └─────────────────┘
// │ 1
// ▼ n
// ┌───────────────┐
// │ OpenDocHandle │
// └───────────────┘
// │
// ▼
// ┌──────────────────┐
// │ DocCommandQueue │
// └──────────────────┘
// │ ┌──────────────────────┐ ┌────────────┐
// ▼ ┌────▶│ RevisionSynchronizer │────▶│ Document │
// ┌────────────────┐ │ └──────────────────────┘ └────────────┘
// │ServerDocEditor │─────┤
// └────────────────┘ │ ┌────────┐ ┌────────────┐
// └────▶│ Users │◆──────│RevisionUser│
// └────────┘ └────────────┘
pub struct ServerDocManager {
open_doc_map: DashMap<String, Arc<OpenDocHandle>>, open_doc_map: DashMap<String, Arc<OpenDocHandle>>,
persistence: Arc<dyn ServerDocPersistence>, persistence: Arc<dyn DocumentPersistence>,
} }
impl ServerDocManager { impl ServerDocumentManager {
pub fn new(persistence: Arc<dyn ServerDocPersistence>) -> Self { pub fn new(persistence: Arc<dyn DocumentPersistence>) -> Self {
Self { Self {
open_doc_map: DashMap::new(), open_doc_map: DashMap::new(),
persistence, persistence,

View file

@ -75,6 +75,7 @@ pub fn category_from_str(type_str: &str) -> TypeCategory {
| "WSError" | "WSError"
| "WSMessage" | "WSMessage"
| "Revision" | "Revision"
| "RepeatedRevision"
| "RevId" | "RevId"
| "RevisionRange" | "RevisionRange"
=> TypeCategory::Protobuf, => TypeCategory::Protobuf,

View file

@ -409,6 +409,172 @@ impl ::protobuf::reflect::ProtobufValue for Revision {
} }
} }
#[derive(PartialEq,Clone,Default)]
pub struct RepeatedRevision {
// message fields
pub items: ::protobuf::RepeatedField<Revision>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RepeatedRevision {
fn default() -> &'a RepeatedRevision {
<RepeatedRevision as ::protobuf::Message>::default_instance()
}
}
impl RepeatedRevision {
pub fn new() -> RepeatedRevision {
::std::default::Default::default()
}
// repeated .Revision items = 1;
pub fn get_items(&self) -> &[Revision] {
&self.items
}
pub fn clear_items(&mut self) {
self.items.clear();
}
// Param is passed by value, moved
pub fn set_items(&mut self, v: ::protobuf::RepeatedField<Revision>) {
self.items = v;
}
// Mutable pointer to the field.
pub fn mut_items(&mut self) -> &mut ::protobuf::RepeatedField<Revision> {
&mut self.items
}
// Take field
pub fn take_items(&mut self) -> ::protobuf::RepeatedField<Revision> {
::std::mem::replace(&mut self.items, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for RepeatedRevision {
fn is_initialized(&self) -> bool {
for v in &self.items {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.items)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.items {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.items {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RepeatedRevision {
RepeatedRevision::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<Revision>>(
"items",
|m: &RepeatedRevision| { &m.items },
|m: &mut RepeatedRevision| { &mut m.items },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RepeatedRevision>(
"RepeatedRevision",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RepeatedRevision {
static instance: ::protobuf::rt::LazyV2<RepeatedRevision> = ::protobuf::rt::LazyV2::INIT;
instance.get(RepeatedRevision::new)
}
}
impl ::protobuf::Clear for RepeatedRevision {
fn clear(&mut self) {
self.items.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RepeatedRevision {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RepeatedRevision {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)] #[derive(PartialEq,Clone,Default)]
pub struct RevId { pub struct RevId {
// message fields // message fields
@ -843,7 +1009,7 @@ impl ::protobuf::reflect::ProtobufValue for RevType {
#[derive(Clone,PartialEq,Eq,Debug,Hash)] #[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum RevState { pub enum RevState {
StateLocal = 0, StateLocal = 0,
Acked = 1, Ack = 1,
} }
impl ::protobuf::ProtobufEnum for RevState { impl ::protobuf::ProtobufEnum for RevState {
@ -854,7 +1020,7 @@ impl ::protobuf::ProtobufEnum for RevState {
fn from_i32(value: i32) -> ::std::option::Option<RevState> { fn from_i32(value: i32) -> ::std::option::Option<RevState> {
match value { match value {
0 => ::std::option::Option::Some(RevState::StateLocal), 0 => ::std::option::Option::Some(RevState::StateLocal),
1 => ::std::option::Option::Some(RevState::Acked), 1 => ::std::option::Option::Some(RevState::Ack),
_ => ::std::option::Option::None _ => ::std::option::Option::None
} }
} }
@ -862,7 +1028,7 @@ impl ::protobuf::ProtobufEnum for RevState {
fn values() -> &'static [Self] { fn values() -> &'static [Self] {
static values: &'static [RevState] = &[ static values: &'static [RevState] = &[
RevState::StateLocal, RevState::StateLocal,
RevState::Acked, RevState::Ack,
]; ];
values values
} }
@ -896,57 +1062,63 @@ static file_descriptor_proto_data: &'static [u8] = b"\
\x05revId\x12\x1d\n\ndelta_data\x18\x03\x20\x01(\x0cR\tdeltaData\x12\x10\ \x05revId\x12\x1d\n\ndelta_data\x18\x03\x20\x01(\x0cR\tdeltaData\x12\x10\
\n\x03md5\x18\x04\x20\x01(\tR\x03md5\x12\x15\n\x06doc_id\x18\x05\x20\x01\ \n\x03md5\x18\x04\x20\x01(\tR\x03md5\x12\x15\n\x06doc_id\x18\x05\x20\x01\
(\tR\x05docId\x12\x18\n\x02ty\x18\x06\x20\x01(\x0e2\x08.RevTypeR\x02ty\ (\tR\x05docId\x12\x18\n\x02ty\x18\x06\x20\x01(\x0e2\x08.RevTypeR\x02ty\
\x12\x17\n\x07user_id\x18\x07\x20\x01(\tR\x06userId\"\x1d\n\x05RevId\x12\ \x12\x17\n\x07user_id\x18\x07\x20\x01(\tR\x06userId\"3\n\x10RepeatedRevi\
\x14\n\x05value\x18\x01\x20\x01(\x03R\x05value\"N\n\rRevisionRange\x12\ sion\x12\x1f\n\x05items\x18\x01\x20\x03(\x0b2\t.RevisionR\x05items\"\x1d\
\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12\x14\n\x05start\x18\x02\ \n\x05RevId\x12\x14\n\x05value\x18\x01\x20\x01(\x03R\x05value\"N\n\rRevi\
\x20\x01(\x03R\x05start\x12\x10\n\x03end\x18\x03\x20\x01(\x03R\x03end*\ sionRange\x12\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12\x14\n\x05\
\x20\n\x07RevType\x12\t\n\x05Local\x10\0\x12\n\n\x06Remote\x10\x01*%\n\ start\x18\x02\x20\x01(\x03R\x05start\x12\x10\n\x03end\x18\x03\x20\x01(\
\x08RevState\x12\x0e\n\nStateLocal\x10\0\x12\t\n\x05Acked\x10\x01J\x8b\ \x03R\x03end*\x20\n\x07RevType\x12\t\n\x05Local\x10\0\x12\n\n\x06Remote\
\x07\n\x06\x12\x04\0\0\x1a\x01\n\x08\n\x01\x0c\x12\x03\0\0\x12\n\n\n\x02\ \x10\x01*#\n\x08RevState\x12\x0e\n\nStateLocal\x10\0\x12\x07\n\x03Ack\
\x04\0\x12\x04\x02\0\n\x01\n\n\n\x03\x04\0\x01\x12\x03\x02\x08\x10\n\x0b\ \x10\x01J\xe8\x07\n\x06\x12\x04\0\0\x1d\x01\n\x08\n\x01\x0c\x12\x03\0\0\
\n\x04\x04\0\x02\0\x12\x03\x03\x04\x1a\n\x0c\n\x05\x04\0\x02\0\x05\x12\ \x12\n\n\n\x02\x04\0\x12\x04\x02\0\n\x01\n\n\n\x03\x04\0\x01\x12\x03\x02\
\x03\x03\x04\t\n\x0c\n\x05\x04\0\x02\0\x01\x12\x03\x03\n\x15\n\x0c\n\x05\ \x08\x10\n\x0b\n\x04\x04\0\x02\0\x12\x03\x03\x04\x1a\n\x0c\n\x05\x04\0\
\x04\0\x02\0\x03\x12\x03\x03\x18\x19\n\x0b\n\x04\x04\0\x02\x01\x12\x03\ \x02\0\x05\x12\x03\x03\x04\t\n\x0c\n\x05\x04\0\x02\0\x01\x12\x03\x03\n\
\x04\x04\x15\n\x0c\n\x05\x04\0\x02\x01\x05\x12\x03\x04\x04\t\n\x0c\n\x05\ \x15\n\x0c\n\x05\x04\0\x02\0\x03\x12\x03\x03\x18\x19\n\x0b\n\x04\x04\0\
\x04\0\x02\x01\x01\x12\x03\x04\n\x10\n\x0c\n\x05\x04\0\x02\x01\x03\x12\ \x02\x01\x12\x03\x04\x04\x15\n\x0c\n\x05\x04\0\x02\x01\x05\x12\x03\x04\
\x03\x04\x13\x14\n\x0b\n\x04\x04\0\x02\x02\x12\x03\x05\x04\x19\n\x0c\n\ \x04\t\n\x0c\n\x05\x04\0\x02\x01\x01\x12\x03\x04\n\x10\n\x0c\n\x05\x04\0\
\x05\x04\0\x02\x02\x05\x12\x03\x05\x04\t\n\x0c\n\x05\x04\0\x02\x02\x01\ \x02\x01\x03\x12\x03\x04\x13\x14\n\x0b\n\x04\x04\0\x02\x02\x12\x03\x05\
\x12\x03\x05\n\x14\n\x0c\n\x05\x04\0\x02\x02\x03\x12\x03\x05\x17\x18\n\ \x04\x19\n\x0c\n\x05\x04\0\x02\x02\x05\x12\x03\x05\x04\t\n\x0c\n\x05\x04\
\x0b\n\x04\x04\0\x02\x03\x12\x03\x06\x04\x13\n\x0c\n\x05\x04\0\x02\x03\ \0\x02\x02\x01\x12\x03\x05\n\x14\n\x0c\n\x05\x04\0\x02\x02\x03\x12\x03\
\x05\x12\x03\x06\x04\n\n\x0c\n\x05\x04\0\x02\x03\x01\x12\x03\x06\x0b\x0e\ \x05\x17\x18\n\x0b\n\x04\x04\0\x02\x03\x12\x03\x06\x04\x13\n\x0c\n\x05\
\n\x0c\n\x05\x04\0\x02\x03\x03\x12\x03\x06\x11\x12\n\x0b\n\x04\x04\0\x02\ \x04\0\x02\x03\x05\x12\x03\x06\x04\n\n\x0c\n\x05\x04\0\x02\x03\x01\x12\
\x04\x12\x03\x07\x04\x16\n\x0c\n\x05\x04\0\x02\x04\x05\x12\x03\x07\x04\n\ \x03\x06\x0b\x0e\n\x0c\n\x05\x04\0\x02\x03\x03\x12\x03\x06\x11\x12\n\x0b\
\n\x0c\n\x05\x04\0\x02\x04\x01\x12\x03\x07\x0b\x11\n\x0c\n\x05\x04\0\x02\ \n\x04\x04\0\x02\x04\x12\x03\x07\x04\x16\n\x0c\n\x05\x04\0\x02\x04\x05\
\x04\x03\x12\x03\x07\x14\x15\n\x0b\n\x04\x04\0\x02\x05\x12\x03\x08\x04\ \x12\x03\x07\x04\n\n\x0c\n\x05\x04\0\x02\x04\x01\x12\x03\x07\x0b\x11\n\
\x13\n\x0c\n\x05\x04\0\x02\x05\x06\x12\x03\x08\x04\x0b\n\x0c\n\x05\x04\0\ \x0c\n\x05\x04\0\x02\x04\x03\x12\x03\x07\x14\x15\n\x0b\n\x04\x04\0\x02\
\x02\x05\x01\x12\x03\x08\x0c\x0e\n\x0c\n\x05\x04\0\x02\x05\x03\x12\x03\ \x05\x12\x03\x08\x04\x13\n\x0c\n\x05\x04\0\x02\x05\x06\x12\x03\x08\x04\
\x08\x11\x12\n\x0b\n\x04\x04\0\x02\x06\x12\x03\t\x04\x17\n\x0c\n\x05\x04\ \x0b\n\x0c\n\x05\x04\0\x02\x05\x01\x12\x03\x08\x0c\x0e\n\x0c\n\x05\x04\0\
\0\x02\x06\x05\x12\x03\t\x04\n\n\x0c\n\x05\x04\0\x02\x06\x01\x12\x03\t\ \x02\x05\x03\x12\x03\x08\x11\x12\n\x0b\n\x04\x04\0\x02\x06\x12\x03\t\x04\
\x0b\x12\n\x0c\n\x05\x04\0\x02\x06\x03\x12\x03\t\x15\x16\n\n\n\x02\x04\ \x17\n\x0c\n\x05\x04\0\x02\x06\x05\x12\x03\t\x04\n\n\x0c\n\x05\x04\0\x02\
\x01\x12\x04\x0b\0\r\x01\n\n\n\x03\x04\x01\x01\x12\x03\x0b\x08\r\n\x0b\n\ \x06\x01\x12\x03\t\x0b\x12\n\x0c\n\x05\x04\0\x02\x06\x03\x12\x03\t\x15\
\x04\x04\x01\x02\0\x12\x03\x0c\x04\x14\n\x0c\n\x05\x04\x01\x02\0\x05\x12\ \x16\n\n\n\x02\x04\x01\x12\x04\x0b\0\r\x01\n\n\n\x03\x04\x01\x01\x12\x03\
\x03\x0c\x04\t\n\x0c\n\x05\x04\x01\x02\0\x01\x12\x03\x0c\n\x0f\n\x0c\n\ \x0b\x08\x18\n\x0b\n\x04\x04\x01\x02\0\x12\x03\x0c\x04\x20\n\x0c\n\x05\
\x05\x04\x01\x02\0\x03\x12\x03\x0c\x12\x13\n\n\n\x02\x04\x02\x12\x04\x0e\ \x04\x01\x02\0\x04\x12\x03\x0c\x04\x0c\n\x0c\n\x05\x04\x01\x02\0\x06\x12\
\0\x12\x01\n\n\n\x03\x04\x02\x01\x12\x03\x0e\x08\x15\n\x0b\n\x04\x04\x02\ \x03\x0c\r\x15\n\x0c\n\x05\x04\x01\x02\0\x01\x12\x03\x0c\x16\x1b\n\x0c\n\
\x02\0\x12\x03\x0f\x04\x16\n\x0c\n\x05\x04\x02\x02\0\x05\x12\x03\x0f\x04\ \x05\x04\x01\x02\0\x03\x12\x03\x0c\x1e\x1f\n\n\n\x02\x04\x02\x12\x04\x0e\
\n\n\x0c\n\x05\x04\x02\x02\0\x01\x12\x03\x0f\x0b\x11\n\x0c\n\x05\x04\x02\ \0\x10\x01\n\n\n\x03\x04\x02\x01\x12\x03\x0e\x08\r\n\x0b\n\x04\x04\x02\
\x02\0\x03\x12\x03\x0f\x14\x15\n\x0b\n\x04\x04\x02\x02\x01\x12\x03\x10\ \x02\0\x12\x03\x0f\x04\x14\n\x0c\n\x05\x04\x02\x02\0\x05\x12\x03\x0f\x04\
\x04\x14\n\x0c\n\x05\x04\x02\x02\x01\x05\x12\x03\x10\x04\t\n\x0c\n\x05\ \t\n\x0c\n\x05\x04\x02\x02\0\x01\x12\x03\x0f\n\x0f\n\x0c\n\x05\x04\x02\
\x04\x02\x02\x01\x01\x12\x03\x10\n\x0f\n\x0c\n\x05\x04\x02\x02\x01\x03\ \x02\0\x03\x12\x03\x0f\x12\x13\n\n\n\x02\x04\x03\x12\x04\x11\0\x15\x01\n\
\x12\x03\x10\x12\x13\n\x0b\n\x04\x04\x02\x02\x02\x12\x03\x11\x04\x12\n\ \n\n\x03\x04\x03\x01\x12\x03\x11\x08\x15\n\x0b\n\x04\x04\x03\x02\0\x12\
\x0c\n\x05\x04\x02\x02\x02\x05\x12\x03\x11\x04\t\n\x0c\n\x05\x04\x02\x02\ \x03\x12\x04\x16\n\x0c\n\x05\x04\x03\x02\0\x05\x12\x03\x12\x04\n\n\x0c\n\
\x02\x01\x12\x03\x11\n\r\n\x0c\n\x05\x04\x02\x02\x02\x03\x12\x03\x11\x10\ \x05\x04\x03\x02\0\x01\x12\x03\x12\x0b\x11\n\x0c\n\x05\x04\x03\x02\0\x03\
\x11\n\n\n\x02\x05\0\x12\x04\x13\0\x16\x01\n\n\n\x03\x05\0\x01\x12\x03\ \x12\x03\x12\x14\x15\n\x0b\n\x04\x04\x03\x02\x01\x12\x03\x13\x04\x14\n\
\x13\x05\x0c\n\x0b\n\x04\x05\0\x02\0\x12\x03\x14\x04\x0e\n\x0c\n\x05\x05\ \x0c\n\x05\x04\x03\x02\x01\x05\x12\x03\x13\x04\t\n\x0c\n\x05\x04\x03\x02\
\0\x02\0\x01\x12\x03\x14\x04\t\n\x0c\n\x05\x05\0\x02\0\x02\x12\x03\x14\ \x01\x01\x12\x03\x13\n\x0f\n\x0c\n\x05\x04\x03\x02\x01\x03\x12\x03\x13\
\x0c\r\n\x0b\n\x04\x05\0\x02\x01\x12\x03\x15\x04\x0f\n\x0c\n\x05\x05\0\ \x12\x13\n\x0b\n\x04\x04\x03\x02\x02\x12\x03\x14\x04\x12\n\x0c\n\x05\x04\
\x02\x01\x01\x12\x03\x15\x04\n\n\x0c\n\x05\x05\0\x02\x01\x02\x12\x03\x15\ \x03\x02\x02\x05\x12\x03\x14\x04\t\n\x0c\n\x05\x04\x03\x02\x02\x01\x12\
\r\x0e\n\n\n\x02\x05\x01\x12\x04\x17\0\x1a\x01\n\n\n\x03\x05\x01\x01\x12\ \x03\x14\n\r\n\x0c\n\x05\x04\x03\x02\x02\x03\x12\x03\x14\x10\x11\n\n\n\
\x03\x17\x05\r\n\x0b\n\x04\x05\x01\x02\0\x12\x03\x18\x04\x13\n\x0c\n\x05\ \x02\x05\0\x12\x04\x16\0\x19\x01\n\n\n\x03\x05\0\x01\x12\x03\x16\x05\x0c\
\x05\x01\x02\0\x01\x12\x03\x18\x04\x0e\n\x0c\n\x05\x05\x01\x02\0\x02\x12\ \n\x0b\n\x04\x05\0\x02\0\x12\x03\x17\x04\x0e\n\x0c\n\x05\x05\0\x02\0\x01\
\x03\x18\x11\x12\n\x0b\n\x04\x05\x01\x02\x01\x12\x03\x19\x04\x0e\n\x0c\n\ \x12\x03\x17\x04\t\n\x0c\n\x05\x05\0\x02\0\x02\x12\x03\x17\x0c\r\n\x0b\n\
\x05\x05\x01\x02\x01\x01\x12\x03\x19\x04\t\n\x0c\n\x05\x05\x01\x02\x01\ \x04\x05\0\x02\x01\x12\x03\x18\x04\x0f\n\x0c\n\x05\x05\0\x02\x01\x01\x12\
\x02\x12\x03\x19\x0c\rb\x06proto3\ \x03\x18\x04\n\n\x0c\n\x05\x05\0\x02\x01\x02\x12\x03\x18\r\x0e\n\n\n\x02\
\x05\x01\x12\x04\x1a\0\x1d\x01\n\n\n\x03\x05\x01\x01\x12\x03\x1a\x05\r\n\
\x0b\n\x04\x05\x01\x02\0\x12\x03\x1b\x04\x13\n\x0c\n\x05\x05\x01\x02\0\
\x01\x12\x03\x1b\x04\x0e\n\x0c\n\x05\x05\x01\x02\0\x02\x12\x03\x1b\x11\
\x12\n\x0b\n\x04\x05\x01\x02\x01\x12\x03\x1c\x04\x0c\n\x0c\n\x05\x05\x01\
\x02\x01\x01\x12\x03\x1c\x04\x07\n\x0c\n\x05\x05\x01\x02\x01\x02\x12\x03\
\x1c\n\x0bb\x06proto3\
"; ";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT; static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;

View file

@ -9,6 +9,9 @@ message Revision {
RevType ty = 6; RevType ty = 6;
string user_id = 7; string user_id = 7;
} }
message RepeatedRevision {
repeated Revision items = 1;
}
message RevId { message RevId {
int64 value = 1; int64 value = 1;
} }
@ -23,5 +26,5 @@ enum RevType {
} }
enum RevState { enum RevState {
StateLocal = 0; StateLocal = 0;
Acked = 1; Ack = 1;
} }

View file

@ -92,6 +92,26 @@ impl Revision {
} }
} }
#[derive(PartialEq, Debug, Default, ProtoBuf, Clone)]
pub struct RepeatedRevision {
#[pb(index = 1)]
pub items: Vec<Revision>,
}
impl std::ops::Deref for RepeatedRevision {
type Target = Vec<Revision>;
fn deref(&self) -> &Self::Target { &self.items }
}
impl std::ops::DerefMut for RepeatedRevision {
fn deref_mut(&mut self) -> &mut Self::Target { &mut self.items }
}
impl RepeatedRevision {
pub fn into_inner(self) -> Vec<Revision> { self.items }
}
#[derive(Clone, Debug, ProtoBuf, Default)] #[derive(Clone, Debug, ProtoBuf, Default)]
pub struct RevId { pub struct RevId {
#[pb(index = 1)] #[pb(index = 1)]
@ -167,5 +187,5 @@ pub fn md5<T: AsRef<[u8]>>(data: T) -> String {
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub enum RevState { pub enum RevState {
StateLocal = 0, StateLocal = 0,
Acked = 1, Ack = 1,
} }