chore: ported uuids for everything except tests

This commit is contained in:
Bartosz Sypytkowski 2025-03-20 12:14:21 +01:00
parent d2c8238664
commit 0cedc39856
16 changed files with 363 additions and 334 deletions

View file

@ -81,7 +81,7 @@ impl DatabaseCollabService for TestDatabaseCollabService {
.flat_map(|(object_id, result)| match result {
Success { encode_collab_v1 } => match EncodedCollab::decode_from_bytes(&encode_collab_v1)
{
Ok(encode) => Some((object_id, encode)),
Ok(encode) => Some((object_id.to_string(), encode)),
Err(err) => {
error!("Failed to decode collab: {}", err);
None

View file

@ -1068,17 +1068,17 @@ impl TestClient {
pub async fn duplicate_published_to_workspace(
&self,
dest_workspace_id: &str,
src_view_id: &str,
dest_view_id: &str,
dest_workspace_id: Uuid,
src_view_id: Uuid,
dest_view_id: Uuid,
) {
self
.api_client
.duplicate_published_to_workspace(
dest_workspace_id,
&PublishedDuplicate {
published_view_id: src_view_id.to_string(),
dest_view_id: dest_view_id.to_string(),
published_view_id: src_view_id,
dest_view_id,
},
)
.await

View file

@ -1,3 +1,4 @@
use crate::{log_request_id, Client};
use bytes::Bytes;
use client_api_entity::publish_dto::DuplicatePublishedPageResponse;
use client_api_entity::workspace_dto::{PublishInfoView, PublishedView};
@ -10,8 +11,7 @@ use client_api_entity::{
use reqwest::Method;
use shared_entity::response::{AppResponse, AppResponseError};
use tracing::instrument;
use crate::{log_request_id, Client};
use uuid::Uuid;
// Publisher API
impl Client {
@ -409,7 +409,7 @@ impl Client {
pub async fn duplicate_published_to_workspace(
&self,
workspace_id: &str,
workspace_id: Uuid,
publish_duplicate: &PublishedDuplicate,
) -> Result<DuplicatePublishedPageResponse, AppResponseError> {
let url = format!(

View file

@ -258,8 +258,9 @@ pub struct QuerySnapshotParams {
pub snapshot_id: i64,
}
#[derive(Debug, Clone, Validate)]
#[derive(Debug, Clone, Validate, Serialize, Deserialize)]
pub struct QueryCollabParams {
#[serde(with = "uuid_str")]
pub workspace_id: Uuid,
#[validate(nested)]
pub inner: QueryCollab,
@ -361,7 +362,7 @@ pub enum QueryCollabResult {
}
#[derive(Serialize, Deserialize)]
pub struct BatchQueryCollabResult(pub HashMap<String, QueryCollabResult>);
pub struct BatchQueryCollabResult(pub HashMap<Uuid, QueryCollabResult>);
#[derive(Serialize, Deserialize)]
pub struct WorkspaceUsage {

View file

@ -133,7 +133,7 @@ pub trait CollabStorage: Send + Sync + 'static {
workspace_id: Uuid,
queries: Vec<QueryCollab>,
from_editing_collab: bool,
) -> HashMap<String, QueryCollabResult>;
) -> HashMap<Uuid, QueryCollabResult>;
/// Deletes a collaboration from the storage.
///

View file

@ -1,8 +1,8 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use super::workspace_dto::{ViewIcon, ViewLayout};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
/// Copied from AppFlowy-IO/AppFlowy/frontend/rust-lib/flowy-folder-pub/src/entities.rs
/// TODO(zack): make AppFlowy use from this crate instead
@ -47,20 +47,20 @@ pub struct PublishDatabaseData {
/// The encoded collab data for the database rows
/// Use the row_id as the key
pub database_row_collabs: HashMap<String, Vec<u8>>,
pub database_row_collabs: HashMap<Uuid, Vec<u8>>,
/// The encoded collab data for the documents inside the database rows
/// It's not used for now
pub database_row_document_collabs: HashMap<String, Vec<u8>>,
pub database_row_document_collabs: HashMap<Uuid, Vec<u8>>,
/// Visible view ids
pub visible_database_view_ids: Vec<String>,
pub visible_database_view_ids: Vec<Uuid>,
/// Relation view id map
pub database_relations: HashMap<String, String>,
pub database_relations: HashMap<Uuid, Uuid>,
}
#[derive(Default, Deserialize, Serialize, Clone, Debug, Eq, PartialEq)]
pub struct DuplicatePublishedPageResponse {
pub view_id: String,
pub view_id: Uuid,
}

View file

@ -1,3 +1,4 @@
use app_error::AppError;
use chrono::{DateTime, Utc};
use collab_entity::{CollabType, EncodedCollab};
use database_entity::dto::{AFRole, AFWebUser, AFWorkspaceInvitationStatus, PublishInfo};
@ -239,7 +240,7 @@ pub struct CreatePageDatabaseViewParams {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PageCollabData {
pub encoded_collab: Vec<u8>,
pub row_data: HashMap<String, Vec<u8>>,
pub row_data: HashMap<Uuid, Vec<u8>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -252,8 +253,8 @@ pub struct PageCollab {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PublishedDuplicate {
pub published_view_id: String,
pub dest_view_id: String,
pub published_view_id: Uuid,
pub dest_view_id: Uuid,
}
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
@ -333,7 +334,7 @@ pub struct PublishInfoView {
#[derive(Debug, Serialize, Deserialize)]
pub struct PublishPageParams {
pub publish_name: Option<String>,
pub visible_database_view_ids: Option<Vec<String>>,
pub visible_database_view_ids: Option<Vec<Uuid>>,
pub comments_enabled: Option<bool>,
pub duplicate_enabled: Option<bool>,
}
@ -422,8 +423,12 @@ impl ListDatabaseRowDetailParam {
with_doc: Some(with_doc),
}
}
pub fn into_ids(&self) -> Vec<&str> {
self.ids.split(',').collect()
pub fn into_ids(&self) -> Result<Vec<Uuid>, AppError> {
let mut res = Vec::new();
for uuid in self.ids.split(',') {
res.push(Uuid::parse_str(uuid)?);
}
Ok(res)
}
}

View file

@ -162,6 +162,6 @@ impl WorkspaceTemplateBuilder {
}
}
pub fn gen_view_id() -> String {
uuid::Uuid::new_v4().to_string()
pub fn gen_view_id() -> Uuid {
uuid::Uuid::new_v4()
}

View file

@ -443,7 +443,7 @@ where
workspace_id: Uuid,
queries: Vec<QueryCollab>,
from_editing_collab: bool,
) -> HashMap<String, QueryCollabResult> {
) -> HashMap<Uuid, QueryCollabResult> {
if queries.is_empty() {
return HashMap::new();
}
@ -504,9 +504,6 @@ where
.await,
);
results
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect()
}
async fn delete_collab(&self, workspace_id: &Uuid, uid: &i64, object_id: &Uuid) -> AppResult<()> {

View file

@ -1499,7 +1499,7 @@ async fn delete_all_pages_from_trash_handler(
async fn publish_page_handler(
user_uuid: UserUuid,
path: web::Path<(Uuid, String)>,
path: web::Path<(Uuid, Uuid)>,
payload: Json<PublishPageParams>,
state: Data<AppState>,
) -> Result<Json<AppResponse<()>>> {
@ -1526,7 +1526,7 @@ async fn publish_page_handler(
uid,
*user_uuid,
workspace_id,
&view_id,
view_id,
visible_database_view_ids,
publish_name,
comments_enabled.unwrap_or(true),
@ -1767,7 +1767,7 @@ async fn update_collab_handler(
let (params, workspace_id) = payload.into_inner().split();
let uid = state.user_cache.get_user_uid(&user_uuid).await?;
let create_params = CreateCollabParams::from((workspace_id.to_string(), params));
let create_params = CreateCollabParams::from((workspace_id, params));
let (params, workspace_id) = create_params.split();
if state
.indexer_scheduler
@ -1984,7 +1984,7 @@ async fn post_published_duplicate_handler(
state.collab_access_control_storage.clone(),
uid,
params.published_view_id,
workspace_id.into_inner().to_string(),
workspace_id.into_inner(),
params.dest_view_id,
)
.await?;
@ -2003,7 +2003,7 @@ async fn list_published_collab_info_handler(
let publish_infos = biz::workspace::publish::list_collab_publish_info(
state.published_collab_store.as_ref(),
&state.collab_access_control_storage,
&workspace_id.into_inner(),
workspace_id.into_inner(),
)
.await?;
@ -2400,7 +2400,7 @@ async fn get_workspace_publish_outline_handler(
async fn list_database_handler(
user_uuid: UserUuid,
workspace_id: web::Path<String>,
workspace_id: web::Path<Uuid>,
state: Data<AppState>,
) -> Result<Json<AppResponse<Vec<AFDatabase>>>> {
let uid = state.user_cache.get_user_uid(&user_uuid).await?;
@ -2498,7 +2498,6 @@ async fn put_database_row_handler(
hash[10], hash[11], hash[12], hash[13], hash[14], hash[15],
])
};
let row_id_str = row_id.to_string();
biz::collab::ops::upsert_database_row(
state.collab_access_control_storage.clone(),
@ -2506,12 +2505,12 @@ async fn put_database_row_handler(
workspace_id,
db_id,
uid,
&row_id_str,
row_id,
cells,
document,
)
.await?;
Ok(Json(AppResponse::Ok().with_data(row_id_str)))
Ok(Json(AppResponse::Ok().with_data(row_id.to_string())))
}
async fn get_database_fields_handler(
@ -2594,7 +2593,7 @@ async fn list_database_row_id_updated_handler(
async fn list_database_row_details_handler(
user_uuid: UserUuid,
path_param: web::Path<(Uuid, String)>,
path_param: web::Path<(Uuid, Uuid)>,
state: Data<AppState>,
param: web::Query<ListDatabaseRowDetailParam>,
) -> Result<Json<AppResponse<Vec<AFDatabaseRowDetail>>>> {
@ -2602,17 +2601,7 @@ async fn list_database_row_details_handler(
let uid = state.user_cache.get_user_uid(&user_uuid).await?;
let list_db_row_query = param.into_inner();
let with_doc = list_db_row_query.with_doc.unwrap_or_default();
let row_ids = list_db_row_query.into_ids();
if let Err(e) = Uuid::parse_str(&db_id) {
return Err(AppError::InvalidRequest(format!("invalid database id `{}`: {}", db_id, e)).into());
}
for id in row_ids.iter() {
if let Err(e) = Uuid::parse_str(id) {
return Err(AppError::InvalidRequest(format!("invalid row id `{}`: {}", id, e)).into());
}
}
let row_ids = list_db_row_query.into_ids()?;
state
.workspace_access_control
@ -2679,7 +2668,7 @@ async fn parser_realtime_msg(
#[instrument(level = "debug", skip_all)]
async fn get_collab_embed_info_handler(
path: web::Path<(String, String)>,
path: web::Path<(String, Uuid)>,
state: Data<AppState>,
) -> Result<Json<AppResponse<AFCollabEmbedInfo>>> {
let (_, object_id) = path.into_inner();
@ -2787,8 +2776,8 @@ async fn collab_full_sync_handler(
let (tx, rx) = tokio::sync::oneshot::channel();
let message = ClientHttpUpdateMessage {
user,
workspace_id: workspace_id.to_string(),
object_id: object_id.to_string(),
workspace_id,
object_id,
collab_type,
update: Bytes::from(doc_state),
state_vector: Some(Bytes::from(sv)),

View file

@ -193,10 +193,11 @@ impl PostgresDatabaseCollabService {
impl DatabaseCollabService for PostgresDatabaseCollabService {
async fn build_collab(
&self,
object_id: Uuid,
object_id: &str,
object_type: CollabType,
encoded_collab: Option<(EncodedCollab, bool)>,
) -> Result<Collab, DatabaseError> {
let object_id = Uuid::parse_str(object_id)?;
match encoded_collab {
None => Collab::new_with_source(
CollabOrigin::Empty,
@ -219,18 +220,25 @@ impl DatabaseCollabService for PostgresDatabaseCollabService {
async fn get_collabs(
&self,
object_ids: Vec<Uuid>,
object_ids: Vec<String>,
collab_type: CollabType,
) -> Result<EncodeCollabByOid, DatabaseError> {
let mut object_uuids = Vec::with_capacity(object_ids.len());
for object_id in object_ids {
object_uuids.push(Uuid::parse_str(&object_id)?);
}
let encoded_collabs = batch_get_latest_collab_encoded(
&self.collab_storage,
GetCollabOrigin::Server,
self.workspace_id,
&object_ids,
&object_uuids,
collab_type,
)
.await
.unwrap();
.unwrap()
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect();
Ok(encoded_collabs)
}

View file

@ -362,7 +362,7 @@ pub async fn list_database(
let db_metas = ws_body.get_all_meta(&ws_body_collab.transact());
let folder =
get_latest_collab_folder(collab_storage, GetCollabOrigin::User { uid }, &workspace_id).await?;
get_latest_collab_folder(collab_storage, GetCollabOrigin::User { uid }, workspace_id).await?;
let trash = folder
.get_all_trash_sections()
@ -422,22 +422,24 @@ pub async fn insert_database_row(
workspace_uuid: Uuid,
database_uuid: Uuid,
uid: i64,
new_db_row_id: Option<&str>,
new_db_row_id: Option<Uuid>,
cell_value_by_id: HashMap<String, serde_json::Value>,
row_doc_content: Option<String>,
) -> Result<String, AppError> {
let new_db_row_id: RowId = new_db_row_id
.map(|id| RowId::from(id.to_string()))
.unwrap_or_else(gen_row_id);
let new_db_row_id = new_db_row_id.unwrap_or_else(Uuid::new_v4);
let new_db_row_id_str = RowId::from(new_db_row_id.to_string());
let creation_time = Utc::now();
let mut new_db_row_collab =
Collab::new_with_origin(CollabOrigin::Empty, new_db_row_id.clone(), vec![], false);
let mut new_db_row_collab = Collab::new_with_origin(
CollabOrigin::Empty,
new_db_row_id.to_string(),
vec![],
false,
);
let new_db_row_body = DatabaseRowBody::create(
new_db_row_id.clone(),
new_db_row_id_str.clone(),
&mut new_db_row_collab,
Row::empty(new_db_row_id.clone(), &database_uuid.to_string()),
Row::empty(new_db_row_id_str, &database_uuid.to_string()),
);
new_db_row_body.update(&mut new_db_row_collab.transact_mut(), |row_update| {
row_update.set_created_at(Utc::now().timestamp());
@ -446,8 +448,7 @@ pub async fn insert_database_row(
let new_row_doc_creation: Option<(Uuid, CreatedRowDocument)> = match row_doc_content {
Some(row_doc_content) if !row_doc_content.is_empty() => {
// update row to indicate that the document is not empty
let is_document_empty_id =
meta_id_from_row_id(&new_db_row_id.parse()?, RowMetaKey::IsDocumentEmpty);
let is_document_empty_id = meta_id_from_row_id(&new_db_row_id, RowMetaKey::IsDocumentEmpty);
new_db_row_body.get_meta().insert(
&mut new_db_row_collab.transact_mut(),
is_document_empty_id,
@ -460,10 +461,11 @@ pub async fn insert_database_row(
.map_err(|err| AppError::Internal(anyhow::anyhow!("Failed to get document id: {:?}", err)))?
.ok_or_else(|| AppError::Internal(anyhow::anyhow!("Failed to get document id")))?;
let new_doc_id = Uuid::parse_str(&new_doc_id)?;
let created_row_doc = create_row_document(
workspace_uuid,
uid,
&new_doc_id,
new_doc_id,
&collab_storage,
row_doc_content,
)
@ -488,7 +490,7 @@ pub async fn insert_database_row(
let ts_now = creation_time.timestamp();
let row_order = db_body
.create_row(CreateRowParams {
id: new_db_row_id.clone(),
id: new_db_row_id.to_string().into(),
database_id: database_uuid.to_string(),
cells: new_db_row_body
.cells(&new_db_row_collab.transact())
@ -582,7 +584,7 @@ pub async fn insert_database_row(
workspace_uuid,
&uid,
CollabParams {
object_id: database_uuid.to_string(),
object_id: database_uuid,
encoded_collab_v1: updated_db_collab.into(),
collab_type: CollabType::Database,
},
@ -592,7 +594,7 @@ pub async fn insert_database_row(
.await?;
db_txn.commit().await?;
broadcast_update_with_timeout(collab_storage, database_uuid.to_string(), db_collab_update).await;
broadcast_update_with_timeout(collab_storage, database_uuid, db_collab_update).await;
Ok(new_db_row_id.to_string())
}
@ -603,7 +605,7 @@ pub async fn upsert_database_row(
workspace_uuid: Uuid,
database_uuid: Uuid,
uid: i64,
row_id: &str,
row_id: Uuid,
cell_value_by_id: HashMap<String, serde_json::Value>,
row_doc_content: Option<String>,
) -> Result<(), AppError> {
@ -644,13 +646,13 @@ pub async fn upsert_database_row(
.await?;
// determine if there are any document changes
let doc_changes: Option<(String, DocChanges)> = get_database_row_doc_changes(
let doc_changes: Option<(Uuid, DocChanges)> = get_database_row_doc_changes(
&collab_storage,
workspace_uuid,
row_doc_content,
&db_row_body,
&mut db_row_txn,
row_id,
&row_id,
uid,
)
.await?;
@ -667,7 +669,7 @@ pub async fn upsert_database_row(
workspace_uuid,
&uid,
CollabParams {
object_id: row_id.to_string(),
object_id: row_id,
encoded_collab_v1: db_row_ec_v1.into(),
collab_type: CollabType::DatabaseRow,
},
@ -675,12 +677,7 @@ pub async fn upsert_database_row(
"inserting new database row from server",
)
.await?;
broadcast_update_with_timeout(
collab_storage.clone(),
row_id.to_string(),
db_row_collab_updates,
)
.await;
broadcast_update_with_timeout(collab_storage.clone(), row_id, db_row_collab_updates).await;
// handle document changes
if let Some((doc_id, doc_changes)) = doc_changes {
@ -691,7 +688,7 @@ pub async fn upsert_database_row(
workspace_uuid,
&uid,
CollabParams {
object_id: doc_id.clone(),
object_id: doc_id,
encoded_collab_v1: updated_doc.into(),
collab_type: CollabType::Document,
},
@ -862,7 +859,7 @@ pub async fn list_database_row_details(
uid: i64,
workspace_uuid: Uuid,
database_uuid: Uuid,
row_ids: &[&str],
row_ids: &[Uuid],
unsupported_field_types: &[FieldType],
with_doc: bool,
) -> Result<Vec<AFDatabaseRowDetail>, AppError> {
@ -884,7 +881,7 @@ pub async fn list_database_row_details(
let query_collabs: Vec<QueryCollab> = row_ids
.iter()
.map(|id| QueryCollab {
object_id: id.to_string(),
object_id: *id,
collab_type: CollabType::DatabaseRow,
})
.collect();
@ -901,14 +898,20 @@ pub async fn list_database_row_details(
return None;
},
};
let collab =
match Collab::new_with_source(CollabOrigin::Server, &id, ec.into(), vec![], false) {
Ok(collab) => collab,
Err(err) => {
tracing::error!("Failed to create collab: {:?}", err);
return None;
},
};
let id = id.to_string();
let collab = match Collab::new_with_source(
CollabOrigin::Server,
&id.to_string(),
ec.into(),
vec![],
false,
) {
Ok(collab) => collab,
Err(err) => {
tracing::error!("Failed to create collab: {:?}", err);
return None;
},
};
let row_detail = match RowDetail::from_collab(&collab) {
Some(row_detail) => row_detail,
None => {
@ -941,8 +944,10 @@ pub async fn list_database_row_details(
.flat_map(|row| {
row.id.parse::<Uuid>().ok().map(|row_uuid| {
(
row.id.clone(),
meta_id_from_row_id(&row_uuid, RowMetaKey::DocumentId),
row_uuid,
meta_id_from_row_id(&row_uuid, RowMetaKey::DocumentId)
.parse::<Uuid>()
.unwrap(),
)
})
})
@ -950,7 +955,7 @@ pub async fn list_database_row_details(
let query_db_docs = doc_id_by_row_id
.values()
.map(|doc_id| QueryCollab {
object_id: doc_id.to_string(),
object_id: *doc_id,
collab_type: CollabType::Document,
})
.collect::<Vec<_>>();
@ -969,16 +974,18 @@ pub async fn list_database_row_details(
fn fill_in_db_row_doc(
row_detail: &mut AFDatabaseRowDetail,
doc_id_by_row_id: &HashMap<String, String>,
query_res: &mut HashMap<String, QueryCollabResult>,
doc_id_by_row_id: &HashMap<Uuid, Uuid>,
query_res: &mut HashMap<Uuid, QueryCollabResult>,
) -> Result<(), AppError> {
let doc_id = doc_id_by_row_id.get(&row_detail.id).ok_or_else(|| {
AppError::Internal(anyhow::anyhow!(
"Failed to get document id for row id: {}",
row_detail.id
))
})?;
let res = query_res.remove(doc_id.as_str()).ok_or_else(|| {
let doc_id = doc_id_by_row_id
.get(&row_detail.id.parse()?)
.ok_or_else(|| {
AppError::Internal(anyhow::anyhow!(
"Failed to get document id for row id: {}",
row_detail.id
))
})?;
let res = query_res.remove(doc_id).ok_or_else(|| {
AppError::Internal(anyhow::anyhow!(
"Failed to get document collab for row id: {}",
row_detail.id
@ -990,13 +997,19 @@ fn fill_in_db_row_doc(
QueryCollabResult::Failed { error } => return Err(AppError::Internal(anyhow::anyhow!(error))),
};
let ec = EncodedCollab::decode_from_bytes(&ec_bytes)?;
let doc_collab = Collab::new_with_source(CollabOrigin::Server, doc_id, ec.into(), vec![], false)
.map_err(|err| {
AppError::Internal(anyhow::anyhow!(
"Failed to create document collab: {:?}",
err
))
})?;
let doc_collab = Collab::new_with_source(
CollabOrigin::Server,
&doc_id.to_string(),
ec.into(),
vec![],
false,
)
.map_err(|err| {
AppError::Internal(anyhow::anyhow!(
"Failed to create document collab: {:?}",
err
))
})?;
let doc = Document::open(doc_collab)
.map_err(|err| AppError::Internal(anyhow::anyhow!("Failed to open document: {:?}", err)))?;
let plain_text = doc.to_plain_text(true, false).map_err(|err| {

View file

@ -275,7 +275,7 @@ pub async fn batch_get_latest_collab_encoded(
workspace_id: Uuid,
oid_list: &[Uuid],
collab_type: CollabType,
) -> Result<HashMap<String, EncodedCollab>, AppError> {
) -> Result<HashMap<Uuid, EncodedCollab>, AppError> {
let uid = match collab_origin {
GetCollabOrigin::User { uid } => uid,
_ => 0,
@ -291,7 +291,7 @@ pub async fn batch_get_latest_collab_encoded(
.batch_get_collab(&uid, workspace_id, queries, true)
.await;
let encoded_collabs = tokio::task::spawn_blocking(move || {
let collabs: HashMap<String, EncodedCollab> = query_collab_results
let collabs: HashMap<_, EncodedCollab> = query_collab_results
.into_par_iter()
.filter_map(|(oid, query_collab_result)| match query_collab_result {
QueryCollabResult::Success { encode_collab_v1 } => {
@ -523,15 +523,16 @@ pub async fn write_to_database_row(
pub async fn create_row_document(
workspace_id: Uuid,
uid: i64,
new_doc_id: &str,
new_doc_id: Uuid,
collab_storage: &CollabAccessControlStorage,
row_doc_content: String,
) -> Result<CreatedRowDocument, AppError> {
let md_importer = MDImporter::new(None);
let new_doc_id_str = new_doc_id.to_string();
let doc_data = md_importer
.import(new_doc_id, row_doc_content)
.import(&new_doc_id_str, row_doc_content)
.map_err(|e| AppError::Internal(anyhow::anyhow!("Failed to import markdown: {:?}", e)))?;
let doc = Document::create(new_doc_id, doc_data)
let doc = Document::create(&new_doc_id_str, doc_data)
.map_err(|e| AppError::Internal(anyhow::anyhow!("Failed to create document: {:?}", e)))?;
let doc_ec = doc.encode_collab().map_err(|e| {
AppError::Internal(anyhow::anyhow!("Failed to encode document collab: {:?}", e))
@ -543,7 +544,11 @@ pub async fn create_row_document(
let mut folder_txn = folder.collab.transact_mut();
folder.body.views.insert(
&mut folder_txn,
collab_folder::View::orphan_view(new_doc_id, collab_folder::ViewLayout::Document, Some(uid)),
collab_folder::View::orphan_view(
&new_doc_id_str,
collab_folder::ViewLayout::Document,
Some(uid),
),
None,
);
folder_txn.encode_update_v1()
@ -573,9 +578,9 @@ pub async fn get_database_row_doc_changes(
row_doc_content: Option<String>,
db_row_body: &DatabaseRowBody,
db_row_txn: &mut yrs::TransactionMut<'_>,
row_id: &str,
row_id: &Uuid,
uid: i64,
) -> Result<Option<(String, DocChanges)>, AppError> {
) -> Result<Option<(Uuid, DocChanges)>, AppError> {
let row_doc_content = match row_doc_content {
Some(row_doc_content) if !row_doc_content.is_empty() => row_doc_content,
_ => return Ok(None),
@ -587,11 +592,12 @@ pub async fn get_database_row_doc_changes(
match doc_id {
Some(doc_id) => {
let doc_uuid = Uuid::parse_str(&doc_id)?;
let cur_doc = get_latest_collab_document(
collab_storage,
GetCollabOrigin::Server,
workspace_id,
doc_id,
doc_uuid,
)
.await?;
@ -628,11 +634,14 @@ pub async fn get_database_row_doc_changes(
}
let updated_doc = collab_to_bin(cur_doc_collab, CollabType::Document).await?;
Ok(Some((doc_id, DocChanges::Update(updated_doc, doc_update))))
Ok(Some((
doc_uuid,
DocChanges::Update(updated_doc, doc_update),
)))
},
None => {
// update row to indicate that the document is not empty
let is_document_empty_id = meta_id_from_row_id(&row_id.parse()?, RowMetaKey::IsDocumentEmpty);
let is_document_empty_id = meta_id_from_row_id(row_id, RowMetaKey::IsDocumentEmpty);
db_row_body
.get_meta()
.insert(db_row_txn, is_document_empty_id, false);
@ -643,10 +652,11 @@ pub async fn get_database_row_doc_changes(
.map_err(|err| AppError::Internal(anyhow::anyhow!("Failed to get document id: {:?}", err)))?
.ok_or_else(|| AppError::Internal(anyhow::anyhow!("Failed to get document id")))?;
let new_doc_id = Uuid::parse_str(&new_doc_id)?;
let created_row_doc: CreatedRowDocument = create_row_document(
workspace_id,
uid,
&new_doc_id,
new_doc_id,
collab_storage,
row_doc_content,
)

View file

@ -168,9 +168,10 @@ fn duplicate_database_data_with_context(
database_id: database_id.clone(),
view_id: context
.view_id_mapping
.get(&view.id)
.get(&Uuid::parse_str(&view.id).unwrap())
.cloned()
.unwrap_or_else(gen_database_view_id),
.unwrap_or_else(Uuid::new_v4)
.to_string(),
name: view.name.clone(),
layout: view.layout,
layout_settings: view.layout_settings.clone(),
@ -208,7 +209,7 @@ async fn duplicate_database(
});
for database_view_id in &duplicate_context.database_view_ids {
let database_id = workspace_database
.get_database_meta_with_view_id(database_view_id)
.get_database_meta_with_view_id(&database_view_id.to_string())
.ok_or_else(|| {
AppError::Internal(anyhow!("Database view id {} not found", database_view_id))
})?
@ -247,8 +248,9 @@ async fn duplicate_database(
))
})?;
let mut collab_params_list = vec![];
let database_id = Uuid::parse_str(&duplicated_database.get_database_id())?;
collab_params_list.push(CollabParams {
object_id: duplicated_database.get_database_id().clone(),
object_id: database_id,
encoded_collab_v1: encoded_database
.encoded_database_collab
.encoded_collab
@ -257,8 +259,9 @@ async fn duplicate_database(
collab_type: CollabType::Database,
});
for row in encoded_database.encoded_row_collabs {
let row_id = Uuid::parse_str(&row.object_id.clone())?;
collab_params_list.push(CollabParams {
object_id: row.object_id.clone(),
object_id: row_id,
encoded_collab_v1: row.encoded_collab.encode_to_bytes()?.into(),
collab_type: CollabType::DatabaseRow,
});
@ -275,12 +278,13 @@ async fn duplicate_database(
);
txn.encode_update_v1()
};
let workspace_database_id = Uuid::parse_str(workspace_database.collab.object_id())?;
update_workspace_database_data(
appflowy_web_metrics,
server.clone(),
user.clone(),
workspace_id,
workspace_database.collab.object_id(),
workspace_database_id,
encoded_update,
)
.await?;
@ -321,7 +325,7 @@ async fn duplicate_document(
))
})?;
let new_collab_param =
duplicate_document_encoded_collab(&collab_id, new_collab_id, encoded_collab)?;
duplicate_document_encoded_collab(&collab_id, *new_collab_id, encoded_collab)?;
collab_params_list.push(new_collab_param);
},
QueryCollabResult::Failed { error: _ } => {
@ -336,10 +340,10 @@ async fn duplicate_document(
}
struct DuplicateContext {
view_id_mapping: HashMap<String, String>,
view_id_mapping: HashMap<Uuid, Uuid>,
duplicated_views: Vec<View>,
database_view_ids: HashSet<String>,
document_view_ids: HashSet<String>,
database_view_ids: HashSet<Uuid>,
document_view_ids: HashSet<Uuid>,
}
fn duplicate_views(views: &[View], suffix: &str) -> Result<DuplicateContext, AppError> {
@ -350,21 +354,23 @@ fn duplicate_views(views: &[View], suffix: &str) -> Result<DuplicateContext, App
)))?
.parent_view_id
.clone();
let mut view_id_mapping: HashMap<String, String> = HashMap::new();
let mut view_id_mapping = HashMap::new();
let mut duplicated_views = vec![];
let mut database_view_ids = HashSet::new();
let mut document_view_ids = HashSet::new();
for view in views {
let duplicated_view_id = Uuid::new_v4().to_string();
view_id_mapping.insert(view.id.clone(), duplicated_view_id);
let view_id = Uuid::parse_str(&view.id)?;
let duplicated_view_id = Uuid::new_v4();
view_id_mapping.insert(view_id, duplicated_view_id);
}
for (index, view) in views.iter().enumerate() {
let orig_parent_view_id = view.parent_view_id.clone();
let duplicated_parent_view_id = if orig_parent_view_id == root_parent_id {
let view_id = Uuid::parse_str(&view.id)?;
let orig_parent_view_id = Uuid::parse_str(&view.parent_view_id)?;
let duplicated_parent_view_id = if view.parent_view_id == root_parent_id {
orig_parent_view_id
} else {
view_id_mapping
.get(&view.parent_view_id)
.get(&orig_parent_view_id)
.cloned()
.ok_or(AppError::Internal(anyhow::anyhow!(
"Failed to find duplicated parent view id {}",
@ -380,13 +386,14 @@ fn duplicate_views(views: &[View], suffix: &str) -> Result<DuplicateContext, App
}
}
duplicated_view.id = view_id_mapping
.get(&view.id)
.get(&view_id)
.cloned()
.ok_or(AppError::Internal(anyhow::anyhow!(
"Failed to find duplicated view id {}",
view.id
)))?;
duplicated_view.parent_view_id = duplicated_parent_view_id.clone();
)))?
.to_string();
duplicated_view.parent_view_id = duplicated_parent_view_id.to_string();
if index == 0 {
duplicated_view.name = format!("{}{}", duplicated_view.name, suffix);
}
@ -400,10 +407,10 @@ fn duplicate_views(views: &[View], suffix: &str) -> Result<DuplicateContext, App
duplicated_views.push(duplicated_view);
match &view.layout {
layout if layout.is_document() => {
document_view_ids.insert(view.id.clone());
document_view_ids.insert(view_id);
},
layout if layout.is_database() => {
database_view_ids.insert(view.id.clone());
database_view_ids.insert(view_id);
},
_ => (),
}

View file

@ -1089,12 +1089,14 @@ async fn create_database_page(
let row_collab_params_list = encoded_database
.encoded_row_collabs
.iter()
.map(|row_collab| CollabParams {
object_id: row_collab.object_id,
encoded_collab_v1: row_collab.encoded_collab.encode_to_bytes().unwrap().into(),
collab_type: CollabType::DatabaseRow,
.flat_map(|row_collab| {
Some(CollabParams {
object_id: Uuid::parse_str(&row_collab.object_id).ok()?,
encoded_collab_v1: row_collab.encoded_collab.encode_to_bytes().unwrap().into(),
collab_type: CollabType::DatabaseRow,
})
})
.collect_vec();
.collect();
let mut transaction = pg_pool.begin().await?;
let start = Instant::now();
@ -1460,8 +1462,8 @@ pub async fn publish_page(
uid: i64,
user_uuid: Uuid,
workspace_id: Uuid,
view_id: &str,
visible_database_view_ids: Option<Vec<String>>,
view_id: Uuid,
visible_database_view_ids: Option<Vec<Uuid>>,
publish_name: Option<impl ToString>,
comments_enabled: bool,
duplicate_enabled: bool,
@ -1473,7 +1475,7 @@ pub async fn publish_page(
)
.await?;
let view = folder
.get_view(view_id)
.get_view(&view_id.to_string())
.ok_or(AppError::InvalidFolderView(format!(
"View {} not found",
view_id
@ -1527,10 +1529,10 @@ pub async fn publish_page(
.publish_collabs(
vec![PublishCollabItem {
meta: PublishCollabMetadata {
view_id: Uuid::parse_str(view_id).unwrap(),
view_id,
publish_name: publish_name
.map(|name| name.to_string())
.unwrap_or_else(|| generate_publish_name(view_id, &view.name)),
.unwrap_or_else(|| generate_publish_name(&view.id, &view.name)),
metadata: serde_json::value::to_value(metadata).unwrap(),
},
data: publish_data,
@ -1548,7 +1550,7 @@ async fn generate_publish_data_for_document(
collab_access_control_storage: &CollabAccessControlStorage,
uid: i64,
workspace_id: Uuid,
view_id: &str,
view_id: Uuid,
) -> Result<Vec<u8>, AppError> {
let collab = get_latest_collab_encoded(
collab_access_control_storage,
@ -1566,8 +1568,8 @@ async fn generate_publish_data_for_database(
collab_storage: &CollabAccessControlStorage,
uid: i64,
workspace_id: Uuid,
view_id: &str,
visible_database_view_ids: Option<Vec<String>>,
view_id: Uuid,
visible_database_view_ids: Option<Vec<Uuid>>,
) -> Result<Vec<u8>, AppError> {
let (_, ws_db) = get_latest_workspace_database(
collab_storage,
@ -1578,7 +1580,7 @@ async fn generate_publish_data_for_database(
.await?;
let db_oid = {
ws_db
.get_database_meta_with_view_id(view_id)
.get_database_meta_with_view_id(&view_id.to_string())
.ok_or(AppError::NoRequiredData(format!(
"Database view {} not found",
view_id
@ -1598,7 +1600,7 @@ async fn generate_publish_data_for_database(
.views
.get_row_orders(&txn, &inline_view_id)
.iter()
.map(|ro| ro.id.to_string())
.flat_map(|ro| Uuid::parse_str(&ro.id))
.collect()
};
let encoded_rows = batch_get_latest_collab_encoded(
@ -1609,20 +1611,20 @@ async fn generate_publish_data_for_database(
CollabType::DatabaseRow,
)
.await?;
let row_data: HashMap<String, Vec<u8>> = encoded_rows
let row_data: HashMap<_, Vec<u8>> = encoded_rows
.into_iter()
.map(|(oid, encoded_collab)| (oid, encoded_collab.doc_state.to_vec()))
.collect();
let row_document_ids = row_ids
let row_document_ids: Vec<_> = row_ids
.iter()
.filter_map(|row_id| {
db_body
.block
.get_row_document_id(&RowId::from(row_id.to_owned()))
.map(|doc_id| doc_id.to_string())
.and_then(|doc_id| Uuid::parse_str(&doc_id).ok())
})
.collect_vec();
.collect();
let encoded_row_documents = batch_get_latest_collab_encoded(
collab_storage,
GetCollabOrigin::User { uid },
@ -1631,7 +1633,7 @@ async fn generate_publish_data_for_database(
CollabType::Document,
)
.await?;
let row_document_data: HashMap<String, Vec<u8>> = encoded_row_documents
let row_document_data: HashMap<_, _> = encoded_row_documents
.into_iter()
.map(|(oid, encoded_collab)| (oid, encoded_collab.doc_state.to_vec()))
.collect();
@ -1640,8 +1642,8 @@ async fn generate_publish_data_for_database(
database_collab: collab_to_doc_state(db_collab, CollabType::Database).await?,
database_row_collabs: row_data,
database_row_document_collabs: row_document_data,
visible_database_view_ids: visible_database_view_ids.unwrap_or(vec![view_id.to_string()]),
database_relations: HashMap::from([(db_oid, view_id.to_string())]),
visible_database_view_ids: visible_database_view_ids.unwrap_or(vec![view_id]),
database_relations: HashMap::from([(db_oid, view_id)]),
};
Ok(serde_json::ser::to_vec(&data)?)
}
@ -1843,19 +1845,19 @@ async fn get_page_collab_data_for_database(
let txn = db_collab.transact();
db_body.get_inline_view_id(&txn)
};
let row_ids: Vec<RowId> = {
let row_ids: Vec<_> = {
let txn = db_collab.transact();
db_body
.views
.get_row_orders(&txn, &inline_view_id)
.iter()
.map(|ro| ro.id.clone())
.flat_map(|ro| Uuid::parse_str(&ro.id).ok())
.collect()
};
let queries: Vec<QueryCollab> = row_ids
.iter()
.map(|row_id| QueryCollab {
object_id: row_id.to_string(),
object_id: *row_id,
collab_type: CollabType::DatabaseRow,
})
.collect();
@ -1863,7 +1865,7 @@ async fn get_page_collab_data_for_database(
.batch_get_collab(&uid, workspace_id, queries, true)
.await;
let row_data = tokio::task::spawn_blocking(move || {
let row_collabs: HashMap<String, Vec<u8>> = row_query_collab_results
let row_collabs: HashMap<_, _> = row_query_collab_results
.into_par_iter()
.filter_map(|(row_id, query_collab_result)| match query_collab_result {
QueryCollabResult::Success { encode_collab_v1 } => {

View file

@ -56,10 +56,10 @@ pub async fn duplicate_published_collab_to_workspace(
bucket_client: AwsS3BucketClientImpl,
collab_storage: Arc<CollabAccessControlStorage>,
dest_uid: i64,
publish_view_id: String,
dest_workspace_id: String,
dest_view_id: String,
) -> Result<String, AppError> {
publish_view_id: Uuid,
dest_workspace_id: Uuid,
dest_view_id: Uuid,
) -> Result<Uuid, AppError> {
let copier = PublishCollabDuplicator::new(
pg_pool.clone(),
bucket_client,
@ -70,7 +70,7 @@ pub async fn duplicate_published_collab_to_workspace(
);
let time_now = chrono::Utc::now().timestamp_millis();
let root_view_id_for_duplicate = copier.duplicate(&publish_view_id).await?;
let root_view_id_for_duplicate = copier.duplicate(publish_view_id).await?;
let elapsed = chrono::Utc::now().timestamp_millis() - time_now;
tracing::info!(
"duplicate_published_collab_to_workspace: elapsed time: {}ms",
@ -85,18 +85,18 @@ pub struct PublishCollabDuplicator {
collab_storage: Arc<CollabAccessControlStorage>,
/// A map to store the old view_id that was duplicated and new view_id assigned.
/// If value is none, it means the view_id is not published.
duplicated_refs: HashMap<String, Option<String>>,
duplicated_refs: HashMap<Uuid, Option<Uuid>>,
/// published_database_id -> view_id
duplicated_db_main_view: HashMap<String, String>,
duplicated_db_main_view: HashMap<Uuid, Uuid>,
/// published_database_view_id -> new_view_id
duplicated_db_view: HashMap<String, String>,
duplicated_db_view: HashMap<Uuid, Uuid>,
/// published_database_row_id -> new_row_id
duplicated_db_row: HashMap<String, String>,
duplicated_db_row: HashMap<Uuid, Uuid>,
/// new views to be added to the folder
/// view_id -> view
views_to_add: HashMap<String, View>,
views_to_add: HashMap<Uuid, View>,
/// A list of database linked views to be added to workspace database
workspace_databases: HashMap<String, Vec<String>>,
workspace_databases: HashMap<Uuid, Vec<Uuid>>,
/// A list of collab objects to added to the workspace (oid -> collab)
collabs_to_insert: HashMap<Uuid, (CollabType, Vec<u8>)>,
/// time of duplication
@ -111,7 +111,7 @@ pub struct PublishCollabDuplicator {
/// workspace to duplicate into
dest_workspace_id: Uuid,
/// view of workspace to duplicate into
dest_view_id: String,
dest_view_id: Uuid,
}
impl PublishCollabDuplicator {
@ -120,8 +120,8 @@ impl PublishCollabDuplicator {
bucket_client: AwsS3BucketClientImpl,
collab_storage: Arc<CollabAccessControlStorage>,
dest_uid: i64,
dest_workspace_id: String,
dest_view_id: String,
dest_workspace_id: Uuid,
dest_view_id: Uuid,
) -> Self {
let ts_now = chrono::Utc::now().timestamp();
Self {
@ -142,14 +142,11 @@ impl PublishCollabDuplicator {
}
}
async fn duplicate(mut self, publish_view_id: &str) -> Result<String, AppError> {
async fn duplicate(mut self, publish_view_id: Uuid) -> Result<Uuid, AppError> {
// new view after deep copy
// this is the root of the document/database duplicated
let root_view_id = gen_view_id();
let mut root_view = match self
.deep_copy(root_view_id.clone(), publish_view_id)
.await?
{
let mut root_view = match self.deep_copy(root_view_id, publish_view_id).await? {
Some(v) => v,
None => {
return Err(AppError::RecordNotFound(
@ -157,7 +154,7 @@ impl PublishCollabDuplicator {
))
},
};
root_view.parent_view_id.clone_from(&self.dest_view_id);
root_view.parent_view_id = self.dest_view_id.to_string();
// destructuring self to own inner values, avoids cloning
let PublishCollabDuplicator {
@ -201,15 +198,15 @@ impl PublishCollabDuplicator {
// update database if any
if !workspace_databases.is_empty() {
let ws_db_oid = select_workspace_database_oid(&pg_pool, &dest_workspace_id.parse()?).await?;
let ws_db_oid = select_workspace_database_oid(&pg_pool, &dest_workspace_id).await?;
let ws_db_collab = {
let ws_database_ec = get_latest_collab_encoded(
&collab_storage,
GetCollabOrigin::User {
uid: duplicator_uid,
},
&dest_workspace_id,
&ws_db_oid,
dest_workspace_id,
ws_db_oid,
CollabType::WorkspaceDatabase,
)
.await?;
@ -223,7 +220,15 @@ impl PublishCollabDuplicator {
let ws_db_updates = {
let view_ids_by_database_id = workspace_databases
.into_iter()
.map(|(database_id, view_ids)| (database_id, view_ids.into_iter().collect()))
.map(|(database_id, view_ids)| {
(
database_id.to_string(),
view_ids
.into_iter()
.map(|view_id| view_id.to_string())
.collect(),
)
})
.collect::<HashMap<_, _>>();
ws_db
@ -246,7 +251,7 @@ impl PublishCollabDuplicator {
collab_storage
.upsert_new_collab_with_transaction(
&dest_workspace_id,
dest_workspace_id,
&duplicator_uid,
CollabParams {
object_id: ws_db_oid.clone(),
@ -257,7 +262,7 @@ impl PublishCollabDuplicator {
"duplicate workspace database collab",
)
.await?;
broadcast_update(&collab_storage, &ws_db_oid, ws_db_updates).await?;
broadcast_update(&collab_storage, ws_db_oid, ws_db_updates).await?;
}
let collab_folder_encoded = get_latest_collab_encoded(
@ -265,19 +270,18 @@ impl PublishCollabDuplicator {
GetCollabOrigin::User {
uid: duplicator_uid,
},
&dest_workspace_id,
&dest_workspace_id,
dest_workspace_id,
dest_workspace_id,
CollabType::Folder,
)
.await?;
let cloned_dest_workspace_id = dest_workspace_id.clone();
let mut folder = tokio::task::spawn_blocking(move || {
Folder::from_collab_doc_state(
duplicator_uid,
CollabOrigin::Server,
collab_folder_encoded.into(),
&cloned_dest_workspace_id,
&dest_workspace_id.to_string(),
vec![],
)
.map_err(|e| AppError::Unhandled(e.to_string()))
@ -289,8 +293,8 @@ impl PublishCollabDuplicator {
let mut folder_txn = folder.collab.transact_mut();
let mut duplicated_view_ids = HashSet::new();
duplicated_view_ids.insert(root_view.id.clone());
duplicated_view_ids.insert(dest_view_id);
duplicated_view_ids.insert(root_view.id.parse().unwrap());
folder.body.views.insert(&mut folder_txn, root_view, None);
// when child views are added, it must have a parent view that is previously added
@ -302,10 +306,10 @@ impl PublishCollabDuplicator {
let mut inserted = vec![];
for (view_id, view) in views_to_add.iter() {
let parent_view_id = Uuid::parse_str(&view.parent_view_id).unwrap();
// allow to insert if parent view is already inserted
// or if view is standalone (view_id == parent_view_id)
if duplicated_view_ids.contains(&view.parent_view_id) || *view_id == view.parent_view_id
{
if duplicated_view_ids.contains(&parent_view_id) || *view_id == parent_view_id {
folder
.body
.views
@ -337,10 +341,10 @@ impl PublishCollabDuplicator {
collab_storage
.upsert_new_collab_with_transaction(
&dest_workspace_id,
dest_workspace_id,
&duplicator_uid,
CollabParams {
object_id: dest_workspace_id.clone(),
object_id: dest_workspace_id,
encoded_collab_v1: updated_encoded_collab.await?.into(),
collab_type: CollabType::Folder,
},
@ -377,8 +381,8 @@ impl PublishCollabDuplicator {
/// Caller should set the parent_view_id to the parent view.
async fn deep_copy(
&mut self,
new_view_id: String,
publish_view_id: &str,
new_view_id: Uuid,
publish_view_id: Uuid,
) -> Result<Option<View>, AppError> {
tracing::info!(
"deep_copy: new_view_id: {}, publish_view_id: {}",
@ -388,7 +392,7 @@ impl PublishCollabDuplicator {
// attempt to get metadata and doc_state for published view
let (metadata, published_blob) = match self
.get_published_data_for_view_id(&publish_view_id.parse()?)
.get_published_data_for_view_id(&publish_view_id)
.await?
{
Some(published_data) => published_data,
@ -405,11 +409,11 @@ impl PublishCollabDuplicator {
// so we insert this knowledge into the duplicated_refs
self
.duplicated_refs
.insert(publish_view_id.to_string(), new_view_id.clone().into());
.insert(publish_view_id, new_view_id.clone().into());
match metadata.view.layout {
ViewLayout::Document => {
let doc_collab = collab_from_doc_state(published_blob, "")?;
let doc_collab = collab_from_doc_state(published_blob, &Uuid::default())?;
let doc = Document::open(doc_collab).map_err(|e| AppError::Unhandled(e.to_string()))?;
let new_doc_view = self
.deep_copy_doc(publish_view_id, new_view_id, doc, metadata)
@ -417,7 +421,7 @@ impl PublishCollabDuplicator {
Ok(Some(new_doc_view))
},
ViewLayout::Grid | ViewLayout::Board | ViewLayout::Calendar => {
let pub_view_id = metadata.view.view_id.clone();
let pub_view_id: Uuid = metadata.view.view_id.parse()?;
let db_payload = serde_json::from_slice::<PublishDatabaseData>(&published_blob)?;
let new_db_view = self
.deep_copy_database_view(new_view_id, db_payload, &metadata, &pub_view_id)
@ -433,13 +437,12 @@ impl PublishCollabDuplicator {
async fn deep_copy_doc<'a>(
&mut self,
pub_view_id: &str,
dup_view_id: String,
pub_view_id: Uuid,
dup_view_id: Uuid,
doc: Document,
metadata: PublishViewMetaData,
) -> Result<View, AppError> {
let mut ret_view =
self.new_folder_view(dup_view_id.clone(), &metadata.view, ViewLayout::Document);
let mut ret_view = self.new_folder_view(dup_view_id, &metadata.view, ViewLayout::Document);
let mut doc_data = doc
.get_document_data()
@ -450,7 +453,7 @@ impl PublishCollabDuplicator {
}
if let Err(err) = self
.deep_copy_doc_databases(pub_view_id, &mut doc_data, &mut ret_view)
.deep_copy_doc_databases(&pub_view_id, &mut doc_data, &mut ret_view)
.await
{
tracing::error!("failed to deep copy doc databases: {}", err);
@ -467,7 +470,7 @@ impl PublishCollabDuplicator {
let new_doc_bin = collab_to_bin(new_doc.split().0, CollabType::Document).await?;
self
.collabs_to_insert
.insert(ret_view.id.clone(), (CollabType::Document, new_doc_bin));
.insert(ret_view.id.parse()?, (CollabType::Document, new_doc_bin));
}
Ok(ret_view)
}
@ -504,10 +507,13 @@ impl PublishCollabDuplicator {
for page_id in page_ids {
let page_id_str = match page_id.as_str() {
Some(page_id_str) => page_id_str,
Some(page_id_str) => Uuid::parse_str(page_id_str)?,
None => continue,
};
if let Some(new_page_id) = self.deep_copy_view(page_id_str, &ret_view.id).await? {
if let Some(new_page_id) = self
.deep_copy_view(page_id_str, ret_view.id.parse()?)
.await?
{
*page_id = serde_json::json!(new_page_id);
} else {
tracing::warn!("deep_copy_doc_pages: view not found: {}", page_id_str);
@ -528,10 +534,10 @@ impl PublishCollabDuplicator {
/// from param `parent_view_id`)
async fn deep_copy_view(
&mut self,
pub_view_id: &str,
parent_view_id: &String,
) -> Result<Option<String>, AppError> {
match self.duplicated_refs.get(pub_view_id) {
pub_view_id: Uuid,
parent_view_id: Uuid,
) -> Result<Option<Uuid>, AppError> {
match self.duplicated_refs.get(&pub_view_id) {
Some(new_view_id) => {
if let Some(vid) = new_view_id {
Ok(Some(vid.clone()))
@ -543,17 +549,15 @@ impl PublishCollabDuplicator {
// Call deep_copy and await the result
if let Some(mut new_view) = Box::pin(self.deep_copy(gen_view_id(), pub_view_id)).await? {
if new_view.parent_view_id.is_empty() {
new_view.parent_view_id.clone_from(parent_view_id);
new_view.parent_view_id = parent_view_id.to_string();
}
self
.duplicated_refs
.insert(pub_view_id.to_string(), Some(new_view.id.clone()));
let ret_view_id = new_view.id.clone();
self.views_to_add.insert(new_view.id.clone(), new_view);
Ok(Some(ret_view_id))
let new_view_id = Uuid::parse_str(&new_view.id)?;
self.duplicated_refs.insert(pub_view_id, Some(new_view_id));
self.views_to_add.insert(new_view_id, new_view);
Ok(Some(new_view_id))
} else {
tracing::warn!("view not found in deep_copy: {}", pub_view_id);
self.duplicated_refs.insert(pub_view_id.to_string(), None);
self.duplicated_refs.insert(pub_view_id, None);
Ok(None)
}
},
@ -562,7 +566,7 @@ impl PublishCollabDuplicator {
async fn deep_copy_doc_databases(
&mut self,
pub_view_id: &str,
pub_view_id: &Uuid,
doc_data: &mut DocumentData,
ret_view: &mut View,
) -> Result<(), AppError> {
@ -573,21 +577,23 @@ impl PublishCollabDuplicator {
for (block_id, block) in db_blocks {
tracing::info!("deep_copy_doc_databases: block_id: {}", block_id);
let block_view_id = block
let block_view_id: Uuid = block
.data
.get("view_id")
.ok_or_else(|| AppError::RecordNotFound("view_id not found in block data".to_string()))?
.as_str()
.ok_or_else(|| AppError::RecordNotFound("view_id not a string".to_string()))?;
.ok_or_else(|| AppError::RecordNotFound("view_id not a string".to_string()))?
.parse()?;
let block_parent_id = block
let block_parent_id: Uuid = block
.data
.get("parent_id")
.ok_or_else(|| AppError::RecordNotFound("view_id not found in block data".to_string()))?
.as_str()
.ok_or_else(|| AppError::RecordNotFound("view_id not a string".to_string()))?;
.ok_or_else(|| AppError::RecordNotFound("view_id not a string".to_string()))?
.parse()?;
if pub_view_id == block_parent_id {
if pub_view_id == &block_parent_id {
// inline database in doc
if let Some(new_view_id) = self
.deep_copy_inline_database_in_doc(block_view_id, &ret_view.id)
@ -612,11 +618,11 @@ impl PublishCollabDuplicator {
{
block.data.insert(
"view_id".to_string(),
serde_json::Value::String(new_view_id),
serde_json::Value::String(new_view_id.to_string()),
);
block.data.insert(
"parent_id".to_string(),
serde_json::Value::String(new_parent_id),
serde_json::Value::String(new_parent_id.to_string()),
);
} else {
tracing::warn!("deep_copy_doc_databases: view not found: {}", block_view_id);
@ -632,13 +638,10 @@ impl PublishCollabDuplicator {
/// parent_view_id is assumed to be doc itself
async fn deep_copy_inline_database_in_doc<'a>(
&mut self,
view_id: &str,
view_id: Uuid,
doc_view_id: &String,
) -> Result<Option<String>, AppError> {
let (metadata, published_blob) = match self
.get_published_data_for_view_id(&view_id.parse()?)
.await?
{
let (metadata, published_blob) = match self.get_published_data_for_view_id(&view_id).await? {
Some(published_data) => published_data,
None => {
tracing::warn!("No published collab data found for view_id: {}", view_id);
@ -648,14 +651,13 @@ impl PublishCollabDuplicator {
let published_db = serde_json::from_slice::<PublishDatabaseData>(&published_blob)?;
let mut parent_view = self
.deep_copy_database_view(gen_view_id(), published_db, &metadata, view_id)
.deep_copy_database_view(gen_view_id(), published_db, &metadata, &view_id)
.await?;
let parent_view_id = parent_view.id.clone();
if parent_view.parent_view_id.is_empty() {
parent_view.parent_view_id.clone_from(doc_view_id);
self
.views_to_add
.insert(parent_view.id.clone(), parent_view);
parent_view.parent_view_id = doc_view_id.clone();
let parent_view_id = Uuid::parse_str(&parent_view.id)?;
self.views_to_add.insert(parent_view_id, parent_view);
}
Ok(Some(parent_view_id))
}
@ -664,14 +666,11 @@ impl PublishCollabDuplicator {
/// returns new (view_id, parent_id)
async fn deep_copy_ref_database_in_doc<'a>(
&mut self,
view_id: &str,
parent_id: &str,
view_id: Uuid,
parent_id: Uuid,
doc_view_id: &String,
) -> Result<Option<(String, String)>, AppError> {
let (metadata, published_blob) = match self
.get_published_data_for_view_id(&view_id.parse()?)
.await?
{
) -> Result<Option<(Uuid, Uuid)>, AppError> {
let (metadata, published_blob) = match self.get_published_data_for_view_id(&view_id).await? {
Some(published_data) => published_data,
None => {
tracing::warn!("No published collab data found for view_id: {}", view_id);
@ -681,31 +680,27 @@ impl PublishCollabDuplicator {
let published_db = serde_json::from_slice::<PublishDatabaseData>(&published_blob)?;
let mut parent_view = self
.deep_copy_database_view(gen_view_id(), published_db, &metadata, parent_id)
.deep_copy_database_view(gen_view_id(), published_db, &metadata, &parent_id)
.await?;
let parent_view_id = parent_view.id.clone();
let parent_view_id: Uuid = parent_view.id.parse()?;
if parent_view.parent_view_id.is_empty() {
parent_view.parent_view_id.clone_from(doc_view_id);
self
.views_to_add
.insert(parent_view.id.clone(), parent_view);
parent_view.parent_view_id = doc_view_id.to_string();
self.views_to_add.insert(parent_view_id, parent_view);
}
let duplicated_view_id = match self.duplicated_db_view.get(view_id) {
Some(v) => v.clone(),
let duplicated_view_id = match self.duplicated_db_view.get(&view_id) {
Some(v) => *v,
None => {
let view_info_by_id = view_info_by_view_id(&metadata);
let view_info = view_info_by_id.get(view_id).ok_or_else(|| {
let view_info = view_info_by_id.get(&view_id).ok_or_else(|| {
AppError::RecordNotFound(format!("metadata not found for view: {}", view_id))
})?;
let mut new_folder_db_view =
self.new_folder_view(view_id.to_string(), view_info, view_info.layout.clone());
new_folder_db_view
.parent_view_id
.clone_from(&parent_view_id);
let new_folder_db_view_id = new_folder_db_view.id.clone();
self.new_folder_view(view_id, view_info, view_info.layout.clone());
new_folder_db_view.parent_view_id = parent_view_id.to_string();
let new_folder_db_view_id: Uuid = new_folder_db_view.id.parse()?;
self
.views_to_add
.insert(new_folder_db_view.id.clone(), new_folder_db_view);
.insert(new_folder_db_view_id, new_folder_db_view);
new_folder_db_view_id
},
};
@ -720,18 +715,21 @@ impl PublishCollabDuplicator {
async fn deep_copy_database<'a>(
&mut self,
published_db: &PublishDatabaseData,
pub_view_id: &str,
new_view_id: String,
) -> Result<(String, String, bool), AppError> {
pub_view_id: &Uuid,
new_view_id: Uuid,
) -> Result<(Uuid, Uuid, bool), AppError> {
// collab of database
let mut db_collab = collab_from_doc_state(published_db.database_collab.clone(), "")?;
let mut db_collab =
collab_from_doc_state(published_db.database_collab.clone(), &Uuid::default())?;
let db_body = DatabaseBody::from_collab(
&db_collab,
Arc::new(NoPersistenceDatabaseCollabService),
None,
)
.ok_or_else(|| AppError::RecordNotFound("no database body found".to_string()))?;
let pub_db_id = db_body.get_database_id(&db_collab.context.transact());
let pub_db_id: Uuid = db_body
.get_database_id(&db_collab.context.transact())
.parse()?;
// check if the database is already duplicated
if let Some(db_id) = self.duplicated_refs.get(&pub_db_id).cloned().flatten() {
@ -747,9 +745,10 @@ impl PublishCollabDuplicator {
// this will mark the database as duplicated
let txn = db_collab.context.transact();
let mut db_views = db_body.views.get_all_views(&txn);
let mut new_db_view_ids: Vec<String> = Vec::with_capacity(db_views.len());
let mut new_db_view_ids: Vec<_> = Vec::with_capacity(db_views.len());
for db_view in db_views.iter_mut() {
let new_db_view_id = if db_view.id == pub_view_id {
let db_view_id: Uuid = db_view.id.parse()?;
let new_db_view_id = if &db_view_id == pub_view_id {
self
.duplicated_db_main_view
.insert(pub_db_id.clone(), new_view_id.clone());
@ -759,7 +758,7 @@ impl PublishCollabDuplicator {
};
self
.duplicated_db_view
.insert(db_view.id.clone(), new_db_view_id.clone());
.insert(db_view_id, new_db_view_id.clone());
new_db_view_ids.push(new_db_view_id);
}
@ -767,7 +766,7 @@ impl PublishCollabDuplicator {
if !self.duplicated_db_main_view.contains_key(&pub_db_id) {
self
.duplicated_db_main_view
.insert(pub_db_id.clone(), db_body.get_inline_view_id(&txn));
.insert(pub_db_id.clone(), db_body.get_inline_view_id(&txn).parse()?);
};
// Add this database as linked view
@ -780,10 +779,10 @@ impl PublishCollabDuplicator {
// this will mark the rows as duplicated
for pub_row_id in published_db.database_row_collabs.keys() {
// assign a new id for the row
let dup_row_id = gen_row_id();
let dup_row_id = Uuid::new_v4();
self
.duplicated_db_row
.insert(pub_row_id.clone(), dup_row_id.to_string());
.insert(pub_row_id.clone(), dup_row_id);
}
{
@ -795,20 +794,16 @@ impl PublishCollabDuplicator {
if *key == FieldType::Relation.type_id() {
if let Some(pub_db_id) = type_option_value.get_mut("database_id") {
if let Any::String(pub_db_id_str) = pub_db_id {
if let Some(pub_rel_db_view) =
published_db.database_relations.get(pub_db_id_str.as_ref())
{
let pub_db_uuid = Uuid::parse_str(&pub_db_id_str)?;
if let Some(&pub_rel_db_view) = published_db.database_relations.get(&pub_db_uuid) {
if let Some(_dup_view_id) = self
.deep_copy_view(pub_rel_db_view, &self.dest_view_id.to_string())
.deep_copy_view(pub_rel_db_view, self.dest_view_id)
.await?
{
if let Some(dup_db_id) = self
.duplicated_refs
.get(pub_db_id_str.as_ref())
.cloned()
.flatten()
if let Some(dup_db_id) =
self.duplicated_refs.get(&pub_db_uuid).cloned().flatten()
{
*pub_db_id = Any::String(dup_db_id.into());
*pub_db_id = Any::from(dup_db_id.to_string());
db_body.fields.update_field(&mut txn, &field.id, |f| {
f.set_type_option(
FieldType::Relation.into(),
@ -841,7 +836,7 @@ impl PublishCollabDuplicator {
let mut txn = db_row_collab.context.transact_mut();
// update database_id
db_row_body.update(&mut txn, |u| {
u.set_database_id(new_db_id.clone());
u.set_database_id(new_db_id.to_string());
});
// get row document id before the id update
@ -856,6 +851,7 @@ impl PublishCollabDuplicator {
// duplicate row document if exists
if let Some(pub_row_doc_id) = pub_row_doc_id {
let pub_row_doc_id: Uuid = pub_row_doc_id.parse()?;
if let Some(row_doc_doc_state) = published_db
.database_row_document_collabs
.get(&pub_row_doc_id)
@ -865,15 +861,15 @@ impl PublishCollabDuplicator {
let pub_doc =
Document::open(pub_doc_collab).map_err(|e| AppError::Unhandled(e.to_string()))?;
let dup_row_doc_id =
meta_id_from_row_id(&dup_row_id.parse()?, RowMetaKey::DocumentId);
meta_id_from_row_id(&dup_row_id, RowMetaKey::DocumentId).parse()?;
let mut new_doc_view = Box::pin(self.deep_copy_doc(
&pub_row_doc_id,
dup_row_doc_id.clone(),
pub_row_doc_id,
dup_row_doc_id,
pub_doc,
PublishViewMetaData::default(),
))
.await?;
new_doc_view.parent_view_id.clone_from(&dup_row_doc_id); // orphan folder view
new_doc_view.parent_view_id = dup_row_doc_id.to_string(); // orphan folder view
self
.views_to_add
.insert(dup_row_doc_id.clone(), new_doc_view);
@ -938,14 +934,11 @@ impl PublishCollabDuplicator {
}
rel_row_ids.remove_range(&mut txn, 0, num_refs);
for pub_row_id in pub_row_ids {
let dup_row_id =
self
.duplicated_db_row
.get(pub_row_id.as_ref())
.ok_or_else(|| {
AppError::RecordNotFound(format!("row not found: {}", pub_row_id))
})?;
let _ = rel_row_ids.push_back(&mut txn, dup_row_id.as_str());
let pub_row_id = Uuid::parse_str(&pub_row_id)?;
let dup_row_id = self.duplicated_db_row.get(&pub_row_id).ok_or_else(|| {
AppError::RecordNotFound(format!("row not found: {}", pub_row_id))
})?;
let _ = rel_row_ids.push_back(&mut txn, dup_row_id.to_string());
}
}
}
@ -953,32 +946,33 @@ impl PublishCollabDuplicator {
// write new row collab to storage
let db_row_ec_bytes = collab_to_bin(db_row_collab, CollabType::DatabaseRow).await?;
self.collabs_to_insert.insert(
dup_row_id.to_string(),
(CollabType::DatabaseRow, db_row_ec_bytes),
);
self
.collabs_to_insert
.insert(dup_row_id, (CollabType::DatabaseRow, db_row_ec_bytes));
}
// accumulate list of database views (Board, Cal, ...) to be linked to the database
{
let mut txn = db_collab.context.transact_mut();
db_body.root.insert(&mut txn, "id", new_db_id.clone());
db_body.root.insert(&mut txn, "id", new_db_id.to_string());
let mut db_views = db_body.views.get_all_views(&txn);
for db_view in db_views.iter_mut() {
let new_db_view_id = self.duplicated_db_view.get(&db_view.id).ok_or_else(|| {
let db_view_id = Uuid::parse_str(&db_view.id)?;
let new_db_view_id = self.duplicated_db_view.get(&db_view_id).ok_or_else(|| {
AppError::Unhandled(format!(
"view not found in duplicated_db_view: {}",
db_view.id
))
})?;
db_view.id.clone_from(new_db_view_id);
db_view.database_id.clone_from(&new_db_id);
db_view.id = new_db_view_id.to_string();
db_view.database_id = new_db_id.to_string();
// update all views's row's id
for row_order in db_view.row_orders.iter_mut() {
if let Some(new_id) = self.duplicated_db_row.get(row_order.id.as_str()) {
let row_order_id = Uuid::parse_str(&row_order.id)?;
if let Some(new_id) = self.duplicated_db_row.get(&row_order_id) {
row_order.id = new_id.clone().into();
} else {
// skip if row not found
@ -989,7 +983,9 @@ impl PublishCollabDuplicator {
}
// update database metas iid
db_body.metas.insert(&mut txn, "iid", new_view_id);
db_body
.metas
.insert(&mut txn, "iid", new_view_id.to_string());
// insert updated views back to db
db_body.views.clear(&mut txn);
@ -1012,10 +1008,10 @@ impl PublishCollabDuplicator {
/// If the database is already duplicated before, does not return the view with `new_view_id`
async fn deep_copy_database_view<'a>(
&mut self,
new_view_id: String,
new_view_id: Uuid,
published_db: PublishDatabaseData,
metadata: &PublishViewMetaData,
pub_view_id: &str,
pub_view_id: &Uuid,
) -> Result<View, AppError> {
// flatten nested view info into a map
let view_info_by_id = view_info_by_view_id(metadata);
@ -1037,7 +1033,7 @@ impl PublishCollabDuplicator {
None => {
let main_view_id = self
.duplicated_db_main_view
.get(pub_db_id.as_str())
.get(&pub_db_id)
.ok_or_else(|| {
AppError::RecordNotFound(format!("main view not found: {}", pub_view_id))
})?;
@ -1047,8 +1043,9 @@ impl PublishCollabDuplicator {
})?;
let mut view = self.new_folder_view(dup_view_id, view_info, view_info.layout.clone());
if *main_view_id != view.id {
view.parent_view_id.clone_from(main_view_id);
let main_view_id = main_view_id.to_string();
if main_view_id != view.id {
view.parent_view_id = main_view_id;
}
return Ok(view);
},
@ -1062,7 +1059,7 @@ impl PublishCollabDuplicator {
// create the main view
let main_view_id = self
.duplicated_db_main_view
.get(pub_db_id.as_str())
.get(&pub_db_id)
.ok_or_else(|| AppError::RecordNotFound(format!("main view not found: {}", pub_view_id)))?;
let main_view_info = view_info_by_id.get(pub_view_id).ok_or_else(|| {
@ -1076,7 +1073,7 @@ impl PublishCollabDuplicator {
// create other visible view which are child to the main view
for vis_view_id in published_db.visible_database_view_ids {
if vis_view_id == pub_view_id {
if &vis_view_id == pub_view_id {
// skip main view
continue;
}
@ -1097,10 +1094,10 @@ impl PublishCollabDuplicator {
})?,
child_view_info.layout.clone(),
);
child_folder_view.parent_view_id.clone_from(main_view_id);
child_folder_view.parent_view_id = main_view_id.to_string();
self
.views_to_add
.insert(child_folder_view.id.clone(), child_folder_view);
.insert(child_folder_view.id.parse()?, child_folder_view);
}
Ok(main_folder_view)
@ -1109,12 +1106,12 @@ impl PublishCollabDuplicator {
/// creates a new folder view without parent_view_id set
fn new_folder_view(
&self,
new_view_id: String,
new_view_id: Uuid,
view_info: &PublishViewInfo,
layout: ViewLayout,
) -> View {
View {
id: new_view_id,
id: new_view_id.to_string(),
parent_view_id: "".to_string(), // to be filled by caller
name: view_info.name.clone(),
children: RepeatedViewIdentifier { items: vec![] }, // fill in while iterating children
@ -1155,17 +1152,17 @@ impl PublishCollabDuplicator {
}
}
fn view_info_by_view_id(meta: &PublishViewMetaData) -> HashMap<String, PublishViewInfo> {
fn view_info_by_view_id(meta: &PublishViewMetaData) -> HashMap<Uuid, PublishViewInfo> {
let mut acc = HashMap::new();
acc.insert(meta.view.view_id.clone(), meta.view.clone());
acc.insert(meta.view.view_id.parse().unwrap(), meta.view.clone());
add_to_view_info(&mut acc, &meta.child_views);
add_to_view_info(&mut acc, &meta.ancestor_views);
acc
}
fn add_to_view_info(acc: &mut HashMap<String, PublishViewInfo>, view_infos: &[PublishViewInfo]) {
fn add_to_view_info(acc: &mut HashMap<Uuid, PublishViewInfo>, view_infos: &[PublishViewInfo]) {
for view_info in view_infos {
acc.insert(view_info.view_id.clone(), view_info.clone());
acc.insert(view_info.view_id.parse().unwrap(), view_info.clone());
if let Some(child_views) = &view_info.child_views {
add_to_view_info(acc, child_views);
}