chore: cargo clippy fix

This commit is contained in:
Bartosz Sypytkowski 2025-03-21 07:26:32 +01:00
parent b2aa5c39c1
commit 6d2e8b7164
20 changed files with 109 additions and 114 deletions

View file

@ -826,8 +826,8 @@ async fn create_collab_handler(
if let Ok(text) = Document::open(collab).and_then(|doc| doc.to_plain_text(false, true)) {
let pending = UnindexedCollabTask::new(
workspace_id,
params.object_id.clone(),
params.collab_type,
params.object_id,
params.collab_type.clone(),
UnindexedData::Text(text),
);
state
@ -980,8 +980,8 @@ async fn batch_create_collab_handler(
.map(|text| {
UnindexedCollabTask::new(
workspace_id,
value.1.object_id.clone(),
value.1.collab_type,
value.1.object_id,
value.1.collab_type.clone(),
UnindexedData::Text(text),
)
})
@ -1797,8 +1797,8 @@ async fn update_collab_handler(
if let Ok(text) = Document::open(collab).and_then(|doc| doc.to_plain_text(false, true)) {
let pending = UnindexedCollabTask::new(
workspace_id,
params.object_id.clone(),
params.collab_type,
params.object_id,
params.collab_type.clone(),
UnindexedData::Text(text),
);
state
@ -2483,9 +2483,8 @@ async fn put_database_row_handler(
let row_id = {
let mut hasher = Sha256::new();
// TODO: check if it is safe to use workspace_id directly
hasher.update(workspace_id.to_string());
hasher.update(&db_id);
hasher.update(workspace_id);
hasher.update(db_id);
hasher.update(pre_hash);
let hash = hasher.finalize();
Uuid::from_bytes([

View file

@ -114,13 +114,13 @@ fn to_folder_view(
) -> Option<FolderView> {
let is_trash = private_space_and_trash_views
.view_ids_in_trash
.contains(&view_id);
.contains(view_id);
let is_my_private_space = private_space_and_trash_views
.my_private_space_ids
.contains(&view_id);
.contains(view_id);
let is_other_private_space = private_space_and_trash_views
.other_private_space_ids
.contains(&view_id);
.contains(view_id);
if depth > max_depth || is_other_private_space || is_trash {
return None;

View file

@ -299,7 +299,7 @@ async fn duplicate_document(
.document_view_ids
.iter()
.map(|id| QueryCollab {
object_id: id.clone(),
object_id: *id,
collab_type: CollabType::Document,
})
.collect();

View file

@ -1077,7 +1077,7 @@ async fn create_database_page(
let workspace_database_update =
add_new_database_to_workspace(&mut workspace_database, &database_id, view_id).await?;
let database_collab_params = CollabParams {
object_id: database_id.clone(),
object_id: database_id,
encoded_collab_v1: encoded_database
.encoded_database_collab
.encoded_collab

View file

@ -179,7 +179,7 @@ impl PublishCollabDuplicator {
let start = Instant::now();
for (oid, (collab_type, encoded_collab)) in collabs_to_insert.into_iter() {
let params = CollabParams {
object_id: oid.clone(),
object_id: oid,
encoded_collab_v1: encoded_collab.into(),
collab_type,
};
@ -253,7 +253,7 @@ impl PublishCollabDuplicator {
dest_workspace_id,
&duplicator_uid,
CollabParams {
object_id: ws_db_oid.clone(),
object_id: ws_db_oid,
encoded_collab_v1: Bytes::from(updated_ws_w_db_collab),
collab_type: CollabType::WorkspaceDatabase,
},
@ -313,8 +313,8 @@ impl PublishCollabDuplicator {
.body
.views
.insert(&mut folder_txn, view.clone(), None);
duplicated_view_ids.insert(view_id.clone());
inserted.push(view_id.clone());
duplicated_view_ids.insert(*view_id);
inserted.push(*view_id);
}
}
if inserted.is_empty() {
@ -408,7 +408,7 @@ impl PublishCollabDuplicator {
// so we insert this knowledge into the duplicated_refs
self
.duplicated_refs
.insert(publish_view_id, new_view_id.clone().into());
.insert(publish_view_id, new_view_id.into());
match metadata.view.layout {
ViewLayout::Document => {
@ -539,7 +539,7 @@ impl PublishCollabDuplicator {
match self.duplicated_refs.get(&pub_view_id) {
Some(new_view_id) => {
if let Some(vid) = new_view_id {
Ok(Some(vid.clone()))
Ok(Some(*vid))
} else {
Ok(None)
}
@ -737,7 +737,7 @@ impl PublishCollabDuplicator {
let new_db_id = gen_view_id();
self
.duplicated_refs
.insert(pub_db_id.clone(), Some(new_db_id.clone()));
.insert(pub_db_id, Some(new_db_id));
{
// assign new id to all views of database.
@ -750,28 +750,26 @@ impl PublishCollabDuplicator {
let new_db_view_id = if &db_view_id == pub_view_id {
self
.duplicated_db_main_view
.insert(pub_db_id.clone(), new_view_id.clone());
new_view_id.clone()
.insert(pub_db_id, new_view_id);
new_view_id
} else {
gen_view_id()
};
self
.duplicated_db_view
.insert(db_view_id, new_db_view_id.clone());
.insert(db_view_id, new_db_view_id);
new_db_view_ids.push(new_db_view_id);
}
// if there is no main view id, use the inline view id
if !self.duplicated_db_main_view.contains_key(&pub_db_id) {
self
.duplicated_db_main_view
.insert(pub_db_id.clone(), db_body.get_inline_view_id(&txn).parse()?);
if let std::collections::hash_map::Entry::Vacant(e) = self.duplicated_db_main_view.entry(pub_db_id) {
e.insert(db_body.get_inline_view_id(&txn).parse()?);
};
// Add this database as linked view
self
.workspace_databases
.insert(new_db_id.clone(), new_db_view_ids);
.insert(new_db_id, new_db_view_ids);
}
// assign new id to all rows of database.
@ -781,7 +779,7 @@ impl PublishCollabDuplicator {
let dup_row_id = Uuid::new_v4();
self
.duplicated_db_row
.insert(pub_row_id.clone(), dup_row_id);
.insert(*pub_row_id, dup_row_id);
}
{
@ -793,7 +791,7 @@ impl PublishCollabDuplicator {
if *key == FieldType::Relation.type_id() {
if let Some(pub_db_id) = type_option_value.get_mut("database_id") {
if let Any::String(pub_db_id_str) = pub_db_id {
let pub_db_uuid = Uuid::parse_str(&pub_db_id_str)?;
let pub_db_uuid = Uuid::parse_str(pub_db_id_str)?;
if let Some(&pub_rel_db_view) = published_db.database_relations.get(&pub_db_uuid) {
if let Some(_dup_view_id) = self
.deep_copy_view(pub_rel_db_view, self.dest_view_id)
@ -821,14 +819,13 @@ impl PublishCollabDuplicator {
// duplicate db collab rows
for (pub_row_id, row_bin_data) in &published_db.database_row_collabs {
let dup_row_id = self
let dup_row_id = *self
.duplicated_db_row
.get(pub_row_id)
.ok_or_else(|| AppError::RecordNotFound(format!("row not found: {}", pub_row_id)))?
.clone();
.ok_or_else(|| AppError::RecordNotFound(format!("row not found: {}", pub_row_id)))?;
let mut db_row_collab = collab_from_doc_state(row_bin_data.clone(), &dup_row_id)?;
let mut db_row_body = DatabaseRowBody::open(pub_row_id.clone().into(), &mut db_row_collab)
let mut db_row_body = DatabaseRowBody::open((*pub_row_id).into(), &mut db_row_collab)
.map_err(|e| AppError::Unhandled(e.to_string()))?;
{
@ -845,7 +842,7 @@ impl PublishCollabDuplicator {
// updates row id along with meta keys
db_row_body
.update_id(&mut txn, dup_row_id.clone().into())
.update_id(&mut txn, dup_row_id.into())
.map_err(|e| AppError::Unhandled(format!("failed to update row id: {:?}", e)))?;
// duplicate row document if exists
@ -871,7 +868,7 @@ impl PublishCollabDuplicator {
new_doc_view.parent_view_id = dup_row_doc_id.to_string(); // orphan folder view
self
.views_to_add
.insert(dup_row_doc_id.clone(), new_doc_view);
.insert(dup_row_doc_id, new_doc_view);
},
Err(err) => tracing::error!("failed to open row document: {}", err),
};
@ -972,7 +969,7 @@ impl PublishCollabDuplicator {
for row_order in db_view.row_orders.iter_mut() {
let row_order_id = Uuid::parse_str(&row_order.id)?;
if let Some(new_id) = self.duplicated_db_row.get(&row_order_id) {
row_order.id = new_id.clone().into();
row_order.id = (*new_id).into();
} else {
// skip if row not found
tracing::warn!("row not found: {}", row_order.id);
@ -997,7 +994,7 @@ impl PublishCollabDuplicator {
let db_encoded_collab = collab_to_bin(db_collab, CollabType::Database).await?;
self
.collabs_to_insert
.insert(new_db_id.clone(), (CollabType::Database, db_encoded_collab));
.insert(new_db_id, (CollabType::Database, db_encoded_collab));
Ok((pub_db_id, new_db_id, false))
}
@ -1065,7 +1062,7 @@ impl PublishCollabDuplicator {
AppError::RecordNotFound(format!("metadata not found for view: {}", pub_view_id))
})?;
let main_folder_view = self.new_folder_view(
main_view_id.clone(),
*main_view_id,
main_view_info,
main_view_info.layout.clone(),
);
@ -1087,7 +1084,7 @@ impl PublishCollabDuplicator {
})?;
let mut child_folder_view = self.new_folder_view(
child_view_id.clone(),
*child_view_id,
view_info_by_id.get(&vis_view_id).ok_or_else(|| {
AppError::RecordNotFound(format!("metadata not found for view: {}", vis_view_id))
})?,

View file

@ -68,7 +68,7 @@ async fn chat_with_multiple_selected_source_test() {
.iter()
.map(|doc| EmbeddedCollabQuery {
collab_type: CollabType::Document,
object_id: doc.object_id.clone(),
object_id: doc.object_id,
})
.collect();
test_client

View file

@ -15,7 +15,7 @@ async fn generate_chat_message_answer_test() {
completion_type: Some(CompletionType::SpellingAndGrammar),
metadata: Some(CompletionMetadata {
object_id: doc_id,
workspace_id: Some(workspace_id.clone()),
workspace_id: Some(workspace_id),
rag_ids: None,
completion_history: None,
custom_prompt: None,

View file

@ -26,8 +26,8 @@ async fn get_collab_response_compatible_test() {
// after 0.3.22, we use [CollabResponse] instead of EncodedCollab as the response
let collab_resp = test_client
.get_collab(
workspace_id.clone(),
workspace_id.clone(),
workspace_id,
workspace_id,
CollabType::Folder,
)
.await
@ -62,7 +62,7 @@ async fn create_collab_params_compatibility_serde_test() {
.unwrap();
let old_version_value = json!(InsertCollabParams {
object_id: object_id.clone(),
object_id: object_id,
encoded_collab_v1: encoded_collab_v1.clone(),
workspace_id: WORKSPACE_ID,
collab_type: CollabType::Document,
@ -130,7 +130,7 @@ async fn create_collab_compatibility_with_json_params_test() {
.json(&QueryCollabParams {
workspace_id,
inner: QueryCollab {
object_id: object_id.clone(),
object_id: object_id,
collab_type: CollabType::Unknown,
},
})
@ -182,8 +182,8 @@ async fn batch_insert_document_collab_test() {
let params = params_list
.iter()
.map(|params| QueryCollab {
object_id: params.object_id.clone(),
collab_type: params.collab_type,
object_id: params.object_id,
collab_type: params.collab_type.clone(),
})
.collect::<Vec<_>>();

View file

@ -372,8 +372,9 @@ async fn multiple_user_with_read_and_write_permission_edit_same_collab_test() {
// simulate multiple users edit the same collab. All of them have read and write permission
for i in 0..3 {
let owner = arc_owner.clone();
let object_id = object_id.clone();
let workspace_id = workspace_id.clone();
let object_id = object_id;
let collab_type = collab_type.clone();
let workspace_id = workspace_id;
let task = tokio::spawn(async move {
let mut new_member = TestClient::new_user().await;
// sleep 2 secs to make sure it do not trigger register user too fast in gotrue
@ -450,8 +451,9 @@ async fn multiple_user_with_read_only_permission_edit_same_collab_test() {
for i in 0..5 {
let owner = arc_owner.clone();
let object_id = object_id.clone();
let workspace_id = workspace_id.clone();
let object_id = object_id;
let collab_type = collab_type.clone();
let workspace_id = workspace_id;
let task = tokio::spawn(async move {
let mut new_user = TestClient::new_user().await;
// sleep 2 secs to make sure it do not trigger register user too fast in gotrue

View file

@ -24,9 +24,9 @@ async fn success_insert_collab_test() {
let object_id = Uuid::new_v4();
let encode_collab = test_encode_collab_v1(&object_id, "title", "hello world");
c.create_collab(CreateCollabParams {
object_id: object_id.clone(),
object_id: object_id,
collab_type: CollabType::Unknown,
workspace_id: workspace_id.clone(),
workspace_id: workspace_id,
encoded_collab_v1: encode_collab.encode_to_bytes().unwrap(),
})
.await
@ -67,24 +67,24 @@ async fn success_batch_get_collab_test() {
let mut expected_results = HashMap::new();
for query in queries.iter() {
let object_id = query.object_id.clone();
let object_id = query.object_id;
let encode_collab = test_encode_collab_v1(&object_id, "title", "hello world")
.encode_to_bytes()
.unwrap();
let collab_type = query.collab_type;
expected_results.insert(
object_id.clone(),
object_id,
QueryCollabResult::Success {
encode_collab_v1: encode_collab.clone(),
},
);
c.create_collab(CreateCollabParams {
object_id: object_id.clone(),
object_id: object_id,
encoded_collab_v1: encode_collab.clone(),
collab_type,
workspace_id: workspace_id.clone(),
collab_type: collab_type.clone(),
workspace_id: workspace_id,
})
.await
.unwrap();
@ -117,31 +117,31 @@ async fn success_part_batch_get_collab_test() {
let mut expected_results = HashMap::new();
for (index, query) in queries.iter().enumerate() {
let object_id = query.object_id.clone();
let collab_type = query.collab_type;
let object_id = query.object_id;
let collab_type = query.collab_type.clone();
let encode_collab = test_encode_collab_v1(&object_id, "title", "hello world")
.encode_to_bytes()
.unwrap();
if index == 1 {
expected_results.insert(
object_id.clone(),
object_id,
QueryCollabResult::Failed {
error: "Record not found".to_string(),
},
);
} else {
expected_results.insert(
object_id.clone(),
object_id,
QueryCollabResult::Success {
encode_collab_v1: encode_collab.clone(),
},
);
c.create_collab(CreateCollabParams {
object_id: object_id.clone(),
object_id: object_id,
encoded_collab_v1: encode_collab.clone(),
collab_type,
workspace_id: workspace_id.clone(),
collab_type: collab_type.clone(),
workspace_id: workspace_id,
})
.await
.unwrap();
@ -164,17 +164,17 @@ async fn success_delete_collab_test() {
.unwrap();
c.create_collab(CreateCollabParams {
object_id: object_id.clone(),
object_id: object_id,
encoded_collab_v1: encode_collab,
collab_type: CollabType::Unknown,
workspace_id: workspace_id.clone(),
workspace_id: workspace_id,
})
.await
.unwrap();
c.delete_collab(DeleteCollabParams {
object_id: object_id.clone(),
workspace_id: workspace_id.clone(),
object_id: object_id,
workspace_id: workspace_id,
})
.await
.unwrap();
@ -200,7 +200,7 @@ async fn fail_insert_collab_with_empty_payload_test() {
object_id: Uuid::new_v4(),
encoded_collab_v1: vec![],
collab_type: CollabType::Document,
workspace_id: workspace_id.clone(),
workspace_id: workspace_id,
})
.await
.unwrap_err();
@ -347,8 +347,8 @@ async fn insert_empty_data_test() {
CollabType::DatabaseRow,
] {
let params = CreateCollabParams {
workspace_id: workspace_id.clone(),
object_id: object_id.clone(),
workspace_id: workspace_id,
object_id: object_id,
encoded_collab_v1: vec![],
collab_type,
};
@ -382,8 +382,8 @@ async fn insert_invalid_data_test() {
CollabType::DatabaseRow,
] {
let params = CreateCollabParams {
workspace_id: workspace_id.clone(),
object_id: object_id.clone(),
workspace_id: workspace_id,
object_id: object_id,
encoded_collab_v1: encoded_collab_v1.clone(),
collab_type,
};
@ -420,8 +420,8 @@ async fn insert_folder_data_success_test() {
for template in templates.into_iter() {
let data = template.encoded_collab.encode_to_bytes().unwrap();
let params = CreateCollabParams {
workspace_id: workspace_id.clone(),
object_id: object_id.clone(),
workspace_id: workspace_id,
object_id: object_id,
encoded_collab_v1: data,
collab_type: template.collab_type,
};

View file

@ -43,10 +43,10 @@ async fn web_and_native_app_edit_same_collab_test() {
let collab_doc_state = web_client
.api_client
.get_collab(QueryCollabParams {
workspace_id: workspace_id.clone(),
workspace_id: workspace_id,
inner: QueryCollab {
object_id: object_id.clone(),
collab_type,
object_id: object_id,
collab_type: collab_type.clone(),
},
})
.await

View file

@ -306,7 +306,7 @@ async fn perform_upload_test(
async fn invalid_test() {
let (c1, _user1) = generate_unique_registered_user_client().await;
let workspace_id = workspace_id_from_client(&c1).await;
let parent_dir = workspace_id.clone();
let parent_dir = workspace_id;
let file_id = uuid::Uuid::new_v4().to_string();
let mime = mime::TEXT_PLAIN_UTF_8;

View file

@ -142,7 +142,7 @@ async fn simulate_30_put_blob_request_test() {
let mut handles = vec![];
for _ in 0..30 {
let cloned_client = c1.clone();
let cloned_workspace_id = workspace_id.clone();
let cloned_workspace_id = workspace_id;
let handle = tokio::spawn(async move {
let mime = mime::TEXT_PLAIN_UTF_8;
let file_id = Uuid::new_v4().to_string();

View file

@ -87,7 +87,7 @@ async fn test_embedding_when_create_document() {
.collect::<Vec<String>>()
.join("\n");
let params = CalculateSimilarityParams {
workspace_id: workspace_id,
workspace_id,
input: previews,
expected: r#"
"Kathryns Journey to Becoming a Tennis Player Kathryns love for tennis began on a warm summer day w

View file

@ -32,7 +32,7 @@ async fn insert_collab_sql_test(pool: PgPool) {
for &data_size in &data_sizes {
let encoded_collab_v1 = generate_random_bytes(data_size);
let object_id = uuid::Uuid::new_v4();
object_ids.push(object_id.clone());
object_ids.push(object_id);
let mut txn = pool.begin().await.unwrap();
let params = CollabParams {
object_id,
@ -85,7 +85,7 @@ async fn insert_bulk_collab_sql_test(pool: PgPool) {
for &data_size in &data_sizes {
let encoded_collab_v1 = generate_random_bytes(data_size);
let object_id = uuid::Uuid::new_v4();
object_ids.push(object_id.clone());
object_ids.push(object_id);
let params = CollabParams {
object_id,
@ -165,7 +165,7 @@ async fn test_bulk_insert_duplicate_oid_partition_key(pool: PgPool) {
// Two items with the same oid and partition_key
let collab_params_list = vec![
CollabParams {
object_id: object_id.clone(),
object_id: object_id,
collab_type: CollabType::Unknown,
encoded_collab_v1: encoded_collab_v1.clone().into(),
},

View file

@ -21,8 +21,7 @@ async fn access_request_test() {
.iter()
.find(|v| v.name == "To-dos")
.unwrap()
.view_id
.clone();
.view_id;
let data = CreateAccessRequestParams {
workspace_id,
view_id,

View file

@ -77,7 +77,7 @@ async fn get_user_default_workspace_test() {
let getting_started_document = get_document_collab_from_remote(
&mut test_client,
workspace_id.clone(),
workspace_id,
getting_started_view.id.parse().unwrap(),
)
.await;
@ -99,7 +99,7 @@ async fn get_user_default_workspace_test() {
);
let desktop_guide_document = get_document_collab_from_remote(
&mut test_client,
workspace_id.clone(),
workspace_id,
desktop_guide_view.id.parse().unwrap(),
)
.await;
@ -112,7 +112,7 @@ async fn get_user_default_workspace_test() {
assert_eq!(mobile_guide_view.icon, None);
let mobile_guide_document = get_document_collab_from_remote(
&mut test_client,
workspace_id.clone(),
workspace_id,
mobile_guide_view.id.parse().unwrap(),
)
.await;
@ -125,7 +125,7 @@ async fn get_user_default_workspace_test() {
assert_eq!(web_guide_view.icon, None);
let web_guide_document = get_document_collab_from_remote(
&mut test_client,
workspace_id.clone(),
workspace_id,
web_guide_view.id.parse().unwrap(),
)
.await;

View file

@ -99,7 +99,7 @@ async fn create_new_page_with_database() {
.create_workspace_page_view(
workspace_id,
&CreatePageParams {
parent_view_id: general_space.view_id.clone(),
parent_view_id: general_space.view_id,
layout: ViewLayout::Calendar,
name: Some("New calendar".to_string()),
page_data: None,
@ -113,7 +113,7 @@ async fn create_new_page_with_database() {
.create_workspace_page_view(
workspace_id,
&CreatePageParams {
parent_view_id: general_space.view_id.clone(),
parent_view_id: general_space.view_id,
layout: ViewLayout::Grid,
name: Some("New grid".to_string()),
page_data: None,
@ -127,7 +127,7 @@ async fn create_new_page_with_database() {
.create_workspace_page_view(
workspace_id,
&CreatePageParams {
parent_view_id: general_space.view_id.clone(),
parent_view_id: general_space.view_id,
layout: ViewLayout::Grid,
name: Some("New board".to_string()),
page_data: None,
@ -150,9 +150,9 @@ async fn create_new_page_with_database() {
let views_under_general_space: HashSet<_> =
general_space.children.iter().map(|v| v.view_id).collect();
for view_id in &[
calendar_page.view_id.clone(),
grid_page.view_id.clone(),
board_page.view_id.clone(),
calendar_page.view_id,
grid_page.view_id,
board_page.view_id,
] {
assert!(views_under_general_space.contains(view_id));
c.get_workspace_page_view(workspace_id, view_id)
@ -180,7 +180,7 @@ async fn create_new_document_page() {
.create_workspace_page_view(
workspace_id,
&CreatePageParams {
parent_view_id: general_space.view_id.clone(),
parent_view_id: general_space.view_id,
layout: ViewLayout::Document,
name: Some("New document".to_string()),
page_data: None,
@ -194,7 +194,7 @@ async fn create_new_document_page() {
.create_workspace_page_view(
workspace_id,
&CreatePageParams {
parent_view_id: general_space.view_id.clone(),
parent_view_id: general_space.view_id,
layout: ViewLayout::Document,
name: Some("Message extracted from why is the sky blue".to_string()),
page_data: Some(json!({
@ -319,7 +319,7 @@ async fn create_new_chat_page() {
.create_workspace_page_view(
workspace_id,
&CreatePageParams {
parent_view_id: general_space.view_id.clone(),
parent_view_id: general_space.view_id,
layout: ViewLayout::Chat,
name: Some("New chat".to_string()),
page_data: None,
@ -379,7 +379,7 @@ async fn move_page_to_another_space() {
.children
.iter()
.find(|v| v.name == "To-dos")
.map(|v| v.view_id.clone())
.map(|v| v.view_id)
.unwrap();
let shared_space = &folder_view
.children
@ -427,8 +427,8 @@ async fn move_page_to_trash_then_restore() {
.find(|v| v.name == "General")
.unwrap();
let view_ids_to_be_deleted = [
general_space.children[0].view_id.clone(),
general_space.children[1].view_id.clone(),
general_space.children[0].view_id,
general_space.children[1].view_id,
];
app_client.open_workspace_collab(workspace_id).await;
app_client
@ -735,7 +735,7 @@ async fn update_page() {
.into_iter()
.find(|v| v.name == "General")
.unwrap();
let view_id_to_be_updated = general_space.children[0].view_id.clone();
let view_id_to_be_updated = general_space.children[0].view_id;
web_client
.api_client
.update_workspace_page_view(
@ -927,15 +927,13 @@ async fn publish_page() {
.iter()
.find(|v| v.name == "To-dos")
.unwrap()
.view_id
.clone();
.view_id;
let document_page_id = general_space
.children
.iter()
.find(|v| v.name == "Getting started")
.unwrap()
.view_id
.clone();
.view_id;
let page_to_be_published = vec![database_page_id, document_page_id];
let workspace_id = workspace_id;
for view_id in &page_to_be_published {

View file

@ -1127,8 +1127,8 @@ async fn duplicate_to_workspace_doc_inline_database() {
let collab_resp = client_2
.get_collab(
workspace_id_2.clone(),
workspace_id_2.clone(),
workspace_id_2,
workspace_id_2,
CollabType::Folder,
)
.await
@ -1296,7 +1296,7 @@ async fn duplicate_to_workspace_db_embedded_in_doc() {
.clone();
let doc_collab = client_2
.get_collab_to_collab(
workspace_id_2.clone(),
workspace_id_2,
doc_with_embedded_db.view_id,
CollabType::Folder,
)

View file

@ -25,7 +25,7 @@ async fn get_workpace_folder() {
.get_workspace_folder(
&workspace_id,
Some(1),
Some(folder_view.children[0].view_id.clone()),
Some(folder_view.children[0].view_id),
)
.await
.unwrap();