mirror of
https://github.com/AppFlowy-IO/AppFlowy-Cloud.git
synced 2025-04-19 03:24:42 -04:00
chore: imported workspace should not become the latest visting workspace (#890)
* chore: imported workspace should not become the latest visting workspace * chore: sqlx files * chore: update test * chore: fmt
This commit is contained in:
parent
d3e4a6870d
commit
9942e68089
5 changed files with 121 additions and 24 deletions
16
.sqlx/query-29279a0a97beb08aea84d588374c7534c28bd9c4da24b1ee20245109f5c33880.json
generated
Normal file
16
.sqlx/query-29279a0a97beb08aea84d588374c7534c28bd9c4da24b1ee20245109f5c33880.json
generated
Normal file
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n UPDATE af_workspace_member\n SET updated_at = $3\n WHERE uid = $1\n AND workspace_id = $2;\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Uuid",
|
||||
"Timestamptz"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "29279a0a97beb08aea84d588374c7534c28bd9c4da24b1ee20245109f5c33880"
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n WITH af_user_row AS (\n SELECT * FROM af_user WHERE uuid = $1\n )\n SELECT\n af_user_row.uid,\n af_user_row.uuid,\n af_user_row.email,\n af_user_row.password,\n af_user_row.name,\n af_user_row.metadata,\n af_user_row.encryption_sign,\n af_user_row.deleted_at,\n af_user_row.updated_at,\n af_user_row.created_at,\n (SELECT workspace_id\n FROM af_workspace_member\n WHERE uid = af_user_row.uid\n ORDER BY updated_at DESC\n LIMIT 1) as latest_workspace_id\n FROM af_user_row\n ",
|
||||
"query": "\n WITH af_user_row AS (\n SELECT * FROM af_user WHERE uuid = $1\n )\n SELECT\n af_user_row.uid,\n af_user_row.uuid,\n af_user_row.email,\n af_user_row.password,\n af_user_row.name,\n af_user_row.metadata,\n af_user_row.encryption_sign,\n af_user_row.deleted_at,\n af_user_row.updated_at,\n af_user_row.created_at,\n (\n SELECT af_workspace_member.workspace_id\n FROM af_workspace_member\n JOIN af_workspace\n ON af_workspace_member.workspace_id = af_workspace.workspace_id\n WHERE af_workspace_member.uid = af_user_row.uid\n AND COALESCE(af_workspace.is_initialized, true) = true\n ORDER BY af_workspace_member.updated_at DESC\n LIMIT 1\n ) AS latest_workspace_id\n FROM af_user_row\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
|
@ -78,5 +78,5 @@
|
|||
null
|
||||
]
|
||||
},
|
||||
"hash": "a8443c8307a099fbfa3e04232fdc7719c28dc5acac088686e86980544fe4bbc9"
|
||||
"hash": "3c2c94b9ac0a329b92847d7176a7435f894c5ef3b3b11e3e2ae03a8ec454a6d8"
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use database_entity::dto::{
|
||||
AFRole, AFWorkspaceInvitation, AFWorkspaceInvitationStatus, AFWorkspaceSettings, GlobalComment,
|
||||
PublishInfo, Reaction,
|
||||
|
@ -622,11 +623,16 @@ pub async fn select_user_profile<'a, E: Executor<'a, Database = Postgres>>(
|
|||
af_user_row.deleted_at,
|
||||
af_user_row.updated_at,
|
||||
af_user_row.created_at,
|
||||
(SELECT workspace_id
|
||||
(
|
||||
SELECT af_workspace_member.workspace_id
|
||||
FROM af_workspace_member
|
||||
WHERE uid = af_user_row.uid
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 1) as latest_workspace_id
|
||||
JOIN af_workspace
|
||||
ON af_workspace_member.workspace_id = af_workspace.workspace_id
|
||||
WHERE af_workspace_member.uid = af_user_row.uid
|
||||
AND COALESCE(af_workspace.is_initialized, true) = true
|
||||
ORDER BY af_workspace_member.updated_at DESC
|
||||
LIMIT 1
|
||||
) AS latest_workspace_id
|
||||
FROM af_user_row
|
||||
"#,
|
||||
user_uuid
|
||||
|
@ -708,6 +714,30 @@ pub async fn update_updated_at_of_workspace<'a, E: Executor<'a, Database = Postg
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub async fn update_updated_at_of_workspace_with_uid<'a, E: Executor<'a, Database = Postgres>>(
|
||||
executor: E,
|
||||
uid: i64,
|
||||
workspace_id: &Uuid,
|
||||
current_timestamp: DateTime<Utc>,
|
||||
) -> Result<(), AppError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE af_workspace_member
|
||||
SET updated_at = $3
|
||||
WHERE uid = $1
|
||||
AND workspace_id = $2;
|
||||
"#,
|
||||
uid,
|
||||
workspace_id,
|
||||
current_timestamp
|
||||
)
|
||||
.execute(executor)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns a list of workspaces that the user is part of.
|
||||
/// User may owner or non-owner.
|
||||
#[inline]
|
||||
|
|
|
@ -23,7 +23,7 @@ use database::collab::{insert_into_af_collab_bulk_for_user, select_blob_from_af_
|
|||
use database::resource_usage::{insert_blob_metadata_bulk, BulkInsertMeta};
|
||||
use database::workspace::{
|
||||
delete_from_workspace, select_workspace_database_storage_id, update_import_task_status,
|
||||
update_workspace_status,
|
||||
update_updated_at_of_workspace_with_uid, update_workspace_status,
|
||||
};
|
||||
use database_entity::dto::CollabParams;
|
||||
|
||||
|
@ -41,6 +41,7 @@ use redis::{AsyncCommands, RedisResult, Value};
|
|||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::from_str;
|
||||
use sqlx::types::chrono;
|
||||
use sqlx::types::chrono::{DateTime, Utc};
|
||||
use sqlx::{PgPool, Pool, Postgres};
|
||||
use std::collections::HashMap;
|
||||
use std::env::temp_dir;
|
||||
|
@ -646,6 +647,8 @@ async fn process_unzip_file(
|
|||
);
|
||||
collab_params_list.push(folder_collab_params);
|
||||
|
||||
let upload_resources = process_resources(resources).await;
|
||||
|
||||
// 6. Start a transaction to insert all collabs
|
||||
let mut transaction = pg_pool.begin().await.map_err(|err| {
|
||||
ImportError::Internal(anyhow!(
|
||||
|
@ -700,7 +703,24 @@ async fn process_unzip_file(
|
|||
err
|
||||
))
|
||||
})?;
|
||||
let upload_resources = process_resources(resources).await;
|
||||
|
||||
// Set the workspace's updated_at to the earliest possible timestamp, as it is created by an import task
|
||||
// and not actively updated by a user. This ensures that when sorting workspaces by updated_at to find
|
||||
// the most recent, the imported workspace doesn't appear as the most recently visited workspace.
|
||||
let updated_at = DateTime::from_timestamp(0, 0).unwrap_or_else(Utc::now);
|
||||
update_updated_at_of_workspace_with_uid(
|
||||
transaction.deref_mut(),
|
||||
import_task.uid,
|
||||
&workspace_id,
|
||||
updated_at,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
ImportError::Internal(anyhow!(
|
||||
"Failed to update workspace updated_at when importing data: {:?}",
|
||||
err
|
||||
))
|
||||
})?;
|
||||
|
||||
// insert metadata into database
|
||||
let metas = upload_resources
|
||||
|
@ -747,7 +767,7 @@ async fn process_unzip_file(
|
|||
.await
|
||||
.map_err(|err| ImportError::Internal(anyhow!("Failed to upload files to S3: {:?}", err)))?;
|
||||
|
||||
// 3. delete zip file regardless of success or failure
|
||||
// 8. delete zip file regardless of success or failure
|
||||
match fs::remove_dir_all(unzip_dir_path).await {
|
||||
Ok(_) => trace!("[Import]: {} deleted unzip file", import_task.workspace_id),
|
||||
Err(err) => error!("Failed to delete unzip file: {:?}", err),
|
||||
|
|
|
@ -4,10 +4,11 @@ use collab_folder::ViewLayout;
|
|||
use shared_entity::dto::import_dto::ImportTaskStatus;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
|
||||
#[tokio::test]
|
||||
async fn import_blog_post_test() {
|
||||
// Step 1: Import the blog post zip
|
||||
let (client, imported_workspace_id) = import_zip("blog_post.zip").await;
|
||||
let (client, imported_workspace_id) = import_notion_zip_until_complete("blog_post.zip").await;
|
||||
|
||||
// Step 2: Fetch the folder and views
|
||||
let folder = client.get_folder(&imported_workspace_id).await;
|
||||
|
@ -77,7 +78,7 @@ async fn import_blog_post_test() {
|
|||
|
||||
#[tokio::test]
|
||||
async fn import_project_and_task_zip_test() {
|
||||
let (client, imported_workspace_id) = import_zip("project&task.zip").await;
|
||||
let (client, imported_workspace_id) = import_notion_zip_until_complete("project&task.zip").await;
|
||||
let folder = client.get_folder(&imported_workspace_id).await;
|
||||
let workspace_database = client.get_workspace_database(&imported_workspace_id).await;
|
||||
let space_views = folder.get_views_belong_to(&imported_workspace_id);
|
||||
|
@ -148,9 +149,35 @@ async fn import_project_and_task_zip_test() {
|
|||
}
|
||||
}
|
||||
|
||||
async fn import_zip(name: &str) -> (TestClient, String) {
|
||||
#[tokio::test]
|
||||
async fn imported_workspace_do_not_become_latest_visit_workspace_test() {
|
||||
let client = TestClient::new_user().await;
|
||||
let file_path = PathBuf::from("tests/workspace/asset/blog_post.zip".to_string());
|
||||
client.api_client.import_file(&file_path).await.unwrap();
|
||||
|
||||
// When importing a Notion file, a new task is spawned to create a workspace for the imported data.
|
||||
// However, the workspace should remain hidden until the import is completed successfully.
|
||||
let user_workspace = client.get_user_workspace_info().await;
|
||||
let visiting_workspace_id = user_workspace.visiting_workspace.workspace_id;
|
||||
assert_eq!(user_workspace.workspaces.len(), 1);
|
||||
assert_eq!(
|
||||
user_workspace.visiting_workspace.workspace_id,
|
||||
user_workspace.workspaces[0].workspace_id
|
||||
);
|
||||
|
||||
wait_until_import_complete(&client).await;
|
||||
|
||||
// after the workspace was imported, then the workspace should be visible
|
||||
let user_workspace = client.get_user_workspace_info().await;
|
||||
assert_eq!(user_workspace.workspaces.len(), 2);
|
||||
assert_eq!(
|
||||
user_workspace.visiting_workspace.workspace_id,
|
||||
visiting_workspace_id,
|
||||
);
|
||||
}
|
||||
|
||||
async fn import_notion_zip_until_complete(name: &str) -> (TestClient, String) {
|
||||
let client = TestClient::new_user().await;
|
||||
let file_path = PathBuf::from(format!("tests/workspace/asset/{name}"));
|
||||
client.api_client.import_file(&file_path).await.unwrap();
|
||||
let default_workspace_id = client.workspace_id().await;
|
||||
|
@ -164,6 +191,22 @@ async fn import_zip(name: &str) -> (TestClient, String) {
|
|||
assert_eq!(tasks.len(), 1);
|
||||
assert_eq!(tasks[0].status, ImportTaskStatus::Pending);
|
||||
|
||||
wait_until_import_complete(&client).await;
|
||||
|
||||
// after the import task is completed, the new workspace should be visible
|
||||
let workspaces = client.api_client.get_workspaces().await.unwrap();
|
||||
assert_eq!(workspaces.len(), 2);
|
||||
|
||||
let imported_workspace = workspaces
|
||||
.into_iter()
|
||||
.find(|workspace| workspace.workspace_id.to_string() != default_workspace_id)
|
||||
.expect("Failed to find imported workspace");
|
||||
|
||||
let imported_workspace_id = imported_workspace.workspace_id.to_string();
|
||||
(client, imported_workspace_id)
|
||||
}
|
||||
|
||||
async fn wait_until_import_complete(client: &TestClient) {
|
||||
let mut task_completed = false;
|
||||
let max_retries = 12;
|
||||
let mut retries = 0;
|
||||
|
@ -182,16 +225,4 @@ async fn import_zip(name: &str) -> (TestClient, String) {
|
|||
task_completed,
|
||||
"The import task was not completed within the expected time."
|
||||
);
|
||||
|
||||
// after the import task is completed, the new workspace should be visible
|
||||
let workspaces = client.api_client.get_workspaces().await.unwrap();
|
||||
assert_eq!(workspaces.len(), 2);
|
||||
|
||||
let imported_workspace = workspaces
|
||||
.into_iter()
|
||||
.find(|workspace| workspace.workspace_id.to_string() != default_workspace_id)
|
||||
.expect("Failed to find imported workspace");
|
||||
|
||||
let imported_workspace_id = imported_workspace.workspace_id.to_string();
|
||||
(client, imported_workspace_id)
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue