tests for delete endpoint are passing all the tests

This commit is contained in:
filipriec
2025-06-25 09:04:58 +02:00
parent 560d8b7234
commit d346670839
3 changed files with 59 additions and 44 deletions

View File

@@ -62,7 +62,7 @@ async fn closed_pool(#[future] pool: PgPool) -> PgPool {
#[fixture]
async fn existing_profile(#[future] pool: PgPool) -> (PgPool, String, i64) {
let pool = pool.await;
let profile_name = format!("TestProfile_{}", Utc::now().timestamp_nanos_opt().unwrap_or_default());
let profile_name = format!("testprofile_{}", Utc::now().timestamp_nanos_opt().unwrap_or_default());
// FIX: The table is `schemas`, not `profiles`.
let profile = sqlx::query!(
@@ -79,36 +79,24 @@ async fn existing_profile(#[future] pool: PgPool) -> (PgPool, String, i64) {
async fn existing_table(
#[future] existing_profile: (PgPool, String, i64),
) -> (PgPool, String, i64, String) {
let (pool, profile_name, schema_id) = existing_profile.await; // Renamed for clarity
let (pool, profile_name, schema_id) = existing_profile.await;
let table_name = format!("test_table_{}", Utc::now().timestamp_nanos_opt().unwrap_or_default());
let columns = json!([
{ "name": "id", "type": "BIGSERIAL", "primary_key": true },
{ "name": "deleted", "type": "BOOLEAN", "default": false }
]);
let indexes = json!([]);
// FIX: The column is `schema_id`, not `profile_id`.
sqlx::query!(
"INSERT INTO table_definitions (schema_id, table_name, columns, indexes) VALUES ($1, $2, $3, $4)",
schema_id,
table_name,
columns,
indexes
)
.execute(&pool)
.await
.unwrap();
// Create the physical schema and table
sqlx::query(&format!("CREATE SCHEMA IF NOT EXISTS \"{}\"", profile_name))
.execute(&pool).await.unwrap();
let create_table = format!(
r#"CREATE TABLE "{}"."{}" (id BIGSERIAL PRIMARY KEY, deleted BOOLEAN NOT NULL DEFAULT false)"#,
profile_name, table_name
);
sqlx::query(&create_table).execute(&pool).await.unwrap();
// Use post_table_definition instead of manual table creation
let table_def_request = PostTableDefinitionRequest {
profile_name: profile_name.clone(),
table_name: table_name.clone(),
columns: vec![
TableColumnDefinition {
name: "test_data".into(),
field_type: "text".into(),
}
],
indexes: vec![],
links: vec![],
};
post_table_definition(&pool, table_def_request).await.unwrap();
(pool, profile_name, schema_id, table_name)
}
@@ -117,13 +105,20 @@ async fn existing_record(
#[future] existing_table: (PgPool, String, i64, String),
) -> (PgPool, String, String, i64) {
let (pool, profile_name, _schema_id, table_name) = existing_table.await;
let query = format!(
"INSERT INTO \"{}\".\"{}\" (deleted) VALUES (false) RETURNING id",
profile_name, table_name
);
let row = sqlx::query(&query).fetch_one(&pool).await.unwrap();
let id: i64 = row.get("id");
(pool, profile_name, table_name, id)
let mut data = HashMap::new();
data.insert("test_data".to_string(), string_to_proto_value("Test Record"));
let post_req = PostTableDataRequest {
profile_name: profile_name.clone(),
table_name: table_name.clone(),
data,
};
let (indexer_tx, _indexer_rx) = mpsc::channel(1);
let response = post_table_data(&pool, post_req, &indexer_tx).await.unwrap();
(pool, profile_name, table_name, response.inserted_id)
}
#[fixture]
@@ -131,13 +126,30 @@ async fn existing_deleted_record(
#[future] existing_table: (PgPool, String, i64, String),
) -> (PgPool, String, String, i64) {
let (pool, profile_name, _schema_id, table_name) = existing_table.await;
let query = format!(
"INSERT INTO \"{}\".\"{}\" (deleted) VALUES (true) RETURNING id",
profile_name, table_name
);
let row = sqlx::query(&query).fetch_one(&pool).await.unwrap();
let id: i64 = row.get("id");
(pool, profile_name, table_name, id)
// First create a record
let mut data = HashMap::new();
data.insert("test_data".to_string(), string_to_proto_value("Test Deleted Record"));
let post_req = PostTableDataRequest {
profile_name: profile_name.clone(),
table_name: table_name.clone(),
data,
};
let (indexer_tx, _indexer_rx) = mpsc::channel(1);
let response = post_table_data(&pool, post_req, &indexer_tx).await.unwrap();
let record_id = response.inserted_id;
// Then delete it
let delete_req = DeleteTableDataRequest {
profile_name: profile_name.clone(),
table_name: table_name.clone(),
record_id,
};
delete_table_data(&pool, delete_req).await.unwrap();
(pool, profile_name, table_name, record_id)
}
// New fixture for advanced tests

View File

@@ -153,7 +153,10 @@ async fn test_delete_fails_if_physical_table_is_missing(
// Arrange: Create definitions, then manually drop the physical table to create a state mismatch.
let context = advanced_delete_context.await;
let qualified_table = format!("\"{}\".\"{}\"", context.profile_name, context.category_table);
sqlx::query(&format!("DROP TABLE {}", qualified_table)).execute(&context.pool).await.unwrap();
sqlx::query(&format!("DROP TABLE {} CASCADE", qualified_table))
.execute(&context.pool)
.await
.unwrap();
// Act: Attempt to delete a record from the logically-defined but physically-absent table.
let delete_req = DeleteTableDataRequest {

View File

@@ -24,7 +24,7 @@ async fn closed_pool(#[future] pool: PgPool) -> PgPool {
#[fixture]
async fn profile(#[future] pool: PgPool) -> (PgPool, String, i64) {
let pool = pool.await;
let profile_name = format!("TestProfile_{}", Utc::now().timestamp_nanos_opt().unwrap_or_default());
let profile_name = format!("testprofile_{}", Utc::now().timestamp_nanos_opt().unwrap_or_default());
let profile = sqlx::query!(
"INSERT INTO profiles (name) VALUES ($1) RETURNING id",