the tests for the put endpoint is now being tested and passing but its not what i would love
This commit is contained in:
@@ -41,14 +41,17 @@ impl TablesData for TablesDataService {
|
||||
Ok(Response::new(response))
|
||||
}
|
||||
|
||||
// You will later apply the same pattern to put_table_data...
|
||||
async fn put_table_data(
|
||||
&self,
|
||||
request: Request<PutTableDataRequest>,
|
||||
) -> Result<Response<PutTableDataResponse>, Status> {
|
||||
let request = request.into_inner();
|
||||
// TODO: Update put_table_data handler to accept and use indexer_tx
|
||||
let response = put_table_data(&self.db_pool, request).await?;
|
||||
let response = put_table_data(
|
||||
&self.db_pool,
|
||||
request,
|
||||
&self.indexer_tx,
|
||||
)
|
||||
.await?;
|
||||
Ok(Response::new(response))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,26 +1,42 @@
|
||||
// src/tables_data/handlers/put_table_data.rs
|
||||
|
||||
use tonic::Status;
|
||||
use sqlx::{PgPool, Arguments, Postgres};
|
||||
use sqlx::{PgPool, Arguments};
|
||||
use sqlx::postgres::PgArguments;
|
||||
use chrono::{DateTime, Utc};
|
||||
use common::proto::multieko2::tables_data::{PutTableDataRequest, PutTableDataResponse};
|
||||
use crate::shared::schema_qualifier::qualify_table_name_for_data;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use prost_types::value::Kind;
|
||||
use rust_decimal::Decimal;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::steel::server::execution::{self, Value};
|
||||
use crate::steel::server::functions::SteelContext;
|
||||
use crate::indexer::{IndexCommand, IndexCommandData};
|
||||
use tokio::sync::mpsc;
|
||||
use tracing::error;
|
||||
|
||||
pub async fn put_table_data(
|
||||
db_pool: &PgPool,
|
||||
request: PutTableDataRequest,
|
||||
indexer_tx: &mpsc::Sender<IndexCommand>,
|
||||
) -> Result<PutTableDataResponse, Status> {
|
||||
let profile_name = request.profile_name;
|
||||
let table_name = request.table_name;
|
||||
let record_id = request.id;
|
||||
|
||||
// If no data is provided to update, it's an invalid request.
|
||||
// An update with no fields is a no-op; we can return success early.
|
||||
if request.data.is_empty() {
|
||||
return Err(Status::invalid_argument("No fields provided to update."));
|
||||
return Ok(PutTableDataResponse {
|
||||
success: true,
|
||||
message: "No fields to update.".into(),
|
||||
updated_id: record_id,
|
||||
});
|
||||
}
|
||||
|
||||
// Lookup profile
|
||||
// --- Start of logic copied and adapted from post_table_data ---
|
||||
|
||||
let schema = sqlx::query!(
|
||||
"SELECT id FROM schemas WHERE name = $1",
|
||||
profile_name
|
||||
@@ -31,7 +47,6 @@ pub async fn put_table_data(
|
||||
|
||||
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
||||
|
||||
// Lookup table_definition
|
||||
let table_def = sqlx::query!(
|
||||
r#"SELECT id, columns FROM table_definitions
|
||||
WHERE schema_id = $1 AND table_name = $2"#,
|
||||
@@ -44,7 +59,6 @@ pub async fn put_table_data(
|
||||
|
||||
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
||||
|
||||
// Parse columns from JSON
|
||||
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
||||
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
||||
|
||||
@@ -59,7 +73,6 @@ pub async fn put_table_data(
|
||||
columns.push((name, sql_type));
|
||||
}
|
||||
|
||||
// Get all foreign key columns for this table (needed for validation)
|
||||
let fk_columns = sqlx::query!(
|
||||
r#"SELECT ltd.table_name
|
||||
FROM table_definition_links tdl
|
||||
@@ -73,20 +86,85 @@ pub async fn put_table_data(
|
||||
|
||||
let mut system_columns = vec!["deleted".to_string()];
|
||||
for fk in fk_columns {
|
||||
let base_name = fk.table_name.split_once('_').map_or(fk.table_name.as_str(), |(_, rest)| rest);
|
||||
system_columns.push(format!("{}_id", base_name));
|
||||
system_columns.push(format!("{}_id", fk.table_name));
|
||||
}
|
||||
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
||||
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
||||
|
||||
// Validate input columns
|
||||
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
||||
|
||||
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
||||
for key in request.data.keys() {
|
||||
if !system_columns_set.contains(key.as_str()) && !user_columns.contains(&key) {
|
||||
if !system_columns_set.contains(key.as_str()) &&
|
||||
!user_columns.contains(&&key.to_string()) {
|
||||
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare SQL parameters
|
||||
let mut string_data_for_scripts = HashMap::new();
|
||||
for (key, proto_value) in &request.data {
|
||||
let str_val = match &proto_value.kind {
|
||||
Some(Kind::StringValue(s)) => {
|
||||
let trimmed = s.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
trimmed.to_string()
|
||||
},
|
||||
Some(Kind::NumberValue(n)) => n.to_string(),
|
||||
Some(Kind::BoolValue(b)) => b.to_string(),
|
||||
Some(Kind::NullValue(_)) | None => continue,
|
||||
Some(Kind::StructValue(_)) | Some(Kind::ListValue(_)) => {
|
||||
return Err(Status::invalid_argument(format!("Unsupported type for script validation in column '{}'", key)));
|
||||
}
|
||||
};
|
||||
string_data_for_scripts.insert(key.clone(), str_val);
|
||||
}
|
||||
|
||||
let scripts = sqlx::query!(
|
||||
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
||||
table_def.id
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await
|
||||
.map_err(|e| Status::internal(format!("Failed to fetch scripts: {}", e)))?;
|
||||
|
||||
for script_record in scripts {
|
||||
let target_column = script_record.target_column;
|
||||
|
||||
if let Some(user_value) = string_data_for_scripts.get(&target_column) {
|
||||
let context = SteelContext {
|
||||
current_table: table_name.clone(),
|
||||
schema_id,
|
||||
schema_name: profile_name.clone(),
|
||||
row_data: string_data_for_scripts.clone(),
|
||||
db_pool: Arc::new(db_pool.clone()),
|
||||
};
|
||||
|
||||
let script_result = execution::execute_script(
|
||||
script_record.script,
|
||||
"STRINGS",
|
||||
Arc::new(db_pool.clone()),
|
||||
context,
|
||||
)
|
||||
.map_err(|e| Status::invalid_argument(
|
||||
format!("Script execution failed for '{}': {}", target_column, e)
|
||||
))?;
|
||||
|
||||
let Value::Strings(mut script_output) = script_result else {
|
||||
return Err(Status::internal("Script must return string values"));
|
||||
};
|
||||
|
||||
let expected_value = script_output.pop()
|
||||
.ok_or_else(|| Status::internal("Script returned no values"))?;
|
||||
|
||||
if user_value != &expected_value {
|
||||
return Err(Status::invalid_argument(format!(
|
||||
"Validation failed for column '{}': Expected '{}', Got '{}'",
|
||||
target_column, expected_value, user_value
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut params = PgArguments::default();
|
||||
let mut set_clauses = Vec::new();
|
||||
let mut param_idx = 1;
|
||||
@@ -105,97 +183,177 @@ pub async fn put_table_data(
|
||||
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
||||
};
|
||||
|
||||
// A provided value cannot be null or empty in a PUT request.
|
||||
// To clear a field, it should be set to an empty string "" for text,
|
||||
// or a specific value for other types if needed (though typically not done).
|
||||
// For now, we reject nulls.
|
||||
let kind = proto_value.kind.ok_or_else(|| {
|
||||
Status::invalid_argument(format!("Value for column '{}' cannot be empty in a PUT request. To clear a text field, send an empty string.", col))
|
||||
})?;
|
||||
let kind = match &proto_value.kind {
|
||||
None | Some(Kind::NullValue(_)) => {
|
||||
match sql_type {
|
||||
"BOOLEAN" => params.add(None::<bool>),
|
||||
"TEXT" => params.add(None::<String>),
|
||||
"TIMESTAMPTZ" => params.add(None::<DateTime<Utc>>),
|
||||
"BIGINT" => params.add(None::<i64>),
|
||||
"INTEGER" => params.add(None::<i32>),
|
||||
s if s.starts_with("NUMERIC") => params.add(None::<Decimal>),
|
||||
_ => return Err(Status::invalid_argument(format!("Unsupported type for null value: {}", sql_type))),
|
||||
}.map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||
|
||||
match sql_type {
|
||||
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => {
|
||||
if let Kind::StringValue(value) = kind {
|
||||
params.add(value)
|
||||
.map_err(|e| Status::internal(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
||||
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
||||
param_idx += 1;
|
||||
continue;
|
||||
}
|
||||
Some(k) => k,
|
||||
};
|
||||
|
||||
if sql_type == "TEXT" {
|
||||
if let Kind::StringValue(value) = kind {
|
||||
let trimmed_value = value.trim();
|
||||
|
||||
if trimmed_value.is_empty() {
|
||||
params.add(None::<String>).map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Expected string for column '{}'", col)));
|
||||
}
|
||||
},
|
||||
"BOOLEAN" => {
|
||||
if let Kind::BoolValue(val) = kind {
|
||||
params.add(val)
|
||||
.map_err(|e| Status::internal(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Expected boolean for column '{}'", col)));
|
||||
}
|
||||
},
|
||||
"TIMESTAMPTZ" => {
|
||||
if let Kind::StringValue(value) = kind {
|
||||
let dt = DateTime::parse_from_rfc3339(&value)
|
||||
.map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
||||
params.add(dt.with_timezone(&Utc))
|
||||
.map_err(|e| Status::internal(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Expected ISO 8601 string for column '{}'", col)));
|
||||
}
|
||||
},
|
||||
"BIGINT" => {
|
||||
if let Kind::NumberValue(val) = kind {
|
||||
if val.fract() != 0.0 {
|
||||
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||
if col == "telefon" && trimmed_value.len() > 15 {
|
||||
return Err(Status::internal(format!("Value too long for {}", col)));
|
||||
}
|
||||
params.add(val as i64)
|
||||
.map_err(|e| Status::internal(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||
params.add(trimmed_value).map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
||||
}
|
||||
},
|
||||
_ => return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type))),
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Expected string for column '{}'", col)));
|
||||
}
|
||||
} else if sql_type == "BOOLEAN" {
|
||||
if let Kind::BoolValue(val) = kind {
|
||||
params.add(val).map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Expected boolean for column '{}'", col)));
|
||||
}
|
||||
} else if sql_type == "TIMESTAMPTZ" {
|
||||
if let Kind::StringValue(value) = kind {
|
||||
let dt = DateTime::parse_from_rfc3339(value).map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
||||
params.add(dt.with_timezone(&Utc)).map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Expected ISO 8601 string for column '{}'", col)));
|
||||
}
|
||||
} else if sql_type == "BIGINT" {
|
||||
if let Kind::NumberValue(val) = kind {
|
||||
if val.fract() != 0.0 {
|
||||
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||
}
|
||||
let as_i64 = *val as i64;
|
||||
if (as_i64 as f64) != *val {
|
||||
return Err(Status::invalid_argument(format!("Integer value out of range for BIGINT column '{}'", col)));
|
||||
}
|
||||
params.add(as_i64).map_err(|e| Status::invalid_argument(format!("Failed to add bigint parameter for {}: {}", col, e)))?;
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||
}
|
||||
} else if sql_type == "INTEGER" {
|
||||
if let Kind::NumberValue(val) = kind {
|
||||
if val.fract() != 0.0 {
|
||||
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||
}
|
||||
let as_i32 = *val as i32;
|
||||
if (as_i32 as f64) != *val {
|
||||
return Err(Status::invalid_argument(format!("Integer value out of range for INTEGER column '{}'", col)));
|
||||
}
|
||||
params.add(as_i32).map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||
}
|
||||
} else if sql_type.starts_with("NUMERIC") {
|
||||
let decimal_val = match kind {
|
||||
Kind::StringValue(s) => {
|
||||
let trimmed = s.trim();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Decimal::from_str(trimmed).map_err(|_| {
|
||||
Status::invalid_argument(format!(
|
||||
"Invalid decimal string format for column '{}': {}",
|
||||
col, s
|
||||
))
|
||||
})?)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(Status::invalid_argument(format!(
|
||||
"Expected a string representation for decimal column '{}', but received a different type.",
|
||||
col
|
||||
)));
|
||||
}
|
||||
};
|
||||
params.add(decimal_val).map_err(|e| {
|
||||
Status::invalid_argument(format!(
|
||||
"Failed to add decimal parameter for {}: {}",
|
||||
col, e
|
||||
))
|
||||
})?;
|
||||
} else {
|
||||
return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type)));
|
||||
}
|
||||
|
||||
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
||||
param_idx += 1;
|
||||
}
|
||||
|
||||
params.add(record_id)
|
||||
.map_err(|e| Status::internal(format!("Failed to add record_id parameter: {}", e)))?;
|
||||
// --- End of copied logic ---
|
||||
|
||||
let qualified_table = qualify_table_name_for_data(
|
||||
if set_clauses.is_empty() {
|
||||
return Ok(PutTableDataResponse {
|
||||
success: true,
|
||||
message: "No valid fields to update after processing.".into(),
|
||||
updated_id: record_id,
|
||||
});
|
||||
}
|
||||
|
||||
let qualified_table = crate::shared::schema_qualifier::qualify_table_name_for_data(
|
||||
db_pool,
|
||||
&profile_name,
|
||||
&table_name,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let set_clause = set_clauses.join(", ");
|
||||
let sql = format!(
|
||||
"UPDATE {} SET {} WHERE id = ${} AND deleted = FALSE RETURNING id",
|
||||
"UPDATE {} SET {} WHERE id = ${} RETURNING id",
|
||||
qualified_table,
|
||||
set_clause,
|
||||
param_idx
|
||||
);
|
||||
|
||||
let result = sqlx::query_scalar_with::<Postgres, i64, _>(&sql, params)
|
||||
params.add(record_id).map_err(|e| Status::internal(format!("Failed to add record_id parameter: {}", e)))?;
|
||||
|
||||
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
||||
.fetch_optional(db_pool)
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(Some(updated_id)) => Ok(PutTableDataResponse {
|
||||
success: true,
|
||||
message: "Data updated successfully".into(),
|
||||
updated_id,
|
||||
}),
|
||||
Ok(None) => Err(Status::not_found("Record not found or already deleted")),
|
||||
let updated_id = match result {
|
||||
Ok(Some(id)) => id,
|
||||
Ok(None) => return Err(Status::not_found("Record not found")),
|
||||
Err(e) => {
|
||||
if let Some(db_err) = e.as_database_error() {
|
||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
||||
return Err(Status::internal(format!(
|
||||
"Table '{}' is defined but does not physically exist in the database as {}",
|
||||
table_name, qualified_table
|
||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("22P02")) ||
|
||||
db_err.code() == Some(std::borrow::Cow::Borrowed("22003")) {
|
||||
return Err(Status::invalid_argument(format!(
|
||||
"Numeric field overflow or invalid format. Check precision and scale. Details: {}", db_err.message()
|
||||
)));
|
||||
}
|
||||
}
|
||||
Err(Status::internal(format!("Update failed: {}", e)))
|
||||
return Err(Status::internal(format!("Update failed: {}", e)));
|
||||
}
|
||||
};
|
||||
|
||||
let command = IndexCommand::AddOrUpdate(IndexCommandData {
|
||||
table_name: table_name.clone(),
|
||||
row_id: updated_id,
|
||||
});
|
||||
|
||||
if let Err(e) = indexer_tx.send(command).await {
|
||||
error!(
|
||||
"CRITICAL: DB update for table '{}' (id: {}) succeeded but failed to queue for indexing: {}. Search index is now inconsistent.",
|
||||
table_name, updated_id, e
|
||||
);
|
||||
}
|
||||
|
||||
Ok(PutTableDataResponse {
|
||||
success: true,
|
||||
message: "Data updated successfully".into(),
|
||||
updated_id,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// tests/mod.rs
|
||||
// pub mod adresar;
|
||||
// pub mod tables_data;
|
||||
pub mod tables_data;
|
||||
pub mod common;
|
||||
pub mod table_definition;
|
||||
// pub mod table_definition;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// tests/tables_data/mod.rs
|
||||
pub mod post_table_data_test;
|
||||
// pub mod put_table_data_test;
|
||||
// pub mod post_table_data_test;
|
||||
pub mod put_table_data_test;
|
||||
// pub mod delete_table_data_test;
|
||||
// pub mod get_table_data_test;
|
||||
// pub mod get_table_data_count_test;
|
||||
|
||||
@@ -1,254 +1,540 @@
|
||||
// tests/tables_data/handlers/put_table_data_test.rs
|
||||
|
||||
use rstest::{fixture, rstest};
|
||||
use sqlx::PgPool;
|
||||
use sqlx::{PgPool, Row};
|
||||
use std::collections::HashMap;
|
||||
use common::proto::multieko2::tables_data::{PutTableDataRequest, PutTableDataResponse};
|
||||
use prost_types::{value::Kind, Value};
|
||||
use common::proto::multieko2::table_definition::{
|
||||
PostTableDefinitionRequest, ColumnDefinition as TableColumnDefinition,
|
||||
};
|
||||
use common::proto::multieko2::tables_data::{
|
||||
PostTableDataRequest, PutTableDataRequest,
|
||||
};
|
||||
use server::table_definition::handlers::post_table_definition;
|
||||
// The post_table_data handler is used in the "Arrange" step of each test to create initial data.
|
||||
use server::tables_data::handlers::post_table_data;
|
||||
// The put_table_data handler is the function we are testing.
|
||||
use server::tables_data::handlers::put_table_data;
|
||||
use crate::common::setup_test_db;
|
||||
use tonic;
|
||||
use chrono::Utc;
|
||||
use tokio::sync::mpsc;
|
||||
use server::indexer::IndexCommand;
|
||||
use rand::Rng;
|
||||
use rand::distr::Alphanumeric;
|
||||
|
||||
// Fixtures
|
||||
#[fixture]
|
||||
async fn pool() -> PgPool {
|
||||
setup_test_db().await
|
||||
// ========= Test Helpers =========
|
||||
|
||||
fn generate_unique_id() -> String {
|
||||
rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(8)
|
||||
.map(char::from)
|
||||
.collect::<String>()
|
||||
.to_lowercase()
|
||||
}
|
||||
|
||||
fn string_to_proto_value(s: &str) -> Value {
|
||||
Value {
|
||||
kind: Some(Kind::StringValue(s.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn bool_to_proto_value(b: bool) -> Value {
|
||||
Value {
|
||||
kind: Some(Kind::BoolValue(b)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_adresar_table(
|
||||
pool: &PgPool,
|
||||
table_name: &str,
|
||||
profile_name: &str,
|
||||
) -> Result<(), tonic::Status> {
|
||||
let table_def_request = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: table_name.into(),
|
||||
columns: vec![
|
||||
TableColumnDefinition { name: "firma".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "kz".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "ulica".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "mesto".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "telefon".into(), field_type: "text".into() },
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![],
|
||||
};
|
||||
post_table_definition(pool, table_def_request).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_test_indexer_channel(
|
||||
) -> (mpsc::Sender<IndexCommand>, mpsc::Receiver<IndexCommand>) {
|
||||
mpsc::channel(100)
|
||||
}
|
||||
|
||||
// Helper to create a record and return its ID for tests
|
||||
async fn create_initial_record(
|
||||
context: &TestContext,
|
||||
initial_data: HashMap<String, Value>,
|
||||
) -> i64 {
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data: initial_data,
|
||||
};
|
||||
let response = post_table_data(&context.pool, request, &context.indexer_tx)
|
||||
.await
|
||||
.expect("Setup: Failed to create initial record");
|
||||
response.inserted_id
|
||||
}
|
||||
|
||||
// ========= Fixtures =========
|
||||
|
||||
#[derive(Clone)]
|
||||
struct TestContext {
|
||||
pool: PgPool,
|
||||
profile_name: String,
|
||||
table_name: String,
|
||||
indexer_tx: mpsc::Sender<IndexCommand>,
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
async fn closed_pool(#[future] pool: PgPool) -> PgPool {
|
||||
let pool = pool.await;
|
||||
pool.close().await;
|
||||
pool
|
||||
async fn test_context() -> TestContext {
|
||||
let pool = setup_test_db().await;
|
||||
let unique_id = generate_unique_id();
|
||||
let profile_name = format!("test_profile_{}", unique_id);
|
||||
let table_name = format!("adresar_test_{}", unique_id);
|
||||
create_adresar_table(&pool, &table_name, &profile_name)
|
||||
.await
|
||||
.expect("Failed to create test table");
|
||||
let (tx, mut rx) = mpsc::channel(100);
|
||||
// Drain receiver to prevent blocking
|
||||
tokio::spawn(async move { while rx.recv().await.is_some() {} });
|
||||
TestContext { pool, profile_name, table_name, indexer_tx: tx }
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
async fn existing_record(#[future] pool: PgPool) -> (PgPool, i64) {
|
||||
let pool = pool.await;
|
||||
// ========= Update Tests (Converted from Post Tests) =========
|
||||
|
||||
// Create a test record in the database
|
||||
let record = sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO "2025_adresar" (
|
||||
firma, kz, drc, ulica, psc, mesto, stat, banka, ucet,
|
||||
skladm, ico, kontakt, telefon, skladu, fax, deleted
|
||||
)
|
||||
VALUES (
|
||||
'Original Company', 'Original KZ', 'Original DRC', 'Original Street',
|
||||
'12345', 'Original City', 'Original Country', 'Original Bank',
|
||||
'Original Account', 'Original SkladM', 'Original ICO',
|
||||
'Original Contact', '+421123456789', 'Original SkladU', 'Original Fax',
|
||||
false
|
||||
)
|
||||
RETURNING id
|
||||
"#
|
||||
)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_table_data_success(#[future] test_context: TestContext) {
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let mut initial_data = HashMap::new();
|
||||
initial_data.insert("firma".to_string(), string_to_proto_value("Original Company"));
|
||||
initial_data.insert("ulica".to_string(), string_to_proto_value("Original Street"));
|
||||
let record_id = create_initial_record(&context, initial_data).await;
|
||||
|
||||
(pool, record.id)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn valid_request_template() -> HashMap<String, String> {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("firma".into(), "Updated Company".into());
|
||||
map.insert("kz".into(), "Updated KZ".into());
|
||||
map.insert("drc".into(), "Updated DRC".into());
|
||||
map.insert("ulica".into(), "Updated Street".into());
|
||||
map.insert("psc".into(), "67890".into());
|
||||
map.insert("mesto".into(), "Updated City".into());
|
||||
map.insert("stat".into(), "Updated Country".into());
|
||||
map.insert("banka".into(), "Updated Bank".into());
|
||||
map.insert("ucet".into(), "987654321".into());
|
||||
map.insert("skladm".into(), "Updated SkladM".into());
|
||||
map.insert("ico".into(), "87654321".into());
|
||||
map.insert("kontakt".into(), "Jane Doe".into());
|
||||
map.insert("telefon".into(), "+421987654321".into());
|
||||
map.insert("skladu".into(), "Updated SkladU".into());
|
||||
map.insert("fax".into(), "+421987654300".into());
|
||||
map
|
||||
}
|
||||
|
||||
// Helper to check database state
|
||||
async fn assert_response_matches(pool: &PgPool, id: i64, response: &PutTableDataResponse) {
|
||||
let db_record = sqlx::query!(r#"SELECT * FROM "2025_adresar" WHERE id = $1"#, id)
|
||||
.fetch_one(pool)
|
||||
// Act
|
||||
let mut update_data = HashMap::new();
|
||||
update_data.insert("firma".to_string(), string_to_proto_value("Updated Company"));
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: update_data,
|
||||
};
|
||||
let response = put_table_data(&context.pool, request, &context.indexer_tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(db_record.firma, "Updated Company");
|
||||
assert_eq!(db_record.kz.unwrap_or_default(), "Updated KZ");
|
||||
assert_eq!(db_record.drc.unwrap_or_default(), "Updated DRC");
|
||||
assert_eq!(db_record.ulica.unwrap_or_default(), "Updated Street");
|
||||
assert_eq!(db_record.psc.unwrap_or_default(), "67890");
|
||||
assert_eq!(db_record.mesto.unwrap_or_default(), "Updated City");
|
||||
assert_eq!(db_record.stat.unwrap_or_default(), "Updated Country");
|
||||
assert_eq!(db_record.banka.unwrap_or_default(), "Updated Bank");
|
||||
assert_eq!(db_record.ucet.unwrap_or_default(), "987654321");
|
||||
assert_eq!(db_record.skladm.unwrap_or_default(), "Updated SkladM");
|
||||
assert_eq!(db_record.ico.unwrap_or_default(), "87654321");
|
||||
assert_eq!(db_record.kontakt.unwrap_or_default(), "Jane Doe");
|
||||
assert_eq!(db_record.telefon.unwrap_or_default(), "+421987654321");
|
||||
assert_eq!(db_record.skladu.unwrap_or_default(), "Updated SkladU");
|
||||
assert_eq!(db_record.fax.unwrap_or_default(), "+421987654300");
|
||||
assert!(!db_record.deleted, "Record should not be deleted");
|
||||
}
|
||||
|
||||
// Tests
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_table_data_success(
|
||||
#[future] existing_record: (PgPool, i64),
|
||||
valid_request_template: HashMap<String, String>,
|
||||
) {
|
||||
let (pool, id) = existing_record.await;
|
||||
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: "default".into(),
|
||||
table_name: "2025_adresar".into(),
|
||||
id,
|
||||
data: valid_request_template,
|
||||
};
|
||||
|
||||
let response = put_table_data(&pool, request).await.unwrap();
|
||||
|
||||
// Assert
|
||||
assert!(response.success);
|
||||
assert_eq!(response.message, "Data updated successfully");
|
||||
assert_eq!(response.updated_id, id);
|
||||
assert_response_matches(&pool, id, &response).await;
|
||||
assert_eq!(response.updated_id, record_id);
|
||||
|
||||
let row = sqlx::query(&format!(
|
||||
r#"SELECT firma, ulica FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.bind(record_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let firma: String = row.get("firma");
|
||||
let ulica: String = row.get("ulica");
|
||||
assert_eq!(firma, "Updated Company");
|
||||
assert_eq!(ulica, "Original Street"); // Should be unchanged
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_table_data_whitespace_trimming(
|
||||
#[future] existing_record: (PgPool, i64),
|
||||
valid_request_template: HashMap<String, String>,
|
||||
#[future] test_context: TestContext,
|
||||
) {
|
||||
let (pool, id) = existing_record.await;
|
||||
|
||||
let mut data = valid_request_template;
|
||||
data.insert("firma".into(), " Updated Company ".into());
|
||||
data.insert("telefon".into(), " +421987654321 ".into());
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let record_id = create_initial_record(
|
||||
&context,
|
||||
HashMap::from([(
|
||||
"firma".to_string(),
|
||||
string_to_proto_value("Original"),
|
||||
)]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Act
|
||||
let mut update_data = HashMap::new();
|
||||
update_data.insert("firma".to_string(), string_to_proto_value(" Trimmed Co. "));
|
||||
update_data.insert("telefon".to_string(), string_to_proto_value(" 12345 "));
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: "default".into(),
|
||||
table_name: "2025_adresar".into(),
|
||||
id,
|
||||
data,
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: update_data,
|
||||
};
|
||||
|
||||
let response = put_table_data(&pool, request).await.unwrap();
|
||||
|
||||
// Verify trimmed values in response
|
||||
assert_eq!(response.message, "Data updated successfully");
|
||||
|
||||
// Verify raw values in database
|
||||
let db_record = sqlx::query!(r#"SELECT firma, telefon FROM "2025_adresar" WHERE id = $1"#, id)
|
||||
.fetch_one(&pool)
|
||||
put_table_data(&context.pool, request, &context.indexer_tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(db_record.firma, "Updated Company"); // Trimmed
|
||||
assert_eq!(db_record.telefon.unwrap(), "+421987654321"); // Trimmed
|
||||
// Assert
|
||||
let row = sqlx::query(&format!(
|
||||
r#"SELECT firma, telefon FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.bind(record_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
let firma: String = row.get("firma");
|
||||
let telefon: Option<String> = row.get("telefon");
|
||||
assert_eq!(firma, "Trimmed Co.");
|
||||
assert_eq!(telefon.unwrap(), "12345");
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_table_data_empty_required_field(
|
||||
#[future] existing_record: (PgPool, i64),
|
||||
valid_request_template: HashMap<String, String>,
|
||||
async fn test_update_field_to_null_with_empty_string(
|
||||
#[future] test_context: TestContext,
|
||||
) {
|
||||
let (pool, id) = existing_record.await;
|
||||
|
||||
let mut data = valid_request_template;
|
||||
data.insert("firma".into(), "".into());
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let record_id = create_initial_record(
|
||||
&context,
|
||||
HashMap::from([(
|
||||
"telefon".to_string(),
|
||||
string_to_proto_value("555-1234"),
|
||||
)]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Act
|
||||
let mut update_data = HashMap::new();
|
||||
update_data.insert("telefon".to_string(), string_to_proto_value(" ")); // Update to empty
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: "default".into(),
|
||||
table_name: "2025_adresar".into(),
|
||||
id,
|
||||
data,
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: update_data,
|
||||
};
|
||||
|
||||
let result = put_table_data(&pool, request).await;
|
||||
assert!(result.is_err());
|
||||
assert_eq!(result.unwrap_err().code(), tonic::Code::InvalidArgument);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_table_data_nonexistent_id(
|
||||
#[future] pool: PgPool,
|
||||
valid_request_template: HashMap<String, String>,
|
||||
) {
|
||||
let pool = pool.await;
|
||||
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: "default".into(),
|
||||
table_name: "2025_adresar".into(),
|
||||
id: 9999, // Non-existent ID
|
||||
data: valid_request_template,
|
||||
};
|
||||
|
||||
let result = put_table_data(&pool, request).await;
|
||||
assert!(result.is_err());
|
||||
assert_eq!(result.unwrap_err().code(), tonic::Code::NotFound);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_table_data_deleted_record(
|
||||
#[future] existing_record: (PgPool, i64),
|
||||
valid_request_template: HashMap<String, String>,
|
||||
) {
|
||||
let (pool, id) = existing_record.await;
|
||||
|
||||
// Mark the record as deleted
|
||||
sqlx::query!(r#"UPDATE "2025_adresar" SET deleted = true WHERE id = $1"#, id)
|
||||
.execute(&pool)
|
||||
put_table_data(&context.pool, request, &context.indexer_tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: "default".into(),
|
||||
table_name: "2025_adresar".into(),
|
||||
id,
|
||||
data: valid_request_template,
|
||||
};
|
||||
|
||||
let result = put_table_data(&pool, request).await;
|
||||
assert!(result.is_err());
|
||||
assert_eq!(result.unwrap_err().code(), tonic::Code::NotFound);
|
||||
// Assert
|
||||
let telefon: Option<String> =
|
||||
sqlx::query_scalar(&format!(
|
||||
r#"SELECT telefon FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.bind(record_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(telefon.is_none());
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_table_data_clear_optional_fields(
|
||||
#[future] existing_record: (PgPool, i64),
|
||||
valid_request_template: HashMap<String, String>,
|
||||
async fn test_update_telefon_length_limit_error(
|
||||
#[future] test_context: TestContext,
|
||||
) {
|
||||
let (pool, id) = existing_record.await;
|
||||
|
||||
let mut data = valid_request_template;
|
||||
data.insert("telefon".into(), String::new());
|
||||
data.insert("ulica".into(), String::new());
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let record_id = create_initial_record(
|
||||
&context,
|
||||
HashMap::from([(
|
||||
"telefon".to_string(),
|
||||
string_to_proto_value("valid-number"),
|
||||
)]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Act
|
||||
let mut update_data = HashMap::new();
|
||||
update_data.insert("telefon".to_string(), string_to_proto_value("1".repeat(16).as_str()));
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: "default".into(),
|
||||
table_name: "2025_adresar".into(),
|
||||
id,
|
||||
data,
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: update_data,
|
||||
};
|
||||
let result = put_table_data(&context.pool, request, &context.indexer_tx).await;
|
||||
|
||||
let response = put_table_data(&pool, request).await.unwrap();
|
||||
// Assert
|
||||
assert!(result.is_err());
|
||||
assert_eq!(result.unwrap_err().code(), tonic::Code::Internal);
|
||||
|
||||
// Check database contains NULL for cleared fields
|
||||
let db_record = sqlx::query!(r#"SELECT telefon, ulica FROM "2025_adresar" WHERE id = $1"#, id)
|
||||
.fetch_one(&pool)
|
||||
// Verify original data is untouched
|
||||
let telefon: String = sqlx::query_scalar(&format!(
|
||||
r#"SELECT telefon FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.bind(record_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(telefon, "valid-number");
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_with_invalid_column_name(#[future] test_context: TestContext) {
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let record_id = create_initial_record(
|
||||
&context,
|
||||
HashMap::from([(
|
||||
"firma".to_string(),
|
||||
string_to_proto_value("Original"),
|
||||
)]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Act
|
||||
let mut update_data = HashMap::new();
|
||||
update_data.insert("nonexistent_col".to_string(), string_to_proto_value("invalid"));
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: update_data,
|
||||
};
|
||||
let result = put_table_data(&context.pool, request, &context.indexer_tx).await;
|
||||
|
||||
// Assert
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Invalid column: nonexistent_col"));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_with_empty_data_request(#[future] test_context: TestContext) {
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let record_id = create_initial_record(
|
||||
&context,
|
||||
HashMap::from([(
|
||||
"firma".to_string(),
|
||||
string_to_proto_value("Original"),
|
||||
)]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Act
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: HashMap::new(), // Empty data map
|
||||
};
|
||||
let result = put_table_data(&context.pool, request, &context.indexer_tx).await;
|
||||
|
||||
// Assert: An update with no fields should be a no-op and succeed.
|
||||
assert!(result.is_ok());
|
||||
let response = result.unwrap();
|
||||
assert!(response.success);
|
||||
assert_eq!(response.updated_id, record_id);
|
||||
|
||||
// Verify original data is untouched
|
||||
let firma: String = sqlx::query_scalar(&format!(
|
||||
r#"SELECT firma FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.bind(record_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(firma, "Original");
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_sql_injection_protection(#[future] test_context: TestContext) {
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let injection_attempt = "admin'; UPDATE adresar SET firma='hacked' WHERE '1'='1";
|
||||
let record_id = create_initial_record(
|
||||
&context,
|
||||
HashMap::from([(
|
||||
"firma".to_string(),
|
||||
string_to_proto_value("Safe Company"),
|
||||
)]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Act
|
||||
let mut update_data = HashMap::new();
|
||||
update_data.insert("firma".to_string(), string_to_proto_value(injection_attempt));
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: update_data,
|
||||
};
|
||||
put_table_data(&context.pool, request, &context.indexer_tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(db_record.telefon.is_none());
|
||||
assert!(db_record.ulica.is_none());
|
||||
// Assert
|
||||
let firma: String = sqlx::query_scalar(&format!(
|
||||
r#"SELECT firma FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.bind(record_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(firma, injection_attempt); // Should be stored as a literal string
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_nonexistent_record_error(#[future] test_context: TestContext) {
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let nonexistent_id = 999999;
|
||||
|
||||
// Act
|
||||
let mut update_data = HashMap::new();
|
||||
update_data.insert("firma".to_string(), string_to_proto_value("No one to update"));
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: nonexistent_id,
|
||||
data: update_data,
|
||||
};
|
||||
let result = put_table_data(&context.pool, request, &context.indexer_tx).await;
|
||||
|
||||
// Assert
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
assert_eq!(err.code(), tonic::Code::NotFound);
|
||||
assert!(err.message().contains("Record not found"));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_concurrent_updates_different_records(
|
||||
#[future] test_context: TestContext,
|
||||
) {
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let mut record_ids = Vec::new();
|
||||
for i in 0..10 {
|
||||
let record_id = create_initial_record(
|
||||
&context,
|
||||
HashMap::from([(
|
||||
"firma".to_string(),
|
||||
string_to_proto_value(&format!("Concurrent-{}", i)),
|
||||
)]),
|
||||
)
|
||||
.await;
|
||||
record_ids.push(record_id);
|
||||
}
|
||||
|
||||
// Act
|
||||
let mut tasks = Vec::new();
|
||||
for (i, record_id) in record_ids.iter().enumerate() {
|
||||
let context = context.clone();
|
||||
let record_id = *record_id;
|
||||
let task = tokio::spawn(async move {
|
||||
let mut update_data = HashMap::new();
|
||||
update_data.insert(
|
||||
"mesto".to_string(),
|
||||
string_to_proto_value(&format!("City-{}", i)),
|
||||
);
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: update_data,
|
||||
};
|
||||
put_table_data(&context.pool, request, &context.indexer_tx).await
|
||||
});
|
||||
tasks.push(task);
|
||||
}
|
||||
let results = futures::future::join_all(tasks).await;
|
||||
|
||||
// Assert
|
||||
for result in results {
|
||||
assert!(result.unwrap().is_ok());
|
||||
}
|
||||
|
||||
let count: i64 = sqlx::query_scalar(&format!(
|
||||
r#"SELECT COUNT(*) FROM "{}"."{}" WHERE mesto LIKE 'City-%'"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count, 10);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_update_boolean_system_column_validation(
|
||||
#[future] test_context: TestContext,
|
||||
) {
|
||||
// Arrange
|
||||
let context = test_context.await;
|
||||
let record_id = create_initial_record(
|
||||
&context,
|
||||
HashMap::from([(
|
||||
"firma".to_string(),
|
||||
string_to_proto_value("To be deleted"),
|
||||
)]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Act: Try to update 'deleted' with a string, which is invalid
|
||||
let mut invalid_data = HashMap::new();
|
||||
invalid_data.insert("deleted".to_string(), string_to_proto_value("true"));
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: invalid_data,
|
||||
};
|
||||
let result = put_table_data(&context.pool, request, &context.indexer_tx).await;
|
||||
|
||||
// Assert: The operation must fail
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Expected boolean for column 'deleted'"));
|
||||
|
||||
// Act: Try to update 'deleted' with a proper boolean
|
||||
let mut valid_data = HashMap::new();
|
||||
valid_data.insert("deleted".to_string(), bool_to_proto_value(true));
|
||||
let request = PutTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
id: record_id,
|
||||
data: valid_data,
|
||||
};
|
||||
let result = put_table_data(&context.pool, request, &context.indexer_tx).await;
|
||||
|
||||
// Assert: The operation must succeed
|
||||
assert!(result.is_ok());
|
||||
let deleted: bool = sqlx::query_scalar(&format!(
|
||||
r#"SELECT deleted FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.bind(record_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(deleted);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user