ordering of the tests for tables data
This commit is contained in:
3
server/tests/tables_data/post/mod.rs
Normal file
3
server/tests/tables_data/post/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
// tests/tables_data/post/mod.rs
|
||||
|
||||
pub mod post_table_data_test;
|
||||
484
server/tests/tables_data/post/post_table_data_test.rs
Normal file
484
server/tests/tables_data/post/post_table_data_test.rs
Normal file
@@ -0,0 +1,484 @@
|
||||
// tests/tables_data/handlers/post_table_data_test.rs
|
||||
use rstest::{fixture, rstest};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use prost_types::Value;
|
||||
use prost_types::value::Kind;
|
||||
use common::proto::multieko2::tables_data::{PostTableDataRequest, PostTableDataResponse};
|
||||
use common::proto::multieko2::table_definition::TableLink;
|
||||
use common::proto::multieko2::table_definition::{
|
||||
PostTableDefinitionRequest, ColumnDefinition as TableColumnDefinition
|
||||
};
|
||||
use server::tables_data::handlers::post_table_data;
|
||||
use server::table_definition::handlers::post_table_definition;
|
||||
use crate::common::setup_test_db;
|
||||
use tonic;
|
||||
use chrono::Utc;
|
||||
use sqlx::types::chrono::DateTime;
|
||||
use tokio::sync::mpsc;
|
||||
use server::indexer::IndexCommand;
|
||||
use sqlx::Row;
|
||||
use rand::distr::Alphanumeric;
|
||||
use rand::Rng;
|
||||
use rust_decimal::prelude::FromPrimitive;
|
||||
|
||||
// Helper function to generate unique identifiers for test isolation
|
||||
fn generate_unique_id() -> String {
|
||||
rand::rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(8)
|
||||
.map(char::from)
|
||||
.collect::<String>()
|
||||
.to_lowercase()
|
||||
}
|
||||
|
||||
// Helper function to convert string to protobuf Value
|
||||
fn string_to_proto_value(s: String) -> Value {
|
||||
Value {
|
||||
kind: Some(Kind::StringValue(s)),
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to convert HashMap<String, String> to HashMap<String, Value>
|
||||
fn convert_to_proto_values(data: HashMap<String, String>) -> HashMap<String, Value> {
|
||||
data.into_iter()
|
||||
.map(|(k, v)| (k, string_to_proto_value(v)))
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Create the table definition for adresar test with unique name
|
||||
async fn create_adresar_table(pool: &PgPool, table_name: &str, profile_name: &str) -> Result<(), tonic::Status> {
|
||||
let table_def_request = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: table_name.into(),
|
||||
columns: vec![
|
||||
TableColumnDefinition {
|
||||
name: "firma".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "kz".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "drc".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "ulica".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "psc".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "mesto".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "stat".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "banka".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "ucet".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "skladm".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "ico".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "kontakt".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "telefon".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "skladu".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
TableColumnDefinition {
|
||||
name: "fax".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![],
|
||||
};
|
||||
|
||||
post_table_definition(pool, table_def_request).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Test context structure to hold unique identifiers
|
||||
#[derive(Clone)]
|
||||
struct TestContext {
|
||||
pool: PgPool,
|
||||
profile_name: String,
|
||||
table_name: String,
|
||||
indexer_tx: mpsc::Sender<IndexCommand>,
|
||||
}
|
||||
|
||||
// Fixtures
|
||||
#[fixture]
|
||||
async fn test_context() -> TestContext {
|
||||
let pool = setup_test_db().await;
|
||||
let unique_id = generate_unique_id();
|
||||
let profile_name = format!("test_profile_{}", unique_id);
|
||||
let table_name = format!("adresar_test_{}", unique_id);
|
||||
|
||||
// Create the table for this specific test
|
||||
create_adresar_table(&pool, &table_name, &profile_name).await
|
||||
.expect("Failed to create test table");
|
||||
|
||||
let (tx, _rx) = mpsc::channel(100);
|
||||
|
||||
TestContext {
|
||||
pool,
|
||||
profile_name,
|
||||
table_name,
|
||||
indexer_tx: tx,
|
||||
}
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
async fn closed_test_context() -> TestContext {
|
||||
let mut context = test_context().await;
|
||||
context.pool.close().await;
|
||||
context
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn valid_request() -> HashMap<String, String> {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("firma".into(), "Test Company".into());
|
||||
map.insert("kz".into(), "KZ123".into());
|
||||
map.insert("drc".into(), "DRC456".into());
|
||||
map.insert("ulica".into(), "Test Street".into());
|
||||
map.insert("psc".into(), "12345".into());
|
||||
map.insert("mesto".into(), "Test City".into());
|
||||
map.insert("stat".into(), "Test Country".into());
|
||||
map.insert("banka".into(), "Test Bank".into());
|
||||
map.insert("ucet".into(), "123456789".into());
|
||||
map.insert("skladm".into(), "Warehouse M".into());
|
||||
map.insert("ico".into(), "12345678".into());
|
||||
map.insert("kontakt".into(), "John Doe".into());
|
||||
map.insert("telefon".into(), "+421123456789".into());
|
||||
map.insert("skladu".into(), "Warehouse U".into());
|
||||
map.insert("fax".into(), "+421123456700".into());
|
||||
map
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn minimal_request() -> HashMap<String, String> {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("firma".into(), "Required Only".into());
|
||||
map
|
||||
}
|
||||
|
||||
fn create_table_request(context: &TestContext, data: HashMap<String, String>) -> PostTableDataRequest {
|
||||
PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data: convert_to_proto_values(data),
|
||||
}
|
||||
}
|
||||
|
||||
async fn assert_table_response(context: &TestContext, response: &PostTableDataResponse, expected: &HashMap<String, String>) {
|
||||
// Use dynamic query since table is created at runtime with unique names
|
||||
let query = format!(
|
||||
r#"SELECT * FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Get values from Row dynamically
|
||||
let firma: String = row.get("firma");
|
||||
let deleted: bool = row.get("deleted");
|
||||
|
||||
assert_eq!(firma, expected["firma"]);
|
||||
assert!(!deleted);
|
||||
|
||||
// Check optional fields
|
||||
let check_field = |field: &str, expected_value: &str| {
|
||||
let db_value: Option<String> = row.get(field);
|
||||
assert_eq!(db_value.as_deref().unwrap_or(""), expected_value);
|
||||
};
|
||||
|
||||
check_field("kz", expected.get("kz").unwrap_or(&String::new()));
|
||||
check_field("drc", expected.get("drc").unwrap_or(&String::new()));
|
||||
check_field("ulica", expected.get("ulica").unwrap_or(&String::new()));
|
||||
check_field("psc", expected.get("psc").unwrap_or(&String::new()));
|
||||
check_field("mesto", expected.get("mesto").unwrap_or(&String::new()));
|
||||
check_field("stat", expected.get("stat").unwrap_or(&String::new()));
|
||||
check_field("banka", expected.get("banka").unwrap_or(&String::new()));
|
||||
check_field("ucet", expected.get("ucet").unwrap_or(&String::new()));
|
||||
check_field("skladm", expected.get("skladm").unwrap_or(&String::new()));
|
||||
check_field("ico", expected.get("ico").unwrap_or(&String::new()));
|
||||
check_field("kontakt", expected.get("kontakt").unwrap_or(&String::new()));
|
||||
check_field("telefon", expected.get("telefon").unwrap_or(&String::new()));
|
||||
check_field("skladu", expected.get("skladu").unwrap_or(&String::new()));
|
||||
check_field("fax", expected.get("fax").unwrap_or(&String::new()));
|
||||
|
||||
// Handle timestamp conversion
|
||||
let created_at: Option<chrono::DateTime<Utc>> = row.get("created_at");
|
||||
assert!(created_at.unwrap() <= Utc::now());
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_success(
|
||||
#[future] test_context: TestContext,
|
||||
valid_request: HashMap<String, String>,
|
||||
) {
|
||||
let context = test_context.await;
|
||||
let request = create_table_request(&context, valid_request.clone());
|
||||
let response = post_table_data(&context.pool, request, &context.indexer_tx).await.unwrap();
|
||||
|
||||
assert!(response.inserted_id > 0);
|
||||
assert!(response.success);
|
||||
assert_eq!(response.message, "Data inserted successfully");
|
||||
assert_table_response(&context, &response, &valid_request).await;
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_whitespace_trimming(
|
||||
#[future] test_context: TestContext,
|
||||
valid_request: HashMap<String, String>,
|
||||
) {
|
||||
let context = test_context.await;
|
||||
let mut request = valid_request;
|
||||
request.insert("firma".into(), " Test Company ".into());
|
||||
request.insert("telefon".into(), " +421123456789 ".into());
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, request), &context.indexer_tx).await.unwrap();
|
||||
|
||||
let query = format!(
|
||||
r#"SELECT firma, telefon FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let firma: String = row.get("firma");
|
||||
let telefon: Option<String> = row.get("telefon");
|
||||
|
||||
assert_eq!(firma, "Test Company");
|
||||
assert_eq!(telefon.unwrap(), "+421123456789");
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_empty_optional_fields(
|
||||
#[future] test_context: TestContext,
|
||||
valid_request: HashMap<String, String>,
|
||||
) {
|
||||
let context = test_context.await;
|
||||
let mut request = valid_request;
|
||||
request.insert("telefon".into(), " ".into());
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, request), &context.indexer_tx).await.unwrap();
|
||||
|
||||
let query = format!(
|
||||
r#"SELECT telefon FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let telefon: Option<String> = sqlx::query_scalar(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(telefon.is_none());
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_minimal_request(
|
||||
#[future] test_context: TestContext,
|
||||
minimal_request: HashMap<String, String>,
|
||||
) {
|
||||
let context = test_context.await;
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, minimal_request.clone()), &context.indexer_tx).await.unwrap();
|
||||
assert!(response.inserted_id > 0);
|
||||
assert_table_response(&context, &response, &minimal_request).await;
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_telefon_length_limit(
|
||||
#[future] test_context: TestContext,
|
||||
valid_request: HashMap<String, String>,
|
||||
) {
|
||||
let context = test_context.await;
|
||||
let mut request = valid_request;
|
||||
request.insert("telefon".into(), "1".repeat(16));
|
||||
|
||||
let result = post_table_data(&context.pool, create_table_request(&context, request), &context.indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
assert_eq!(result.unwrap_err().code(), tonic::Code::Internal);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_special_characters(
|
||||
#[future] test_context: TestContext,
|
||||
valid_request: HashMap<String, String>,
|
||||
) {
|
||||
let context = test_context.await;
|
||||
let mut request = valid_request;
|
||||
request.insert("ulica".into(), "Náměstí 28. října 123/456".into());
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, request), &context.indexer_tx).await.unwrap();
|
||||
|
||||
let query = format!(
|
||||
r#"SELECT ulica FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let ulica: Option<String> = row.get("ulica");
|
||||
assert_eq!(ulica.unwrap(), "Náměstí 28. října 123/456");
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_database_error(
|
||||
#[future] closed_test_context: TestContext,
|
||||
minimal_request: HashMap<String, String>,
|
||||
) {
|
||||
let context = closed_test_context.await;
|
||||
let result = post_table_data(&context.pool, create_table_request(&context, minimal_request), &context.indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
assert_eq!(result.unwrap_err().code(), tonic::Code::Internal);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_optional_fields_null_vs_empty(
|
||||
#[future] test_context: TestContext,
|
||||
valid_request: HashMap<String, String>,
|
||||
) {
|
||||
let context = test_context.await;
|
||||
let mut request = valid_request;
|
||||
request.insert("telefon".into(), "".into());
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, request), &context.indexer_tx).await.unwrap();
|
||||
|
||||
let query = format!(
|
||||
r#"SELECT telefon FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let telefon: Option<String> = sqlx::query_scalar(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(telefon.is_none());
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_field_length_limits(
|
||||
#[future] test_context: TestContext,
|
||||
valid_request: HashMap<String, String>,
|
||||
) {
|
||||
let context = test_context.await;
|
||||
let mut request = valid_request;
|
||||
request.insert("firma".into(), "a".repeat(255));
|
||||
request.insert("telefon".into(), "1".repeat(15)); // Within limits
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, request), &context.indexer_tx).await.unwrap();
|
||||
|
||||
let query = format!(
|
||||
r#"SELECT firma, telefon FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let firma: String = row.get("firma");
|
||||
let telefon: Option<String> = row.get("telefon");
|
||||
|
||||
assert_eq!(firma.len(), 255);
|
||||
assert_eq!(telefon.unwrap().len(), 15);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_create_table_data_with_null_values(
|
||||
#[future] test_context: TestContext,
|
||||
) {
|
||||
let context = test_context.await;
|
||||
|
||||
// Create a request with some null values
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), string_to_proto_value("Test Company".into()));
|
||||
data.insert("telefon".into(), Value { kind: Some(Kind::NullValue(0)) }); // Explicit null
|
||||
data.insert("ulica".into(), Value { kind: None }); // Another way to represent null
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let response = post_table_data(&context.pool, request, &context.indexer_tx).await.unwrap();
|
||||
|
||||
let query = format!(
|
||||
r#"SELECT firma, telefon, ulica FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let firma: String = row.get("firma");
|
||||
let telefon: Option<String> = row.get("telefon");
|
||||
let ulica: Option<String> = row.get("ulica");
|
||||
|
||||
assert_eq!(firma, "Test Company");
|
||||
assert!(telefon.is_none());
|
||||
assert!(ulica.is_none());
|
||||
}
|
||||
|
||||
include!("post_table_data_test2.rs");
|
||||
include!("post_table_data_test3.rs");
|
||||
include!("post_table_data_test4.rs");
|
||||
include!("post_table_data_test5.rs");
|
||||
484
server/tests/tables_data/post/post_table_data_test2.rs
Normal file
484
server/tests/tables_data/post/post_table_data_test2.rs
Normal file
@@ -0,0 +1,484 @@
|
||||
// tests/tables_data/handlers/post_table_data_test2.rs
|
||||
|
||||
// ========= Additional helper functions for test2 =========
|
||||
|
||||
async fn create_test_indexer_channel() -> mpsc::Sender<IndexCommand> {
|
||||
let (tx, mut rx) = mpsc::channel(100);
|
||||
|
||||
// Spawn a task to consume indexer messages to prevent blocking
|
||||
tokio::spawn(async move {
|
||||
while let Some(_) = rx.recv().await {
|
||||
// Just consume the messages
|
||||
}
|
||||
});
|
||||
|
||||
tx
|
||||
}
|
||||
|
||||
// ========= Extended Data Type Validation Tests =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_boolean_system_column_validation(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Test setting the deleted flag with string (should fail)
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), "System Test Company".to_string());
|
||||
data.insert("deleted".into(), "true".to_string()); // String instead of boolean
|
||||
|
||||
let result = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Expected boolean for column 'deleted'"));
|
||||
}
|
||||
}
|
||||
|
||||
// ========= String Processing and Edge Cases =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_unicode_special_characters_comprehensive(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let special_strings = vec![
|
||||
"José María González", // Accented characters
|
||||
"Москва", // Cyrillic
|
||||
"北京市", // Chinese
|
||||
"🚀 Tech Company 🌟", // Emoji
|
||||
"Line\nBreak\tTab", // Control characters
|
||||
"Quote\"Test'Apostrophe", // Quotes
|
||||
"SQL'; DROP TABLE test; --", // SQL injection attempt
|
||||
"Price: $1,000.50 (50% off!)", // Special symbols
|
||||
];
|
||||
|
||||
for (i, test_string) in special_strings.into_iter().enumerate() {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), test_string.to_string());
|
||||
data.insert("kz".into(), format!("TEST{}", i));
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await.unwrap();
|
||||
assert!(response.success, "Failed for string: '{}'", test_string);
|
||||
|
||||
// Verify the data was stored correctly
|
||||
let query = format!(
|
||||
r#"SELECT firma FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let stored_firma: Option<String> = sqlx::query_scalar::<_, Option<String>>(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(stored_firma.unwrap(), test_string.trim());
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_field_length_boundaries(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Test telefon field length validation (should reject >15 chars)
|
||||
let length_test_cases = vec![
|
||||
("1234567890123456", true), // 16 chars - should fail
|
||||
("123456789012345", false), // 15 chars - should pass
|
||||
("", false), // Empty - should pass (becomes NULL)
|
||||
("1", false), // Single char - should pass
|
||||
];
|
||||
|
||||
for (test_string, should_fail) in length_test_cases {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), "Length Test Company".to_string());
|
||||
data.insert("telefon".into(), test_string.to_string());
|
||||
|
||||
let result = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await;
|
||||
|
||||
if should_fail {
|
||||
assert!(result.is_err(), "Should fail for telefon length: {}", test_string.len());
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::Internal);
|
||||
assert!(err.message().contains("Value too long for telefon"));
|
||||
}
|
||||
} else {
|
||||
assert!(result.is_ok(), "Should succeed for telefon length: {}", test_string.len());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ========= NULL vs Empty String Handling =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_empty_strings_become_null(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let test_cases = vec![
|
||||
("", "empty_string"),
|
||||
(" ", "whitespace_only"),
|
||||
("\t\n", "tabs_newlines"),
|
||||
(" Normal Value ", "padded_value"),
|
||||
];
|
||||
|
||||
for (input, test_name) in test_cases {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), format!("Test {}", test_name));
|
||||
data.insert("ulica".into(), input.to_string());
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await.unwrap();
|
||||
assert!(response.success, "Failed for test case: {}", test_name);
|
||||
|
||||
// Check what was actually stored
|
||||
let query = format!(
|
||||
r#"SELECT ulica FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let stored_ulica: Option<String> = sqlx::query_scalar::<_, Option<String>>(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let trimmed = input.trim();
|
||||
if trimmed.is_empty() {
|
||||
assert!(stored_ulica.is_none(), "Empty/whitespace string should be NULL for: {}", test_name);
|
||||
} else {
|
||||
assert_eq!(stored_ulica.unwrap(), trimmed, "String should be trimmed for: {}", test_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ========= Concurrent Operations Testing =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_concurrent_inserts_same_table(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
|
||||
use futures::future::join_all;
|
||||
|
||||
// Create multiple concurrent insert operations
|
||||
let futures = (0..10).map(|i| {
|
||||
let context = context.clone();
|
||||
async move {
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), format!("Concurrent Company {}", i));
|
||||
data.insert("kz".into(), format!("CONC{}", i));
|
||||
data.insert("mesto".into(), format!("City {}", i));
|
||||
|
||||
post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await
|
||||
}
|
||||
});
|
||||
|
||||
let results = join_all(futures).await;
|
||||
|
||||
// All inserts should succeed
|
||||
for (i, result) in results.into_iter().enumerate() {
|
||||
assert!(result.is_ok(), "Concurrent insert {} should succeed", i);
|
||||
}
|
||||
|
||||
// Verify all records were inserted
|
||||
let query = format!(
|
||||
r#"SELECT COUNT(*) FROM "{}"."{}" WHERE firma LIKE 'Concurrent Company%'"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let count: i64 = sqlx::query_scalar::<_, i64>(&query)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count, 10);
|
||||
}
|
||||
|
||||
// ========= Error Scenarios =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_invalid_column_names(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), "Valid Company".to_string());
|
||||
data.insert("nonexistent_column".into(), "Invalid".to_string());
|
||||
|
||||
let result = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Invalid column: nonexistent_column"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_empty_data_request(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Try to insert completely empty data
|
||||
let data = HashMap::new();
|
||||
|
||||
let result = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("No valid columns to insert"));
|
||||
}
|
||||
}
|
||||
|
||||
// ========= Performance and Stress Testing =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_rapid_sequential_inserts(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
// Perform rapid sequential inserts
|
||||
for i in 0..50 {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), format!("Rapid Company {}", i));
|
||||
data.insert("kz".into(), format!("RAP{}", i));
|
||||
data.insert("telefon".into(), format!("+421{:09}", i));
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await.unwrap();
|
||||
assert!(response.success, "Rapid insert {} should succeed", i);
|
||||
}
|
||||
|
||||
let duration = start_time.elapsed();
|
||||
println!("50 rapid inserts took: {:?}", duration);
|
||||
|
||||
// Verify all records were inserted
|
||||
let query = format!(
|
||||
r#"SELECT COUNT(*) FROM "{}"."{}" WHERE firma LIKE 'Rapid Company%'"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let count: i64 = sqlx::query_scalar::<_, i64>(&query)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count, 50);
|
||||
}
|
||||
|
||||
// ========= SQL Injection Protection =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_sql_injection_protection(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let injection_attempts = vec![
|
||||
"'; DROP TABLE users; --",
|
||||
"1; DELETE FROM adresar; --",
|
||||
"admin'; UPDATE adresar SET firma='hacked' WHERE '1'='1",
|
||||
"' OR '1'='1",
|
||||
"'; INSERT INTO adresar (firma) VALUES ('injected'); --",
|
||||
"Robert'); DROP TABLE students; --", // Classic Bobby Tables
|
||||
];
|
||||
|
||||
let injection_count = injection_attempts.len();
|
||||
|
||||
for (i, injection) in injection_attempts.into_iter().enumerate() {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), injection.to_string());
|
||||
data.insert("kz".into(), format!("INJ{}", i));
|
||||
|
||||
// These should all succeed because values are properly parameterized
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await.unwrap();
|
||||
assert!(response.success, "SQL injection attempt should be safely handled: {}", injection);
|
||||
|
||||
// Verify the injection attempt was stored as literal text
|
||||
let query = format!(
|
||||
r#"SELECT firma FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let stored_firma: String = sqlx::query_scalar::<_, String>(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(stored_firma, injection);
|
||||
}
|
||||
|
||||
// Verify the table still exists and has the expected number of records
|
||||
let query = format!(
|
||||
r#"SELECT COUNT(*) FROM "{}"."{}" WHERE kz LIKE 'INJ%'"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let count: i64 = sqlx::query_scalar::<_, i64>(&query)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count, injection_count as i64);
|
||||
}
|
||||
|
||||
// ========= Large Data Testing =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_large_text_fields(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Test various large text sizes (except telefon which has length limits)
|
||||
let sizes = vec![1000, 5000, 10000];
|
||||
|
||||
for size in sizes {
|
||||
let large_text = "A".repeat(size);
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), large_text.clone());
|
||||
data.insert("ulica".into(), format!("Street with {} chars", size));
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await.unwrap();
|
||||
assert!(response.success, "Failed for size: {}", size);
|
||||
|
||||
// Verify the large text was stored correctly
|
||||
let query = format!(
|
||||
r#"SELECT firma FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let stored_firma: String = sqlx::query_scalar::<_, String>(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(stored_firma.len(), size);
|
||||
assert_eq!(stored_firma, large_text);
|
||||
}
|
||||
}
|
||||
|
||||
// ========= Indexer Integration Testing =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_indexer_command_generation(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let (indexer_tx, mut indexer_rx) = mpsc::channel(100);
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), "Indexer Test Company".to_string());
|
||||
data.insert("kz".into(), "IDX123".to_string());
|
||||
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await.unwrap();
|
||||
assert!(response.success);
|
||||
|
||||
// Check that indexer command was sent
|
||||
let indexer_command = tokio::time::timeout(
|
||||
tokio::time::Duration::from_millis(100),
|
||||
indexer_rx.recv()
|
||||
).await;
|
||||
|
||||
assert!(indexer_command.is_ok());
|
||||
let command = indexer_command.unwrap().unwrap();
|
||||
|
||||
match command {
|
||||
IndexCommand::AddOrUpdate(data) => {
|
||||
assert_eq!(data.table_name, context.table_name);
|
||||
assert_eq!(data.row_id, response.inserted_id);
|
||||
},
|
||||
IndexCommand::Delete(_) => panic!("Expected AddOrUpdate command, got Delete"),
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_indexer_failure_resilience(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
|
||||
// Create a closed channel to simulate indexer failure
|
||||
let (indexer_tx, indexer_rx) = mpsc::channel(1);
|
||||
drop(indexer_rx); // Close receiver to simulate failure
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), "Resilience Test Company".to_string());
|
||||
data.insert("kz".into(), "RES123".to_string());
|
||||
|
||||
// Insert should still succeed even if indexer fails
|
||||
let response = post_table_data(&context.pool, create_table_request(&context, data), &indexer_tx).await.unwrap();
|
||||
assert!(response.success);
|
||||
|
||||
// Verify data was inserted despite indexer failure
|
||||
let query = format!(
|
||||
r#"SELECT COUNT(*) FROM "{}"."{}" WHERE kz = 'RES123'"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let count: i64 = sqlx::query_scalar::<_, i64>(&query)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count, 1);
|
||||
}
|
||||
|
||||
// ========= Profile and Table Validation =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_nonexistent_profile_error(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), "Test Company".to_string());
|
||||
|
||||
let invalid_request = PostTableDataRequest {
|
||||
profile_name: "nonexistent_profile".into(),
|
||||
table_name: context.table_name.clone(),
|
||||
data: convert_to_proto_values(data),
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, invalid_request, &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::NotFound);
|
||||
assert!(err.message().contains("Profile not found"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_nonexistent_table_error(#[future] test_context: TestContext) {
|
||||
let context = test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("firma".into(), "Test Company".to_string());
|
||||
|
||||
let invalid_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: "nonexistent_table".into(),
|
||||
data: convert_to_proto_values(data),
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, invalid_request, &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::NotFound);
|
||||
assert!(err.message().contains("Table not found"));
|
||||
}
|
||||
}
|
||||
847
server/tests/tables_data/post/post_table_data_test3.rs
Normal file
847
server/tests/tables_data/post/post_table_data_test3.rs
Normal file
@@ -0,0 +1,847 @@
|
||||
// tests/tables_data/handlers/post_table_data_test3.rs
|
||||
|
||||
// ========================================================================
|
||||
// ADDITIONAL HELPER FUNCTIONS FOR TEST3
|
||||
// ========================================================================
|
||||
|
||||
// Helper to create different Value types
|
||||
fn create_string_value(s: &str) -> Value {
|
||||
Value { kind: Some(Kind::StringValue(s.to_string())) }
|
||||
}
|
||||
|
||||
fn create_number_value(n: f64) -> Value {
|
||||
Value { kind: Some(Kind::NumberValue(n)) }
|
||||
}
|
||||
|
||||
fn create_bool_value(b: bool) -> Value {
|
||||
Value { kind: Some(Kind::BoolValue(b)) }
|
||||
}
|
||||
|
||||
fn create_null_value() -> Value {
|
||||
Value { kind: Some(Kind::NullValue(0)) }
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// FIXTURES AND CONTEXT SETUP FOR ADVANCED TESTS
|
||||
// ========================================================================
|
||||
|
||||
#[derive(Clone)]
|
||||
struct DataTypeTestContext {
|
||||
pool: PgPool,
|
||||
profile_name: String,
|
||||
table_name: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ForeignKeyTestContext {
|
||||
pool: PgPool,
|
||||
profile_name: String,
|
||||
category_table: String,
|
||||
product_table: String,
|
||||
order_table: String,
|
||||
}
|
||||
|
||||
// Create a table with various data types for comprehensive testing
|
||||
async fn create_data_type_test_table(pool: &PgPool, table_name: &str, profile_name: &str) -> Result<(), tonic::Status> {
|
||||
let table_def_request = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: table_name.into(),
|
||||
columns: vec![
|
||||
TableColumnDefinition { name: "my_text".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "my_bool".into(), field_type: "boolean".into() },
|
||||
TableColumnDefinition { name: "my_timestamp".into(), field_type: "timestamp".into() },
|
||||
TableColumnDefinition { name: "my_bigint".into(), field_type: "integer".into() },
|
||||
TableColumnDefinition { name: "my_money".into(), field_type: "decimal(19,4)".into() },
|
||||
TableColumnDefinition { name: "my_date".into(), field_type: "date".into() },
|
||||
TableColumnDefinition { name: "my_decimal".into(), field_type: "decimal(10,2)".into() },
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![],
|
||||
};
|
||||
|
||||
post_table_definition(pool, table_def_request).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Create foreign key test tables (category -> product -> order)
|
||||
async fn create_foreign_key_test_tables(pool: &PgPool, profile_name: &str, category_table: &str, product_table: &str, order_table: &str) -> Result<(), tonic::Status> {
|
||||
// Create category table first (no dependencies)
|
||||
let category_def = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: category_table.into(),
|
||||
columns: vec![
|
||||
TableColumnDefinition { name: "name".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "description".into(), field_type: "text".into() },
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![],
|
||||
};
|
||||
post_table_definition(pool, category_def).await?;
|
||||
|
||||
// Create product table with required link to category
|
||||
let product_def = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: product_table.into(),
|
||||
columns: vec![
|
||||
TableColumnDefinition { name: "name".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "price".into(), field_type: "decimal(10,2)".into() },
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![
|
||||
TableLink { linked_table_name: category_table.into(), required: true },
|
||||
],
|
||||
};
|
||||
post_table_definition(pool, product_def).await?;
|
||||
|
||||
// Create order table with required link to product and optional link to category
|
||||
let order_def = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: order_table.into(),
|
||||
columns: vec![
|
||||
TableColumnDefinition { name: "quantity".into(), field_type: "integer".into() },
|
||||
TableColumnDefinition { name: "notes".into(), field_type: "text".into() },
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![
|
||||
TableLink { linked_table_name: product_table.into(), required: true },
|
||||
TableLink { linked_table_name: category_table.into(), required: false }, // Optional link
|
||||
],
|
||||
};
|
||||
post_table_definition(pool, order_def).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
async fn data_type_test_context() -> DataTypeTestContext {
|
||||
let pool = setup_test_db().await;
|
||||
let unique_id = generate_unique_id();
|
||||
let profile_name = format!("dtype_profile_{}", unique_id);
|
||||
let table_name = format!("dtype_table_{}", unique_id);
|
||||
|
||||
create_data_type_test_table(&pool, &table_name, &profile_name).await
|
||||
.expect("Failed to create data type test table");
|
||||
|
||||
DataTypeTestContext { pool, profile_name, table_name }
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
async fn foreign_key_test_context() -> ForeignKeyTestContext {
|
||||
let pool = setup_test_db().await;
|
||||
let unique_id = generate_unique_id();
|
||||
let profile_name = format!("fk_profile_{}", unique_id);
|
||||
let category_table = format!("category_{}", unique_id);
|
||||
let product_table = format!("product_{}", unique_id);
|
||||
let order_table = format!("order_{}", unique_id);
|
||||
|
||||
create_foreign_key_test_tables(&pool, &profile_name, &category_table, &product_table, &order_table).await
|
||||
.expect("Failed to create foreign key test tables");
|
||||
|
||||
ForeignKeyTestContext { pool, profile_name, category_table, product_table, order_table }
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// DATA TYPE VALIDATION TESTS
|
||||
// ========================================================================
|
||||
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_correct_data_types_success(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value("Test String"));
|
||||
data.insert("my_bool".into(), create_bool_value(true));
|
||||
data.insert("my_timestamp".into(), create_string_value("2024-01-15T10:30:00Z"));
|
||||
data.insert("my_bigint".into(), create_number_value(42.0));
|
||||
data.insert("my_money".into(), create_string_value("123.45")); // Use string for decimal
|
||||
data.insert("my_decimal".into(), create_string_value("999.99")); // Use string for decimal
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
let response = post_table_data(&context.pool, request, &indexer_tx).await.unwrap();
|
||||
assert!(response.success);
|
||||
assert!(response.inserted_id > 0);
|
||||
|
||||
// Verify data was stored correctly
|
||||
let query = format!(
|
||||
r#"SELECT my_text, my_bool, my_timestamp, my_bigint FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let stored_text: String = row.get("my_text");
|
||||
let stored_bool: bool = row.get("my_bool");
|
||||
// Change this based on your actual column type in the schema:
|
||||
// If my_bigint is defined as "integer" in table definition, use i32:
|
||||
let stored_bigint: i32 = row.get("my_bigint");
|
||||
// If my_bigint is defined as "biginteger" or "bigint" in table definition, use i64:
|
||||
// let stored_bigint: i64 = row.get("my_bigint");
|
||||
|
||||
assert_eq!(stored_text, "Test String");
|
||||
assert_eq!(stored_bool, true);
|
||||
assert_eq!(stored_bigint, 42);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_type_mismatch_string_for_boolean(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value("Required field"));
|
||||
data.insert("my_bool".into(), create_string_value("true")); // String instead of boolean
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Expected boolean for column 'my_bool'"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_type_mismatch_string_for_integer(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value("Required field"));
|
||||
data.insert("my_bigint".into(), create_string_value("42")); // String instead of number
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Expected number for column 'my_bigint'"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_type_mismatch_number_for_boolean(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value("Required field"));
|
||||
data.insert("my_bool".into(), create_number_value(1.0)); // Number instead of boolean
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Expected boolean for column 'my_bool'"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_decimal_requires_string_not_number(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value("Required field"));
|
||||
data.insert("my_money".into(), create_number_value(123.45)); // Number instead of string for decimal
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Expected a string representation for decimal column 'my_money'"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_invalid_timestamp_format(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value("Required field"));
|
||||
data.insert("my_timestamp".into(), create_string_value("not-a-date"));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Invalid timestamp for my_timestamp"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_float_for_integer_field(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value("Required field"));
|
||||
data.insert("my_bigint".into(), create_number_value(123.45)); // Float for integer field
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Expected integer for column 'my_bigint', but got a float"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_valid_timestamp_formats(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let valid_timestamps = vec![
|
||||
"2024-01-15T10:30:00Z",
|
||||
"2024-01-15T10:30:00+00:00",
|
||||
"2024-01-15T10:30:00.123Z",
|
||||
"2024-12-31T23:59:59Z",
|
||||
"1970-01-01T00:00:00Z", // Unix epoch
|
||||
];
|
||||
|
||||
for (i, timestamp) in valid_timestamps.into_iter().enumerate() {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value(&format!("Test {}", i)));
|
||||
data.insert("my_timestamp".into(), create_string_value(timestamp));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Failed for timestamp: {}", timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_boundary_integer_values(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Use safer boundary values that don't have f64 precision issues
|
||||
let boundary_values = vec![
|
||||
0.0,
|
||||
1.0,
|
||||
-1.0,
|
||||
2147483647.0, // i32::MAX (for INTEGER columns)
|
||||
-2147483648.0, // i32::MIN (for INTEGER columns)
|
||||
];
|
||||
|
||||
for (i, value) in boundary_values.into_iter().enumerate() {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value(&format!("Boundary test {}", i)));
|
||||
data.insert("my_bigint".into(), create_number_value(value));
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Failed for boundary value: {}", value);
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_null_values_for_all_types(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value("Required for test"));
|
||||
data.insert("my_bool".into(), create_null_value());
|
||||
data.insert("my_timestamp".into(), create_null_value());
|
||||
data.insert("my_bigint".into(), create_null_value());
|
||||
data.insert("my_money".into(), create_null_value());
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let response = post_table_data(&context.pool, request, &indexer_tx).await.unwrap();
|
||||
assert!(response.success);
|
||||
|
||||
// Verify nulls were stored correctly
|
||||
let query = format!(
|
||||
r#"SELECT my_bool, my_timestamp, my_bigint FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let stored_bool: Option<bool> = row.get("my_bool");
|
||||
let stored_timestamp: Option<chrono::DateTime<Utc>> = row.get("my_timestamp");
|
||||
let stored_bigint: Option<i64> = row.get("my_bigint");
|
||||
|
||||
assert!(stored_bool.is_none());
|
||||
assert!(stored_timestamp.is_none());
|
||||
assert!(stored_bigint.is_none());
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// FOREIGN KEY CONSTRAINT TESTS
|
||||
// ========================================================================
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_insert_with_valid_foreign_key(#[future] foreign_key_test_context: ForeignKeyTestContext) {
|
||||
let context = foreign_key_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// First, insert a category
|
||||
let mut category_data = HashMap::new();
|
||||
category_data.insert("name".into(), create_string_value("Electronics"));
|
||||
category_data.insert("description".into(), create_string_value("Electronic devices"));
|
||||
|
||||
let category_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.category_table.clone(),
|
||||
data: category_data,
|
||||
};
|
||||
|
||||
let category_response = post_table_data(&context.pool, category_request, &indexer_tx).await.unwrap();
|
||||
let category_id = category_response.inserted_id;
|
||||
|
||||
// Now insert a product with the valid category_id
|
||||
let mut product_data = HashMap::new();
|
||||
product_data.insert("name".into(), create_string_value("Laptop"));
|
||||
product_data.insert("price".into(), create_string_value("999.99")); // Use string for decimal
|
||||
product_data.insert(format!("{}_id", context.category_table), create_number_value(category_id as f64));
|
||||
|
||||
let product_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.product_table.clone(),
|
||||
data: product_data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, product_request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Insert with valid foreign key should succeed");
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_insert_with_nonexistent_foreign_key(#[future] foreign_key_test_context: ForeignKeyTestContext) {
|
||||
let context = foreign_key_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Try to insert product with non-existent category_id
|
||||
let mut product_data = HashMap::new();
|
||||
product_data.insert("name".into(), create_string_value("Laptop"));
|
||||
product_data.insert("price".into(), create_string_value("999.99"));
|
||||
product_data.insert(format!("{}_id", context.category_table), create_number_value(99999.0)); // Non-existent ID
|
||||
|
||||
let product_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.product_table.clone(),
|
||||
data: product_data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, product_request, &indexer_tx).await;
|
||||
assert!(result.is_err(), "Insert with non-existent foreign key should fail");
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::Internal);
|
||||
assert!(err.message().contains("Insert failed"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_insert_with_null_required_foreign_key(#[future] foreign_key_test_context: ForeignKeyTestContext) {
|
||||
let context = foreign_key_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Try to insert product without category_id (required foreign key)
|
||||
let mut product_data = HashMap::new();
|
||||
product_data.insert("name".into(), create_string_value("Laptop"));
|
||||
product_data.insert("price".into(), create_string_value("999.99"));
|
||||
// Intentionally omit category_id
|
||||
|
||||
let product_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.product_table.clone(),
|
||||
data: product_data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, product_request, &indexer_tx).await;
|
||||
assert!(result.is_err(), "Insert without required foreign key should fail");
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::Internal);
|
||||
assert!(err.message().contains("Insert failed"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_insert_with_null_optional_foreign_key(#[future] foreign_key_test_context: ForeignKeyTestContext) {
|
||||
let context = foreign_key_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// First create a category and product for the required FK
|
||||
let mut category_data = HashMap::new();
|
||||
category_data.insert("name".into(), create_string_value("Electronics"));
|
||||
|
||||
let category_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.category_table.clone(),
|
||||
data: category_data,
|
||||
};
|
||||
|
||||
let category_response = post_table_data(&context.pool, category_request, &indexer_tx).await.unwrap();
|
||||
|
||||
let mut product_data = HashMap::new();
|
||||
product_data.insert("name".into(), create_string_value("Laptop"));
|
||||
product_data.insert("price".into(), create_string_value("999.99"));
|
||||
product_data.insert(format!("{}_id", context.category_table), create_number_value(category_response.inserted_id as f64));
|
||||
|
||||
let product_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.product_table.clone(),
|
||||
data: product_data,
|
||||
};
|
||||
|
||||
let product_response = post_table_data(&context.pool, product_request, &indexer_tx).await.unwrap();
|
||||
|
||||
// Now insert order with required product_id but without optional category_id
|
||||
let mut order_data = HashMap::new();
|
||||
order_data.insert("quantity".into(), create_number_value(2.0));
|
||||
order_data.insert("notes".into(), create_string_value("Test order"));
|
||||
order_data.insert(format!("{}_id", context.product_table), create_number_value(product_response.inserted_id as f64));
|
||||
// Intentionally omit optional category_id
|
||||
|
||||
let order_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.order_table.clone(),
|
||||
data: order_data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, order_request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Insert with NULL optional foreign key should succeed");
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_multiple_foreign_keys_scenario(#[future] foreign_key_test_context: ForeignKeyTestContext) {
|
||||
let context = foreign_key_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Create category
|
||||
let mut category_data = HashMap::new();
|
||||
category_data.insert("name".into(), create_string_value("Books"));
|
||||
|
||||
let category_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.category_table.clone(),
|
||||
data: category_data,
|
||||
};
|
||||
|
||||
let category_response = post_table_data(&context.pool, category_request, &indexer_tx).await.unwrap();
|
||||
|
||||
// Create product
|
||||
let mut product_data = HashMap::new();
|
||||
product_data.insert("name".into(), create_string_value("Programming Book"));
|
||||
product_data.insert("price".into(), create_string_value("49.99"));
|
||||
product_data.insert(format!("{}_id", context.category_table), create_number_value(category_response.inserted_id as f64));
|
||||
|
||||
let product_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.product_table.clone(),
|
||||
data: product_data,
|
||||
};
|
||||
|
||||
let product_response = post_table_data(&context.pool, product_request, &indexer_tx).await.unwrap();
|
||||
|
||||
// Create order with both foreign keys
|
||||
let mut order_data = HashMap::new();
|
||||
order_data.insert("quantity".into(), create_number_value(3.0));
|
||||
order_data.insert("notes".into(), create_string_value("Bulk order"));
|
||||
order_data.insert(format!("{}_id", context.product_table), create_number_value(product_response.inserted_id as f64));
|
||||
order_data.insert(format!("{}_id", context.category_table), create_number_value(category_response.inserted_id as f64));
|
||||
|
||||
let order_request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.order_table.clone(),
|
||||
data: order_data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, order_request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Insert with multiple valid foreign keys should succeed");
|
||||
|
||||
// Verify the data was inserted correctly
|
||||
let product_id_col = format!("{}_id", context.product_table);
|
||||
let category_id_col = format!("{}_id", context.category_table);
|
||||
|
||||
let query = format!(
|
||||
r#"SELECT quantity, "{}", "{}" FROM "{}"."{}" WHERE id = $1"#,
|
||||
product_id_col, category_id_col, context.profile_name, context.order_table
|
||||
);
|
||||
|
||||
let row = sqlx::query(&query)
|
||||
.bind(result.unwrap().inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Fix: quantity is defined as "integer" in the foreign key test context, so use i32
|
||||
let quantity: i32 = row.get("quantity");
|
||||
let stored_product_id: i64 = row.get(product_id_col.as_str());
|
||||
let stored_category_id: Option<i64> = row.get(category_id_col.as_str());
|
||||
|
||||
assert_eq!(quantity, 3);
|
||||
assert_eq!(stored_product_id, product_response.inserted_id);
|
||||
assert_eq!(stored_category_id.unwrap(), category_response.inserted_id);
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// ADDITIONAL EDGE CASE TESTS
|
||||
// ========================================================================
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_extremely_large_decimal_numbers(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let large_decimals = vec![
|
||||
"1000000000.0000",
|
||||
"999999999999.99",
|
||||
"-999999999999.99",
|
||||
"0.0001",
|
||||
];
|
||||
|
||||
for (i, decimal_str) in large_decimals.into_iter().enumerate() {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value(&format!("Large decimal test {}", i)));
|
||||
data.insert("my_money".into(), create_string_value(decimal_str));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Failed for large decimal: {}", decimal_str);
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_boolean_edge_cases(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let boolean_values = vec![true, false];
|
||||
|
||||
for (i, bool_val) in boolean_values.into_iter().enumerate() {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value(&format!("Boolean test {}", i)));
|
||||
data.insert("my_bool".into(), create_bool_value(bool_val));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let response = post_table_data(&context.pool, request, &indexer_tx).await.unwrap();
|
||||
|
||||
// Verify boolean was stored correctly
|
||||
let query = format!(
|
||||
r#"SELECT my_bool FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
|
||||
let stored_bool: bool = sqlx::query_scalar::<_, bool>(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(stored_bool, bool_val);
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_decimal_precision_handling(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let decimal_values = vec![
|
||||
"0.01",
|
||||
"99.99",
|
||||
"123.45",
|
||||
"999.99",
|
||||
"-123.45",
|
||||
];
|
||||
|
||||
for (i, decimal_val) in decimal_values.into_iter().enumerate() {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value(&format!("Decimal test {}", i)));
|
||||
data.insert("my_decimal".into(), create_string_value(decimal_val));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Failed for decimal value: {}", decimal_val);
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_invalid_decimal_string_formats(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
let invalid_decimals = vec![
|
||||
"not-a-number",
|
||||
"123.45.67",
|
||||
"abc123",
|
||||
"",
|
||||
" ",
|
||||
];
|
||||
|
||||
for (i, invalid_decimal) in invalid_decimals.into_iter().enumerate() {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value(&format!("Invalid decimal test {}", i)));
|
||||
data.insert("my_decimal".into(), create_string_value(invalid_decimal));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
|
||||
if invalid_decimal.trim().is_empty() {
|
||||
// Empty strings should be treated as NULL and succeed
|
||||
assert!(result.is_ok(), "Empty string should be treated as NULL for: {}", invalid_decimal);
|
||||
} else {
|
||||
// Invalid decimal strings should fail
|
||||
assert!(result.is_err(), "Should fail for invalid decimal: {}", invalid_decimal);
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Invalid decimal string format"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_mixed_null_and_valid_data(#[future] data_type_test_context: DataTypeTestContext) {
|
||||
let context = data_type_test_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
let mut data = HashMap::new();
|
||||
data.insert("my_text".into(), create_string_value("Mixed data test"));
|
||||
data.insert("my_bool".into(), create_bool_value(true));
|
||||
data.insert("my_timestamp".into(), create_null_value());
|
||||
data.insert("my_bigint".into(), create_number_value(42.0));
|
||||
data.insert("my_money".into(), create_null_value());
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
let response = post_table_data(&context.pool, request, &indexer_tx).await.unwrap();
|
||||
assert!(response.success);
|
||||
|
||||
// Verify mixed null and valid data was stored correctly
|
||||
let query = format!(
|
||||
r#"SELECT my_text, my_bool, my_timestamp, my_bigint, my_money FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let stored_text: String = row.get("my_text");
|
||||
let stored_bool: bool = row.get("my_bool");
|
||||
let stored_timestamp: Option<DateTime<Utc>> = row.get("my_timestamp");
|
||||
// Change this based on your actual column type in the schema:
|
||||
// If my_bigint is defined as "integer" in table definition, use i32:
|
||||
let stored_bigint: i32 = row.get("my_bigint");
|
||||
// If my_bigint is defined as "biginteger" or "bigint" in table definition, use i64:
|
||||
// let stored_bigint: i64 = row.get("my_bigint");
|
||||
let stored_money: Option<Decimal> = row.get("my_money");
|
||||
|
||||
assert_eq!(stored_text, "Mixed data test");
|
||||
assert_eq!(stored_bool, true);
|
||||
assert!(stored_timestamp.is_none());
|
||||
assert_eq!(stored_bigint, 42);
|
||||
assert!(stored_money.is_none());
|
||||
}
|
||||
264
server/tests/tables_data/post/post_table_data_test4.rs
Normal file
264
server/tests/tables_data/post/post_table_data_test4.rs
Normal file
@@ -0,0 +1,264 @@
|
||||
// tests/tables_data/handlers/post_table_data_test4.rs
|
||||
|
||||
use rust_decimal::Decimal;
|
||||
use rust_decimal_macros::dec;
|
||||
|
||||
// Helper to create a protobuf Value from a string
|
||||
fn proto_string(s: &str) -> Value {
|
||||
Value {
|
||||
kind: Some(Kind::StringValue(s.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to create a protobuf Value from a number
|
||||
fn proto_number(n: f64) -> Value {
|
||||
Value {
|
||||
kind: Some(Kind::NumberValue(n)),
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to create a protobuf Null Value
|
||||
fn proto_null() -> Value {
|
||||
Value {
|
||||
kind: Some(Kind::NullValue(0)),
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create a table with various decimal types for testing
|
||||
async fn create_financial_table(
|
||||
pool: &PgPool,
|
||||
table_name: &str,
|
||||
profile_name: &str,
|
||||
) -> Result<(), tonic::Status> {
|
||||
let table_def_request = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: table_name.into(),
|
||||
columns: vec![
|
||||
TableColumnDefinition {
|
||||
name: "product_name".into(),
|
||||
field_type: "text".into(),
|
||||
},
|
||||
// Standard money column
|
||||
TableColumnDefinition {
|
||||
name: "price".into(),
|
||||
field_type: "decimal(19, 4)".into(),
|
||||
},
|
||||
// Column for things like exchange rates or precise factors
|
||||
TableColumnDefinition {
|
||||
name: "rate".into(),
|
||||
field_type: "decimal(10, 5)".into(),
|
||||
},
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![],
|
||||
};
|
||||
|
||||
post_table_definition(pool, table_def_request).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// A new test context fixture for our financial table
|
||||
#[fixture]
|
||||
async fn decimal_test_context() -> TestContext {
|
||||
let pool = setup_test_db().await;
|
||||
let unique_id = generate_unique_id();
|
||||
let profile_name = format!("decimal_profile_{}", unique_id);
|
||||
let table_name = format!("invoices_{}", unique_id);
|
||||
|
||||
create_financial_table(&pool, &table_name, &profile_name)
|
||||
.await
|
||||
.expect("Failed to create decimal test table");
|
||||
|
||||
let (tx, _rx) = mpsc::channel(100);
|
||||
|
||||
TestContext {
|
||||
pool,
|
||||
profile_name,
|
||||
table_name,
|
||||
indexer_tx: tx,
|
||||
}
|
||||
}
|
||||
|
||||
// ========= DECIMAL/NUMERIC DATA TYPE TESTS =========
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_insert_valid_decimal_string(#[future] decimal_test_context: TestContext) {
|
||||
let context = decimal_test_context.await;
|
||||
let mut data = HashMap::new();
|
||||
data.insert("product_name".into(), proto_string("Laptop"));
|
||||
data.insert("price".into(), proto_string("1499.99"));
|
||||
data.insert("rate".into(), proto_string("-0.12345"));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let response = post_table_data(&context.pool, request, &context.indexer_tx)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(response.success);
|
||||
|
||||
let query = format!(
|
||||
r#"SELECT price, rate FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
);
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let price: Decimal = row.get("price");
|
||||
let rate: Decimal = row.get("rate");
|
||||
|
||||
assert_eq!(price, dec!(1499.99));
|
||||
assert_eq!(rate, dec!(-0.12345));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_insert_decimal_from_number_fails(#[future] decimal_test_context: TestContext) {
|
||||
let context = decimal_test_context.await;
|
||||
let mut data = HashMap::new();
|
||||
data.insert("product_name".into(), proto_string("Mouse"));
|
||||
// THIS IS THE INVALID PART: using a number for a decimal field.
|
||||
data.insert("price".into(), proto_number(75.50));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
// The operation should fail.
|
||||
let result = post_table_data(&context.pool, request, &context.indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
// Verify the error is correct.
|
||||
let status = result.unwrap_err();
|
||||
assert_eq!(status.code(), tonic::Code::InvalidArgument);
|
||||
assert!(status
|
||||
.message()
|
||||
.contains("Expected a string representation for decimal column 'price'"));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_decimal_rounding_behavior(#[future] decimal_test_context: TestContext) {
|
||||
let context = decimal_test_context.await;
|
||||
let mut data = HashMap::new();
|
||||
data.insert("product_name".into(), proto_string("Keyboard"));
|
||||
// price is NUMERIC(19, 4), so this should be rounded up by the database
|
||||
data.insert("price".into(), proto_string("99.12345"));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let response = post_table_data(&context.pool, request, &context.indexer_tx)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(response.success);
|
||||
|
||||
let price: Decimal = sqlx::query_scalar(&format!(
|
||||
r#"SELECT price FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// PostgreSQL rounds away from zero (0.5 rounds up)
|
||||
assert_eq!(price, dec!(99.1235));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_insert_null_and_empty_string_for_decimal(
|
||||
#[future] decimal_test_context: TestContext,
|
||||
) {
|
||||
let context = decimal_test_context.await;
|
||||
let mut data = HashMap::new();
|
||||
data.insert("product_name".into(), proto_string("Monitor"));
|
||||
data.insert("price".into(), proto_string(" ")); // Empty string should be NULL
|
||||
data.insert("rate".into(), proto_null()); // Explicit NULL
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let response = post_table_data(&context.pool, request, &context.indexer_tx)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(response.success);
|
||||
|
||||
let row = sqlx::query(&format!(
|
||||
r#"SELECT price, rate FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.table_name
|
||||
))
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let price: Option<Decimal> = row.get("price");
|
||||
let rate: Option<Decimal> = row.get("rate");
|
||||
|
||||
assert!(price.is_none());
|
||||
assert!(rate.is_none());
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_invalid_decimal_string_fails(#[future] decimal_test_context: TestContext) {
|
||||
let context = decimal_test_context.await;
|
||||
let mut data = HashMap::new();
|
||||
data.insert("product_name".into(), proto_string("Bad Data"));
|
||||
data.insert("price".into(), proto_string("not-a-number"));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &context.indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
let status = result.unwrap_err();
|
||||
assert_eq!(status.code(), tonic::Code::InvalidArgument);
|
||||
assert!(status
|
||||
.message()
|
||||
.contains("Invalid decimal string format for column 'price'"));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_decimal_precision_overflow_fails(#[future] decimal_test_context: TestContext) {
|
||||
let context = decimal_test_context.await;
|
||||
let mut data = HashMap::new();
|
||||
data.insert("product_name".into(), proto_string("Too Expensive"));
|
||||
// rate is NUMERIC(10, 5), so it allows 5 digits before the decimal.
|
||||
// 123456.1 is 6 digits before, so it should fail at the database level.
|
||||
data.insert("rate".into(), proto_string("123456.1"));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.table_name.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &context.indexer_tx).await;
|
||||
assert!(result.is_err());
|
||||
let status = result.unwrap_err();
|
||||
// This error comes from the database itself.
|
||||
assert_eq!(status.code(), tonic::Code::InvalidArgument);
|
||||
assert!(status.message().contains("Numeric field overflow"));
|
||||
}
|
||||
588
server/tests/tables_data/post/post_table_data_test5.rs
Normal file
588
server/tests/tables_data/post/post_table_data_test5.rs
Normal file
@@ -0,0 +1,588 @@
|
||||
// ========================================================================
|
||||
// COMPREHENSIVE INTEGER ROBUSTNESS TESTS - ADD TO TEST FILE 5
|
||||
// ========================================================================
|
||||
|
||||
#[derive(Clone)]
|
||||
struct IntegerRobustnessTestContext {
|
||||
pool: PgPool,
|
||||
profile_name: String,
|
||||
mixed_integer_table: String,
|
||||
bigint_only_table: String,
|
||||
integer_only_table: String,
|
||||
}
|
||||
|
||||
// Create tables with different integer type combinations
|
||||
async fn create_integer_robustness_tables(pool: &PgPool, profile_name: &str) -> Result<IntegerRobustnessTestContext, tonic::Status> {
|
||||
let unique_id = generate_unique_id();
|
||||
let mixed_table = format!("mixed_int_table_{}", unique_id);
|
||||
let bigint_table = format!("bigint_table_{}", unique_id);
|
||||
let integer_table = format!("integer_table_{}", unique_id);
|
||||
|
||||
// Table with both INTEGER and BIGINT columns
|
||||
let mixed_def = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: mixed_table.clone(),
|
||||
columns: vec![
|
||||
TableColumnDefinition { name: "name".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "small_int".into(), field_type: "integer".into() }, // i32
|
||||
TableColumnDefinition { name: "big_int".into(), field_type: "biginteger".into() }, // i64
|
||||
TableColumnDefinition { name: "another_int".into(), field_type: "int".into() }, // i32 (alias)
|
||||
TableColumnDefinition { name: "another_bigint".into(), field_type: "bigint".into() }, // i64 (alias)
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![],
|
||||
};
|
||||
post_table_definition(pool, mixed_def).await?;
|
||||
|
||||
// Table with only BIGINT columns
|
||||
let bigint_def = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: bigint_table.clone(),
|
||||
columns: vec![
|
||||
TableColumnDefinition { name: "name".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "value1".into(), field_type: "biginteger".into() },
|
||||
TableColumnDefinition { name: "value2".into(), field_type: "bigint".into() },
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![],
|
||||
};
|
||||
post_table_definition(pool, bigint_def).await?;
|
||||
|
||||
// Table with only INTEGER columns
|
||||
let integer_def = PostTableDefinitionRequest {
|
||||
profile_name: profile_name.into(),
|
||||
table_name: integer_table.clone(),
|
||||
columns: vec![
|
||||
TableColumnDefinition { name: "name".into(), field_type: "text".into() },
|
||||
TableColumnDefinition { name: "value1".into(), field_type: "integer".into() },
|
||||
TableColumnDefinition { name: "value2".into(), field_type: "int".into() },
|
||||
],
|
||||
indexes: vec![],
|
||||
links: vec![],
|
||||
};
|
||||
post_table_definition(pool, integer_def).await?;
|
||||
|
||||
Ok(IntegerRobustnessTestContext {
|
||||
pool: pool.clone(),
|
||||
profile_name: profile_name.to_string(),
|
||||
mixed_integer_table: mixed_table,
|
||||
bigint_only_table: bigint_table,
|
||||
integer_only_table: integer_table,
|
||||
})
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
async fn integer_robustness_context() -> IntegerRobustnessTestContext {
|
||||
let pool = setup_test_db().await;
|
||||
let unique_id = generate_unique_id();
|
||||
let profile_name = format!("int_robust_profile_{}", unique_id);
|
||||
|
||||
create_integer_robustness_tables(&pool, &profile_name).await
|
||||
.expect("Failed to create integer robustness test tables")
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// BOUNDARY AND OVERFLOW TESTS
|
||||
// ========================================================================
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_integer_boundary_values_comprehensive(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Test i32 boundaries on INTEGER columns
|
||||
let i32_boundary_tests = vec![
|
||||
(2147483647.0, "i32::MAX"),
|
||||
(-2147483648.0, "i32::MIN"),
|
||||
(2147483646.0, "i32::MAX - 1"),
|
||||
(-2147483647.0, "i32::MIN + 1"),
|
||||
(0.0, "zero"),
|
||||
(1.0, "one"),
|
||||
(-1.0, "negative one"),
|
||||
];
|
||||
|
||||
for (value, description) in i32_boundary_tests {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value(&format!("i32 test: {}", description)));
|
||||
data.insert("value1".into(), create_number_value(value));
|
||||
data.insert("value2".into(), create_number_value(value));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.integer_only_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Failed for i32 value {}: {}", value, description);
|
||||
|
||||
// Verify correct storage
|
||||
let response = result.unwrap();
|
||||
let query = format!(
|
||||
r#"SELECT value1, value2 FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.integer_only_table
|
||||
);
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let stored_val1: i32 = row.get("value1");
|
||||
let stored_val2: i32 = row.get("value2");
|
||||
assert_eq!(stored_val1, value as i32);
|
||||
assert_eq!(stored_val2, value as i32);
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_bigint_boundary_values_comprehensive(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Test i64 boundaries that can be precisely represented in f64
|
||||
let i64_boundary_tests = vec![
|
||||
(9223372036854774784.0, "Close to i64::MAX (precisely representable)"),
|
||||
(-9223372036854774784.0, "Close to i64::MIN (precisely representable)"),
|
||||
(4611686018427387904.0, "i64::MAX / 2"),
|
||||
(-4611686018427387904.0, "i64::MIN / 2"),
|
||||
(2147483647.0, "i32::MAX in i64 column"),
|
||||
(-2147483648.0, "i32::MIN in i64 column"),
|
||||
(1000000000000.0, "One trillion"),
|
||||
(-1000000000000.0, "Negative one trillion"),
|
||||
];
|
||||
|
||||
for (value, description) in i64_boundary_tests {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value(&format!("i64 test: {}", description)));
|
||||
data.insert("value1".into(), create_number_value(value));
|
||||
data.insert("value2".into(), create_number_value(value));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.bigint_only_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Failed for i64 value {}: {}", value, description);
|
||||
|
||||
// Verify correct storage
|
||||
let response = result.unwrap();
|
||||
let query = format!(
|
||||
r#"SELECT value1, value2 FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.bigint_only_table
|
||||
);
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let stored_val1: i64 = row.get("value1");
|
||||
let stored_val2: i64 = row.get("value2");
|
||||
assert_eq!(stored_val1, value as i64);
|
||||
assert_eq!(stored_val2, value as i64);
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_integer_overflow_rejection_i32(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Values that should be rejected for INTEGER columns
|
||||
let overflow_values = vec![
|
||||
(2147483648.0, "i32::MAX + 1"),
|
||||
(-2147483649.0, "i32::MIN - 1"),
|
||||
(3000000000.0, "3 billion"),
|
||||
(-3000000000.0, "negative 3 billion"),
|
||||
(4294967296.0, "2^32"),
|
||||
(9223372036854775807.0, "i64::MAX (should fail on i32)"),
|
||||
];
|
||||
|
||||
for (value, description) in overflow_values {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value(&format!("Overflow test: {}", description)));
|
||||
data.insert("value1".into(), create_number_value(value));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.integer_only_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_err(), "Should have failed for i32 overflow value {}: {}", value, description);
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Integer value out of range for INTEGER column"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_bigint_overflow_rejection_i64(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Values that should be rejected for BIGINT columns
|
||||
// Only include values that actually DON'T round-trip correctly
|
||||
let overflow_values = vec![
|
||||
(f64::INFINITY, "Positive infinity"),
|
||||
(f64::NEG_INFINITY, "Negative infinity"),
|
||||
(1e20, "Very large number (100,000,000,000,000,000,000)"),
|
||||
(-1e20, "Very large negative number"),
|
||||
(1e25, "Extremely large number"),
|
||||
(-1e25, "Extremely large negative number"),
|
||||
(f64::MAX, "f64::MAX"),
|
||||
(f64::MIN, "f64::MIN"),
|
||||
// Remove the problematic values that actually round-trip correctly:
|
||||
// (9223372036854775808.0, "Just above i64 safe range"), // This actually round-trips!
|
||||
// (-9223372036854775808.0, "Just below i64 safe range"), // This might also round-trip!
|
||||
];
|
||||
|
||||
for (value, description) in overflow_values {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value(&format!("i64 Overflow test: {}", description)));
|
||||
data.insert("value1".into(), create_number_value(value));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.bigint_only_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
|
||||
assert!(result.is_err(), "Should have failed for i64 overflow value {}: {}", value, description);
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
// Check for either message format (the new robust check should catch these)
|
||||
let message = err.message();
|
||||
assert!(
|
||||
message.contains("Integer value out of range for BIGINT column") ||
|
||||
message.contains("Expected integer for column") ||
|
||||
message.contains("but got a float"),
|
||||
"Unexpected error message for {}: {}",
|
||||
description,
|
||||
message
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_bigint_successful_roundtrip_values(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Values that SHOULD successfully round-trip and be accepted
|
||||
let successful_values = vec![
|
||||
(9223372036854775808.0, "Exactly i64::MAX as f64 (legitimate value)"),
|
||||
(-9223372036854775808.0, "Exactly i64::MIN as f64 (legitimate value)"),
|
||||
(9223372036854774784.0, "Large but precisely representable in f64"),
|
||||
(-9223372036854774784.0, "Large negative but precisely representable in f64"),
|
||||
(0.0, "Zero"),
|
||||
(1.0, "One"),
|
||||
(-1.0, "Negative one"),
|
||||
(2147483647.0, "i32::MAX as f64"),
|
||||
(-2147483648.0, "i32::MIN as f64"),
|
||||
];
|
||||
|
||||
for (value, description) in successful_values {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value(&format!("Successful test: {}", description)));
|
||||
data.insert("value1".into(), create_number_value(value));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.bigint_only_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Should have succeeded for legitimate i64 value {}: {}", value, description);
|
||||
|
||||
// Verify it was stored correctly
|
||||
if let Ok(response) = result {
|
||||
let query = format!(
|
||||
r#"SELECT value1 FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.bigint_only_table
|
||||
);
|
||||
let stored_value: i64 = sqlx::query_scalar(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(stored_value, value as i64, "Stored value should match for {}", description);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_mixed_integer_types_in_same_table(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Test inserting different values into different integer types in the same table
|
||||
let test_cases = vec![
|
||||
(42.0, 1000000000000.0, "Small i32, large i64"),
|
||||
(2147483647.0, 9223372036854774784.0, "i32::MAX, near i64::MAX"),
|
||||
(-2147483648.0, -9223372036854774784.0, "i32::MIN, near i64::MIN"),
|
||||
(0.0, 0.0, "Both zero"),
|
||||
(-1.0, -1.0, "Both negative one"),
|
||||
];
|
||||
|
||||
for (i32_val, i64_val, description) in test_cases {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value(&format!("Mixed test: {}", description)));
|
||||
data.insert("small_int".into(), create_number_value(i32_val));
|
||||
data.insert("big_int".into(), create_number_value(i64_val));
|
||||
data.insert("another_int".into(), create_number_value(i32_val));
|
||||
data.insert("another_bigint".into(), create_number_value(i64_val));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.mixed_integer_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Failed for mixed integer test: {}", description);
|
||||
|
||||
// Verify correct storage with correct types
|
||||
let response = result.unwrap();
|
||||
let query = format!(
|
||||
r#"SELECT small_int, big_int, another_int, another_bigint FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.mixed_integer_table
|
||||
);
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let stored_small_int: i32 = row.get("small_int");
|
||||
let stored_big_int: i64 = row.get("big_int");
|
||||
let stored_another_int: i32 = row.get("another_int");
|
||||
let stored_another_bigint: i64 = row.get("another_bigint");
|
||||
|
||||
assert_eq!(stored_small_int, i32_val as i32);
|
||||
assert_eq!(stored_big_int, i64_val as i64);
|
||||
assert_eq!(stored_another_int, i32_val as i32);
|
||||
assert_eq!(stored_another_bigint, i64_val as i64);
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_wrong_type_for_mixed_integer_columns(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Try to put i64 values into i32 columns
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value("Wrong type test"));
|
||||
data.insert("small_int".into(), create_number_value(3000000000.0)); // Too big for i32
|
||||
data.insert("big_int".into(), create_number_value(42.0)); // This should be fine
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.mixed_integer_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_err(), "Should fail when putting i64 value in i32 column");
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Integer value out of range for INTEGER column"));
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_float_precision_edge_cases(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Test values that have fractional parts (should be rejected)
|
||||
let fractional_values = vec![
|
||||
(42.1, "42.1"),
|
||||
(42.9, "42.9"),
|
||||
(42.000001, "42.000001"),
|
||||
(-42.5, "-42.5"),
|
||||
(0.1, "0.1"),
|
||||
(2147483646.5, "Near i32::MAX with fraction"),
|
||||
];
|
||||
|
||||
for (value, description) in fractional_values {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value(&format!("Float test: {}", description)));
|
||||
data.insert("value1".into(), create_number_value(value));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.integer_only_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_err(), "Should fail for fractional value {}: {}", value, description);
|
||||
|
||||
if let Err(err) = result {
|
||||
assert_eq!(err.code(), tonic::Code::InvalidArgument);
|
||||
assert!(err.message().contains("Expected integer for column") && err.message().contains("but got a float"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_null_integer_handling_comprehensive(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Test null values in mixed integer table
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value("Null integer test"));
|
||||
data.insert("small_int".into(), create_null_value());
|
||||
data.insert("big_int".into(), create_null_value());
|
||||
data.insert("another_int".into(), create_number_value(42.0));
|
||||
data.insert("another_bigint".into(), create_number_value(1000000000000.0));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.mixed_integer_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Should succeed with null integer values");
|
||||
|
||||
// Verify null storage
|
||||
let response = result.unwrap();
|
||||
let query = format!(
|
||||
r#"SELECT small_int, big_int, another_int, another_bigint FROM "{}"."{}" WHERE id = $1"#,
|
||||
context.profile_name, context.mixed_integer_table
|
||||
);
|
||||
let row = sqlx::query(&query)
|
||||
.bind(response.inserted_id)
|
||||
.fetch_one(&context.pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let stored_small_int: Option<i32> = row.get("small_int");
|
||||
let stored_big_int: Option<i64> = row.get("big_int");
|
||||
let stored_another_int: i32 = row.get("another_int");
|
||||
let stored_another_bigint: i64 = row.get("another_bigint");
|
||||
|
||||
assert!(stored_small_int.is_none());
|
||||
assert!(stored_big_int.is_none());
|
||||
assert_eq!(stored_another_int, 42);
|
||||
assert_eq!(stored_another_bigint, 1000000000000);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_concurrent_mixed_integer_inserts(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Test concurrent inserts with different integer types
|
||||
let tasks: Vec<_> = (0..10).map(|i| {
|
||||
let context = context.clone();
|
||||
let indexer_tx = indexer_tx.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value(&format!("Concurrent test {}", i)));
|
||||
data.insert("small_int".into(), create_number_value((i * 1000) as f64));
|
||||
data.insert("big_int".into(), create_number_value((i as i64 * 1000000000000) as f64));
|
||||
data.insert("another_int".into(), create_number_value((i * -100) as f64));
|
||||
data.insert("another_bigint".into(), create_number_value((i as i64 * -1000000000000) as f64));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.mixed_integer_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
post_table_data(&context.pool, request, &indexer_tx).await
|
||||
})
|
||||
}).collect();
|
||||
|
||||
// Wait for all tasks to complete
|
||||
let results = futures::future::join_all(tasks).await;
|
||||
|
||||
// All should succeed
|
||||
for (i, result) in results.into_iter().enumerate() {
|
||||
let task_result = result.expect("Task should not panic");
|
||||
assert!(task_result.is_ok(), "Concurrent insert {} should succeed", i);
|
||||
}
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// PERFORMANCE AND STRESS TESTS
|
||||
// ========================================================================
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_rapid_integer_inserts_stress(#[future] integer_robustness_context: IntegerRobustnessTestContext) {
|
||||
let context = integer_robustness_context.await;
|
||||
let indexer_tx = create_test_indexer_channel().await;
|
||||
|
||||
// Rapid sequential inserts with alternating integer types
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
for i in 0..100 {
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name".into(), create_string_value(&format!("Stress test {}", i)));
|
||||
|
||||
// Alternate between different boundary values
|
||||
let small_val = match i % 4 {
|
||||
0 => 2147483647.0, // i32::MAX
|
||||
1 => -2147483648.0, // i32::MIN
|
||||
2 => 0.0,
|
||||
_ => (i as f64) * 1000.0,
|
||||
};
|
||||
|
||||
let big_val = match i % 4 {
|
||||
0 => 9223372036854774784.0, // Near i64::MAX
|
||||
1 => -9223372036854774784.0, // Near i64::MIN
|
||||
2 => 0.0,
|
||||
_ => (i as f64) * 1000000000000.0,
|
||||
};
|
||||
|
||||
data.insert("small_int".into(), create_number_value(small_val));
|
||||
data.insert("big_int".into(), create_number_value(big_val));
|
||||
data.insert("another_int".into(), create_number_value(small_val));
|
||||
data.insert("another_bigint".into(), create_number_value(big_val));
|
||||
|
||||
let request = PostTableDataRequest {
|
||||
profile_name: context.profile_name.clone(),
|
||||
table_name: context.mixed_integer_table.clone(),
|
||||
data,
|
||||
};
|
||||
|
||||
let result = post_table_data(&context.pool, request, &indexer_tx).await;
|
||||
assert!(result.is_ok(), "Rapid insert {} should succeed", i);
|
||||
}
|
||||
|
||||
let duration = start.elapsed();
|
||||
println!("100 mixed integer inserts took: {:?}", duration);
|
||||
|
||||
// Should complete in reasonable time (adjust threshold as needed)
|
||||
assert!(duration.as_secs() < 10, "Stress test took too long: {:?}", duration);
|
||||
}
|
||||
Reference in New Issue
Block a user