fixing serialization and deserialization in the data insertion

This commit is contained in:
Priec
2025-09-17 09:38:05 +02:00
parent 2435f58256
commit 532977056d
4 changed files with 107 additions and 143 deletions

View File

@@ -5,22 +5,7 @@ use sqlx::{PgPool, Transaction, Postgres};
use serde_json::json; use serde_json::json;
use common::proto::komp_ac::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse}; use common::proto::komp_ac::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse};
use common::proto::komp_ac::table_definition::ColumnDefinition; use common::proto::komp_ac::table_definition::ColumnDefinition;
use crate::table_definition::models::map_field_type;
// TODO CRITICAL add decimal with optional precision"
const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
("text", "TEXT"),
("string", "TEXT"),
("boolean", "BOOLEAN"),
("timestamp", "TIMESTAMPTZ"),
("timestamptz", "TIMESTAMPTZ"),
("time", "TIMESTAMPTZ"),
("money", "NUMERIC(14, 4)"),
("integer", "INTEGER"),
("int", "INTEGER"),
("biginteger", "BIGINT"),
("bigint", "BIGINT"),
("date", "DATE"),
];
// NEW: Helper function to provide detailed error messages // NEW: Helper function to provide detailed error messages
fn validate_identifier_format(s: &str, identifier_type: &str) -> Result<(), Status> { fn validate_identifier_format(s: &str, identifier_type: &str) -> Result<(), Status> {
@@ -59,116 +44,6 @@ fn validate_identifier_format(s: &str, identifier_type: &str) -> Result<(), Stat
Ok(()) Ok(())
} }
fn validate_decimal_number_format(num_str: &str, param_name: &str) -> Result<(), Status> {
if num_str.is_empty() {
return Err(Status::invalid_argument(format!(
"{} cannot be empty",
param_name
)));
}
// Check for explicit signs
if num_str.starts_with('+') || num_str.starts_with('-') {
return Err(Status::invalid_argument(format!(
"{} cannot have explicit positive or negative signs",
param_name
)));
}
// Check for decimal points
if num_str.contains('.') {
return Err(Status::invalid_argument(format!(
"{} must be a whole number (no decimal points)",
param_name
)));
}
// Check for leading zeros (but allow "0" itself)
if num_str.len() > 1 && num_str.starts_with('0') {
let trimmed = num_str.trim_start_matches('0');
let suggestion = if trimmed.is_empty() { "0" } else { trimmed };
return Err(Status::invalid_argument(format!(
"{} cannot have leading zeros (use '{}' instead of '{}')",
param_name,
suggestion,
num_str
)));
}
// Check that all characters are digits
if !num_str.chars().all(|c| c.is_ascii_digit()) {
return Err(Status::invalid_argument(format!(
"{} contains invalid characters. Only digits 0-9 are allowed",
param_name
)));
}
Ok(())
}
fn map_field_type(field_type: &str) -> Result<String, Status> {
let lower_field_type = field_type.to_lowercase();
// Special handling for "decimal(precision, scale)"
if lower_field_type.starts_with("decimal(") && lower_field_type.ends_with(')') {
// Extract the part inside the parentheses, e.g., "10, 2"
let args = lower_field_type
.strip_prefix("decimal(")
.and_then(|s| s.strip_suffix(')'))
.unwrap_or(""); // Should always succeed due to the checks above
// Split into precision and scale parts
if let Some((p_str, s_str)) = args.split_once(',') {
let precision_str = p_str.trim();
let scale_str = s_str.trim();
// NEW: Validate format BEFORE parsing
validate_decimal_number_format(precision_str, "precision")?;
validate_decimal_number_format(scale_str, "scale")?;
// Parse precision, returning an error if it's not a valid number
let precision = precision_str.parse::<u32>().map_err(|_| {
Status::invalid_argument("Invalid precision in decimal type")
})?;
// Parse scale, returning an error if it's not a valid number
let scale = scale_str.parse::<u32>().map_err(|_| {
Status::invalid_argument("Invalid scale in decimal type")
})?;
// Add validation based on PostgreSQL rules
if precision < 1 {
return Err(Status::invalid_argument("Precision must be at least 1"));
}
if scale > precision {
return Err(Status::invalid_argument(
"Scale cannot be greater than precision",
));
}
// If everything is valid, build and return the NUMERIC type string
return Ok(format!("NUMERIC({}, {})", precision, scale));
} else {
// The format was wrong, e.g., "decimal(10)" or "decimal()"
return Err(Status::invalid_argument(
"Invalid decimal format. Expected: decimal(precision, scale)",
));
}
}
// If not a decimal, fall back to the predefined list
PREDEFINED_FIELD_TYPES
.iter()
.find(|(key, _)| *key == lower_field_type.as_str())
.map(|(_, sql_type)| sql_type.to_string()) // Convert to an owned String
.ok_or_else(|| {
Status::invalid_argument(format!(
"Invalid field type: {}",
field_type
))
})
}
fn is_invalid_table_name(table_name: &str) -> bool { fn is_invalid_table_name(table_name: &str) -> bool {
table_name.ends_with("_id") || table_name.ends_with("_id") ||
table_name == "id" || table_name == "id" ||

View File

@@ -0,0 +1,91 @@
// src/table_definition/models.rs
use tonic::Status;
/// Predefined static field mappings
// TODO CRITICAL add decimal with optional precision"
pub const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
("text", "TEXT"),
("string", "TEXT"),
("boolean", "BOOLEAN"),
("timestamp", "TIMESTAMPTZ"),
("timestamptz", "TIMESTAMPTZ"),
("time", "TIMESTAMPTZ"),
("money", "NUMERIC(14, 4)"),
("integer", "INTEGER"),
("int", "INTEGER"),
("biginteger", "BIGINT"),
("bigint", "BIGINT"),
("date", "DATE"),
];
/// reusable decimal number validation
pub fn validate_decimal_number_format(num_str: &str, param_name: &str) -> Result<(), Status> {
if num_str.is_empty() {
return Err(Status::invalid_argument(format!("{} cannot be empty", param_name)));
}
if num_str.starts_with('+') || num_str.starts_with('-') {
return Err(Status::invalid_argument(format!(
"{} cannot have explicit positive/negative signs", param_name
)));
}
if num_str.contains('.') {
return Err(Status::invalid_argument(format!(
"{} must be a whole number (no decimal point)", param_name
)));
}
if num_str.len() > 1 && num_str.starts_with('0') {
let trimmed = num_str.trim_start_matches('0');
let suggestion = if trimmed.is_empty() { "0" } else { trimmed };
return Err(Status::invalid_argument(format!(
"{} cannot have leading zeros (use '{}' instead of '{}')",
param_name, suggestion, num_str
)));
}
if !num_str.chars().all(|c| c.is_ascii_digit()) {
return Err(Status::invalid_argument(format!(
"{} contains invalid characters. Only digits allowed", param_name
)));
}
Ok(())
}
/// reusable field type mapper
pub fn map_field_type(field_type: &str) -> Result<String, Status> {
let lower_field_type = field_type.to_lowercase();
if lower_field_type.starts_with("decimal(") && lower_field_type.ends_with(')') {
let args = lower_field_type.strip_prefix("decimal(").unwrap()
.strip_suffix(')').unwrap();
if let Some((p_str, s_str)) = args.split_once(',') {
let precision_str = p_str.trim();
let scale_str = s_str.trim();
validate_decimal_number_format(precision_str, "precision")?;
validate_decimal_number_format(scale_str, "scale")?;
let precision = precision_str.parse::<u32>()
.map_err(|_| Status::invalid_argument("Invalid precision"))?;
let scale = scale_str.parse::<u32>()
.map_err(|_| Status::invalid_argument("Invalid scale"))?;
if precision < 1 {
return Err(Status::invalid_argument("Precision must be >= 1"));
}
if scale > precision {
return Err(Status::invalid_argument("Scale cannot be > precision"));
}
return Ok(format!("NUMERIC({}, {})", precision, scale));
} else {
return Err(Status::invalid_argument(
"Invalid decimal format. Expected decimal(precision, scale)"
));
}
}
PREDEFINED_FIELD_TYPES
.iter()
.find(|(key, _)| *key == lower_field_type.as_str())
.map(|(_, sql_type)| sql_type.to_string())
.ok_or_else(|| Status::invalid_argument(format!("Invalid field type: {}", field_type)))
}

View File

@@ -1,9 +1,11 @@
// src/tables_data/handlers/get_table_data.rs // src/tables_data/handlers/get_table_data.rs
use tonic::Status; use tonic::Status;
use sqlx::{PgPool, Row}; use sqlx::{PgPool, Row};
use std::collections::HashMap; use std::collections::HashMap;
use common::proto::komp_ac::tables_data::{GetTableDataRequest, GetTableDataResponse}; use common::proto::komp_ac::tables_data::{GetTableDataRequest, GetTableDataResponse};
use common::proto::komp_ac::table_definition::ColumnDefinition;
use crate::shared::schema_qualifier::qualify_table_name_for_data; use crate::shared::schema_qualifier::qualify_table_name_for_data;
pub async fn get_table_data( pub async fn get_table_data(
@@ -39,17 +41,13 @@ pub async fn get_table_data(
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?; let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
// Parse user-defined columns from JSON // Parse user-defined columns from JSON
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone()) let stored_columns: Vec<ColumnDefinition> = serde_json::from_value(table_def.columns.clone())
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?; .map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
// Directly extract names, no split(" ") parsing anymore
let mut user_columns = Vec::new(); let mut user_columns = Vec::new();
for col_def in columns_json { for col_def in stored_columns {
let parts: Vec<&str> = col_def.splitn(2, ' ').collect(); user_columns.push(col_def.name.trim().to_string());
if parts.len() != 2 {
return Err(Status::internal("Invalid column format"));
}
let name = parts[0].trim_matches('"').to_string();
user_columns.push(name);
} }
// --- START OF FIX --- // --- START OF FIX ---

View File

@@ -5,6 +5,8 @@ use sqlx::{PgPool, Arguments};
use sqlx::postgres::PgArguments; use sqlx::postgres::PgArguments;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use common::proto::komp_ac::tables_data::{PostTableDataRequest, PostTableDataResponse}; use common::proto::komp_ac::tables_data::{PostTableDataRequest, PostTableDataResponse};
use common::proto::komp_ac::table_definition::ColumnDefinition;
use crate::table_definition::models::map_field_type;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use prost_types::value::Kind; use prost_types::value::Kind;
@@ -56,18 +58,16 @@ pub async fn post_table_data(
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?; let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
// Parse column definitions from JSON format // Parse column definitions from JSON format
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone()) let stored_columns: Vec<ColumnDefinition> = serde_json::from_value(table_def.columns.clone())
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?; .map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
// convert ColumnDefinition -> (name, sql_type) using the same map_field_type logic
let mut columns = Vec::new(); let mut columns = Vec::new();
for col_def in columns_json { for col_def in stored_columns {
let parts: Vec<&str> = col_def.splitn(2, ' ').collect(); let col_name = col_def.name.trim().to_string();
if parts.len() != 2 { let sql_type = map_field_type(&col_def.field_type)
return Err(Status::internal("Invalid column format")); .map_err(|e| Status::invalid_argument(format!("Invalid type for column '{}': {}", col_name, e)))?;
} columns.push((col_name, sql_type));
let name = parts[0].trim_matches('"').to_string();
let sql_type = parts[1].to_string();
columns.push((name, sql_type));
} }
// Build list of valid system columns (foreign keys and special columns) // Build list of valid system columns (foreign keys and special columns)