Compare commits

..

13 Commits

34 changed files with 895 additions and 719 deletions

View File

@@ -12,6 +12,10 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
".komp_ac.table_validation.CharacterLimits", ".komp_ac.table_validation.CharacterLimits",
"#[derive(serde::Serialize, serde::Deserialize)]", "#[derive(serde::Serialize, serde::Deserialize)]",
) )
.type_attribute(
".komp_ac.table_validation.DisplayMask",
"#[derive(serde::Serialize, serde::Deserialize)]",
)
.type_attribute( .type_attribute(
".komp_ac.table_validation.TableValidationResponse", ".komp_ac.table_validation.TableValidationResponse",
"#[derive(serde::Serialize, serde::Deserialize)]", "#[derive(serde::Serialize, serde::Deserialize)]",
@@ -29,6 +33,30 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
".komp_ac.table_validation.CountMode", ".komp_ac.table_validation.CountMode",
"#[derive(serde::Serialize, serde::Deserialize)] #[serde(rename_all = \"SCREAMING_SNAKE_CASE\")]", "#[derive(serde::Serialize, serde::Deserialize)] #[serde(rename_all = \"SCREAMING_SNAKE_CASE\")]",
) )
.type_attribute(
".komp_ac.table_definition.ColumnDefinition",
"#[derive(serde::Serialize, serde::Deserialize)]",
)
.type_attribute(
".komp_ac.table_definition.TableLink",
"#[derive(serde::Serialize, serde::Deserialize)]"
)
.type_attribute(
".komp_ac.table_definition.PostTableDefinitionRequest",
"#[derive(serde::Serialize, serde::Deserialize)]",
)
.type_attribute(
".komp_ac.table_definition.TableDefinitionResponse",
"#[derive(serde::Serialize, serde::Deserialize)]"
)
.type_attribute(
".komp_ac.table_script.PostTableScriptRequest",
"#[derive(serde::Serialize, serde::Deserialize)]",
)
.type_attribute(
".komp_ac.table_script.TableScriptResponse",
"#[derive(serde::Serialize, serde::Deserialize)]",
)
.compile_protos( .compile_protos(
&[ &[
"proto/common.proto", "proto/common.proto",

View File

@@ -23,9 +23,10 @@ message FieldValidation {
CharacterLimits limits = 10; CharacterLimits limits = 10;
// Future expansion: // Future expansion:
// PatternRules pattern = 11; // PatternRules pattern = 11;
// DisplayMask mask = 12; DisplayMask mask = 3;
// ExternalValidation external = 13; // ExternalValidation external = 13;
// CustomFormatter formatter = 14; // CustomFormatter formatter = 14;
bool required = 4;
} }
// Character length counting mode // Character length counting mode
@@ -49,6 +50,13 @@ message CharacterLimits {
CountMode countMode = 4; // defaults to CHARS if unspecified CountMode countMode = 4; // defaults to CHARS if unspecified
} }
// Mask for pretty display
message DisplayMask {
string pattern = 1; // e.g., "(###) ###-####" or "####-##-##"
string input_char = 2; // e.g., "#"
optional string template_char = 3; // e.g., "_"
}
// Service to fetch validations for a table // Service to fetch validations for a table
service TableValidationService { service TableValidationService {
rpc GetTableValidation(GetTableValidationRequest) rpc GetTableValidation(GetTableValidationRequest)

Binary file not shown.

View File

@@ -1,4 +1,5 @@
// This file is @generated by prost-build. // This file is @generated by prost-build.
#[derive(serde::Serialize, serde::Deserialize)]
#[derive(Clone, PartialEq, ::prost::Message)] #[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableLink { pub struct TableLink {
#[prost(string, tag = "1")] #[prost(string, tag = "1")]
@@ -6,6 +7,7 @@ pub struct TableLink {
#[prost(bool, tag = "2")] #[prost(bool, tag = "2")]
pub required: bool, pub required: bool,
} }
#[derive(serde::Serialize, serde::Deserialize)]
#[derive(Clone, PartialEq, ::prost::Message)] #[derive(Clone, PartialEq, ::prost::Message)]
pub struct PostTableDefinitionRequest { pub struct PostTableDefinitionRequest {
#[prost(string, tag = "1")] #[prost(string, tag = "1")]
@@ -19,6 +21,7 @@ pub struct PostTableDefinitionRequest {
#[prost(string, tag = "5")] #[prost(string, tag = "5")]
pub profile_name: ::prost::alloc::string::String, pub profile_name: ::prost::alloc::string::String,
} }
#[derive(serde::Serialize, serde::Deserialize)]
#[derive(Clone, PartialEq, ::prost::Message)] #[derive(Clone, PartialEq, ::prost::Message)]
pub struct ColumnDefinition { pub struct ColumnDefinition {
#[prost(string, tag = "1")] #[prost(string, tag = "1")]
@@ -26,6 +29,7 @@ pub struct ColumnDefinition {
#[prost(string, tag = "2")] #[prost(string, tag = "2")]
pub field_type: ::prost::alloc::string::String, pub field_type: ::prost::alloc::string::String,
} }
#[derive(serde::Serialize, serde::Deserialize)]
#[derive(Clone, PartialEq, ::prost::Message)] #[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableDefinitionResponse { pub struct TableDefinitionResponse {
#[prost(bool, tag = "1")] #[prost(bool, tag = "1")]

View File

@@ -1,4 +1,5 @@
// This file is @generated by prost-build. // This file is @generated by prost-build.
#[derive(serde::Serialize, serde::Deserialize)]
#[derive(Clone, PartialEq, ::prost::Message)] #[derive(Clone, PartialEq, ::prost::Message)]
pub struct PostTableScriptRequest { pub struct PostTableScriptRequest {
#[prost(int64, tag = "1")] #[prost(int64, tag = "1")]
@@ -10,6 +11,7 @@ pub struct PostTableScriptRequest {
#[prost(string, tag = "4")] #[prost(string, tag = "4")]
pub description: ::prost::alloc::string::String, pub description: ::prost::alloc::string::String,
} }
#[derive(serde::Serialize, serde::Deserialize)]
#[derive(Clone, PartialEq, ::prost::Message)] #[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableScriptResponse { pub struct TableScriptResponse {
#[prost(int64, tag = "1")] #[prost(int64, tag = "1")]

View File

@@ -23,14 +23,16 @@ pub struct FieldValidation {
#[prost(string, tag = "1")] #[prost(string, tag = "1")]
pub data_key: ::prost::alloc::string::String, pub data_key: ::prost::alloc::string::String,
/// Current: only CharacterLimits. More rules can be added later. /// Current: only CharacterLimits. More rules can be added later.
///
/// Future expansion:
/// PatternRules pattern = 11;
/// DisplayMask mask = 12;
/// ExternalValidation external = 13;
/// CustomFormatter formatter = 14;
#[prost(message, optional, tag = "10")] #[prost(message, optional, tag = "10")]
pub limits: ::core::option::Option<CharacterLimits>, pub limits: ::core::option::Option<CharacterLimits>,
/// Future expansion:
/// PatternRules pattern = 11;
#[prost(message, optional, tag = "3")]
pub mask: ::core::option::Option<DisplayMask>,
/// ExternalValidation external = 13;
/// CustomFormatter formatter = 14;
#[prost(bool, tag = "4")]
pub required: bool,
} }
/// Character limit validation (Validation 1) /// Character limit validation (Validation 1)
#[derive(serde::Serialize, serde::Deserialize)] #[derive(serde::Serialize, serde::Deserialize)]
@@ -49,6 +51,20 @@ pub struct CharacterLimits {
#[prost(enumeration = "CountMode", tag = "4")] #[prost(enumeration = "CountMode", tag = "4")]
pub count_mode: i32, pub count_mode: i32,
} }
/// Mask for pretty display
#[derive(serde::Serialize, serde::Deserialize)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DisplayMask {
/// e.g., "(###) ###-####" or "####-##-##"
#[prost(string, tag = "1")]
pub pattern: ::prost::alloc::string::String,
/// e.g., "#"
#[prost(string, tag = "2")]
pub input_char: ::prost::alloc::string::String,
/// e.g., "_"
#[prost(string, optional, tag = "3")]
pub template_char: ::core::option::Option<::prost::alloc::string::String>,
}
#[derive(serde::Serialize, serde::Deserialize)] #[derive(serde::Serialize, serde::Deserialize)]
#[derive(Clone, PartialEq, ::prost::Message)] #[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateFieldValidationRequest { pub struct UpdateFieldValidationRequest {

View File

@@ -1,6 +1,5 @@
// src/server/services/table_validation_service.rs // src/server/services/table_validation_service.rs
use tonic::transport::Server;
use sqlx::PgPool; use sqlx::PgPool;
use common::proto::komp_ac::table_validation::table_validation_service_server::TableValidationServiceServer; use common::proto::komp_ac::table_validation::table_validation_service_server::TableValidationServiceServer;

View File

@@ -3,21 +3,20 @@
use steel::steel_vm::engine::Engine; use steel::steel_vm::engine::Engine;
use steel::steel_vm::register_fn::RegisterFn; use steel::steel_vm::register_fn::RegisterFn;
use steel::rvals::SteelVal; use steel::rvals::SteelVal;
use super::functions::{SteelContext, convert_row_data_for_steel}; use super::functions::SteelContext;
use steel_decimal::registry::FunctionRegistry; use steel_decimal::registry::FunctionRegistry;
use sqlx::PgPool; use sqlx::PgPool;
use std::sync::Arc; use std::sync::Arc;
use std::collections::HashMap; use std::collections::HashMap;
use thiserror::Error; use thiserror::Error;
use tracing::{debug, error}; use tracing::{debug, error};
use regex::Regex; // NEW
/// Represents different types of values that can be returned from Steel script execution.
#[derive(Debug)] #[derive(Debug)]
pub enum Value { pub enum Value {
Strings(Vec<String>), Strings(Vec<String>),
} }
/// Errors that can occur during Steel script execution.
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum ExecutionError { pub enum ExecutionError {
#[error("Script execution failed: {0}")] #[error("Script execution failed: {0}")]
@@ -28,7 +27,83 @@ pub enum ExecutionError {
UnsupportedType(String), UnsupportedType(String),
} }
/// Creates a Steel execution context with proper boolean value conversion. // NEW: upgrade steel_get_column -> steel_get_column_with_index using FK present in row_data
fn auto_promote_with_index(
script: &str,
current_table: &str,
row_data: &HashMap<String, String>,
) -> String {
// Matches: (steel_get_column "table" "column")
let re = Regex::new(
r#"\(\s*steel_get_column\s+"([^"]+)"\s+"([^"]+)"\s*\)"#,
)
.unwrap();
re.replace_all(script, |caps: &regex::Captures| {
let table = caps.get(1).unwrap().as_str();
let column = caps.get(2).unwrap().as_str();
// Only upgrade cross-table calls, if FK is present in the request data
if table != current_table {
let fk_key = format!("{}_id", table);
if let Some(id_str) = row_data.get(&fk_key) {
if let Ok(_) = id_str.parse::<i64>() {
return format!(
r#"(steel_get_column_with_index "{}" {} "{}")"#,
table, id_str, column
);
}
}
}
// Default: keep original call
caps.get(0).unwrap().as_str().to_string()
})
.into_owned()
}
use common::proto::komp_ac::table_definition::ColumnDefinition;
// Converts row data boolean values to Steel script format during context initialization.
pub async fn convert_row_data_for_steel(
db_pool: &PgPool,
schema_id: i64,
table_name: &str,
row_data: &mut HashMap<String, String>,
) -> Result<(), sqlx::Error> {
let table_def = sqlx::query!(
r#"
SELECT columns FROM table_definitions
WHERE schema_id = $1 AND table_name = $2
"#,
schema_id,
table_name
)
.fetch_optional(db_pool)
.await?
.ok_or_else(|| sqlx::Error::RowNotFound)?;
// Parse column definitions to identify boolean columns
if let Ok(columns) = serde_json::from_value::<Vec<ColumnDefinition>>(table_def.columns) {
for col_def in columns {
let normalized_type =
col_def.field_type.to_uppercase().split('(').next().unwrap().to_string();
if normalized_type == "BOOLEAN" || normalized_type == "BOOL" {
if let Some(value) = row_data.get_mut(&col_def.name) {
*value = match value.to_lowercase().as_str() {
"true" | "t" | "1" | "yes" | "on" => "#true".to_string(),
"false" | "f" | "0" | "no" | "off" => "#false".to_string(),
_ => value.clone(),
};
}
}
}
}
Ok(())
}
pub async fn create_steel_context_with_boolean_conversion( pub async fn create_steel_context_with_boolean_conversion(
current_table: String, current_table: String,
schema_id: i64, schema_id: i64,
@@ -36,7 +111,6 @@ pub async fn create_steel_context_with_boolean_conversion(
mut row_data: HashMap<String, String>, mut row_data: HashMap<String, String>,
db_pool: Arc<PgPool>, db_pool: Arc<PgPool>,
) -> Result<SteelContext, ExecutionError> { ) -> Result<SteelContext, ExecutionError> {
// Convert boolean values in row_data to Steel format
convert_row_data_for_steel(&db_pool, schema_id, &current_table, &mut row_data) convert_row_data_for_steel(&db_pool, schema_id, &current_table, &mut row_data)
.await .await
.map_err(|e| { .map_err(|e| {
@@ -53,7 +127,6 @@ pub async fn create_steel_context_with_boolean_conversion(
}) })
} }
/// Executes a Steel script with database context and type-safe result processing.
pub async fn execute_script( pub async fn execute_script(
script: String, script: String,
target_type: &str, target_type: &str,
@@ -65,42 +138,40 @@ pub async fn execute_script(
) -> Result<Value, ExecutionError> { ) -> Result<Value, ExecutionError> {
let mut vm = Engine::new(); let mut vm = Engine::new();
// Create execution context with proper boolean value conversion // Upgrade to with_index based on FK presence in the posted data
let script = auto_promote_with_index(&script, &current_table, &row_data);
let context = create_steel_context_with_boolean_conversion( let context = create_steel_context_with_boolean_conversion(
current_table, current_table.clone(),
schema_id, schema_id,
schema_name, schema_name,
row_data, row_data.clone(),
db_pool.clone(), db_pool.clone(),
).await?; )
.await?;
let context = Arc::new(context); let context = Arc::new(context);
// Register database access functions
register_steel_functions(&mut vm, context.clone()); register_steel_functions(&mut vm, context.clone());
// Register decimal math operations
register_decimal_math_functions(&mut vm); register_decimal_math_functions(&mut vm);
// Register row data as variables in the Steel VM for get-var access
let mut define_script = String::new(); let mut define_script = String::new();
for (key, value) in &context.row_data { for (key, value) in &context.row_data {
// Register only bare variable names for get-var access
define_script.push_str(&format!("(define {} \"{}\")\n", key, value)); define_script.push_str(&format!("(define {} \"{}\")\n", key, value));
} }
// Execute variable definitions if any exist
if !define_script.is_empty() { if !define_script.is_empty() {
vm.compile_and_run_raw_program(define_script) vm.compile_and_run_raw_program(define_script)
.map_err(|e| ExecutionError::RuntimeError(format!("Failed to register variables: {}", e)))?; .map_err(|e| ExecutionError::RuntimeError(format!(
"Failed to register variables: {}",
e
)))?;
} }
// Also register variables using the decimal registry as backup method
FunctionRegistry::register_variables(&mut vm, context.row_data.clone()); FunctionRegistry::register_variables(&mut vm, context.row_data.clone());
// Execute the main script let results = vm
let results = vm.compile_and_run_raw_program(script.clone()) .compile_and_run_raw_program(script.clone())
.map_err(|e| { .map_err(|e| {
error!("Steel script execution failed: {}", e); error!("Steel script execution failed: {}", e);
error!("Script was: {}", script); error!("Script was: {}", script);
@@ -108,22 +179,22 @@ pub async fn execute_script(
ExecutionError::RuntimeError(e.to_string()) ExecutionError::RuntimeError(e.to_string())
})?; })?;
// Convert results to the requested target type
match target_type { match target_type {
"STRINGS" => process_string_results(results), "STRINGS" => process_string_results(results),
_ => Err(ExecutionError::UnsupportedType(target_type.into())) _ => Err(ExecutionError::UnsupportedType(target_type.into())),
} }
} }
/// Registers Steel functions for database access within the VM context.
fn register_steel_functions(vm: &mut Engine, context: Arc<SteelContext>) { fn register_steel_functions(vm: &mut Engine, context: Arc<SteelContext>) {
debug!("Registering Steel functions with context"); debug!("Registering Steel functions with context");
// Register column access function for current and related tables
vm.register_fn("steel_get_column", { vm.register_fn("steel_get_column", {
let ctx = context.clone(); let ctx = context.clone();
move |table: String, column: String| { move |table: String, column: String| {
debug!("steel_get_column called with table: '{}', column: '{}'", table, column); debug!(
"steel_get_column called with table: '{}', column: '{}'",
table, column
);
ctx.steel_get_column(&table, &column) ctx.steel_get_column(&table, &column)
.map_err(|e| { .map_err(|e| {
error!("steel_get_column failed: {:?}", e); error!("steel_get_column failed: {:?}", e);
@@ -132,11 +203,13 @@ fn register_steel_functions(vm: &mut Engine, context: Arc<SteelContext>) {
} }
}); });
// Register indexed column access for comma-separated values
vm.register_fn("steel_get_column_with_index", { vm.register_fn("steel_get_column_with_index", {
let ctx = context.clone(); let ctx = context.clone();
move |table: String, index: i64, column: String| { move |table: String, index: i64, column: String| {
debug!("steel_get_column_with_index called with table: '{}', index: {}, column: '{}'", table, index, column); debug!(
"steel_get_column_with_index called with table: '{}', index: {}, column: '{}'",
table, index, column
);
ctx.steel_get_column_with_index(&table, index, &column) ctx.steel_get_column_with_index(&table, index, &column)
.map_err(|e| { .map_err(|e| {
error!("steel_get_column_with_index failed: {:?}", e); error!("steel_get_column_with_index failed: {:?}", e);
@@ -145,13 +218,11 @@ fn register_steel_functions(vm: &mut Engine, context: Arc<SteelContext>) {
} }
}); });
// Register safe SQL query execution
vm.register_fn("steel_query_sql", { vm.register_fn("steel_query_sql", {
let ctx = context.clone(); let ctx = context.clone();
move |query: String| { move |query: String| {
debug!("steel_query_sql called with query: '{}'", query); debug!("steel_query_sql called with query: '{}'", query);
ctx.steel_query_sql(&query) ctx.steel_query_sql(&query).map_err(|e| {
.map_err(|e| {
error!("steel_query_sql failed: {:?}", e); error!("steel_query_sql failed: {:?}", e);
e.to_string() e.to_string()
}) })
@@ -159,13 +230,11 @@ fn register_steel_functions(vm: &mut Engine, context: Arc<SteelContext>) {
}); });
} }
/// Registers decimal mathematics functions in the Steel VM.
fn register_decimal_math_functions(vm: &mut Engine) { fn register_decimal_math_functions(vm: &mut Engine) {
debug!("Registering decimal math functions"); debug!("Registering decimal math functions");
FunctionRegistry::register_all(vm); FunctionRegistry::register_all(vm);
} }
/// Processes Steel script results into string format for consistent output.
fn process_string_results(results: Vec<SteelVal>) -> Result<Value, ExecutionError> { fn process_string_results(results: Vec<SteelVal>) -> Result<Value, ExecutionError> {
let mut strings = Vec::new(); let mut strings = Vec::new();
@@ -178,7 +247,7 @@ fn process_string_results(results: Vec<SteelVal>) -> Result<Value, ExecutionErro
_ => { _ => {
error!("Unexpected result type: {:?}", result); error!("Unexpected result type: {:?}", result);
return Err(ExecutionError::TypeConversionError( return Err(ExecutionError::TypeConversionError(
format!("Expected string-convertible type, got {:?}", result) format!("Expected string-convertible type, got {:?}", result),
)); ));
} }
}; };

View File

@@ -1,5 +1,6 @@
// src/steel/server/functions.rs // src/steel/server/functions.rs
use common::proto::komp_ac::table_definition::ColumnDefinition;
use steel::rvals::SteelVal; use steel::rvals::SteelVal;
use sqlx::PgPool; use sqlx::PgPool;
use std::collections::HashMap; use std::collections::HashMap;
@@ -21,10 +22,8 @@ pub enum FunctionError {
ProhibitedTypeAccess(String), ProhibitedTypeAccess(String),
} }
/// Data types that Steel scripts are prohibited from accessing for security reasons
const PROHIBITED_TYPES: &[&str] = &["BIGINT", "DATE", "TIMESTAMPTZ"]; const PROHIBITED_TYPES: &[&str] = &["BIGINT", "DATE", "TIMESTAMPTZ"];
/// Execution context for Steel scripts with database access capabilities.
#[derive(Clone)] #[derive(Clone)]
pub struct SteelContext { pub struct SteelContext {
pub current_table: String, pub current_table: String,
@@ -35,26 +34,11 @@ pub struct SteelContext {
} }
impl SteelContext { impl SteelContext {
/// Resolves a base table name to its full qualified name in the current schema. async fn get_column_type(
/// Used for foreign key relationship traversal in Steel scripts. &self,
pub async fn get_related_table_name(&self, base_name: &str) -> Result<String, FunctionError> { table_name: &str,
let table_def = sqlx::query!( column_name: &str,
r#"SELECT table_name FROM table_definitions ) -> Result<String, FunctionError> {
WHERE schema_id = $1 AND table_name LIKE $2"#,
self.schema_id,
format!("%_{}", base_name)
)
.fetch_optional(&*self.db_pool)
.await
.map_err(|e| FunctionError::DatabaseError(e.to_string()))?
.ok_or_else(|| FunctionError::TableNotFound(base_name.to_string()))?;
Ok(table_def.table_name)
}
/// Retrieves the SQL data type for a specific column in a table.
/// Parses the JSON column definitions to find type information.
async fn get_column_type(&self, table_name: &str, column_name: &str) -> Result<String, FunctionError> {
let table_def = sqlx::query!( let table_def = sqlx::query!(
r#"SELECT columns FROM table_definitions r#"SELECT columns FROM table_definitions
WHERE schema_id = $1 AND table_name = $2"#, WHERE schema_id = $1 AND table_name = $2"#,
@@ -66,49 +50,43 @@ impl SteelContext {
.map_err(|e| FunctionError::DatabaseError(e.to_string()))? .map_err(|e| FunctionError::DatabaseError(e.to_string()))?
.ok_or_else(|| FunctionError::TableNotFound(table_name.to_string()))?; .ok_or_else(|| FunctionError::TableNotFound(table_name.to_string()))?;
let columns: Vec<String> = serde_json::from_value(table_def.columns) let columns: Vec<ColumnDefinition> = serde_json::from_value(table_def.columns)
.map_err(|e| FunctionError::DatabaseError(format!("Invalid column data: {}", e)))?; .map_err(|e| FunctionError::DatabaseError(format!(
"Invalid column data: {}",
e
)))?;
// Parse column definitions to find the requested column type for col_def in columns {
for column_def in columns { if col_def.name == column_name {
let mut parts = column_def.split_whitespace(); return Ok(col_def.field_type.to_uppercase());
if let (Some(name), Some(data_type)) = (parts.next(), parts.next()) {
let column_name_clean = name.trim_matches('"');
if column_name_clean == column_name {
return Ok(data_type.to_string());
}
} }
} }
Err(FunctionError::ColumnNotFound(format!( Err(FunctionError::ColumnNotFound(format!(
"Column '{}' not found in table '{}'", "Column '{}' not found in table '{}'",
column_name, column_name, table_name
table_name
))) )))
} }
/// Converts database values to Steel script format based on column type.
/// Currently handles boolean conversion to Steel's #true/#false syntax.
fn convert_value_to_steel_format(&self, value: &str, column_type: &str) -> String { fn convert_value_to_steel_format(&self, value: &str, column_type: &str) -> String {
let normalized_type = normalize_data_type(column_type); let normalized_type = normalize_data_type(column_type);
match normalized_type.as_str() { match normalized_type.as_str() {
"BOOLEAN" | "BOOL" => { "BOOLEAN" | "BOOL" => match value.to_lowercase().as_str() {
// Convert database boolean representations to Steel boolean syntax
match value.to_lowercase().as_str() {
"true" | "t" | "1" | "yes" | "on" => "#true".to_string(), "true" | "t" | "1" | "yes" | "on" => "#true".to_string(),
"false" | "f" | "0" | "no" | "off" => "#false".to_string(), "false" | "f" | "0" | "no" | "off" => "#false".to_string(),
_ => value.to_string(), // Return as-is if not a recognized boolean _ => value.to_string(),
} },
}
"INTEGER" => value.to_string(), "INTEGER" => value.to_string(),
_ => value.to_string(), // Return as-is for other types _ => value.to_string(),
} }
} }
/// Validates that a column type is allowed for Steel script access. async fn validate_column_type_and_get_type(
/// Returns the column type if validation passes, error if prohibited. &self,
async fn validate_column_type_and_get_type(&self, table_name: &str, column_name: &str) -> Result<String, FunctionError> { table_name: &str,
column_name: &str,
) -> Result<String, FunctionError> {
let column_type = self.get_column_type(table_name, column_name).await?; let column_type = self.get_column_type(table_name, column_name).await?;
if is_prohibited_type(&column_type) { if is_prohibited_type(&column_type) {
@@ -124,15 +102,13 @@ impl SteelContext {
Ok(column_type) Ok(column_type)
} }
/// Retrieves column value from current table or related tables via foreign keys. pub fn steel_get_column(
/// &self,
/// # Behavior table: &str,
/// - Current table: Returns value directly from row_data with type conversion column: &str,
/// - Related table: Follows foreign key relationship and queries database ) -> Result<SteelVal, SteelVal> {
/// - All accesses are subject to prohibited type validation
pub fn steel_get_column(&self, table: &str, column: &str) -> Result<SteelVal, SteelVal> {
if table == self.current_table { if table == self.current_table {
// Access current table data with type validation // current table
let column_type = tokio::task::block_in_place(|| { let column_type = tokio::task::block_in_place(|| {
let handle = tokio::runtime::Handle::current(); let handle = tokio::runtime::Handle::current();
handle.block_on(async { handle.block_on(async {
@@ -145,70 +121,112 @@ impl SteelContext {
Err(e) => return Err(SteelVal::StringV(e.to_string().into())), Err(e) => return Err(SteelVal::StringV(e.to_string().into())),
}; };
return self.row_data.get(column) return self
.row_data
.get(column)
.map(|v| { .map(|v| {
let converted_value = self.convert_value_to_steel_format(v, &column_type); let converted =
SteelVal::StringV(converted_value.into()) self.convert_value_to_steel_format(v, &column_type);
SteelVal::StringV(converted.into())
}) })
.ok_or_else(|| SteelVal::StringV(format!("Column {} not found", column).into())); .ok_or_else(|| {
SteelVal::StringV(
format!("Column {} not found", column).into(),
)
});
} }
// Access related table via foreign key relationship // Cross-table via FK: use exact table name FK convention: "<table>_id"
let base_name = table.split_once('_')
.map(|(_, rest)| rest)
.unwrap_or(table);
let fk_column = format!("{}_id", base_name);
let fk_value = self.row_data.get(&fk_column)
.ok_or_else(|| SteelVal::StringV(format!("Foreign key {} not found", fk_column).into()))?;
let result = tokio::task::block_in_place(|| { let result = tokio::task::block_in_place(|| {
let handle = tokio::runtime::Handle::current(); let handle = tokio::runtime::Handle::current();
handle.block_on(async { handle.block_on(async {
let actual_table = self.get_related_table_name(base_name).await let fk_column = format!("{}_id", table);
.map_err(|e| SteelVal::StringV(e.to_string().into()))?; let fk_value = self
.row_data
.get(&fk_column)
.ok_or_else(|| {
FunctionError::ForeignKeyNotFound(format!(
"Foreign key column '{}' not found on '{}'",
fk_column, self.current_table
))
})?;
// Validate column type and get type information let column_type =
let column_type = self.validate_column_type_and_get_type(&actual_table, column).await self.validate_column_type_and_get_type(table, column)
.map_err(|e| SteelVal::StringV(e.to_string().into()))?; .await?;
// Query the related table for the column value let raw_value = sqlx::query_scalar::<_, String>(&format!(
let raw_value = sqlx::query_scalar::<_, String>( "SELECT \"{}\" FROM \"{}\".\"{}\" WHERE id = $1",
&format!("SELECT {} FROM \"{}\".\"{}\" WHERE id = $1", column, self.schema_name, actual_table) column, self.schema_name, table
))
.bind(
fk_value
.parse::<i64>()
.map_err(|_| {
FunctionError::DatabaseError(
"Invalid foreign key format".into(),
)
})?,
) )
.bind(fk_value.parse::<i64>().map_err(|_|
SteelVal::StringV("Invalid foreign key format".into()))?)
.fetch_one(&*self.db_pool) .fetch_one(&*self.db_pool)
.await .await
.map_err(|e| SteelVal::StringV(e.to_string().into()))?; .map_err(|e| FunctionError::DatabaseError(e.to_string()))?;
// Convert to appropriate Steel format let converted =
let converted_value = self.convert_value_to_steel_format(&raw_value, &column_type); self.convert_value_to_steel_format(&raw_value, &column_type);
Ok(converted_value) Ok::<String, FunctionError>(converted)
}) })
}); });
result.map(|v| SteelVal::StringV(v.into())) match result {
Ok(v) => Ok(SteelVal::StringV(v.into())),
Err(e) => Err(SteelVal::StringV(e.to_string().into())),
}
} }
/// Retrieves a specific indexed element from a comma-separated column value.
/// Useful for accessing elements from array-like string representations.
pub fn steel_get_column_with_index( pub fn steel_get_column_with_index(
&self, &self,
table: &str, table: &str,
index: i64, index: i64,
column: &str column: &str,
) -> Result<SteelVal, SteelVal> { ) -> Result<SteelVal, SteelVal> {
// Get the full value with proper type conversion // Cross-table: interpret 'index' as the row id to fetch directly
let value = self.steel_get_column(table, column)?; if table != self.current_table {
let result = tokio::task::block_in_place(|| {
let handle = tokio::runtime::Handle::current();
handle.block_on(async {
let column_type =
self.validate_column_type_and_get_type(table, column)
.await?;
let raw_value = sqlx::query_scalar::<_, String>(&format!(
"SELECT \"{}\" FROM \"{}\".\"{}\" WHERE id = $1",
column, self.schema_name, table
))
.bind(index)
.fetch_one(&*self.db_pool)
.await
.map_err(|e| FunctionError::DatabaseError(e.to_string()))?;
let converted = self
.convert_value_to_steel_format(&raw_value, &column_type);
Ok::<String, FunctionError>(converted)
})
});
return match result {
Ok(v) => Ok(SteelVal::StringV(v.into())),
Err(e) => Err(SteelVal::StringV(e.to_string().into())),
};
}
// Current table: existing behavior (index in comma-separated string)
let value = self.steel_get_column(table, column)?;
if let SteelVal::StringV(s) = value { if let SteelVal::StringV(s) = value {
let parts: Vec<_> = s.split(',').collect(); let parts: Vec<_> = s.split(',').collect();
if let Some(part) = parts.get(index as usize) { if let Some(part) = parts.get(index as usize) {
let trimmed_part = part.trim(); let trimmed = part.trim();
// Apply type conversion to the indexed part based on original column type
let column_type = tokio::task::block_in_place(|| { let column_type = tokio::task::block_in_place(|| {
let handle = tokio::runtime::Handle::current(); let handle = tokio::runtime::Handle::current();
handle.block_on(async { handle.block_on(async {
@@ -218,40 +236,35 @@ impl SteelContext {
match column_type { match column_type {
Ok(ct) => { Ok(ct) => {
let converted_part = self.convert_value_to_steel_format(trimmed_part, &ct); let converted =
Ok(SteelVal::StringV(converted_part.into())) self.convert_value_to_steel_format(trimmed, &ct);
} Ok(SteelVal::StringV(converted.into()))
Err(_) => {
// If type cannot be determined, return value as-is
Ok(SteelVal::StringV(trimmed_part.into()))
} }
Err(_) => Ok(SteelVal::StringV(trimmed.into())),
} }
} else { } else {
Err(SteelVal::StringV("Index out of bounds".into())) Err(SteelVal::StringV("Index out of bounds".into()))
} }
} else { } else {
Err(SteelVal::StringV("Expected comma-separated string".into())) Err(SteelVal::StringV(
"Expected comma-separated string".into(),
))
} }
} }
/// Executes read-only SQL queries from Steel scripts with safety restrictions.
///
/// # Security Features
/// - Only SELECT, SHOW, and EXPLAIN queries allowed
/// - Prohibited column type access validation
/// - Returns first column of all rows as comma-separated string
pub fn steel_query_sql(&self, query: &str) -> Result<SteelVal, SteelVal> { pub fn steel_query_sql(&self, query: &str) -> Result<SteelVal, SteelVal> {
if !is_read_only_query(query) { if !is_read_only_query(query) {
return Err(SteelVal::StringV( return Err(SteelVal::StringV("Only SELECT queries are allowed".into()));
"Only SELECT queries are allowed".into()
));
} }
if contains_prohibited_column_access(query) { if contains_prohibited_column_access(query) {
return Err(SteelVal::StringV(format!( return Err(SteelVal::StringV(
format!(
"SQL query may access prohibited column types. Steel scripts cannot access columns of type: {}", "SQL query may access prohibited column types. Steel scripts cannot access columns of type: {}",
PROHIBITED_TYPES.join(", ") PROHIBITED_TYPES.join(", ")
).into())); )
.into(),
));
} }
let pool = self.db_pool.clone(); let pool = self.db_pool.clone();
@@ -266,7 +279,8 @@ impl SteelContext {
let mut results = Vec::new(); let mut results = Vec::new();
for row in rows { for row in rows {
let val: String = row.try_get(0) let val: String = row
.try_get(0)
.map_err(|e| SteelVal::StringV(e.to_string().into()))?; .map_err(|e| SteelVal::StringV(e.to_string().into()))?;
results.push(val); results.push(val);
} }
@@ -279,85 +293,30 @@ impl SteelContext {
} }
} }
/// Checks if a data type is prohibited for Steel script access.
fn is_prohibited_type(data_type: &str) -> bool { fn is_prohibited_type(data_type: &str) -> bool {
let normalized_type = normalize_data_type(data_type); let normalized_type = normalize_data_type(data_type);
PROHIBITED_TYPES.iter().any(|&prohibited| normalized_type.starts_with(prohibited)) PROHIBITED_TYPES
.iter()
.any(|&prohibited| normalized_type.starts_with(prohibited))
} }
/// Normalizes data type strings for consistent comparison.
/// Handles variations like NUMERIC(10,2) by extracting base type.
fn normalize_data_type(data_type: &str) -> String { fn normalize_data_type(data_type: &str) -> String {
data_type.to_uppercase() data_type
.split('(') // Remove precision/scale from NUMERIC(x,y) .to_uppercase()
.split('(')
.next() .next()
.unwrap_or(data_type) .unwrap_or(data_type)
.trim() .trim()
.to_string() .to_string()
} }
/// Performs basic heuristic check for prohibited column type access in SQL queries.
/// Looks for common patterns that might indicate access to restricted types.
fn contains_prohibited_column_access(query: &str) -> bool { fn contains_prohibited_column_access(query: &str) -> bool {
let query_upper = query.to_uppercase(); let query_upper = query.to_uppercase();
let patterns = ["EXTRACT(", "DATE_PART(", "::DATE", "::TIMESTAMPTZ", "::BIGINT"];
let patterns = [ patterns.iter().any(|p| query_upper.contains(p))
"EXTRACT(", // Common with DATE/TIMESTAMPTZ
"DATE_PART(", // Common with DATE/TIMESTAMPTZ
"::DATE",
"::TIMESTAMPTZ",
"::BIGINT",
];
patterns.iter().any(|pattern| query_upper.contains(pattern))
} }
/// Validates that a query is read-only and safe for Steel script execution.
fn is_read_only_query(query: &str) -> bool { fn is_read_only_query(query: &str) -> bool {
let query = query.trim_start().to_uppercase(); let query = query.trim_start().to_uppercase();
query.starts_with("SELECT") || query.starts_with("SELECT") || query.starts_with("SHOW") || query.starts_with("EXPLAIN")
query.starts_with("SHOW") ||
query.starts_with("EXPLAIN")
}
/// Converts row data boolean values to Steel script format during context initialization.
pub async fn convert_row_data_for_steel(
db_pool: &PgPool,
schema_id: i64,
table_name: &str,
row_data: &mut HashMap<String, String>,
) -> Result<(), sqlx::Error> {
let table_def = sqlx::query!(
r#"SELECT columns FROM table_definitions
WHERE schema_id = $1 AND table_name = $2"#,
schema_id,
table_name
)
.fetch_optional(db_pool)
.await?
.ok_or_else(|| sqlx::Error::RowNotFound)?;
// Parse column definitions to identify boolean columns for conversion
if let Ok(columns) = serde_json::from_value::<Vec<String>>(table_def.columns) {
for column_def in columns {
let mut parts = column_def.split_whitespace();
if let (Some(name), Some(data_type)) = (parts.next(), parts.next()) {
let column_name = name.trim_matches('"');
let normalized_type = normalize_data_type(data_type);
if normalized_type == "BOOLEAN" || normalized_type == "BOOL" {
if let Some(value) = row_data.get_mut(column_name) {
// Convert boolean value to Steel format
*value = match value.to_lowercase().as_str() {
"true" | "t" | "1" | "yes" | "on" => "#true".to_string(),
"false" | "f" | "0" | "no" | "off" => "#false".to_string(),
_ => value.clone(), // Keep original if not recognized
};
}
}
}
}
}
Ok(())
} }

View File

@@ -2,24 +2,9 @@
use tonic::Status; use tonic::Status;
use sqlx::{PgPool, Transaction, Postgres}; use sqlx::{PgPool, Transaction, Postgres};
use serde_json::json;
use common::proto::komp_ac::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse}; use common::proto::komp_ac::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse};
use common::proto::komp_ac::table_definition::ColumnDefinition;
// TODO CRITICAL add decimal with optional precision" use crate::table_definition::models::map_field_type;
const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
("text", "TEXT"),
("string", "TEXT"),
("boolean", "BOOLEAN"),
("timestamp", "TIMESTAMPTZ"),
("timestamptz", "TIMESTAMPTZ"),
("time", "TIMESTAMPTZ"),
("money", "NUMERIC(14, 4)"),
("integer", "INTEGER"),
("int", "INTEGER"),
("biginteger", "BIGINT"),
("bigint", "BIGINT"),
("date", "DATE"),
];
// NEW: Helper function to provide detailed error messages // NEW: Helper function to provide detailed error messages
fn validate_identifier_format(s: &str, identifier_type: &str) -> Result<(), Status> { fn validate_identifier_format(s: &str, identifier_type: &str) -> Result<(), Status> {
@@ -58,116 +43,6 @@ fn validate_identifier_format(s: &str, identifier_type: &str) -> Result<(), Stat
Ok(()) Ok(())
} }
fn validate_decimal_number_format(num_str: &str, param_name: &str) -> Result<(), Status> {
if num_str.is_empty() {
return Err(Status::invalid_argument(format!(
"{} cannot be empty",
param_name
)));
}
// Check for explicit signs
if num_str.starts_with('+') || num_str.starts_with('-') {
return Err(Status::invalid_argument(format!(
"{} cannot have explicit positive or negative signs",
param_name
)));
}
// Check for decimal points
if num_str.contains('.') {
return Err(Status::invalid_argument(format!(
"{} must be a whole number (no decimal points)",
param_name
)));
}
// Check for leading zeros (but allow "0" itself)
if num_str.len() > 1 && num_str.starts_with('0') {
let trimmed = num_str.trim_start_matches('0');
let suggestion = if trimmed.is_empty() { "0" } else { trimmed };
return Err(Status::invalid_argument(format!(
"{} cannot have leading zeros (use '{}' instead of '{}')",
param_name,
suggestion,
num_str
)));
}
// Check that all characters are digits
if !num_str.chars().all(|c| c.is_ascii_digit()) {
return Err(Status::invalid_argument(format!(
"{} contains invalid characters. Only digits 0-9 are allowed",
param_name
)));
}
Ok(())
}
fn map_field_type(field_type: &str) -> Result<String, Status> {
let lower_field_type = field_type.to_lowercase();
// Special handling for "decimal(precision, scale)"
if lower_field_type.starts_with("decimal(") && lower_field_type.ends_with(')') {
// Extract the part inside the parentheses, e.g., "10, 2"
let args = lower_field_type
.strip_prefix("decimal(")
.and_then(|s| s.strip_suffix(')'))
.unwrap_or(""); // Should always succeed due to the checks above
// Split into precision and scale parts
if let Some((p_str, s_str)) = args.split_once(',') {
let precision_str = p_str.trim();
let scale_str = s_str.trim();
// NEW: Validate format BEFORE parsing
validate_decimal_number_format(precision_str, "precision")?;
validate_decimal_number_format(scale_str, "scale")?;
// Parse precision, returning an error if it's not a valid number
let precision = precision_str.parse::<u32>().map_err(|_| {
Status::invalid_argument("Invalid precision in decimal type")
})?;
// Parse scale, returning an error if it's not a valid number
let scale = scale_str.parse::<u32>().map_err(|_| {
Status::invalid_argument("Invalid scale in decimal type")
})?;
// Add validation based on PostgreSQL rules
if precision < 1 {
return Err(Status::invalid_argument("Precision must be at least 1"));
}
if scale > precision {
return Err(Status::invalid_argument(
"Scale cannot be greater than precision",
));
}
// If everything is valid, build and return the NUMERIC type string
return Ok(format!("NUMERIC({}, {})", precision, scale));
} else {
// The format was wrong, e.g., "decimal(10)" or "decimal()"
return Err(Status::invalid_argument(
"Invalid decimal format. Expected: decimal(precision, scale)",
));
}
}
// If not a decimal, fall back to the predefined list
PREDEFINED_FIELD_TYPES
.iter()
.find(|(key, _)| *key == lower_field_type.as_str())
.map(|(_, sql_type)| sql_type.to_string()) // Convert to an owned String
.ok_or_else(|| {
Status::invalid_argument(format!(
"Invalid field type: {}",
field_type
))
})
}
fn is_invalid_table_name(table_name: &str) -> bool { fn is_invalid_table_name(table_name: &str) -> bool {
table_name.ends_with("_id") || table_name.ends_with("_id") ||
table_name == "id" || table_name == "id" ||
@@ -299,7 +174,9 @@ async fn execute_table_definition(
links.push((linked_id, link.required)); links.push((linked_id, link.required));
} }
let mut columns = Vec::new(); let mut stored_columns = Vec::new();
let mut sql_columns = Vec::new();
for col_def in request.columns.drain(..) { for col_def in request.columns.drain(..) {
let col_name = col_def.name.trim().to_string(); let col_name = col_def.name.trim().to_string();
validate_identifier_format(&col_name, "Column name")?; validate_identifier_format(&col_name, "Column name")?;
@@ -312,21 +189,33 @@ async fn execute_table_definition(
} }
let sql_type = map_field_type(&col_def.field_type)?; let sql_type = map_field_type(&col_def.field_type)?;
columns.push(format!("\"{}\" {}", col_name, sql_type)); sql_columns.push(format!("\"{}\" {}", col_name, sql_type));
// push the proto type (serde serializable)
stored_columns.push(ColumnDefinition {
name: col_name,
field_type: col_def.field_type,
});
} }
// Indexes
let mut stored_indexes = Vec::new();
let mut indexes = Vec::new(); let mut indexes = Vec::new();
for idx in request.indexes.drain(..) { for idx in request.indexes.drain(..) {
let idx_name = idx.trim().to_string(); let idx_name = idx.trim().to_string();
validate_identifier_format(&idx_name, "Index name")?; validate_identifier_format(&idx_name, "Index name")?;
if !columns.iter().any(|c| c.starts_with(&format!("\"{}\"", idx_name))) { if !sql_columns.iter().any(|c| c.starts_with(&format!("\"{}\"", idx_name))) {
return Err(Status::invalid_argument(format!("Index column '{}' not found", idx_name))); return Err(Status::invalid_argument(format!(
"Index column '{}' not found", idx_name
)));
} }
stored_indexes.push(idx_name.clone());
indexes.push(idx_name); indexes.push(idx_name);
} }
let (create_sql, index_sql) = generate_table_sql(tx, &profile_name, &table_name, &columns, &indexes, &links).await?; let (create_sql, index_sql) = generate_table_sql(tx, &profile_name, &table_name, &sql_columns, &indexes, &links).await?;
// Use schema_id instead of profile_id // Use schema_id instead of profile_id
let table_def = sqlx::query!( let table_def = sqlx::query!(
@@ -336,8 +225,8 @@ async fn execute_table_definition(
RETURNING id"#, RETURNING id"#,
schema.id, schema.id,
&table_name, &table_name,
json!(columns), serde_json::to_value(&stored_columns).unwrap(),
json!(indexes) serde_json::to_value(&stored_indexes).unwrap()
) )
.fetch_one(&mut **tx) .fetch_one(&mut **tx)
.await .await
@@ -351,7 +240,7 @@ async fn execute_table_definition(
Status::internal(format!("Database error: {}", e)) Status::internal(format!("Database error: {}", e))
})?; })?;
for col_def in &columns { for col_def in &sql_columns {
// Column string looks like "\"name\" TYPE", split out identifier // Column string looks like "\"name\" TYPE", split out identifier
let col_name = col_def.split_whitespace().next().unwrap_or(""); let col_name = col_def.split_whitespace().next().unwrap_or("");
let clean_col = col_name.trim_matches('"'); let clean_col = col_name.trim_matches('"');

View File

@@ -2,3 +2,6 @@
pub mod models; pub mod models;
pub mod handlers; pub mod handlers;
pub mod repo;
pub use repo::*;

View File

@@ -0,0 +1,91 @@
// src/table_definition/models.rs
use tonic::Status;
/// Predefined static field mappings
// TODO CRITICAL add decimal with optional precision"
pub const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
("text", "TEXT"),
("string", "TEXT"),
("boolean", "BOOLEAN"),
("timestamp", "TIMESTAMPTZ"),
("timestamptz", "TIMESTAMPTZ"),
("time", "TIMESTAMPTZ"),
("money", "NUMERIC(14, 4)"),
("integer", "INTEGER"),
("int", "INTEGER"),
("biginteger", "BIGINT"),
("bigint", "BIGINT"),
("date", "DATE"),
];
/// reusable decimal number validation
pub fn validate_decimal_number_format(num_str: &str, param_name: &str) -> Result<(), Status> {
if num_str.is_empty() {
return Err(Status::invalid_argument(format!("{} cannot be empty", param_name)));
}
if num_str.starts_with('+') || num_str.starts_with('-') {
return Err(Status::invalid_argument(format!(
"{} cannot have explicit positive/negative signs", param_name
)));
}
if num_str.contains('.') {
return Err(Status::invalid_argument(format!(
"{} must be a whole number (no decimal point)", param_name
)));
}
if num_str.len() > 1 && num_str.starts_with('0') {
let trimmed = num_str.trim_start_matches('0');
let suggestion = if trimmed.is_empty() { "0" } else { trimmed };
return Err(Status::invalid_argument(format!(
"{} cannot have leading zeros (use '{}' instead of '{}')",
param_name, suggestion, num_str
)));
}
if !num_str.chars().all(|c| c.is_ascii_digit()) {
return Err(Status::invalid_argument(format!(
"{} contains invalid characters. Only digits allowed", param_name
)));
}
Ok(())
}
/// reusable field type mapper
pub fn map_field_type(field_type: &str) -> Result<String, Status> {
let lower_field_type = field_type.to_lowercase();
if lower_field_type.starts_with("decimal(") && lower_field_type.ends_with(')') {
let args = lower_field_type.strip_prefix("decimal(").unwrap()
.strip_suffix(')').unwrap();
if let Some((p_str, s_str)) = args.split_once(',') {
let precision_str = p_str.trim();
let scale_str = s_str.trim();
validate_decimal_number_format(precision_str, "precision")?;
validate_decimal_number_format(scale_str, "scale")?;
let precision = precision_str.parse::<u32>()
.map_err(|_| Status::invalid_argument("Invalid precision"))?;
let scale = scale_str.parse::<u32>()
.map_err(|_| Status::invalid_argument("Invalid scale"))?;
if precision < 1 {
return Err(Status::invalid_argument("Precision must be >= 1"));
}
if scale > precision {
return Err(Status::invalid_argument("Scale cannot be > precision"));
}
return Ok(format!("NUMERIC({}, {})", precision, scale));
} else {
return Err(Status::invalid_argument(
"Invalid decimal format. Expected decimal(precision, scale)"
));
}
}
PREDEFINED_FIELD_TYPES
.iter()
.find(|(key, _)| *key == lower_field_type.as_str())
.map(|(_, sql_type)| sql_type.to_string())
.ok_or_else(|| Status::invalid_argument(format!("Invalid field type: {}", field_type)))
}

View File

@@ -0,0 +1,33 @@
// src/table_definition/repo.rs
use common::proto::komp_ac::table_definition::ColumnDefinition;
use sqlx::PgPool;
pub struct TableDefRow {
pub id: i64,
pub table_name: String,
pub columns: Vec<ColumnDefinition>,
pub indexes: Vec<String>,
}
pub async fn get_table_definition(
db: &PgPool,
id: i64,
) -> Result<TableDefRow, anyhow::Error> {
let rec = sqlx::query!(
r#"
SELECT id, table_name, columns, indexes
FROM table_definitions
WHERE id = $1
"#,
id
)
.fetch_one(db)
.await?;
Ok(TableDefRow {
id: rec.id,
table_name: rec.table_name,
columns: serde_json::from_value(rec.columns)?, // 🔑
indexes: serde_json::from_value(rec.indexes)?,
})
}

View File

@@ -2,7 +2,8 @@
use std::collections::HashMap; use std::collections::HashMap;
use tonic::Status; use tonic::Status;
use serde_json::{json, Value}; use serde::{Deserialize, Serialize};
use serde_json::Value;
/// Represents the state of a node during dependency graph traversal. /// Represents the state of a node during dependency graph traversal.
#[derive(Clone, Copy, PartialEq)] #[derive(Clone, Copy, PartialEq)]
@@ -40,18 +41,38 @@ impl DependencyType {
DependencyType::SqlQuery { .. } => "sql_query", DependencyType::SqlQuery { .. } => "sql_query",
} }
} }
}
/// Generates context JSON for database storage. /// Strongly-typed JSON for script_dependencies.context_info
pub fn context_json(&self) -> Value { /// Using untagged so JSON stays minimal (no "type" field), and we can still
/// deserialize it into a proper enum.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ScriptDependencyContext {
ColumnAccess { column: String },
IndexedAccess { column: String, index: i64 },
SqlQuery { query_fragment: String },
}
impl DependencyType {
/// Convert this dependency into its JSON context struct.
pub fn to_context(&self) -> ScriptDependencyContext {
match self { match self {
DependencyType::ColumnAccess { column } => { DependencyType::ColumnAccess { column } => {
json!({ "column": column }) ScriptDependencyContext::ColumnAccess {
column: column.clone(),
}
} }
DependencyType::IndexedAccess { column, index } => { DependencyType::IndexedAccess { column, index } => {
json!({ "column": column, "index": index }) ScriptDependencyContext::IndexedAccess {
column: column.clone(),
index: *index,
}
} }
DependencyType::SqlQuery { query_fragment } => { DependencyType::SqlQuery { query_fragment } => {
json!({ "query_fragment": query_fragment }) ScriptDependencyContext::SqlQuery {
query_fragment: query_fragment.clone(),
}
} }
} }
} }
@@ -554,7 +575,7 @@ impl DependencyAnalyzer {
table_id, table_id,
target_id, target_id,
dep.dependency_type.as_str(), dep.dependency_type.as_str(),
dep.dependency_type.context_json() serde_json::to_value(dep.dependency_type.to_context()).unwrap()
) )
.execute(&mut **tx) .execute(&mut **tx)
.await .await

View File

@@ -4,6 +4,7 @@
use tonic::Status; use tonic::Status;
use sqlx::{PgPool, Error as SqlxError}; use sqlx::{PgPool, Error as SqlxError};
use common::proto::komp_ac::table_script::{PostTableScriptRequest, TableScriptResponse}; use common::proto::komp_ac::table_script::{PostTableScriptRequest, TableScriptResponse};
use common::proto::komp_ac::table_definition::ColumnDefinition;
use serde_json::Value; use serde_json::Value;
use steel_decimal::SteelDecimal; use steel_decimal::SteelDecimal;
use regex::Regex; use regex::Regex;
@@ -303,16 +304,12 @@ async fn validate_math_operations_column_types(
let mut table_column_types: HashMap<String, HashMap<String, String>> = HashMap::new(); let mut table_column_types: HashMap<String, HashMap<String, String>> = HashMap::new();
for table_def in table_definitions { for table_def in table_definitions {
let columns: Vec<String> = serde_json::from_value(table_def.columns) let columns: Vec<ColumnDefinition> = serde_json::from_value(table_def.columns)
.map_err(|e| Status::internal(format!("Invalid column data for table '{}': {}", table_def.table_name, e)))?; .map_err(|e| Status::internal(format!("Invalid column data for table '{}': {}", table_def.table_name, e)))?;
let mut column_types = HashMap::new(); let mut column_types = HashMap::new();
for column_def in columns { for col_def in columns {
let mut parts = column_def.split_whitespace(); column_types.insert(col_def.name.clone(), col_def.field_type.clone());
if let (Some(name), Some(data_type)) = (parts.next(), parts.next()) {
let column_name = name.trim_matches('"');
column_types.insert(column_name.to_string(), data_type.to_string());
}
} }
table_column_types.insert(table_def.table_name, column_types); table_column_types.insert(table_def.table_name, column_types);
} }
@@ -363,25 +360,13 @@ fn validate_target_column(
} }
// Parse the columns JSON into a vector of strings // Parse the columns JSON into a vector of strings
let columns: Vec<String> = serde_json::from_value(table_columns.clone()) let columns: Vec<ColumnDefinition> = serde_json::from_value(table_columns.clone())
.map_err(|e| format!("Invalid column data: {}", e))?; .map_err(|e| format!("Invalid column data: {}", e))?;
// Extract column names and types let column_type = columns
let column_info: Vec<(&str, &str)> = columns
.iter() .iter()
.filter_map(|c| { .find(|c| c.name == target)
let mut parts = c.split_whitespace(); .map(|c| c.field_type.clone())
let name = parts.next()?.trim_matches('"');
let data_type = parts.next()?;
Some((name, data_type))
})
.collect();
// Find the target column and return its type
let column_type = column_info
.iter()
.find(|(name, _)| *name == target)
.map(|(_, dt)| dt.to_string())
.ok_or_else(|| format!("Target column '{}' not defined in table '{}'", target, table_name))?; .ok_or_else(|| format!("Target column '{}' not defined in table '{}'", target, table_name))?;
// Check if the target column type is prohibited // Check if the target column type is prohibited
@@ -509,34 +494,21 @@ async fn validate_script_column_references(
/// Validate that a referenced column doesn't have a prohibited type /// Validate that a referenced column doesn't have a prohibited type
fn validate_referenced_column_type(table_name: &str, column_name: &str, table_columns: &Value) -> Result<(), String> { fn validate_referenced_column_type(table_name: &str, column_name: &str, table_columns: &Value) -> Result<(), String> {
// Parse the columns JSON into a vector of strings // Parse the columns JSON into a vector of strings
let columns: Vec<String> = serde_json::from_value(table_columns.clone()) let columns: Vec<ColumnDefinition> = serde_json::from_value(table_columns.clone())
.map_err(|e| format!("Invalid column data for table '{}': {}", table_name, e))?; .map_err(|e| format!("Invalid column data for table '{}': {}", table_name, e))?;
// Extract column names and types if let Some(col_def) = columns.iter().find(|c| c.name == column_name) {
let column_info: Vec<(&str, &str)> = columns if is_prohibited_type(&col_def.field_type) {
.iter()
.filter_map(|c| {
let mut parts = c.split_whitespace();
let name = parts.next()?.trim_matches('"');
let data_type = parts.next()?;
Some((name, data_type))
})
.collect();
// Find the referenced column and check its type
if let Some((_, column_type)) = column_info.iter().find(|(name, _)| *name == column_name) {
if is_prohibited_type(column_type) {
return Err(format!( return Err(format!(
"Script references column '{}' in table '{}' which has prohibited type '{}'. Steel scripts cannot access columns of type: {}", "Script references column '{}' in table '{}' which has prohibited type '{}'. Steel scripts cannot access columns of type: {}",
column_name, column_name,
table_name, table_name,
column_type, col_def.field_type,
PROHIBITED_TYPES.join(", ") PROHIBITED_TYPES.join(", ")
)); ));
} }
// Log info for boolean columns let normalized_type = normalize_data_type(&col_def.field_type);
let normalized_type = normalize_data_type(column_type);
if normalized_type == "BOOLEAN" || normalized_type == "BOOL" { if normalized_type == "BOOLEAN" || normalized_type == "BOOL" {
println!("Info: Script references boolean column '{}' in table '{}'. Values will be converted to Steel format (#true/#false)", column_name, table_name); println!("Info: Script references boolean column '{}' in table '{}'. Values will be converted to Steel format (#true/#false)", column_name, table_name);
} }

View File

@@ -1,4 +1,7 @@
// src/table_script/mod.rs // src/table_script/mod.rs
pub mod handlers; pub mod handlers;
pub mod repo;
pub use handlers::*; pub use handlers::*;
pub use repo::*;

View File

@@ -0,0 +1,49 @@
// src/table_script/repo.rs
use anyhow::Result;
use sqlx::PgPool;
use crate::table_script::handlers::dependency_analyzer::ScriptDependencyContext;
#[derive(Debug, Clone)]
pub struct ScriptDependencyRecord {
pub script_id: i64,
pub source_table_id: i64,
pub target_table_id: i64,
pub dependency_type: String,
pub context: Option<ScriptDependencyContext>,
}
pub async fn get_dependencies_for_script(
db: &PgPool,
script_id: i64,
) -> Result<Vec<ScriptDependencyRecord>> {
let rows = sqlx::query!(
r#"
SELECT script_id, source_table_id, target_table_id, dependency_type, context_info
FROM script_dependencies
WHERE script_id = $1
ORDER BY source_table_id, target_table_id
"#,
script_id
)
.fetch_all(db)
.await?;
let mut out = Vec::new();
for r in rows {
let context = match r.context_info {
Some(value) => Some(serde_json::from_value::<ScriptDependencyContext>(value)?),
None => None,
};
out.push(ScriptDependencyRecord {
script_id: r.script_id,
source_table_id: r.source_table_id,
target_table_id: r.target_table_id,
dependency_type: r.dependency_type,
context,
});
}
Ok(out)
}

View File

@@ -40,12 +40,16 @@ impl TableValidationService for TableValidationSvc {
// Set the data_key from the database row // Set the data_key from the database row
fv.data_key = r.data_key; fv.data_key = r.data_key;
// Skip if limits are all zero // Keep entries that have either meaningful limits or a mask
if let Some(lims) = &fv.limits { let has_meaningful_limits = fv
if lims.min == 0 && lims.max == 0 && lims.warn_at.is_none() { .limits
.as_ref()
.map_or(false, |l| l.min > 0 || l.max > 0 || l.warn_at.is_some());
let has_mask = fv.mask.is_some();
if !has_meaningful_limits && !has_mask {
continue; continue;
} }
}
fields_out.push(fv); fields_out.push(fv);
} }
Err(e) => { Err(e) => {

View File

@@ -1,9 +1,11 @@
// src/tables_data/handlers/get_table_data.rs // src/tables_data/handlers/get_table_data.rs
use tonic::Status; use tonic::Status;
use sqlx::{PgPool, Row}; use sqlx::{PgPool, Row};
use std::collections::HashMap; use std::collections::HashMap;
use common::proto::komp_ac::tables_data::{GetTableDataRequest, GetTableDataResponse}; use common::proto::komp_ac::tables_data::{GetTableDataRequest, GetTableDataResponse};
use common::proto::komp_ac::table_definition::ColumnDefinition;
use crate::shared::schema_qualifier::qualify_table_name_for_data; use crate::shared::schema_qualifier::qualify_table_name_for_data;
pub async fn get_table_data( pub async fn get_table_data(
@@ -39,17 +41,13 @@ pub async fn get_table_data(
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?; let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
// Parse user-defined columns from JSON // Parse user-defined columns from JSON
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone()) let stored_columns: Vec<ColumnDefinition> = serde_json::from_value(table_def.columns.clone())
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?; .map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
// Directly extract names, no split(" ") parsing anymore
let mut user_columns = Vec::new(); let mut user_columns = Vec::new();
for col_def in columns_json { for col_def in stored_columns {
let parts: Vec<&str> = col_def.splitn(2, ' ').collect(); user_columns.push(col_def.name.trim().to_string());
if parts.len() != 2 {
return Err(Status::internal("Invalid column format"));
}
let name = parts[0].trim_matches('"').to_string();
user_columns.push(name);
} }
// --- START OF FIX --- // --- START OF FIX ---

View File

@@ -5,6 +5,8 @@ use sqlx::{PgPool, Arguments};
use sqlx::postgres::PgArguments; use sqlx::postgres::PgArguments;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use common::proto::komp_ac::tables_data::{PostTableDataRequest, PostTableDataResponse}; use common::proto::komp_ac::tables_data::{PostTableDataRequest, PostTableDataResponse};
use common::proto::komp_ac::table_definition::ColumnDefinition;
use crate::table_definition::models::map_field_type;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use prost_types::value::Kind; use prost_types::value::Kind;
@@ -56,18 +58,16 @@ pub async fn post_table_data(
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?; let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
// Parse column definitions from JSON format // Parse column definitions from JSON format
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone()) let stored_columns: Vec<ColumnDefinition> = serde_json::from_value(table_def.columns.clone())
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?; .map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
// convert ColumnDefinition -> (name, sql_type) using the same map_field_type logic
let mut columns = Vec::new(); let mut columns = Vec::new();
for col_def in columns_json { for col_def in stored_columns {
let parts: Vec<&str> = col_def.splitn(2, ' ').collect(); let col_name = col_def.name.trim().to_string();
if parts.len() != 2 { let sql_type = map_field_type(&col_def.field_type)
return Err(Status::internal("Invalid column format")); .map_err(|e| Status::invalid_argument(format!("Invalid type for column '{}': {}", col_name, e)))?;
} columns.push((col_name, sql_type));
let name = parts[0].trim_matches('"').to_string();
let sql_type = parts[1].to_string();
columns.push((name, sql_type));
} }
// Build list of valid system columns (foreign keys and special columns) // Build list of valid system columns (foreign keys and special columns)

View File

@@ -5,6 +5,7 @@ use sqlx::{PgPool, Arguments, Row};
use sqlx::postgres::PgArguments; use sqlx::postgres::PgArguments;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use common::proto::komp_ac::tables_data::{PutTableDataRequest, PutTableDataResponse}; use common::proto::komp_ac::tables_data::{PutTableDataRequest, PutTableDataResponse};
use common::proto::komp_ac::table_definition::ColumnDefinition;
use std::sync::Arc; use std::sync::Arc;
use prost_types::value::Kind; use prost_types::value::Kind;
@@ -14,6 +15,7 @@ use std::collections::HashMap;
use crate::steel::server::execution::{self, Value}; use crate::steel::server::execution::{self, Value};
use crate::indexer::{IndexCommand, IndexCommandData}; use crate::indexer::{IndexCommand, IndexCommandData};
use crate::table_definition::models::map_field_type;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use tracing::error; use tracing::error;
@@ -56,19 +58,20 @@ pub async fn put_table_data(
.map_err(|e| Status::internal(format!("Table lookup error: {}", e)))? .map_err(|e| Status::internal(format!("Table lookup error: {}", e)))?
.ok_or_else(|| Status::not_found("Table not found"))?; .ok_or_else(|| Status::not_found("Table not found"))?;
// Parse column definitions from JSON format // Parse column definitions from JSON format (now ColumnDefinition objects)
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone()) let stored_columns: Vec<ColumnDefinition> = serde_json::from_value(table_def.columns.clone())
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?; .map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
// Convert ColumnDefinition → (name, sql_type)
let mut columns = Vec::new(); let mut columns = Vec::new();
for col_def in columns_json { for col_def in stored_columns {
let parts: Vec<&str> = col_def.splitn(2, ' ').collect(); let col_name = col_def.name.trim().to_string();
if parts.len() != 2 { let sql_type = map_field_type(&col_def.field_type)
return Err(Status::internal("Invalid column format")); .map_err(|e| Status::invalid_argument(format!(
} "Invalid type for column '{}': {}",
let name = parts[0].trim_matches('"').to_string(); col_name, e
let sql_type = parts[1].to_string(); )))?;
columns.push((name, sql_type)); columns.push((col_name, sql_type));
} }
// Build list of valid system columns (foreign keys and special columns) // Build list of valid system columns (foreign keys and special columns)

View File

@@ -3,8 +3,8 @@
use crate::common::setup_isolated_db; use crate::common::setup_isolated_db;
use server::table_script::handlers::post_table_script::post_table_script; // Fixed import use server::table_script::handlers::post_table_script::post_table_script; // Fixed import
use common::proto::komp_ac::table_script::PostTableScriptRequest; use common::proto::komp_ac::table_script::PostTableScriptRequest;
use common::proto::komp_ac::table_definition::ColumnDefinition;
use rstest::*; use rstest::*;
use serde_json::json;
use sqlx::PgPool; use sqlx::PgPool;
/// Helper function to create a test table with specified columns /// Helper function to create a test table with specified columns
@@ -12,15 +12,10 @@ async fn create_test_table(
pool: &PgPool, pool: &PgPool,
schema_id: i64, schema_id: i64,
table_name: &str, table_name: &str,
columns: Vec<(&str, &str)>, columns: Vec<ColumnDefinition>,
) -> i64 { ) -> i64 {
let column_definitions: Vec<String> = columns let columns_json = serde_json::to_value(columns).unwrap();
.iter() let indexes_json = serde_json::json!([]);
.map(|(name, type_def)| format!("\"{}\" {}", name, type_def))
.collect();
let columns_json = json!(column_definitions);
let indexes_json = json!([]);
sqlx::query_scalar!( sqlx::query_scalar!(
r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes) r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes)
@@ -115,22 +110,17 @@ async fn test_comprehensive_error_scenarios(
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
// Create comprehensive error test table // Create comprehensive error test table
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
// Valid types ColumnDefinition { name: "valid_numeric".into(), field_type: "NUMERIC(10, 2)".into() },
("valid_numeric", "NUMERIC(10, 2)"), ColumnDefinition { name: "valid_integer".into(), field_type: "INTEGER".into() },
("valid_integer", "INTEGER"), ColumnDefinition { name: "text_col".into(), field_type: "TEXT".into() },
ColumnDefinition { name: "boolean_col".into(), field_type: "BOOLEAN".into() },
// Invalid for math operations ColumnDefinition { name: "bigint_col".into(), field_type: "BIGINT".into() },
("text_col", "TEXT"), ColumnDefinition { name: "date_col".into(), field_type: "DATE".into() },
("boolean_col", "BOOLEAN"), ColumnDefinition { name: "timestamp_col".into(), field_type: "TIMESTAMPTZ".into() },
("bigint_col", "BIGINT"), ColumnDefinition { name: "bigint_target".into(), field_type: "BIGINT".into() },
("date_col", "DATE"), ColumnDefinition { name: "date_target".into(), field_type: "DATE".into() },
("timestamp_col", "TIMESTAMPTZ"), ColumnDefinition { name: "timestamp_target".into(), field_type: "TIMESTAMPTZ".into() },
// Invalid target types
("bigint_target", "BIGINT"),
("date_target", "DATE"),
("timestamp_target", "TIMESTAMPTZ"),
]; ];
let table_id = create_test_table(&pool, schema_id, "error_table", columns).await; let table_id = create_test_table(&pool, schema_id, "error_table", columns).await;
@@ -169,7 +159,9 @@ async fn test_malformed_script_scenarios(
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![("result", "NUMERIC(10, 2)")]; let columns: Vec<ColumnDefinition> = vec![
ColumnDefinition { name: "result".into(), field_type: "NUMERIC(10, 2)".into() }
];
let table_id = create_test_table(&pool, schema_id, "malformed_test", columns).await; let table_id = create_test_table(&pool, schema_id, "malformed_test", columns).await;
let request = PostTableScriptRequest { let request = PostTableScriptRequest {
@@ -194,7 +186,9 @@ async fn test_advanced_validation_scenarios(
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![("result", "NUMERIC(10, 2)")]; let columns: Vec<ColumnDefinition> = vec![
ColumnDefinition { name: "result".into(), field_type: "NUMERIC(10, 2)".into() }
];
let table_id = create_test_table(&pool, schema_id, "advanced_test", columns).await; let table_id = create_test_table(&pool, schema_id, "advanced_test", columns).await;
let request = PostTableScriptRequest { let request = PostTableScriptRequest {
@@ -236,16 +230,16 @@ async fn test_dependency_cycle_detection() {
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
// Create table_b first // Create table_b first
let table_b_columns = vec![ let table_b_columns: Vec<ColumnDefinition> = vec![
("value_b", "NUMERIC(10, 2)"), ColumnDefinition { name: "value_b".into(), field_type: "NUMERIC(10, 2)".into() },
("result_b", "NUMERIC(10, 2)"), ColumnDefinition { name: "result_b".into(), field_type: "NUMERIC(10, 2)".into() },
]; ];
let table_b_id = create_test_table(&pool, schema_id, "table_b", table_b_columns).await; let table_b_id = create_test_table(&pool, schema_id, "table_b", table_b_columns).await;
// Create table_a // Create table_a
let table_a_columns = vec![ let table_a_columns: Vec<ColumnDefinition> = vec![
("value_a", "NUMERIC(10, 2)"), ColumnDefinition { name: "value_a".into(), field_type: "NUMERIC(10, 2)".into() },
("result_a", "NUMERIC(10, 2)"), ColumnDefinition { name: "result_a".into(), field_type: "NUMERIC(10, 2)".into() },
]; ];
let table_a_id = create_test_table(&pool, schema_id, "table_a", table_a_columns).await; let table_a_id = create_test_table(&pool, schema_id, "table_a", table_a_columns).await;
@@ -305,7 +299,9 @@ async fn test_edge_case_identifiers(
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![("result", "NUMERIC(10, 2)")]; let columns: Vec<ColumnDefinition> = vec![
ColumnDefinition { name: "result".into(), field_type: "NUMERIC(10, 2)".into() }
];
let table_id = create_test_table(&pool, schema_id, "identifier_test", columns).await; let table_id = create_test_table(&pool, schema_id, "identifier_test", columns).await;
// Test with edge case identifier in script // Test with edge case identifier in script
@@ -342,7 +338,9 @@ async fn test_sql_injection_prevention() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![("result", "NUMERIC(10, 2)")]; let columns: Vec<ColumnDefinition> = vec![
ColumnDefinition { name: "result".into(), field_type: "NUMERIC(10, 2)".into() }
];
let table_id = create_test_table(&pool, schema_id, "injection_test", columns).await; let table_id = create_test_table(&pool, schema_id, "injection_test", columns).await;
// Attempt SQL injection through script content // Attempt SQL injection through script content
@@ -388,9 +386,9 @@ async fn test_performance_with_deeply_nested_expressions() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("x", "NUMERIC(15, 8)"), ColumnDefinition { name: "x".into(), field_type: "NUMERIC(15, 8)".into() },
("performance_result", "NUMERIC(25, 12)"), ColumnDefinition { name: "performance_result".into(), field_type: "NUMERIC(25, 12)".into() },
]; ];
let table_id = create_test_table(&pool, schema_id, "performance_test", columns).await; let table_id = create_test_table(&pool, schema_id, "performance_test", columns).await;
@@ -437,11 +435,11 @@ async fn test_concurrent_script_creation() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("value", "NUMERIC(10, 2)"), ColumnDefinition { name: "value".into(), field_type: "NUMERIC(10, 2)".into() },
("result1", "NUMERIC(10, 2)"), ColumnDefinition { name: "result1".into(), field_type: "NUMERIC(10, 2)".into() },
("result2", "NUMERIC(10, 2)"), ColumnDefinition { name: "result2".into(), field_type: "NUMERIC(10, 2)".into() },
("result3", "NUMERIC(10, 2)"), ColumnDefinition { name: "result3".into(), field_type: "NUMERIC(10, 2)".into() },
]; ];
let table_id = create_test_table(&pool, schema_id, "concurrent_test", columns).await; let table_id = create_test_table(&pool, schema_id, "concurrent_test", columns).await;
@@ -500,9 +498,10 @@ async fn test_error_message_localization_and_clarity() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![
("text_col", "TEXT"), let columns: Vec<ColumnDefinition> = vec![
("result", "NUMERIC(10, 2)"), ColumnDefinition { name: "text_col".into(), field_type: "TEXT".into() },
ColumnDefinition { name: "result".into(), field_type: "NUMERIC(10, 2)".into() },
]; ];
let table_id = create_test_table(&pool, schema_id, "error_clarity_test", columns).await; let table_id = create_test_table(&pool, schema_id, "error_clarity_test", columns).await;

View File

@@ -3,8 +3,8 @@
use crate::common::setup_isolated_db; use crate::common::setup_isolated_db;
use server::table_script::handlers::post_table_script::post_table_script; // Fixed import use server::table_script::handlers::post_table_script::post_table_script; // Fixed import
use common::proto::komp_ac::table_script::PostTableScriptRequest; use common::proto::komp_ac::table_script::PostTableScriptRequest;
use common::proto::komp_ac::table_definition::ColumnDefinition;
use rstest::*; use rstest::*;
use serde_json::json;
use sqlx::PgPool; use sqlx::PgPool;
/// Helper function to create a test table with specified columns /// Helper function to create a test table with specified columns
@@ -12,15 +12,10 @@ async fn create_test_table(
pool: &PgPool, pool: &PgPool,
schema_id: i64, schema_id: i64,
table_name: &str, table_name: &str,
columns: Vec<(&str, &str)>, columns: Vec<ColumnDefinition>,
) -> i64 { ) -> i64 {
let column_definitions: Vec<String> = columns let columns_json = serde_json::to_value(columns).unwrap();
.iter() let indexes_json = serde_json::json!([]);
.map(|(name, type_def)| format!("\"{}\" {}", name, type_def))
.collect();
let columns_json = json!(column_definitions);
let indexes_json = json!([]);
sqlx::query_scalar!( sqlx::query_scalar!(
r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes) r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes)
@@ -97,7 +92,9 @@ async fn test_steel_decimal_literal_operations(
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![("result", "NUMERIC(30, 15)")]; let columns: Vec<ColumnDefinition> = vec![
ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(30, 15)".to_string() }
];
let table_id = create_test_table(&pool, schema_id, "literal_test", columns).await; let table_id = create_test_table(&pool, schema_id, "literal_test", columns).await;
let script = format!(r#"({} "{}" "{}")"#, operation, value1, value2); let script = format!(r#"({} "{}" "{}")"#, operation, value1, value2);
@@ -133,9 +130,9 @@ async fn test_steel_decimal_column_operations(
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("test_value", column_type), ColumnDefinition { name: "test_value".to_string(), field_type: column_type.to_string() },
("result", "NUMERIC(30, 15)"), ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(30, 15)".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "column_test", columns).await; let table_id = create_test_table(&pool, schema_id, "column_test", columns).await;
@@ -179,12 +176,12 @@ async fn test_complex_financial_calculation(
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
// Create a realistic financial calculation table // Create a realistic financial calculation table
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("principal", "NUMERIC(16, 2)"), // Principal amount ColumnDefinition { name: "principal".to_string(), field_type: "NUMERIC(16, 2)".to_string() }, // Principal amount
("annual_rate", "NUMERIC(6, 5)"), // Interest rate ColumnDefinition { name: "annual_rate".to_string(), field_type: "NUMERIC(6, 5)".to_string() }, // Interest rate
("years", "INTEGER"), // Time period ColumnDefinition { name: "years".to_string(), field_type: "INTEGER".to_string() }, // Time period
("compounding_periods", "INTEGER"), // Compounding frequency ColumnDefinition { name: "compounding_periods".to_string(), field_type: "INTEGER".to_string() }, // Compounding frequency
("compound_interest", "NUMERIC(20, 8)"), // Result ColumnDefinition { name: "compound_interest".to_string(), field_type: "NUMERIC(20, 8)".to_string() }, // Result
]; ];
let table_id = create_test_table(&pool, schema_id, "financial_calc", columns).await; let table_id = create_test_table(&pool, schema_id, "financial_calc", columns).await;
@@ -217,11 +214,11 @@ async fn test_scientific_precision_calculations() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("measurement_a", "NUMERIC(25, 15)"), ColumnDefinition { name: "measurement_a".to_string(), field_type: "NUMERIC(25, 15)".to_string() },
("measurement_b", "NUMERIC(25, 15)"), ColumnDefinition { name: "measurement_b".to_string(), field_type: "NUMERIC(25, 15)".to_string() },
("coefficient", "NUMERIC(10, 8)"), ColumnDefinition { name: "coefficient".to_string(), field_type: "NUMERIC(10, 8)".to_string() },
("scientific_result", "NUMERIC(30, 18)"), ColumnDefinition { name: "scientific_result".to_string(), field_type: "NUMERIC(30, 18)".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "scientific_data", columns).await; let table_id = create_test_table(&pool, schema_id, "scientific_data", columns).await;
@@ -259,9 +256,9 @@ async fn test_precision_boundary_conditions(
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("boundary_value", numeric_type), ColumnDefinition { name: "boundary_value".to_string(), field_type: numeric_type.to_string() },
("result", "NUMERIC(30, 15)"), ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(30, 15)".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "boundary_test", columns).await; let table_id = create_test_table(&pool, schema_id, "boundary_test", columns).await;
@@ -284,11 +281,11 @@ async fn test_mixed_integer_and_numeric_operations() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("integer_quantity", "INTEGER"), ColumnDefinition { name: "integer_quantity".to_string(), field_type: "INTEGER".to_string() },
("numeric_price", "NUMERIC(10, 4)"), ColumnDefinition { name: "numeric_price".to_string(), field_type: "NUMERIC(10, 4)".to_string() },
("numeric_tax_rate", "NUMERIC(5, 4)"), ColumnDefinition { name: "numeric_tax_rate".to_string(), field_type: "NUMERIC(5, 4)".to_string() },
("total_with_tax", "NUMERIC(15, 4)"), ColumnDefinition { name: "total_with_tax".to_string(), field_type: "NUMERIC(15, 4)".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "mixed_types_calc", columns).await; let table_id = create_test_table(&pool, schema_id, "mixed_types_calc", columns).await;
@@ -325,9 +322,9 @@ async fn test_mathematical_edge_cases(
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("test_value", "NUMERIC(15, 6)"), ColumnDefinition { name: "test_value".to_string(), field_type: "NUMERIC(15, 6)".to_string() },
("result", "NUMERIC(20, 8)"), ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(20, 8)".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "edge_case_test", columns).await; let table_id = create_test_table(&pool, schema_id, "edge_case_test", columns).await;
@@ -381,10 +378,10 @@ async fn test_comparison_operations_with_valid_types() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("value_a", "NUMERIC(10, 2)"), ColumnDefinition { name: "value_a".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("value_b", "INTEGER"), ColumnDefinition { name: "value_b".to_string(), field_type: "INTEGER".to_string() },
("comparison_result", "BOOLEAN"), ColumnDefinition { name: "comparison_result".to_string(), field_type: "BOOLEAN".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "comparison_test", columns).await; let table_id = create_test_table(&pool, schema_id, "comparison_test", columns).await;
@@ -419,11 +416,11 @@ async fn test_nested_mathematical_expressions() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("x", "NUMERIC(15, 8)"), ColumnDefinition { name: "x".to_string(), field_type: "NUMERIC(15, 8)".to_string() },
("y", "NUMERIC(15, 8)"), ColumnDefinition { name: "y".to_string(), field_type: "NUMERIC(15, 8)".to_string() },
("z", "INTEGER"), ColumnDefinition { name: "z".to_string(), field_type: "INTEGER".to_string() },
("nested_result", "NUMERIC(25, 12)"), ColumnDefinition { name: "nested_result".to_string(), field_type: "NUMERIC(25, 12)".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "nested_calc", columns).await; let table_id = create_test_table(&pool, schema_id, "nested_calc", columns).await;

View File

@@ -3,7 +3,7 @@
use crate::common::setup_isolated_db; use crate::common::setup_isolated_db;
use server::table_script::handlers::post_table_script::post_table_script; use server::table_script::handlers::post_table_script::post_table_script;
use common::proto::komp_ac::table_script::{PostTableScriptRequest, TableScriptResponse}; use common::proto::komp_ac::table_script::{PostTableScriptRequest, TableScriptResponse};
use serde_json::json; use common::proto::komp_ac::table_definition::ColumnDefinition;
use sqlx::PgPool; use sqlx::PgPool;
/// Test utilities for table script integration testing - moved to top level for shared access /// Test utilities for table script integration testing - moved to top level for shared access
@@ -26,14 +26,9 @@ impl TableScriptTestHelper {
} }
} }
pub async fn create_table_with_types(&self, table_name: &str, column_definitions: Vec<(&str, &str)>) -> i64 { pub async fn create_table_with_types(&self, table_name: &str, column_definitions: Vec<ColumnDefinition>) -> i64 {
let columns: Vec<String> = column_definitions let columns_json = serde_json::to_value(column_definitions).unwrap();
.iter() let indexes_json = serde_json::json!([]);
.map(|(name, type_def)| format!("\"{}\" {}", name, type_def))
.collect();
let columns_json = json!(columns);
let indexes_json = json!([]);
sqlx::query_scalar!( sqlx::query_scalar!(
r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes) r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes)
@@ -73,24 +68,24 @@ mod integration_tests {
"comprehensive_table", "comprehensive_table",
vec![ vec![
// Supported types for math operations // Supported types for math operations
("integer_col", "INTEGER"), ColumnDefinition { name: "integer_col".to_string(), field_type: "INTEGER".to_string() },
("numeric_basic", "NUMERIC(10, 2)"), ColumnDefinition { name: "numeric_basic".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("numeric_high_precision", "NUMERIC(28, 15)"), ColumnDefinition { name: "numeric_high_precision".to_string(), field_type: "NUMERIC(28, 15)".to_string() },
("numeric_currency", "NUMERIC(14, 4)"), ColumnDefinition { name: "numeric_currency".to_string(), field_type: "NUMERIC(14, 4)".to_string() },
// Supported but not for math operations // Supported but not for math operations
("text_col", "TEXT"), ColumnDefinition { name: "text_col".to_string(), field_type: "TEXT".to_string() },
("boolean_col", "BOOLEAN"), ColumnDefinition { name: "boolean_col".to_string(), field_type: "BOOLEAN".to_string() },
// Prohibited types entirely // Prohibited types entirely
("bigint_col", "BIGINT"), ColumnDefinition { name: "bigint_col".to_string(), field_type: "BIGINT".to_string() },
("date_col", "DATE"), ColumnDefinition { name: "date_col".to_string(), field_type: "DATE".to_string() },
("timestamp_col", "TIMESTAMPTZ"), ColumnDefinition { name: "timestamp_col".to_string(), field_type: "TIMESTAMPTZ".to_string() },
// Result columns of various types // Result columns of various types
("result_integer", "INTEGER"), ColumnDefinition { name: "result_integer".to_string(), field_type: "INTEGER".to_string() },
("result_numeric", "NUMERIC(15, 5)"), ColumnDefinition { name: "result_numeric".to_string(), field_type: "NUMERIC(15, 5)".to_string() },
("result_text", "TEXT"), ColumnDefinition { name: "result_text".to_string(), field_type: "TEXT".to_string() },
] ]
).await; ).await;
@@ -150,13 +145,13 @@ mod integration_tests {
let table_id = helper.create_table_with_types( let table_id = helper.create_table_with_types(
"precision_table", "precision_table",
vec![ vec![
("low_precision", "NUMERIC(5, 2)"), // e.g., 999.99 ColumnDefinition { name: "low_precision".to_string(), field_type: "NUMERIC(5, 2)".to_string() }, // e.g., 999.99
("medium_precision", "NUMERIC(10, 4)"), // e.g., 999999.9999 ColumnDefinition { name: "medium_precision".to_string(), field_type: "NUMERIC(10, 4)".to_string() }, // e.g., 999999.9999
("high_precision", "NUMERIC(28, 15)"), // Maximum PostgreSQL precision ColumnDefinition { name: "high_precision".to_string(), field_type: "NUMERIC(28, 15)".to_string() }, // Maximum PostgreSQL precision
("currency", "NUMERIC(14, 4)"), // Standard currency precision ColumnDefinition { name: "currency".to_string(), field_type: "NUMERIC(14, 4)".to_string() }, // Standard currency precision
("percentage", "NUMERIC(5, 4)"), // e.g., 0.9999 (99.99%) ColumnDefinition { name: "percentage".to_string(), field_type: "NUMERIC(5, 4)".to_string() }, // e.g., 0.9999 (99.99%)
("integer_val", "INTEGER"), ColumnDefinition { name: "integer_val".to_string(), field_type: "INTEGER".to_string() },
("result", "NUMERIC(30, 15)"), ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(30, 15)".to_string() },
] ]
).await; ).await;
@@ -202,12 +197,12 @@ mod integration_tests {
let table_id = helper.create_table_with_types( let table_id = helper.create_table_with_types(
"financial_instruments", "financial_instruments",
vec![ vec![
("principal", "NUMERIC(16, 2)"), // Principal amount ColumnDefinition { name: "principal".to_string(), field_type: "NUMERIC(16, 2)".to_string() }, // Principal amount
("annual_rate", "NUMERIC(6, 5)"), // Interest rate (e.g., 0.05250) ColumnDefinition { name: "annual_rate".to_string(), field_type: "NUMERIC(6, 5)".to_string() }, // Interest rate (e.g., 0.05250)
("years", "INTEGER"), // Time period ColumnDefinition { name: "years".to_string(), field_type: "INTEGER".to_string() }, // Time period
("compounding_periods", "INTEGER"), // Compounding frequency ColumnDefinition { name: "compounding_periods".to_string(), field_type: "INTEGER".to_string() }, // Compounding frequency
("fees", "NUMERIC(10, 2)"), // Transaction fees ColumnDefinition { name: "fees".to_string(), field_type: "NUMERIC(10, 2)".to_string() }, // Transaction fees
("compound_interest", "NUMERIC(20, 8)"), // Result column ColumnDefinition { name: "compound_interest".to_string(), field_type: "NUMERIC(20, 8)".to_string() }, // Result column
] ]
).await; ).await;
@@ -237,9 +232,9 @@ mod integration_tests {
let table_id = helper.create_table_with_types( let table_id = helper.create_table_with_types(
"scientific_data", "scientific_data",
vec![ vec![
("large_number", "NUMERIC(30, 10)"), ColumnDefinition { name: "large_number".to_string(), field_type: "NUMERIC(30, 10)".to_string() },
("small_number", "NUMERIC(30, 20)"), ColumnDefinition { name: "small_number".to_string(), field_type: "NUMERIC(30, 20)".to_string() },
("result", "NUMERIC(35, 25)"), ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(35, 25)".to_string() },
] ]
).await; ).await;
@@ -265,8 +260,8 @@ mod integration_tests {
let table_a_id = helper.create_table_with_types( let table_a_id = helper.create_table_with_types(
"table_a", "table_a",
vec![ vec![
("value_a", "NUMERIC(10, 2)"), ColumnDefinition { name: "value_a".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("result_a", "NUMERIC(10, 2)"), ColumnDefinition { name: "result_a".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
] ]
).await; ).await;
println!("Created table_a with ID: {}", table_a_id); println!("Created table_a with ID: {}", table_a_id);
@@ -274,8 +269,8 @@ mod integration_tests {
let table_b_id = helper.create_table_with_types( let table_b_id = helper.create_table_with_types(
"table_b", "table_b",
vec![ vec![
("value_b", "NUMERIC(10, 2)"), ColumnDefinition { name: "value_b".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("result_b", "NUMERIC(10, 2)"), ColumnDefinition { name: "result_b".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
] ]
).await; ).await;
println!("Created table_b with ID: {}", table_b_id); println!("Created table_b with ID: {}", table_b_id);
@@ -354,10 +349,10 @@ mod integration_tests {
let table_id = helper.create_table_with_types( let table_id = helper.create_table_with_types(
"error_test_table", "error_test_table",
vec![ vec![
("text_field", "TEXT"), ColumnDefinition { name: "text_field".to_string(), field_type: "TEXT".to_string() },
("numeric_field", "NUMERIC(10, 2)"), ColumnDefinition { name: "numeric_field".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("boolean_field", "BOOLEAN"), ColumnDefinition { name: "boolean_field".to_string(), field_type: "BOOLEAN".to_string() },
("bigint_field", "BIGINT"), ColumnDefinition { name: "bigint_field".to_string(), field_type: "BIGINT".to_string() },
] ]
).await; ).await;
@@ -417,11 +412,11 @@ mod integration_tests {
let table_id = helper.create_table_with_types( let table_id = helper.create_table_with_types(
"performance_table", "performance_table",
vec![ vec![
("x", "NUMERIC(15, 8)"), ColumnDefinition { name: "x".to_string(), field_type: "NUMERIC(15, 8)".to_string() },
("y", "NUMERIC(15, 8)"), ColumnDefinition { name: "y".to_string(), field_type: "NUMERIC(15, 8)".to_string() },
("z", "NUMERIC(15, 8)"), ColumnDefinition { name: "z".to_string(), field_type: "NUMERIC(15, 8)".to_string() },
("w", "NUMERIC(15, 8)"), ColumnDefinition { name: "w".to_string(), field_type: "NUMERIC(15, 8)".to_string() },
("complex_result", "NUMERIC(25, 12)"), ColumnDefinition { name: "complex_result".to_string(), field_type: "NUMERIC(25, 12)".to_string() },
] ]
).await; ).await;
@@ -456,11 +451,11 @@ mod integration_tests {
let table_id = helper.create_table_with_types( let table_id = helper.create_table_with_types(
"boundary_table", "boundary_table",
vec![ vec![
("min_numeric", "NUMERIC(1, 0)"), // Minimum: single digit, no decimal ColumnDefinition { name: "min_numeric".to_string(), field_type: "NUMERIC(1, 0)".to_string() }, // Minimum: single digit, no decimal
("max_numeric", "NUMERIC(1000, 999)"), // Maximum PostgreSQL allows ColumnDefinition { name: "max_numeric".to_string(), field_type: "NUMERIC(1000, 999)".to_string() }, // Maximum PostgreSQL allows
("zero_scale", "NUMERIC(10, 0)"), // Integer-like numeric ColumnDefinition { name: "zero_scale".to_string(), field_type: "NUMERIC(10, 0)".to_string() }, // Integer-like numeric
("max_scale", "NUMERIC(28, 28)"), // Maximum scale ColumnDefinition { name: "max_scale".to_string(), field_type: "NUMERIC(28, 28)".to_string() }, // Maximum scale
("result", "NUMERIC(1000, 999)"), ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(1000, 999)".to_string() },
] ]
).await; ).await;
@@ -495,10 +490,10 @@ mod steel_decimal_integration_tests {
let table_id = helper.create_table_with_types( let table_id = helper.create_table_with_types(
"test_execution_table", "test_execution_table",
vec![ vec![
("amount", "NUMERIC(10, 2)"), ColumnDefinition { name: "amount".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("quantity", "INTEGER"), ColumnDefinition { name: "quantity".to_string(), field_type: "INTEGER".to_string() },
("tax_rate", "NUMERIC(5, 4)"), ColumnDefinition { name: "tax_rate".to_string(), field_type: "NUMERIC(5, 4)".to_string() },
("result", "NUMERIC(15, 4)"), // Add a result column ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(15, 4)".to_string() }, // Add a result column
] ]
).await; ).await;
println!("Created test table with ID: {}", table_id); println!("Created test table with ID: {}", table_id);
@@ -575,9 +570,9 @@ mod steel_decimal_integration_tests {
let table_id = helper.create_table_with_types( let table_id = helper.create_table_with_types(
"precision_test_table", "precision_test_table",
vec![ vec![
("precise_value", "NUMERIC(20, 12)"), ColumnDefinition { name: "precise_value".to_string(), field_type: "NUMERIC(20, 12)".to_string() },
("multiplier", "NUMERIC(20, 12)"), ColumnDefinition { name: "multiplier".to_string(), field_type: "NUMERIC(20, 12)".to_string() },
("result", "NUMERIC(25, 15)"), // Add result column ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(25, 15)".to_string() }, // Add result column
] ]
).await; ).await;
println!("Created precision test table with ID: {}", table_id); println!("Created precision test table with ID: {}", table_id);

View File

@@ -3,7 +3,7 @@
use crate::common::setup_isolated_db; use crate::common::setup_isolated_db;
use server::table_script::handlers::post_table_script::post_table_script; use server::table_script::handlers::post_table_script::post_table_script;
use common::proto::komp_ac::table_script::PostTableScriptRequest; use common::proto::komp_ac::table_script::PostTableScriptRequest;
use serde_json::json; use common::proto::komp_ac::table_definition::ColumnDefinition;
use sqlx::PgPool; use sqlx::PgPool;
/// Helper function to create a test table with specified columns /// Helper function to create a test table with specified columns
@@ -11,15 +11,10 @@ async fn create_test_table(
pool: &PgPool, pool: &PgPool,
schema_id: i64, schema_id: i64,
table_name: &str, table_name: &str,
columns: Vec<(&str, &str)>, columns: Vec<ColumnDefinition>,
) -> i64 { ) -> i64 {
let column_definitions: Vec<String> = columns let columns_json = serde_json::to_value(columns).unwrap();
.iter() let indexes_json = serde_json::json!([]);
.map(|(name, type_def)| format!("\"{}\" {}", name, type_def))
.collect();
let columns_json = json!(column_definitions);
let indexes_json = json!([]);
sqlx::query_scalar!( sqlx::query_scalar!(
r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes) r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes)
@@ -67,7 +62,10 @@ async fn test_reject_bigint_target_column() {
&pool, &pool,
schema_id, schema_id,
"bigint_table", "bigint_table",
vec![("name", "TEXT"), ("big_number", "BIGINT")] vec![
ColumnDefinition { name: "name".to_string(), field_type: "TEXT".to_string() },
ColumnDefinition { name: "big_number".to_string(), field_type: "BIGINT".to_string() }
]
).await; ).await;
let request = PostTableScriptRequest { let request = PostTableScriptRequest {
@@ -99,7 +97,10 @@ async fn test_reject_date_target_column() {
&pool, &pool,
schema_id, schema_id,
"date_table", "date_table",
vec![("name", "TEXT"), ("event_date", "DATE")] vec![
ColumnDefinition { name: "name".to_string(), field_type: "TEXT".to_string() },
ColumnDefinition { name: "event_date".to_string(), field_type: "DATE".to_string() }
]
).await; ).await;
let request = PostTableScriptRequest { let request = PostTableScriptRequest {
@@ -131,7 +132,10 @@ async fn test_reject_timestamptz_target_column() {
&pool, &pool,
schema_id, schema_id,
"timestamp_table", "timestamp_table",
vec![("name", "TEXT"), ("created_time", "TIMESTAMPTZ")] vec![
ColumnDefinition { name: "name".to_string(), field_type: "TEXT".to_string() },
ColumnDefinition { name: "created_time".to_string(), field_type: "TIMESTAMPTZ".to_string() }
]
).await; ).await;
let request = PostTableScriptRequest { let request = PostTableScriptRequest {
@@ -164,9 +168,9 @@ async fn test_reject_text_in_mathematical_operations() {
schema_id, schema_id,
"text_math_table", "text_math_table",
vec![ vec![
("description", "TEXT"), ColumnDefinition { name: "description".to_string(), field_type: "TEXT".to_string() },
("amount", "NUMERIC(10, 2)"), ColumnDefinition { name: "amount".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("result", "NUMERIC(10, 2)") ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(10, 2)".to_string() }
] ]
).await; ).await;
@@ -202,9 +206,9 @@ async fn test_reject_boolean_in_mathematical_operations() {
schema_id, schema_id,
"boolean_math_table", "boolean_math_table",
vec![ vec![
("is_active", "BOOLEAN"), ColumnDefinition { name: "is_active".to_string(), field_type: "BOOLEAN".to_string() },
("amount", "NUMERIC(10, 2)"), ColumnDefinition { name: "amount".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("result", "NUMERIC(10, 2)") ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(10, 2)".to_string() }
] ]
).await; ).await;
@@ -240,8 +244,8 @@ async fn test_reject_bigint_in_mathematical_operations() {
schema_id, schema_id,
"bigint_math_table", "bigint_math_table",
vec![ vec![
("big_value", "BIGINT"), ColumnDefinition { name: "big_value".to_string(), field_type: "BIGINT".to_string() },
("result", "NUMERIC(10, 2)") ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(10, 2)".to_string() }
] ]
).await; ).await;
@@ -277,10 +281,10 @@ async fn test_allow_valid_script_with_allowed_types() {
schema_id, schema_id,
"allowed_types_table", "allowed_types_table",
vec![ vec![
("name", "TEXT"), ColumnDefinition { name: "name".to_string(), field_type: "TEXT".to_string() },
("count", "INTEGER"), ColumnDefinition { name: "count".to_string(), field_type: "INTEGER".to_string() },
("amount", "NUMERIC(10, 2)"), ColumnDefinition { name: "amount".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("computed_value", "TEXT") ColumnDefinition { name: "computed_value".to_string(), field_type: "TEXT".to_string() }
] ]
).await; ).await;
@@ -312,9 +316,9 @@ async fn test_allow_integer_and_numeric_in_math_operations() {
schema_id, schema_id,
"math_allowed_table", "math_allowed_table",
vec![ vec![
("quantity", "INTEGER"), ColumnDefinition { name: "quantity".to_string(), field_type: "INTEGER".to_string() },
("price", "NUMERIC(10, 2)"), ColumnDefinition { name: "price".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("total", "NUMERIC(12, 2)") ColumnDefinition { name: "total".to_string(), field_type: "NUMERIC(12, 2)".to_string() }
] ]
).await; ).await;
@@ -363,14 +367,19 @@ async fn test_script_without_table_links_should_fail() {
&pool, &pool,
schema_id, schema_id,
"table_a", "table_a",
vec![("value_a", "INTEGER"), ("result", "INTEGER")] vec![
ColumnDefinition { name: "value_a".to_string(), field_type: "INTEGER".to_string() },
ColumnDefinition { name: "result".to_string(), field_type: "INTEGER".to_string() }
]
).await; ).await;
let _table_b_id = create_test_table( let _table_b_id = create_test_table(
&pool, &pool,
schema_id, schema_id,
"table_b", "table_b",
vec![("value_b", "INTEGER")] vec![
ColumnDefinition { name: "value_b".to_string(), field_type: "INTEGER".to_string() }
]
).await; ).await;
// DON'T create a link between the tables // DON'T create a link between the tables
@@ -404,14 +413,19 @@ async fn test_script_with_table_links_should_succeed() {
&pool, &pool,
schema_id, schema_id,
"linked_table_a", "linked_table_a",
vec![("value_a", "INTEGER"), ("result", "INTEGER")] vec![
ColumnDefinition { name: "value_a".to_string(), field_type: "INTEGER".to_string() },
ColumnDefinition { name: "result".to_string(), field_type: "INTEGER".to_string() }
]
).await; ).await;
let table_b_id = create_test_table( let table_b_id = create_test_table(
&pool, &pool,
schema_id, schema_id,
"linked_table_b", "linked_table_b",
vec![("value_b", "INTEGER")] vec![
ColumnDefinition { name: "value_b".to_string(), field_type: "INTEGER".to_string() }
]
).await; ).await;
// Create a link between the tables (table_a can access table_b) // Create a link between the tables (table_a can access table_b)

View File

@@ -3,8 +3,8 @@
use crate::common::setup_isolated_db; use crate::common::setup_isolated_db;
use server::table_script::handlers::post_table_script::post_table_script; use server::table_script::handlers::post_table_script::post_table_script;
use common::proto::komp_ac::table_script::PostTableScriptRequest; use common::proto::komp_ac::table_script::PostTableScriptRequest;
use common::proto::komp_ac::table_definition::ColumnDefinition;
use rstest::*; use rstest::*;
use serde_json::json;
use sqlx::PgPool; use sqlx::PgPool;
/// Test fixture for allowed mathematical types /// Test fixture for allowed mathematical types
@@ -76,15 +76,10 @@ async fn create_test_table(
pool: &PgPool, pool: &PgPool,
schema_id: i64, schema_id: i64,
table_name: &str, table_name: &str,
columns: Vec<(&str, &str)>, columns: Vec<ColumnDefinition>,
) -> i64 { ) -> i64 {
let column_definitions: Vec<String> = columns let columns_json = serde_json::to_value(columns).unwrap();
.iter() let indexes_json = serde_json::json!([]);
.map(|(name, type_def)| format!("\"{}\" {}", name, type_def))
.collect();
let columns_json = json!(column_definitions);
let indexes_json = json!([]);
sqlx::query_scalar!( sqlx::query_scalar!(
r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes) r#"INSERT INTO table_definitions (schema_id, table_name, columns, indexes)
@@ -123,8 +118,17 @@ async fn test_allowed_types_in_math_operations(
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
// Create table with all allowed mathematical types plus a result column // Create table with all allowed mathematical types plus a result column
let mut columns = allowed_math_types.clone(); let mut columns: Vec<ColumnDefinition> = allowed_math_types
columns.push(("result", "NUMERIC(30, 15)")); .iter()
.map(|(name, field_type)| ColumnDefinition {
name: name.to_string(),
field_type: field_type.to_string(),
})
.collect();
columns.push(ColumnDefinition {
name: "result".to_string(),
field_type: "NUMERIC(30, 15)".to_string(),
});
let table_id = create_test_table(&pool, schema_id, "math_test_table", columns).await; let table_id = create_test_table(&pool, schema_id, "math_test_table", columns).await;
@@ -172,8 +176,17 @@ async fn test_prohibited_types_in_math_operations(
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
// Create table with prohibited types plus a valid result column // Create table with prohibited types plus a valid result column
let mut columns = prohibited_math_types.clone(); let mut columns: Vec<ColumnDefinition> = prohibited_math_types
columns.push(("result", "NUMERIC(15, 6)")); .iter()
.map(|(name, field_type)| ColumnDefinition {
name: name.to_string(),
field_type: field_type.to_string(),
})
.collect();
columns.push(ColumnDefinition {
name: "result".to_string(),
field_type: "NUMERIC(15, 6)".to_string(),
});
let table_id = create_test_table(&pool, schema_id, "prohibited_math_table", columns).await; let table_id = create_test_table(&pool, schema_id, "prohibited_math_table", columns).await;
@@ -225,8 +238,17 @@ async fn test_prohibited_target_column_types(
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
// Create table with prohibited target types plus some valid source columns // Create table with prohibited target types plus some valid source columns
let mut columns = prohibited_target_types.clone(); let mut columns: Vec<ColumnDefinition> = prohibited_target_types
columns.push(("amount", "NUMERIC(10, 2)")); .iter()
.map(|(name, field_type)| ColumnDefinition {
name: name.to_string(),
field_type: field_type.to_string(),
})
.collect();
columns.push(ColumnDefinition {
name: "amount".to_string(),
field_type: "NUMERIC(10, 2)".to_string(),
});
let table_id = create_test_table(&pool, schema_id, "prohibited_target_table", columns).await; let table_id = create_test_table(&pool, schema_id, "prohibited_target_table", columns).await;
@@ -261,7 +283,12 @@ async fn test_system_column_restrictions(#[case] target_column: &str, #[case] de
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![("amount", "NUMERIC(10, 2)")]; let columns: Vec<ColumnDefinition> = vec![
ColumnDefinition {
name: "amount".to_string(),
field_type: "NUMERIC(10, 2)".to_string(),
}
];
let table_id = create_test_table(&pool, schema_id, "system_test_table", columns).await; let table_id = create_test_table(&pool, schema_id, "system_test_table", columns).await;
let script = r#"(+ "10" "20")"#; let script = r#"(+ "10" "20")"#;
@@ -290,22 +317,22 @@ async fn test_comprehensive_type_matrix() {
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
// Create comprehensive table with all type categories // Create comprehensive table with all type categories
let all_columns = vec![ let all_columns: Vec<ColumnDefinition> = vec![
// Allowed math types // Allowed math types
("integer_col", "INTEGER"), ColumnDefinition { name: "integer_col".to_string(), field_type: "INTEGER".to_string() },
("numeric_col", "NUMERIC(10, 2)"), ColumnDefinition { name: "numeric_col".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("high_precision", "NUMERIC(28, 15)"), ColumnDefinition { name: "high_precision".to_string(), field_type: "NUMERIC(28, 15)".to_string() },
// Prohibited math types // Prohibited math types
("text_col", "TEXT"), ColumnDefinition { name: "text_col".to_string(), field_type: "TEXT".to_string() },
("boolean_col", "BOOLEAN"), ColumnDefinition { name: "boolean_col".to_string(), field_type: "BOOLEAN".to_string() },
("bigint_col", "BIGINT"), ColumnDefinition { name: "bigint_col".to_string(), field_type: "BIGINT".to_string() },
("date_col", "DATE"), ColumnDefinition { name: "date_col".to_string(), field_type: "DATE".to_string() },
("timestamp_col", "TIMESTAMPTZ"), ColumnDefinition { name: "timestamp_col".to_string(), field_type: "TIMESTAMPTZ".to_string() },
// Result columns // Result columns
("result_numeric", "NUMERIC(20, 8)"), ColumnDefinition { name: "result_numeric".to_string(), field_type: "NUMERIC(20, 8)".to_string() },
("result_text", "TEXT"), ColumnDefinition { name: "result_text".to_string(), field_type: "TEXT".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "comprehensive_table", all_columns).await; let table_id = create_test_table(&pool, schema_id, "comprehensive_table", all_columns).await;
@@ -361,11 +388,11 @@ async fn test_complex_mathematical_expressions() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("principal", "NUMERIC(16, 2)"), ColumnDefinition { name: "principal".to_string(), field_type: "NUMERIC(16, 2)".to_string() },
("rate", "NUMERIC(6, 5)"), ColumnDefinition { name: "rate".to_string(), field_type: "NUMERIC(6, 5)".to_string() },
("years", "INTEGER"), ColumnDefinition { name: "years".to_string(), field_type: "INTEGER".to_string() },
("compound_result", "NUMERIC(20, 8)"), ColumnDefinition { name: "compound_result".to_string(), field_type: "NUMERIC(20, 8)".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "financial_table", columns).await; let table_id = create_test_table(&pool, schema_id, "financial_table", columns).await;
@@ -395,9 +422,9 @@ async fn test_nonexistent_column_reference() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("amount", "NUMERIC(10, 2)"), ColumnDefinition { name: "amount".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("result", "NUMERIC(10, 2)"), ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "simple_table", columns).await; let table_id = create_test_table(&pool, schema_id, "simple_table", columns).await;
@@ -427,9 +454,9 @@ async fn test_nonexistent_table_reference() {
let pool = setup_isolated_db().await; let pool = setup_isolated_db().await;
let schema_id = get_default_schema_id(&pool).await; let schema_id = get_default_schema_id(&pool).await;
let columns = vec![ let columns: Vec<ColumnDefinition> = vec![
("amount", "NUMERIC(10, 2)"), ColumnDefinition { name: "amount".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
("result", "NUMERIC(10, 2)"), ColumnDefinition { name: "result".to_string(), field_type: "NUMERIC(10, 2)".to_string() },
]; ];
let table_id = create_test_table(&pool, schema_id, "existing_table", columns).await; let table_id = create_test_table(&pool, schema_id, "existing_table", columns).await;

View File

@@ -5,7 +5,6 @@ use sqlx::{PgPool, Row};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::{mpsc, Mutex}; use tokio::sync::{mpsc, Mutex};
use serde_json::json;
use chrono::Utc; use chrono::Utc;
use futures::future::join_all; use futures::future::join_all;
use prost_types::{value::Kind, Value}; use prost_types::{value::Kind, Value};
@@ -17,7 +16,7 @@ use common::proto::komp_ac::table_definition::{
PostTableDefinitionRequest, ColumnDefinition as TableColumnDefinition, TableLink, PostTableDefinitionRequest, ColumnDefinition as TableColumnDefinition, TableLink,
}; };
use common::proto::komp_ac::tables_data::{ use common::proto::komp_ac::tables_data::{
DeleteTableDataRequest, DeleteTableDataResponse, PostTableDataRequest, PutTableDataRequest, DeleteTableDataRequest, PostTableDataRequest, PutTableDataRequest,
}; };
use server::indexer::IndexCommand; use server::indexer::IndexCommand;
use server::table_definition::handlers::post_table_definition; use server::table_definition::handlers::post_table_definition;

View File

@@ -8,7 +8,6 @@ use server::table_definition::handlers::post_table_definition;
use server::tables_data::handlers::get_table_data_by_position; use server::tables_data::handlers::get_table_data_by_position;
use crate::common::setup_test_db; use crate::common::setup_test_db;
use chrono::Utc; use chrono::Utc;
use serde_json::json;
#[fixture] #[fixture]
async fn pool() -> PgPool { async fn pool() -> PgPool {

View File

@@ -5,14 +5,12 @@ use common::proto::komp_ac::tables_data::GetTableDataRequest;
use crate::common::setup_test_db; use crate::common::setup_test_db;
use sqlx::{PgPool, Row}; use sqlx::{PgPool, Row};
use tonic; use tonic;
use chrono::{DateTime, Utc}; use chrono::Utc;
use serde_json::json; use serde_json::json;
use std::collections::HashMap; use std::collections::HashMap;
use futures::future::join_all; use futures::future::join_all;
use rand::distr::Alphanumeric; use rand::distr::Alphanumeric;
use rand::Rng; use rand::Rng;
use rust_decimal::Decimal;
use rust_decimal_macros::dec;
use server::table_definition::handlers::post_table_definition; use server::table_definition::handlers::post_table_definition;
use server::tables_data::handlers::post_table_data; use server::tables_data::handlers::post_table_data;
use common::proto::komp_ac::table_definition::{ use common::proto::komp_ac::table_definition::{
@@ -22,7 +20,6 @@ use common::proto::komp_ac::tables_data::PostTableDataRequest;
use prost_types::Value; use prost_types::Value;
use prost_types::value::Kind; use prost_types::value::Kind;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use server::indexer::IndexCommand;
#[fixture] #[fixture]
async fn pool() -> PgPool { async fn pool() -> PgPool {
@@ -67,10 +64,10 @@ async fn table_definition(#[future] schema: (PgPool, String, i64)) -> (PgPool, S
// Define columns and indexes for the table // Define columns and indexes for the table
let columns = json!([ let columns = json!([
"\"name\" TEXT", { "name": "name", "field_type": "text" },
"\"age\" INTEGER", { "name": "age", "field_type": "integer" },
"\"email\" TEXT", { "name": "email", "field_type": "text" },
"\"is_active\" BOOLEAN" { "name": "is_active", "field_type": "boolean" }
]); ]);
let indexes = json!([]); let indexes = json!([]);

View File

@@ -795,7 +795,7 @@ async fn test_retrieve_from_nonexistent_schema() {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn test_retrieve_with_database_connection_error() { async fn test_retrieve_with_database_connection_error() {
let mut closed_pool = setup_test_db().await; let closed_pool = setup_test_db().await;
closed_pool.close().await; closed_pool.close().await;
let request = GetTableDataRequest { let request = GetTableDataRequest {

View File

@@ -20,7 +20,6 @@ use server::indexer::IndexCommand;
use sqlx::Row; use sqlx::Row;
use rand::distr::Alphanumeric; use rand::distr::Alphanumeric;
use rand::Rng; use rand::Rng;
use rust_decimal::prelude::FromPrimitive;
// Helper function to generate unique identifiers for test isolation // Helper function to generate unique identifiers for test isolation
fn generate_unique_id() -> String { fn generate_unique_id() -> String {
@@ -154,7 +153,7 @@ async fn test_context() -> TestContext {
#[fixture] #[fixture]
async fn closed_test_context() -> TestContext { async fn closed_test_context() -> TestContext {
let mut context = test_context().await; let context = test_context().await;
context.pool.close().await; context.pool.close().await;
context context
} }

View File

@@ -91,7 +91,7 @@ async fn create_initial_record(
// Set different initial values based on the test case to satisfy validation scripts // Set different initial values based on the test case to satisfy validation scripts
match (profile_name, table_name) { match (profile_name, table_name) {
("test_put_complex", "order") => { ("test_put_complex", "order") => {
// For complex formula: (+ (* @price @quantity) (* (* @price @quantity) 0.08)) // For complex formula: (+ (* $price $quantity) (* (* $price $quantity) 0.08))
// With price=10.00, quantity=1: (10*1) + (10*1*0.08) = 10 + 0.8 = 10.8 // With price=10.00, quantity=1: (10*1) + (10*1*0.08) = 10 + 0.8 = 10.8
data.insert("price".to_string(), ProtoValue { kind: Some(Kind::StringValue("10.00".to_string())) }); data.insert("price".to_string(), ProtoValue { kind: Some(Kind::StringValue("10.00".to_string())) });
data.insert("quantity".to_string(), ProtoValue { kind: Some(Kind::NumberValue(1.0)) }); data.insert("quantity".to_string(), ProtoValue { kind: Some(Kind::NumberValue(1.0)) });
@@ -99,7 +99,7 @@ async fn create_initial_record(
data.insert("percentage".to_string(), ProtoValue { kind: Some(Kind::StringValue("100.00".to_string())) }); data.insert("percentage".to_string(), ProtoValue { kind: Some(Kind::StringValue("100.00".to_string())) });
}, },
("test_put_division", "calculation") => { ("test_put_division", "calculation") => {
// For division: (/ @total @price) // For division: (/ $total $price)
// With total=10.00, price=10.00: 10/10 = 1 // With total=10.00, price=10.00: 10/10 = 1
data.insert("price".to_string(), ProtoValue { kind: Some(Kind::StringValue("10.00".to_string())) }); data.insert("price".to_string(), ProtoValue { kind: Some(Kind::StringValue("10.00".to_string())) });
data.insert("quantity".to_string(), ProtoValue { kind: Some(Kind::NumberValue(1.0)) }); data.insert("quantity".to_string(), ProtoValue { kind: Some(Kind::NumberValue(1.0)) });
@@ -142,7 +142,7 @@ async fn test_put_basic_arithmetic_validation_success(pool: PgPool) {
let script_request = PostTableScriptRequest { let script_request = PostTableScriptRequest {
table_definition_id: table_def_id, table_definition_id: table_def_id,
target_column: "total".to_string(), target_column: "total".to_string(),
script: "(* @price @quantity)".to_string(), script: "(* $price $quantity)".to_string(),
description: "Total = Price × Quantity".to_string(), description: "Total = Price × Quantity".to_string(),
}; };
post_table_script(&pool, script_request).await.unwrap(); post_table_script(&pool, script_request).await.unwrap();
@@ -180,7 +180,7 @@ async fn test_put_basic_arithmetic_validation_failure(pool: PgPool) {
let script_request = PostTableScriptRequest { let script_request = PostTableScriptRequest {
table_definition_id: table_def_id, table_definition_id: table_def_id,
target_column: "total".to_string(), target_column: "total".to_string(),
script: "(* @price @quantity)".to_string(), script: "(* $price $quantity)".to_string(),
description: "Total = Price × Quantity".to_string(), description: "Total = Price × Quantity".to_string(),
}; };
post_table_script(&pool, script_request).await.unwrap(); post_table_script(&pool, script_request).await.unwrap();
@@ -224,7 +224,7 @@ async fn test_put_complex_formula_validation(pool: PgPool) {
let script_request = PostTableScriptRequest { let script_request = PostTableScriptRequest {
table_definition_id: table_def_id, table_definition_id: table_def_id,
target_column: "total".to_string(), target_column: "total".to_string(),
script: "(+ (* @price @quantity) (* (* @price @quantity) 0.08))".to_string(), script: "(+ (* $price $quantity) (* (* $price $quantity) 0.08))".to_string(),
description: "Total with 8% tax".to_string(), description: "Total with 8% tax".to_string(),
}; };
post_table_script(&pool, script_request).await.unwrap(); post_table_script(&pool, script_request).await.unwrap();
@@ -261,7 +261,7 @@ async fn test_put_division_with_precision(pool: PgPool) {
let script_request = PostTableScriptRequest { let script_request = PostTableScriptRequest {
table_definition_id: table_def_id, table_definition_id: table_def_id,
target_column: "percentage".to_string(), target_column: "percentage".to_string(),
script: "(/ @total @price)".to_string(), script: "(/ $total $price)".to_string(),
description: "Percentage = Total / Price".to_string(), description: "Percentage = Total / Price".to_string(),
}; };
post_table_script(&pool, script_request).await.unwrap(); post_table_script(&pool, script_request).await.unwrap();
@@ -326,7 +326,7 @@ async fn test_put_advanced_math_functions(pool: PgPool) {
let sqrt_script = PostTableScriptRequest { let sqrt_script = PostTableScriptRequest {
table_definition_id: table_row.id, table_definition_id: table_row.id,
target_column: "square_root".to_string(), target_column: "square_root".to_string(),
script: "(sqrt @input)".to_string(), script: "(sqrt $input)".to_string(),
description: "Square root validation".to_string(), description: "Square root validation".to_string(),
}; };
post_table_script(&pool, sqrt_script).await.unwrap(); post_table_script(&pool, sqrt_script).await.unwrap();
@@ -334,7 +334,7 @@ async fn test_put_advanced_math_functions(pool: PgPool) {
let power_script = PostTableScriptRequest { let power_script = PostTableScriptRequest {
table_definition_id: table_row.id, table_definition_id: table_row.id,
target_column: "power_result".to_string(), target_column: "power_result".to_string(),
script: "(^ @input 2.0)".to_string(), script: "(^ $input 2.0)".to_string(),
description: "Power function validation".to_string(), description: "Power function validation".to_string(),
}; };
post_table_script(&pool, power_script).await.unwrap(); post_table_script(&pool, power_script).await.unwrap();
@@ -389,7 +389,7 @@ async fn test_put_financial_calculations(pool: PgPool) {
let compound_script = PostTableScriptRequest { let compound_script = PostTableScriptRequest {
table_definition_id: table_row.id, table_definition_id: table_row.id,
target_column: "compound_result".to_string(), target_column: "compound_result".to_string(),
script: "(* @principal (^ (+ 1.0 @rate) @time))".to_string(), script: "(* $principal (^ (+ 1.0 $rate) $time))".to_string(),
description: "Compound interest calculation".to_string(), description: "Compound interest calculation".to_string(),
}; };
post_table_script(&pool, compound_script).await.unwrap(); post_table_script(&pool, compound_script).await.unwrap();
@@ -397,7 +397,7 @@ async fn test_put_financial_calculations(pool: PgPool) {
let percentage_script = PostTableScriptRequest { let percentage_script = PostTableScriptRequest {
table_definition_id: table_row.id, table_definition_id: table_row.id,
target_column: "percentage_result".to_string(), target_column: "percentage_result".to_string(),
script: "(* @principal @rate)".to_string(), script: "(* $principal $rate)".to_string(),
description: "Percentage calculation".to_string(), description: "Percentage calculation".to_string(),
}; };
post_table_script(&pool, percentage_script).await.unwrap(); post_table_script(&pool, percentage_script).await.unwrap();
@@ -441,15 +441,13 @@ async fn test_put_partial_update_with_validation(pool: PgPool) {
let script_request = PostTableScriptRequest { let script_request = PostTableScriptRequest {
table_definition_id: table_def_id, table_definition_id: table_def_id,
target_column: "total".to_string(), target_column: "total".to_string(),
script: "(* @price @quantity)".to_string(), script: r#"( * (get-var "price") (get-var "quantity") )"#.to_string(),
description: "Total = Price × Quantity".to_string(), description: "Total = Price × Quantity".to_string(),
}; };
post_table_script(&pool, script_request).await.unwrap(); post_table_script(&pool, script_request).await.unwrap();
let record_id = create_initial_record(&pool, "test_put_partial", "invoice", &indexer_tx).await; let record_id = create_initial_record(&pool, "test_put_partial", "invoice", &indexer_tx).await;
// Partial update: only update quantity. The script detects this would change total
// from 10.00 to 50.00 and requires the user to include 'total' in the update.
let mut update_data = HashMap::new(); let mut update_data = HashMap::new();
update_data.insert("quantity".to_string(), ProtoValue { update_data.insert("quantity".to_string(), ProtoValue {
kind: Some(Kind::NumberValue(5.0)), kind: Some(Kind::NumberValue(5.0)),
@@ -462,16 +460,16 @@ async fn test_put_partial_update_with_validation(pool: PgPool) {
data: update_data, data: update_data,
}; };
// This should fail because script would change total value // This should fail because script would change total value (Case B: implicit change detection)
let result = put_table_data(&pool, put_request, &indexer_tx).await; let result = put_table_data(&pool, put_request, &indexer_tx).await;
assert!(result.is_err()); assert!(result.is_err());
let error = result.unwrap_err(); let error = result.unwrap_err();
assert_eq!(error.code(), tonic::Code::FailedPrecondition); assert_eq!(error.code(), tonic::Code::FailedPrecondition);
assert!(error.message().contains("Script for column 'total' was triggered")); let msg = error.message();
assert!(error.message().contains("from '10.00' to '50.00'")); assert!(msg.contains("Script for column 'total' was triggered"));
assert!(msg.contains("from '10.00' to '50.00'"));
assert!(msg.contains("include 'total' in your update request")); // Full change detection msg
// Now, test a partial update that SHOULD fail validation.
// We update quantity and provide an incorrect total.
let mut failing_update_data = HashMap::new(); let mut failing_update_data = HashMap::new();
failing_update_data.insert("quantity".to_string(), ProtoValue { failing_update_data.insert("quantity".to_string(), ProtoValue {
kind: Some(Kind::NumberValue(3.0)), kind: Some(Kind::NumberValue(3.0)),
@@ -491,8 +489,9 @@ async fn test_put_partial_update_with_validation(pool: PgPool) {
assert!(result.is_err()); assert!(result.is_err());
let error = result.unwrap_err(); let error = result.unwrap_err();
assert_eq!(error.code(), tonic::Code::InvalidArgument); assert_eq!(error.code(), tonic::Code::InvalidArgument);
assert!(error.message().contains("Script calculated '30.00'")); let msg = error.message();
assert!(error.message().contains("but user provided '99.99'")); assert!(msg.contains("Script calculated '30.00'"));
assert!(msg.contains("but user provided '99.99'"));
} }
#[sqlx::test] #[sqlx::test]
@@ -553,7 +552,7 @@ async fn test_put_steel_script_error_handling(pool: PgPool) {
let script_request = PostTableScriptRequest { let script_request = PostTableScriptRequest {
table_definition_id: table_def_id, table_definition_id: table_def_id,
target_column: "total".to_string(), target_column: "total".to_string(),
script: "(/ @price 0.0)".to_string(), script: "(/ $price 0.0)".to_string(),
description: "Error test".to_string(), description: "Error test".to_string(),
}; };
post_table_script(&pool, script_request).await.unwrap(); post_table_script(&pool, script_request).await.unwrap();
@@ -623,7 +622,7 @@ async fn test_decimal_precision_behavior(pool: PgPool) {
let script_request = PostTableScriptRequest { let script_request = PostTableScriptRequest {
table_definition_id: table_row.id, table_definition_id: table_row.id,
target_column: "result".to_string(), target_column: "result".to_string(),
script: "(/ @dividend @divisor)".to_string(), script: "(/ $dividend $divisor)".to_string(),
description: "Division test for precision".to_string(), description: "Division test for precision".to_string(),
}; };
post_table_script(&pool, script_request).await.unwrap(); post_table_script(&pool, script_request).await.unwrap();
@@ -816,7 +815,7 @@ async fn test_put_complex_formula_validation_via_handlers(pool: PgPool) {
"test_put_complex_handlers", "test_put_complex_handlers",
"order", "order",
"total", "total",
"(+ (* @price @quantity) (* (* @price @quantity) 0.08))", // Total with 8% tax "(+ (* $price $quantity) (* (* $price $quantity) 0.08))", // Total with 8% tax
) )
.await .await
.expect("Failed to add validation script"); .expect("Failed to add validation script");
@@ -891,7 +890,7 @@ async fn test_put_basic_arithmetic_validation_via_handlers(pool: PgPool) {
"test_put_arithmetic_handlers", "test_put_arithmetic_handlers",
"invoice", "invoice",
"total", "total",
"(* @price @quantity)", // Simple: Total = Price × Quantity "(* $price $quantity)", // Simple: Total = Price × Quantity
) )
.await .await
.expect("Failed to add validation script"); .expect("Failed to add validation script");
@@ -955,7 +954,7 @@ async fn test_put_arithmetic_validation_failure_via_handlers(pool: PgPool) {
"test_put_arithmetic_fail_handlers", "test_put_arithmetic_fail_handlers",
"invoice", "invoice",
"total", "total",
"(* @price @quantity)", "(* $price $quantity)",
) )
.await .await
.expect("Failed to add validation script"); .expect("Failed to add validation script");

View File

@@ -5,7 +5,7 @@
// ======================================================================== // ========================================================================
// Additional imports needed for these tests // Additional imports needed for these tests
use chrono::{DateTime, Utc}; use chrono::Utc;
use rust_decimal::Decimal; use rust_decimal::Decimal;
use std::str::FromStr; use std::str::FromStr;

View File

@@ -8,7 +8,7 @@
// This is needed for the database error test. // This is needed for the database error test.
#[fixture] #[fixture]
async fn closed_test_context() -> TestContext { async fn closed_test_context() -> TestContext {
let mut context = test_context().await; let context = test_context().await;
context.pool.close().await; context.pool.close().await;
context context
} }