360 lines
14 KiB
Rust
360 lines
14 KiB
Rust
// src/tables_data/handlers/put_table_data.rs
|
|
|
|
use tonic::Status;
|
|
use sqlx::{PgPool, Arguments};
|
|
use sqlx::postgres::PgArguments;
|
|
use chrono::{DateTime, Utc};
|
|
use common::proto::multieko2::tables_data::{PutTableDataRequest, PutTableDataResponse};
|
|
use std::collections::HashMap;
|
|
use std::sync::Arc;
|
|
use prost_types::value::Kind;
|
|
use rust_decimal::Decimal;
|
|
use std::str::FromStr;
|
|
|
|
use crate::steel::server::execution::{self, Value};
|
|
use crate::steel::server::functions::SteelContext;
|
|
use crate::indexer::{IndexCommand, IndexCommandData};
|
|
use tokio::sync::mpsc;
|
|
use tracing::error;
|
|
|
|
pub async fn put_table_data(
|
|
db_pool: &PgPool,
|
|
request: PutTableDataRequest,
|
|
indexer_tx: &mpsc::Sender<IndexCommand>,
|
|
) -> Result<PutTableDataResponse, Status> {
|
|
let profile_name = request.profile_name;
|
|
let table_name = request.table_name;
|
|
let record_id = request.id;
|
|
|
|
// An update with no fields is a no-op; we can return success early.
|
|
if request.data.is_empty() {
|
|
return Ok(PutTableDataResponse {
|
|
success: true,
|
|
message: "No fields to update.".into(),
|
|
updated_id: record_id,
|
|
});
|
|
}
|
|
|
|
// --- Start of logic copied and adapted from post_table_data ---
|
|
|
|
let schema = sqlx::query!(
|
|
"SELECT id FROM schemas WHERE name = $1",
|
|
profile_name
|
|
)
|
|
.fetch_optional(db_pool)
|
|
.await
|
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
|
|
|
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
|
|
|
let table_def = sqlx::query!(
|
|
r#"SELECT id, columns FROM table_definitions
|
|
WHERE schema_id = $1 AND table_name = $2"#,
|
|
schema_id,
|
|
table_name
|
|
)
|
|
.fetch_optional(db_pool)
|
|
.await
|
|
.map_err(|e| Status::internal(format!("Table lookup error: {}", e)))?;
|
|
|
|
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
|
|
|
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
|
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
|
|
|
let mut columns = Vec::new();
|
|
for col_def in columns_json {
|
|
let parts: Vec<&str> = col_def.splitn(2, ' ').collect();
|
|
if parts.len() != 2 {
|
|
return Err(Status::internal("Invalid column format"));
|
|
}
|
|
let name = parts[0].trim_matches('"').to_string();
|
|
let sql_type = parts[1].to_string();
|
|
columns.push((name, sql_type));
|
|
}
|
|
|
|
let fk_columns = sqlx::query!(
|
|
r#"SELECT ltd.table_name
|
|
FROM table_definition_links tdl
|
|
JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
|
WHERE tdl.source_table_id = $1"#,
|
|
table_def.id
|
|
)
|
|
.fetch_all(db_pool)
|
|
.await
|
|
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
|
|
|
let mut system_columns = vec!["deleted".to_string()];
|
|
for fk in fk_columns {
|
|
system_columns.push(format!("{}_id", fk.table_name));
|
|
}
|
|
|
|
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
|
|
|
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
|
for key in request.data.keys() {
|
|
if !system_columns_set.contains(key.as_str()) &&
|
|
!user_columns.contains(&&key.to_string()) {
|
|
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
|
}
|
|
}
|
|
|
|
let mut string_data_for_scripts = HashMap::new();
|
|
for (key, proto_value) in &request.data {
|
|
let str_val = match &proto_value.kind {
|
|
Some(Kind::StringValue(s)) => {
|
|
let trimmed = s.trim();
|
|
if trimmed.is_empty() {
|
|
continue;
|
|
}
|
|
trimmed.to_string()
|
|
},
|
|
Some(Kind::NumberValue(n)) => n.to_string(),
|
|
Some(Kind::BoolValue(b)) => b.to_string(),
|
|
Some(Kind::NullValue(_)) | None => continue,
|
|
Some(Kind::StructValue(_)) | Some(Kind::ListValue(_)) => {
|
|
return Err(Status::invalid_argument(format!("Unsupported type for script validation in column '{}'", key)));
|
|
}
|
|
};
|
|
string_data_for_scripts.insert(key.clone(), str_val);
|
|
}
|
|
|
|
let scripts = sqlx::query!(
|
|
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
|
table_def.id
|
|
)
|
|
.fetch_all(db_pool)
|
|
.await
|
|
.map_err(|e| Status::internal(format!("Failed to fetch scripts: {}", e)))?;
|
|
|
|
for script_record in scripts {
|
|
let target_column = script_record.target_column;
|
|
|
|
if let Some(user_value) = string_data_for_scripts.get(&target_column) {
|
|
let context = SteelContext {
|
|
current_table: table_name.clone(),
|
|
schema_id,
|
|
schema_name: profile_name.clone(),
|
|
row_data: string_data_for_scripts.clone(),
|
|
db_pool: Arc::new(db_pool.clone()),
|
|
};
|
|
|
|
let script_result = execution::execute_script(
|
|
script_record.script,
|
|
"STRINGS",
|
|
Arc::new(db_pool.clone()),
|
|
context,
|
|
)
|
|
.map_err(|e| Status::invalid_argument(
|
|
format!("Script execution failed for '{}': {}", target_column, e)
|
|
))?;
|
|
|
|
let Value::Strings(mut script_output) = script_result else {
|
|
return Err(Status::internal("Script must return string values"));
|
|
};
|
|
|
|
let expected_value = script_output.pop()
|
|
.ok_or_else(|| Status::internal("Script returned no values"))?;
|
|
|
|
if user_value != &expected_value {
|
|
return Err(Status::invalid_argument(format!(
|
|
"Validation failed for column '{}': Expected '{}', Got '{}'",
|
|
target_column, expected_value, user_value
|
|
)));
|
|
}
|
|
}
|
|
}
|
|
|
|
let mut params = PgArguments::default();
|
|
let mut set_clauses = Vec::new();
|
|
let mut param_idx = 1;
|
|
|
|
for (col, proto_value) in request.data {
|
|
let sql_type = if system_columns_set.contains(col.as_str()) {
|
|
match col.as_str() {
|
|
"deleted" => "BOOLEAN",
|
|
_ if col.ends_with("_id") => "BIGINT",
|
|
_ => return Err(Status::invalid_argument("Invalid system column")),
|
|
}
|
|
} else {
|
|
columns.iter()
|
|
.find(|(name, _)| name == &col)
|
|
.map(|(_, sql_type)| sql_type.as_str())
|
|
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
|
};
|
|
|
|
let kind = match &proto_value.kind {
|
|
None | Some(Kind::NullValue(_)) => {
|
|
match sql_type {
|
|
"BOOLEAN" => params.add(None::<bool>),
|
|
"TEXT" => params.add(None::<String>),
|
|
"TIMESTAMPTZ" => params.add(None::<DateTime<Utc>>),
|
|
"BIGINT" => params.add(None::<i64>),
|
|
"INTEGER" => params.add(None::<i32>),
|
|
s if s.starts_with("NUMERIC") => params.add(None::<Decimal>),
|
|
_ => return Err(Status::invalid_argument(format!("Unsupported type for null value: {}", sql_type))),
|
|
}.map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
|
|
|
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
|
param_idx += 1;
|
|
continue;
|
|
}
|
|
Some(k) => k,
|
|
};
|
|
|
|
if sql_type == "TEXT" {
|
|
if let Kind::StringValue(value) = kind {
|
|
let trimmed_value = value.trim();
|
|
|
|
if trimmed_value.is_empty() {
|
|
params.add(None::<String>).map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
|
} else {
|
|
if col == "telefon" && trimmed_value.len() > 15 {
|
|
return Err(Status::internal(format!("Value too long for {}", col)));
|
|
}
|
|
params.add(trimmed_value).map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
|
}
|
|
} else {
|
|
return Err(Status::invalid_argument(format!("Expected string for column '{}'", col)));
|
|
}
|
|
} else if sql_type == "BOOLEAN" {
|
|
if let Kind::BoolValue(val) = kind {
|
|
params.add(val).map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
|
} else {
|
|
return Err(Status::invalid_argument(format!("Expected boolean for column '{}'", col)));
|
|
}
|
|
} else if sql_type == "TIMESTAMPTZ" {
|
|
if let Kind::StringValue(value) = kind {
|
|
let dt = DateTime::parse_from_rfc3339(value).map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
|
params.add(dt.with_timezone(&Utc)).map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
|
} else {
|
|
return Err(Status::invalid_argument(format!("Expected ISO 8601 string for column '{}'", col)));
|
|
}
|
|
} else if sql_type == "BIGINT" {
|
|
if let Kind::NumberValue(val) = kind {
|
|
if val.fract() != 0.0 {
|
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
|
}
|
|
let as_i64 = *val as i64;
|
|
if (as_i64 as f64) != *val {
|
|
return Err(Status::invalid_argument(format!("Integer value out of range for BIGINT column '{}'", col)));
|
|
}
|
|
params.add(as_i64).map_err(|e| Status::invalid_argument(format!("Failed to add bigint parameter for {}: {}", col, e)))?;
|
|
} else {
|
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
|
}
|
|
} else if sql_type == "INTEGER" {
|
|
if let Kind::NumberValue(val) = kind {
|
|
if val.fract() != 0.0 {
|
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
|
}
|
|
let as_i32 = *val as i32;
|
|
if (as_i32 as f64) != *val {
|
|
return Err(Status::invalid_argument(format!("Integer value out of range for INTEGER column '{}'", col)));
|
|
}
|
|
params.add(as_i32).map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
|
} else {
|
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
|
}
|
|
} else if sql_type.starts_with("NUMERIC") {
|
|
let decimal_val = match kind {
|
|
Kind::StringValue(s) => {
|
|
let trimmed = s.trim();
|
|
if trimmed.is_empty() {
|
|
None
|
|
} else {
|
|
Some(Decimal::from_str(trimmed).map_err(|_| {
|
|
Status::invalid_argument(format!(
|
|
"Invalid decimal string format for column '{}': {}",
|
|
col, s
|
|
))
|
|
})?)
|
|
}
|
|
}
|
|
_ => {
|
|
return Err(Status::invalid_argument(format!(
|
|
"Expected a string representation for decimal column '{}', but received a different type.",
|
|
col
|
|
)));
|
|
}
|
|
};
|
|
params.add(decimal_val).map_err(|e| {
|
|
Status::invalid_argument(format!(
|
|
"Failed to add decimal parameter for {}: {}",
|
|
col, e
|
|
))
|
|
})?;
|
|
} else {
|
|
return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type)));
|
|
}
|
|
|
|
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
|
param_idx += 1;
|
|
}
|
|
|
|
// --- End of copied logic ---
|
|
|
|
if set_clauses.is_empty() {
|
|
return Ok(PutTableDataResponse {
|
|
success: true,
|
|
message: "No valid fields to update after processing.".into(),
|
|
updated_id: record_id,
|
|
});
|
|
}
|
|
|
|
let qualified_table = crate::shared::schema_qualifier::qualify_table_name_for_data(
|
|
db_pool,
|
|
&profile_name,
|
|
&table_name,
|
|
)
|
|
.await?;
|
|
|
|
let set_clause = set_clauses.join(", ");
|
|
let sql = format!(
|
|
"UPDATE {} SET {} WHERE id = ${} RETURNING id",
|
|
qualified_table,
|
|
set_clause,
|
|
param_idx
|
|
);
|
|
|
|
params.add(record_id).map_err(|e| Status::internal(format!("Failed to add record_id parameter: {}", e)))?;
|
|
|
|
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
|
.fetch_optional(db_pool)
|
|
.await;
|
|
|
|
let updated_id = match result {
|
|
Ok(Some(id)) => id,
|
|
Ok(None) => return Err(Status::not_found("Record not found")),
|
|
Err(e) => {
|
|
if let Some(db_err) = e.as_database_error() {
|
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("22P02")) ||
|
|
db_err.code() == Some(std::borrow::Cow::Borrowed("22003")) {
|
|
return Err(Status::invalid_argument(format!(
|
|
"Numeric field overflow or invalid format. Check precision and scale. Details: {}", db_err.message()
|
|
)));
|
|
}
|
|
}
|
|
return Err(Status::internal(format!("Update failed: {}", e)));
|
|
}
|
|
};
|
|
|
|
let command = IndexCommand::AddOrUpdate(IndexCommandData {
|
|
table_name: table_name.clone(),
|
|
row_id: updated_id,
|
|
});
|
|
|
|
if let Err(e) = indexer_tx.send(command).await {
|
|
error!(
|
|
"CRITICAL: DB update for table '{}' (id: {}) succeeded but failed to queue for indexing: {}. Search index is now inconsistent.",
|
|
table_name, updated_id, e
|
|
);
|
|
}
|
|
|
|
Ok(PutTableDataResponse {
|
|
success: true,
|
|
message: "Data updated successfully".into(),
|
|
updated_id,
|
|
})
|
|
}
|