Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed786f087c | ||
|
|
8e22ea05ff | ||
|
|
8414657224 | ||
|
|
e25213ed1b | ||
|
|
4843b0778c | ||
|
|
f5fae98c69 | ||
|
|
6faf0a4a31 | ||
|
|
011fafc0ff | ||
|
|
8ebe74484c | ||
|
|
3eb9523103 | ||
|
|
3dfa922b9e | ||
|
|
248d54a30f | ||
|
|
b30fef4ccd | ||
|
|
a9c4527318 | ||
|
|
c31f08d5b8 | ||
|
|
9e0fa9ddb1 |
4
Cargo.lock
generated
4
Cargo.lock
generated
@@ -449,6 +449,7 @@ dependencies = [
|
|||||||
"dotenvy",
|
"dotenvy",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"prost",
|
"prost",
|
||||||
|
"prost-types",
|
||||||
"ratatui",
|
"ratatui",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@@ -487,6 +488,7 @@ name = "common"
|
|||||||
version = "0.3.13"
|
version = "0.3.13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"prost",
|
"prost",
|
||||||
|
"prost-types",
|
||||||
"serde",
|
"serde",
|
||||||
"tantivy",
|
"tantivy",
|
||||||
"tonic",
|
"tonic",
|
||||||
@@ -2843,6 +2845,8 @@ dependencies = [
|
|||||||
"jsonwebtoken",
|
"jsonwebtoken",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"prost",
|
"prost",
|
||||||
|
"prost-types",
|
||||||
|
"rand 0.9.1",
|
||||||
"regex",
|
"regex",
|
||||||
"rstest",
|
"rstest",
|
||||||
"rust-stemmers",
|
"rust-stemmers",
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ tokio = { version = "1.44.2", features = ["full"] }
|
|||||||
tonic = "0.13.0"
|
tonic = "0.13.0"
|
||||||
prost = "0.13.5"
|
prost = "0.13.5"
|
||||||
async-trait = "0.1.88"
|
async-trait = "0.1.88"
|
||||||
|
prost-types = "0.13.0"
|
||||||
|
|
||||||
# Data Handling & Serialization
|
# Data Handling & Serialization
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ anyhow = "1.0.98"
|
|||||||
async-trait = "0.1.88"
|
async-trait = "0.1.88"
|
||||||
common = { path = "../common" }
|
common = { path = "../common" }
|
||||||
|
|
||||||
|
prost-types = { workspace = true }
|
||||||
crossterm = "0.28.1"
|
crossterm = "0.28.1"
|
||||||
dirs = "6.0.0"
|
dirs = "6.0.0"
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
|
|||||||
@@ -1,30 +1,18 @@
|
|||||||
// src/components/common/autocomplete.rs
|
// src/components/common/autocomplete.rs
|
||||||
|
|
||||||
use common::proto::multieko2::search::search_response::Hit;
|
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::pages::form::FormState;
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
layout::Rect,
|
layout::Rect,
|
||||||
style::{Color, Modifier, Style},
|
style::{Color, Modifier, Style},
|
||||||
widgets::{Block, List, ListItem, ListState},
|
widgets::{Block, List, ListItem, ListState},
|
||||||
Frame,
|
Frame,
|
||||||
};
|
};
|
||||||
use std::collections::HashMap;
|
|
||||||
use unicode_width::UnicodeWidthStr;
|
use unicode_width::UnicodeWidthStr;
|
||||||
|
|
||||||
/// Converts a serde_json::Value into a displayable String.
|
|
||||||
/// Handles String, Number, and Bool variants. Returns an empty string for Null and others.
|
|
||||||
fn json_value_to_string(value: &serde_json::Value) -> String {
|
|
||||||
match value {
|
|
||||||
serde_json::Value::String(s) => s.clone(),
|
|
||||||
serde_json::Value::Number(n) => n.to_string(),
|
|
||||||
serde_json::Value::Bool(b) => b.to_string(),
|
|
||||||
// Return an empty string for Null, Array, or Object so we can filter them out.
|
|
||||||
_ => String::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Renders an opaque dropdown list for simple string-based suggestions.
|
/// Renders an opaque dropdown list for simple string-based suggestions.
|
||||||
/// This function remains unchanged.
|
/// THIS IS THE RESTORED FUNCTION.
|
||||||
pub fn render_autocomplete_dropdown(
|
pub fn render_autocomplete_dropdown(
|
||||||
f: &mut Frame,
|
f: &mut Frame,
|
||||||
input_rect: Rect,
|
input_rect: Rect,
|
||||||
@@ -84,22 +72,22 @@ pub fn render_autocomplete_dropdown(
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let list = List::new(items);
|
let list = List::new(items);
|
||||||
let mut profile_list_state = ListState::default();
|
let mut list_state = ListState::default();
|
||||||
profile_list_state.select(selected_index);
|
list_state.select(selected_index);
|
||||||
|
|
||||||
f.render_stateful_widget(list, dropdown_area, &mut profile_list_state);
|
f.render_stateful_widget(list, dropdown_area, &mut list_state);
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- MODIFIED FUNCTION FOR RICH SUGGESTIONS ---
|
|
||||||
/// Renders an opaque dropdown list for rich `Hit`-based suggestions.
|
/// Renders an opaque dropdown list for rich `Hit`-based suggestions.
|
||||||
/// Displays the value of the first meaningful column, followed by the Hit ID.
|
/// RENAMED from render_rich_autocomplete_dropdown
|
||||||
pub fn render_rich_autocomplete_dropdown(
|
pub fn render_hit_autocomplete_dropdown(
|
||||||
f: &mut Frame,
|
f: &mut Frame,
|
||||||
input_rect: Rect,
|
input_rect: Rect,
|
||||||
frame_area: Rect,
|
frame_area: Rect,
|
||||||
theme: &Theme,
|
theme: &Theme,
|
||||||
suggestions: &[Hit],
|
suggestions: &[Hit],
|
||||||
selected_index: Option<usize>,
|
selected_index: Option<usize>,
|
||||||
|
form_state: &FormState,
|
||||||
) {
|
) {
|
||||||
if suggestions.is_empty() {
|
if suggestions.is_empty() {
|
||||||
return;
|
return;
|
||||||
@@ -107,50 +95,9 @@ pub fn render_rich_autocomplete_dropdown(
|
|||||||
|
|
||||||
let display_names: Vec<String> = suggestions
|
let display_names: Vec<String> = suggestions
|
||||||
.iter()
|
.iter()
|
||||||
.map(|hit| {
|
.map(|hit| form_state.get_display_name_for_hit(hit))
|
||||||
// Use serde_json::Value to handle mixed types (string, null, etc.)
|
|
||||||
if let Ok(content_map) =
|
|
||||||
serde_json::from_str::<HashMap<String, serde_json::Value>>(
|
|
||||||
&hit.content_json,
|
|
||||||
)
|
|
||||||
{
|
|
||||||
// Define keys to ignore for a cleaner display
|
|
||||||
const IGNORED_KEYS: &[&str] = &["id", "deleted", "created_at"];
|
|
||||||
|
|
||||||
// Get keys, filter out ignored ones, and sort for consistency
|
|
||||||
let mut keys: Vec<_> = content_map
|
|
||||||
.keys()
|
|
||||||
.filter(|k| !IGNORED_KEYS.contains(&k.as_str()))
|
|
||||||
.cloned()
|
|
||||||
.collect();
|
|
||||||
keys.sort();
|
|
||||||
|
|
||||||
// Get only the first non-empty value from the sorted keys
|
|
||||||
let values: Vec<_> = keys
|
|
||||||
.iter()
|
|
||||||
.map(|key| {
|
|
||||||
content_map
|
|
||||||
.get(key)
|
|
||||||
.map(json_value_to_string)
|
|
||||||
.unwrap_or_default()
|
|
||||||
})
|
|
||||||
.filter(|s| !s.is_empty()) // Filter out null/empty values
|
|
||||||
.take(1) // Changed from take(2) to take(1)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let display_part = values.first().cloned().unwrap_or_default(); // Get the first value
|
|
||||||
if display_part.is_empty() {
|
|
||||||
format!("ID: {}", hit.id)
|
|
||||||
} else {
|
|
||||||
format!("{} | ID: {}", display_part, hit.id) // ID at the end
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
format!("ID: {} (parse error)", hit.id)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// --- Calculate Dropdown Size & Position ---
|
|
||||||
let max_suggestion_width =
|
let max_suggestion_width =
|
||||||
display_names.iter().map(|s| s.width()).max().unwrap_or(0) as u16;
|
display_names.iter().map(|s| s.width()).max().unwrap_or(0) as u16;
|
||||||
let horizontal_padding: u16 = 2;
|
let horizontal_padding: u16 = 2;
|
||||||
@@ -164,7 +111,6 @@ pub fn render_rich_autocomplete_dropdown(
|
|||||||
height: dropdown_height,
|
height: dropdown_height,
|
||||||
};
|
};
|
||||||
|
|
||||||
// --- Clamping Logic ---
|
|
||||||
if dropdown_area.bottom() > frame_area.height {
|
if dropdown_area.bottom() > frame_area.height {
|
||||||
dropdown_area.y = input_rect.y.saturating_sub(dropdown_height);
|
dropdown_area.y = input_rect.y.saturating_sub(dropdown_height);
|
||||||
}
|
}
|
||||||
@@ -174,7 +120,6 @@ pub fn render_rich_autocomplete_dropdown(
|
|||||||
dropdown_area.x = dropdown_area.x.max(0);
|
dropdown_area.x = dropdown_area.x.max(0);
|
||||||
dropdown_area.y = dropdown_area.y.max(0);
|
dropdown_area.y = dropdown_area.y.max(0);
|
||||||
|
|
||||||
// --- Rendering Logic ---
|
|
||||||
let background_block =
|
let background_block =
|
||||||
Block::default().style(Style::default().bg(Color::DarkGray));
|
Block::default().style(Style::default().bg(Color::DarkGray));
|
||||||
f.render_widget(background_block, dropdown_area);
|
f.render_widget(background_block, dropdown_area);
|
||||||
|
|||||||
@@ -78,25 +78,25 @@ pub fn render_form(
|
|||||||
|
|
||||||
// --- NEW: RENDER AUTOCOMPLETE ---
|
// --- NEW: RENDER AUTOCOMPLETE ---
|
||||||
if form_state.autocomplete_active {
|
if form_state.autocomplete_active {
|
||||||
// Use the Rect of the active field that render_canvas found for us.
|
|
||||||
if let Some(active_rect) = active_field_rect {
|
if let Some(active_rect) = active_field_rect {
|
||||||
let selected_index = form_state.get_selected_suggestion_index();
|
let selected_index = form_state.get_selected_suggestion_index();
|
||||||
|
|
||||||
// THE DECIDER LOGIC:
|
|
||||||
// 1. Check for rich suggestions first.
|
|
||||||
if let Some(rich_suggestions) = form_state.get_rich_suggestions() {
|
if let Some(rich_suggestions) = form_state.get_rich_suggestions() {
|
||||||
if !rich_suggestions.is_empty() {
|
if !rich_suggestions.is_empty() {
|
||||||
autocomplete::render_rich_autocomplete_dropdown(
|
// CHANGE THIS to call the renamed function
|
||||||
|
autocomplete::render_hit_autocomplete_dropdown(
|
||||||
f,
|
f,
|
||||||
active_rect,
|
active_rect,
|
||||||
f.area(), // Use f.area() for clamping, not f.size()
|
f.area(),
|
||||||
theme,
|
theme,
|
||||||
rich_suggestions,
|
rich_suggestions,
|
||||||
selected_index,
|
selected_index,
|
||||||
|
form_state,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// 2. Fallback to simple suggestions if rich ones aren't available.
|
// The fallback to simple suggestions is now correctly handled
|
||||||
|
// because the original render_autocomplete_dropdown exists again.
|
||||||
else if let Some(simple_suggestions) = form_state.get_suggestions() {
|
else if let Some(simple_suggestions) = form_state.get_suggestions() {
|
||||||
if !simple_suggestions.is_empty() {
|
if !simple_suggestions.is_empty() {
|
||||||
autocomplete::render_autocomplete_dropdown(
|
autocomplete::render_autocomplete_dropdown(
|
||||||
@@ -112,3 +112,4 @@ pub fn render_form(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
// src/components/handlers/canvas.rs
|
// src/components/handlers/canvas.rs
|
||||||
|
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
widgets::{Paragraph, Block, Borders},
|
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||||
layout::{Layout, Constraint, Direction, Rect},
|
style::{Modifier, Style},
|
||||||
style::{Style, Modifier},
|
|
||||||
text::{Line, Span},
|
text::{Line, Span},
|
||||||
|
widgets::{Block, Borders, Paragraph},
|
||||||
Frame,
|
Frame,
|
||||||
prelude::Alignment,
|
|
||||||
};
|
};
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::app::highlight::HighlightState;
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::app::highlight::HighlightState; // Ensure correct import path
|
use std::cmp::{max, min};
|
||||||
use std::cmp::{min, max};
|
|
||||||
|
|
||||||
pub fn render_canvas(
|
pub fn render_canvas(
|
||||||
f: &mut Frame,
|
f: &mut Frame,
|
||||||
@@ -21,9 +21,8 @@ pub fn render_canvas(
|
|||||||
inputs: &[&String],
|
inputs: &[&String],
|
||||||
theme: &Theme,
|
theme: &Theme,
|
||||||
is_edit_mode: bool,
|
is_edit_mode: bool,
|
||||||
highlight_state: &HighlightState, // Using the enum state
|
highlight_state: &HighlightState,
|
||||||
) -> Option<Rect> {
|
) -> Option<Rect> {
|
||||||
// ... (setup code remains the same) ...
|
|
||||||
let columns = Layout::default()
|
let columns = Layout::default()
|
||||||
.direction(Direction::Horizontal)
|
.direction(Direction::Horizontal)
|
||||||
.constraints([Constraint::Percentage(30), Constraint::Percentage(70)])
|
.constraints([Constraint::Percentage(30), Constraint::Percentage(70)])
|
||||||
@@ -58,46 +57,47 @@ pub fn render_canvas(
|
|||||||
|
|
||||||
let mut active_field_input_rect = None;
|
let mut active_field_input_rect = None;
|
||||||
|
|
||||||
// Render labels
|
|
||||||
for (i, field) in fields.iter().enumerate() {
|
for (i, field) in fields.iter().enumerate() {
|
||||||
let label = Paragraph::new(Line::from(Span::styled(
|
let label = Paragraph::new(Line::from(Span::styled(
|
||||||
format!("{}:", field),
|
format!("{}:", field),
|
||||||
Style::default().fg(theme.fg)),
|
Style::default().fg(theme.fg),
|
||||||
));
|
)));
|
||||||
f.render_widget(label, Rect {
|
f.render_widget(
|
||||||
x: columns[0].x,
|
label,
|
||||||
y: input_block.y + 1 + i as u16,
|
Rect {
|
||||||
width: columns[0].width,
|
x: columns[0].x,
|
||||||
height: 1,
|
y: input_block.y + 1 + i as u16,
|
||||||
});
|
width: columns[0].width,
|
||||||
|
height: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (i, _input) in inputs.iter().enumerate() {
|
||||||
// Render inputs and cursor
|
|
||||||
for (i, input) in inputs.iter().enumerate() {
|
|
||||||
let is_active = i == *current_field_idx;
|
let is_active = i == *current_field_idx;
|
||||||
let current_cursor_pos = form_state.current_cursor_pos();
|
let current_cursor_pos = form_state.current_cursor_pos();
|
||||||
let text = input.as_str();
|
|
||||||
let text_len = text.chars().count();
|
|
||||||
|
|
||||||
|
// Use the trait method to get display value
|
||||||
|
let text = form_state.get_display_value_for_field(i);
|
||||||
|
let text_len = text.chars().count();
|
||||||
let line: Line;
|
let line: Line;
|
||||||
|
|
||||||
// --- Use match on the highlight_state enum ---
|
|
||||||
match highlight_state {
|
match highlight_state {
|
||||||
HighlightState::Off => {
|
HighlightState::Off => {
|
||||||
// Not in highlight mode, render normally
|
|
||||||
line = Line::from(Span::styled(
|
line = Line::from(Span::styled(
|
||||||
text,
|
text,
|
||||||
if is_active { Style::default().fg(theme.highlight) } else { Style::default().fg(theme.fg) }
|
if is_active {
|
||||||
|
Style::default().fg(theme.highlight)
|
||||||
|
} else {
|
||||||
|
Style::default().fg(theme.fg)
|
||||||
|
},
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
HighlightState::Characterwise { anchor } => {
|
HighlightState::Characterwise { anchor } => {
|
||||||
// --- Character-wise Highlight Logic ---
|
|
||||||
let (anchor_field, anchor_char) = *anchor;
|
let (anchor_field, anchor_char) = *anchor;
|
||||||
let start_field = min(anchor_field, *current_field_idx);
|
let start_field = min(anchor_field, *current_field_idx);
|
||||||
let end_field = max(anchor_field, *current_field_idx);
|
let end_field = max(anchor_field, *current_field_idx);
|
||||||
|
|
||||||
// Use start_char and end_char consistently
|
|
||||||
let (start_char, end_char) = if anchor_field == *current_field_idx {
|
let (start_char, end_char) = if anchor_field == *current_field_idx {
|
||||||
(min(anchor_char, current_cursor_pos), max(anchor_char, current_cursor_pos))
|
(min(anchor_char, current_cursor_pos), max(anchor_char, current_cursor_pos))
|
||||||
} else if anchor_field < *current_field_idx {
|
} else if anchor_field < *current_field_idx {
|
||||||
@@ -111,24 +111,20 @@ pub fn render_canvas(
|
|||||||
let normal_style_outside = Style::default().fg(theme.fg);
|
let normal_style_outside = Style::default().fg(theme.fg);
|
||||||
|
|
||||||
if i >= start_field && i <= end_field {
|
if i >= start_field && i <= end_field {
|
||||||
// This line is within the character-wise highlight range
|
if start_field == end_field {
|
||||||
if start_field == end_field { // Case 1: Single Line Highlight
|
|
||||||
// Use start_char and end_char here
|
|
||||||
let clamped_start = start_char.min(text_len);
|
let clamped_start = start_char.min(text_len);
|
||||||
let clamped_end = end_char.min(text_len); // Use text_len for slicing logic
|
let clamped_end = end_char.min(text_len);
|
||||||
|
|
||||||
let before: String = text.chars().take(clamped_start).collect();
|
let before: String = text.chars().take(clamped_start).collect();
|
||||||
let highlighted: String = text.chars().skip(clamped_start).take(clamped_end.saturating_sub(clamped_start) + 1).collect();
|
let highlighted: String = text.chars().skip(clamped_start).take(clamped_end.saturating_sub(clamped_start) + 1).collect();
|
||||||
// Define 'after' here
|
|
||||||
let after: String = text.chars().skip(clamped_end + 1).collect();
|
let after: String = text.chars().skip(clamped_end + 1).collect();
|
||||||
|
|
||||||
line = Line::from(vec![
|
line = Line::from(vec![
|
||||||
Span::styled(before, normal_style_in_highlight),
|
Span::styled(before, normal_style_in_highlight),
|
||||||
Span::styled(highlighted, highlight_style),
|
Span::styled(highlighted, highlight_style),
|
||||||
Span::styled(after, normal_style_in_highlight), // Use defined 'after'
|
Span::styled(after, normal_style_in_highlight),
|
||||||
]);
|
]);
|
||||||
} else if i == start_field { // Case 2: Multi-Line Highlight - Start Line
|
} else if i == start_field {
|
||||||
// Use start_char here
|
|
||||||
let safe_start = start_char.min(text_len);
|
let safe_start = start_char.min(text_len);
|
||||||
let before: String = text.chars().take(safe_start).collect();
|
let before: String = text.chars().take(safe_start).collect();
|
||||||
let highlighted: String = text.chars().skip(safe_start).collect();
|
let highlighted: String = text.chars().skip(safe_start).collect();
|
||||||
@@ -136,8 +132,7 @@ pub fn render_canvas(
|
|||||||
Span::styled(before, normal_style_in_highlight),
|
Span::styled(before, normal_style_in_highlight),
|
||||||
Span::styled(highlighted, highlight_style),
|
Span::styled(highlighted, highlight_style),
|
||||||
]);
|
]);
|
||||||
} else if i == end_field { // Case 3: Multi-Line Highlight - End Line (Corrected index)
|
} else if i == end_field {
|
||||||
// Use end_char here
|
|
||||||
let safe_end_inclusive = if text_len > 0 { end_char.min(text_len - 1) } else { 0 };
|
let safe_end_inclusive = if text_len > 0 { end_char.min(text_len - 1) } else { 0 };
|
||||||
let highlighted: String = text.chars().take(safe_end_inclusive + 1).collect();
|
let highlighted: String = text.chars().take(safe_end_inclusive + 1).collect();
|
||||||
let after: String = text.chars().skip(safe_end_inclusive + 1).collect();
|
let after: String = text.chars().skip(safe_end_inclusive + 1).collect();
|
||||||
@@ -145,19 +140,17 @@ pub fn render_canvas(
|
|||||||
Span::styled(highlighted, highlight_style),
|
Span::styled(highlighted, highlight_style),
|
||||||
Span::styled(after, normal_style_in_highlight),
|
Span::styled(after, normal_style_in_highlight),
|
||||||
]);
|
]);
|
||||||
} else { // Case 4: Multi-Line Highlight - Middle Line (Corrected index)
|
} else {
|
||||||
line = Line::from(Span::styled(text, highlight_style)); // Highlight whole line
|
line = Line::from(Span::styled(text, highlight_style));
|
||||||
}
|
}
|
||||||
} else { // Case 5: Line Outside Character-wise Highlight Range
|
} else {
|
||||||
line = Line::from(Span::styled(
|
line = Line::from(Span::styled(
|
||||||
text,
|
text,
|
||||||
// Use normal styling (active or inactive)
|
|
||||||
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
HighlightState::Linewise { anchor_line } => {
|
HighlightState::Linewise { anchor_line } => {
|
||||||
// --- Linewise Highlight Logic ---
|
|
||||||
let start_field = min(*anchor_line, *current_field_idx);
|
let start_field = min(*anchor_line, *current_field_idx);
|
||||||
let end_field = max(*anchor_line, *current_field_idx);
|
let end_field = max(*anchor_line, *current_field_idx);
|
||||||
let highlight_style = Style::default().fg(theme.highlight).bg(theme.highlight_bg).add_modifier(Modifier::BOLD);
|
let highlight_style = Style::default().fg(theme.highlight).bg(theme.highlight_bg).add_modifier(Modifier::BOLD);
|
||||||
@@ -165,25 +158,31 @@ pub fn render_canvas(
|
|||||||
let normal_style_outside = Style::default().fg(theme.fg);
|
let normal_style_outside = Style::default().fg(theme.fg);
|
||||||
|
|
||||||
if i >= start_field && i <= end_field {
|
if i >= start_field && i <= end_field {
|
||||||
// Highlight the entire line
|
|
||||||
line = Line::from(Span::styled(text, highlight_style));
|
line = Line::from(Span::styled(text, highlight_style));
|
||||||
} else {
|
} else {
|
||||||
// Line outside linewise highlight range
|
|
||||||
line = Line::from(Span::styled(
|
line = Line::from(Span::styled(
|
||||||
text,
|
text,
|
||||||
// Use normal styling (active or inactive)
|
|
||||||
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} // End match highlight_state
|
}
|
||||||
|
|
||||||
let input_display = Paragraph::new(line).alignment(Alignment::Left);
|
let input_display = Paragraph::new(line).alignment(Alignment::Left);
|
||||||
f.render_widget(input_display, input_rows[i]);
|
f.render_widget(input_display, input_rows[i]);
|
||||||
|
|
||||||
if is_active {
|
if is_active {
|
||||||
active_field_input_rect = Some(input_rows[i]);
|
active_field_input_rect = Some(input_rows[i]);
|
||||||
let cursor_x = input_rows[i].x + form_state.current_cursor_pos() as u16;
|
|
||||||
|
// --- CORRECTED CURSOR POSITIONING LOGIC ---
|
||||||
|
// Use the new generic trait method to check for an override.
|
||||||
|
let cursor_x = if form_state.has_display_override(i) {
|
||||||
|
// If an override exists, place the cursor at the end.
|
||||||
|
input_rows[i].x + text.chars().count() as u16
|
||||||
|
} else {
|
||||||
|
// Otherwise, use the real cursor position.
|
||||||
|
input_rows[i].x + form_state.current_cursor_pos() as u16
|
||||||
|
};
|
||||||
let cursor_y = input_rows[i].y;
|
let cursor_y = input_rows[i].y;
|
||||||
f.set_cursor_position((cursor_x, cursor_y));
|
f.set_cursor_position((cursor_x, cursor_y));
|
||||||
}
|
}
|
||||||
@@ -191,4 +190,3 @@ pub fn render_canvas(
|
|||||||
|
|
||||||
active_field_input_rect
|
active_field_input_rect
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use crate::services::grpc_client::GrpcClient;
|
|||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::FormState;
|
||||||
use crate::state::pages::auth::RegisterState;
|
use crate::state::pages::auth::RegisterState;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
use crate::tui::functions::common::form::{revert, save};
|
use crate::tui::functions::common::form::{revert, save};
|
||||||
use crossterm::event::{KeyCode, KeyEvent};
|
use crossterm::event::{KeyCode, KeyEvent};
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
@@ -13,6 +14,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
action: &str,
|
action: &str,
|
||||||
state: &mut S,
|
state: &mut S,
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
|
app_state: &AppState,
|
||||||
current_position: &mut u64,
|
current_position: &mut u64,
|
||||||
total_count: u64,
|
total_count: u64,
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
@@ -27,6 +29,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
match action {
|
match action {
|
||||||
"save" => {
|
"save" => {
|
||||||
let outcome = save(
|
let outcome = save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::FormState;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
use crate::tui::functions::common::form::{revert, save};
|
use crate::tui::functions::common::form::{revert, save};
|
||||||
use crate::tui::functions::common::form::SaveOutcome;
|
use crate::tui::functions::common::form::SaveOutcome;
|
||||||
use crate::modes::handlers::event::EventOutcome;
|
use crate::modes::handlers::event::EventOutcome;
|
||||||
@@ -14,6 +15,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
action: &str,
|
action: &str,
|
||||||
state: &mut S,
|
state: &mut S,
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
|
app_state: &AppState,
|
||||||
) -> Result<EventOutcome> {
|
) -> Result<EventOutcome> {
|
||||||
match action {
|
match action {
|
||||||
"save" | "revert" => {
|
"save" | "revert" => {
|
||||||
@@ -26,6 +28,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
match action {
|
match action {
|
||||||
"save" => {
|
"save" => {
|
||||||
let save_result = save(
|
let save_result = save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await;
|
).await;
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ pub async fn handle_core_action(
|
|||||||
Ok(EventOutcome::Ok(message))
|
Ok(EventOutcome::Ok(message))
|
||||||
} else {
|
} else {
|
||||||
let save_outcome = form_save(
|
let save_outcome = form_save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await.context("Register save action failed")?;
|
).await.context("Register save action failed")?;
|
||||||
@@ -52,6 +53,7 @@ pub async fn handle_core_action(
|
|||||||
login_save(auth_state, login_state, auth_client, app_state).await.context("Login save n quit action failed")?
|
login_save(auth_state, login_state, auth_client, app_state).await.context("Login save n quit action failed")?
|
||||||
} else {
|
} else {
|
||||||
let save_outcome = form_save(
|
let save_outcome = form_save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await?;
|
).await?;
|
||||||
|
|||||||
@@ -132,13 +132,24 @@ pub async fn handle_edit_event(
|
|||||||
.get(selected_idx)
|
.get(selected_idx)
|
||||||
.cloned()
|
.cloned()
|
||||||
{
|
{
|
||||||
|
// --- THIS IS THE CORE LOGIC CHANGE ---
|
||||||
|
|
||||||
|
// 1. Get the friendly display name for the UI
|
||||||
|
let display_name =
|
||||||
|
form_state.get_display_name_for_hit(&selection);
|
||||||
|
|
||||||
|
// 2. Store the REAL ID in the form's values
|
||||||
let current_input =
|
let current_input =
|
||||||
form_state.get_current_input_mut();
|
form_state.get_current_input_mut();
|
||||||
*current_input = selection.id.to_string();
|
*current_input = selection.id.to_string();
|
||||||
let new_cursor_pos = current_input.len();
|
|
||||||
form_state.set_current_cursor_pos(new_cursor_pos);
|
// 3. Set the persistent display override in the map
|
||||||
// FIX: Access ideal_cursor_column through event_handler
|
form_state.link_display_map.insert(
|
||||||
event_handler.ideal_cursor_column = new_cursor_pos;
|
form_state.current_field,
|
||||||
|
display_name,
|
||||||
|
);
|
||||||
|
|
||||||
|
// 4. Finalize state
|
||||||
form_state.deactivate_autocomplete();
|
form_state.deactivate_autocomplete();
|
||||||
form_state.set_has_unsaved_changes(true);
|
form_state.set_has_unsaved_changes(true);
|
||||||
return Ok(EditEventOutcome::Message(
|
return Ok(EditEventOutcome::Message(
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ use anyhow::Result;
|
|||||||
pub async fn handle_command_event(
|
pub async fn handle_command_event(
|
||||||
key: KeyEvent,
|
key: KeyEvent,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
app_state: &AppState,
|
app_state: &mut AppState,
|
||||||
login_state: &LoginState,
|
login_state: &LoginState,
|
||||||
register_state: &RegisterState,
|
register_state: &RegisterState,
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
@@ -74,7 +74,7 @@ pub async fn handle_command_event(
|
|||||||
async fn process_command(
|
async fn process_command(
|
||||||
config: &Config,
|
config: &Config,
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
app_state: &AppState,
|
app_state: &mut AppState,
|
||||||
login_state: &LoginState,
|
login_state: &LoginState,
|
||||||
register_state: &RegisterState,
|
register_state: &RegisterState,
|
||||||
command_input: &mut String,
|
command_input: &mut String,
|
||||||
@@ -117,6 +117,7 @@ async fn process_command(
|
|||||||
},
|
},
|
||||||
"save" => {
|
"save" => {
|
||||||
let outcome = save(
|
let outcome = save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await?;
|
).await?;
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// src/services/grpc_client.rs
|
// src/services/grpc_client.rs
|
||||||
|
|
||||||
use tonic::transport::Channel;
|
use common::proto::multieko2::common::Empty;
|
||||||
use common::proto::multieko2::common::{CountResponse, Empty};
|
|
||||||
use common::proto::multieko2::table_structure::table_structure_service_client::TableStructureServiceClient;
|
use common::proto::multieko2::table_structure::table_structure_service_client::TableStructureServiceClient;
|
||||||
use common::proto::multieko2::table_structure::{GetTableStructureRequest, TableStructureResponse};
|
use common::proto::multieko2::table_structure::{GetTableStructureRequest, TableStructureResponse};
|
||||||
use common::proto::multieko2::table_definition::{
|
use common::proto::multieko2::table_definition::{
|
||||||
@@ -23,8 +22,10 @@ use common::proto::multieko2::tables_data::{
|
|||||||
use common::proto::multieko2::search::{
|
use common::proto::multieko2::search::{
|
||||||
searcher_client::SearcherClient, SearchRequest, SearchResponse,
|
searcher_client::SearcherClient, SearchRequest, SearchResponse,
|
||||||
};
|
};
|
||||||
use anyhow::{Context, Result}; // Added Context
|
use anyhow::{Context, Result};
|
||||||
use std::collections::HashMap; // NEW
|
use std::collections::HashMap;
|
||||||
|
use tonic::transport::Channel;
|
||||||
|
use prost_types::Value;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct GrpcClient {
|
pub struct GrpcClient {
|
||||||
@@ -48,7 +49,6 @@ impl GrpcClient {
|
|||||||
TableDefinitionClient::new(channel.clone());
|
TableDefinitionClient::new(channel.clone());
|
||||||
let table_script_client = TableScriptClient::new(channel.clone());
|
let table_script_client = TableScriptClient::new(channel.clone());
|
||||||
let tables_data_client = TablesDataClient::new(channel.clone());
|
let tables_data_client = TablesDataClient::new(channel.clone());
|
||||||
// NEW: Instantiate the search client
|
|
||||||
let search_client = SearcherClient::new(channel.clone());
|
let search_client = SearcherClient::new(channel.clone());
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
@@ -56,7 +56,7 @@ impl GrpcClient {
|
|||||||
table_definition_client,
|
table_definition_client,
|
||||||
table_script_client,
|
table_script_client,
|
||||||
tables_data_client,
|
tables_data_client,
|
||||||
search_client, // NEW
|
search_client,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -135,7 +135,7 @@ impl GrpcClient {
|
|||||||
Ok(response.into_inner().count as u64)
|
Ok(response.into_inner().count as u64)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_table_data_by_position(
|
pub async fn get_table_data_by_position(
|
||||||
&mut self,
|
&mut self,
|
||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
@@ -159,12 +159,14 @@ impl GrpcClient {
|
|||||||
&mut self,
|
&mut self,
|
||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
data: HashMap<String, String>,
|
// CHANGE THIS: Accept the pre-converted data
|
||||||
|
data: HashMap<String, Value>,
|
||||||
) -> Result<PostTableDataResponse> {
|
) -> Result<PostTableDataResponse> {
|
||||||
|
// The conversion logic is now gone from here.
|
||||||
let grpc_request = PostTableDataRequest {
|
let grpc_request = PostTableDataRequest {
|
||||||
profile_name,
|
profile_name,
|
||||||
table_name,
|
table_name,
|
||||||
data,
|
data, // This is now the correct type
|
||||||
};
|
};
|
||||||
let request = tonic::Request::new(grpc_request);
|
let request = tonic::Request::new(grpc_request);
|
||||||
let response = self
|
let response = self
|
||||||
@@ -180,13 +182,15 @@ impl GrpcClient {
|
|||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
id: i64,
|
id: i64,
|
||||||
data: HashMap<String, String>,
|
// CHANGE THIS: Accept the pre-converted data
|
||||||
|
data: HashMap<String, Value>,
|
||||||
) -> Result<PutTableDataResponse> {
|
) -> Result<PutTableDataResponse> {
|
||||||
|
// The conversion logic is now gone from here.
|
||||||
let grpc_request = PutTableDataRequest {
|
let grpc_request = PutTableDataRequest {
|
||||||
profile_name,
|
profile_name,
|
||||||
table_name,
|
table_name,
|
||||||
id,
|
id,
|
||||||
data,
|
data, // This is now the correct type
|
||||||
};
|
};
|
||||||
let request = tonic::Request::new(grpc_request);
|
let request = tonic::Request::new(grpc_request);
|
||||||
let response = self
|
let response = self
|
||||||
|
|||||||
@@ -1,16 +1,100 @@
|
|||||||
// src/services/ui_service.rs
|
// src/services/ui_service.rs
|
||||||
|
|
||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
use crate::state::pages::form::FormState;
|
|
||||||
use crate::tui::functions::common::form::SaveOutcome;
|
|
||||||
use crate::state::pages::add_logic::AddLogicState;
|
|
||||||
use crate::state::app::state::AppState;
|
use crate::state::app::state::AppState;
|
||||||
|
use crate::state::pages::add_logic::AddLogicState;
|
||||||
|
use crate::state::pages::form::{FieldDefinition, FormState};
|
||||||
|
use crate::tui::functions::common::form::SaveOutcome;
|
||||||
use crate::utils::columns::filter_user_columns;
|
use crate::utils::columns::filter_user_columns;
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub struct UiService;
|
pub struct UiService;
|
||||||
|
|
||||||
impl UiService {
|
impl UiService {
|
||||||
|
pub async fn load_table_view(
|
||||||
|
grpc_client: &mut GrpcClient,
|
||||||
|
app_state: &mut AppState,
|
||||||
|
profile_name: &str,
|
||||||
|
table_name: &str,
|
||||||
|
) -> Result<FormState> {
|
||||||
|
// 1. & 2. Fetch and Cache Schema - UNCHANGED
|
||||||
|
let table_structure = grpc_client
|
||||||
|
.get_table_structure(profile_name.to_string(), table_name.to_string())
|
||||||
|
.await
|
||||||
|
.context(format!(
|
||||||
|
"Failed to get table structure for {}.{}",
|
||||||
|
profile_name, table_name
|
||||||
|
))?;
|
||||||
|
let cache_key = format!("{}.{}", profile_name, table_name);
|
||||||
|
app_state
|
||||||
|
.schema_cache
|
||||||
|
.insert(cache_key, Arc::new(table_structure.clone()));
|
||||||
|
tracing::info!("Schema for '{}.{}' cached.", profile_name, table_name);
|
||||||
|
|
||||||
|
// --- START: FINAL, SIMPLIFIED, CORRECT LOGIC ---
|
||||||
|
|
||||||
|
// 3a. Create definitions for REGULAR fields first.
|
||||||
|
let mut fields: Vec<FieldDefinition> = table_structure
|
||||||
|
.columns
|
||||||
|
.iter()
|
||||||
|
.filter(|col| {
|
||||||
|
!col.is_primary_key
|
||||||
|
&& col.name != "deleted"
|
||||||
|
&& col.name != "created_at"
|
||||||
|
&& !col.name.ends_with("_id") // Filter out ALL potential links
|
||||||
|
})
|
||||||
|
.map(|col| FieldDefinition {
|
||||||
|
display_name: col.name.clone(),
|
||||||
|
data_key: col.name.clone(),
|
||||||
|
is_link: false,
|
||||||
|
link_target_table: None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// 3b. Now, find and APPEND definitions for LINK fields based on the `_id` convention.
|
||||||
|
let link_fields: Vec<FieldDefinition> = table_structure
|
||||||
|
.columns
|
||||||
|
.iter()
|
||||||
|
.filter(|col| col.name.ends_with("_id")) // Find all foreign key columns
|
||||||
|
.map(|col| {
|
||||||
|
// The table we link to is derived from the column name.
|
||||||
|
// e.g., "test_diacritics_id" -> "test_diacritics"
|
||||||
|
let target_table_base = col
|
||||||
|
.name
|
||||||
|
.strip_suffix("_id")
|
||||||
|
.unwrap_or(&col.name);
|
||||||
|
|
||||||
|
// Find the full table name from the profile tree for display.
|
||||||
|
// e.g., "test_diacritics" -> "2025_test_diacritics"
|
||||||
|
let full_target_table_name = app_state
|
||||||
|
.profile_tree
|
||||||
|
.profiles
|
||||||
|
.iter()
|
||||||
|
.find(|p| p.name == profile_name)
|
||||||
|
.and_then(|p| p.tables.iter().find(|t| t.name.ends_with(target_table_base)))
|
||||||
|
.map_or(target_table_base.to_string(), |t| t.name.clone());
|
||||||
|
|
||||||
|
FieldDefinition {
|
||||||
|
display_name: full_target_table_name.clone(),
|
||||||
|
data_key: col.name.clone(), // The actual FK column name
|
||||||
|
is_link: true,
|
||||||
|
link_target_table: Some(full_target_table_name),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
fields.extend(link_fields); // Append the link fields to the end
|
||||||
|
|
||||||
|
// --- END: FINAL, SIMPLIFIED, CORRECT LOGIC ---
|
||||||
|
|
||||||
|
Ok(FormState::new(
|
||||||
|
profile_name.to_string(),
|
||||||
|
table_name.to_string(),
|
||||||
|
fields,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn initialize_add_logic_table_data(
|
pub async fn initialize_add_logic_table_data(
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
add_logic_state: &mut AddLogicState,
|
add_logic_state: &mut AddLogicState,
|
||||||
@@ -92,6 +176,7 @@ impl UiService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// REFACTOR THIS FUNCTION
|
||||||
pub async fn initialize_app_state_and_form(
|
pub async fn initialize_app_state_and_form(
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
app_state: &mut AppState,
|
app_state: &mut AppState,
|
||||||
@@ -102,7 +187,6 @@ impl UiService {
|
|||||||
.context("Failed to get profile tree")?;
|
.context("Failed to get profile tree")?;
|
||||||
app_state.profile_tree = profile_tree;
|
app_state.profile_tree = profile_tree;
|
||||||
|
|
||||||
// Determine initial table to load (e.g., first table of first profile, or a default)
|
|
||||||
let initial_profile_name = app_state
|
let initial_profile_name = app_state
|
||||||
.profile_tree
|
.profile_tree
|
||||||
.profiles
|
.profiles
|
||||||
@@ -115,33 +199,26 @@ impl UiService {
|
|||||||
.profiles
|
.profiles
|
||||||
.first()
|
.first()
|
||||||
.and_then(|p| p.tables.first().map(|t| t.name.clone()))
|
.and_then(|p| p.tables.first().map(|t| t.name.clone()))
|
||||||
.unwrap_or_else(|| "2025_company_data1".to_string()); // Fallback if no tables
|
.unwrap_or_else(|| "2025_company_data1".to_string());
|
||||||
|
|
||||||
app_state.set_current_view_table(
|
app_state.set_current_view_table(
|
||||||
initial_profile_name.clone(),
|
initial_profile_name.clone(),
|
||||||
initial_table_name.clone(),
|
initial_table_name.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let table_structure = grpc_client
|
// NOW, just call our new central function. This avoids code duplication.
|
||||||
.get_table_structure(
|
let form_state = Self::load_table_view(
|
||||||
initial_profile_name.clone(),
|
grpc_client,
|
||||||
initial_table_name.clone(),
|
app_state,
|
||||||
)
|
&initial_profile_name,
|
||||||
.await
|
&initial_table_name,
|
||||||
.context(format!(
|
)
|
||||||
"Failed to get initial table structure for {}.{}",
|
.await?;
|
||||||
initial_profile_name, initial_table_name
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let column_names: Vec<String> = table_structure
|
// The field names for the UI are derived from the new form_state
|
||||||
.columns
|
let field_names = form_state.fields.iter().map(|f| f.display_name.clone()).collect();
|
||||||
.iter()
|
|
||||||
.map(|col| col.name.clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let filtered_columns = filter_user_columns(column_names);
|
Ok((initial_profile_name, initial_table_name, field_names))
|
||||||
|
|
||||||
Ok((initial_profile_name, initial_table_name, filtered_columns))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn fetch_and_set_table_count(
|
pub async fn fetch_and_set_table_count(
|
||||||
|
|||||||
@@ -1,15 +1,19 @@
|
|||||||
// src/state/app/state.rs
|
// src/state/app/state.rs
|
||||||
|
|
||||||
use std::env;
|
|
||||||
use common::proto::multieko2::table_definition::ProfileTreeResponse;
|
|
||||||
use crate::modes::handlers::mode_manager::AppMode;
|
|
||||||
use crate::ui::handlers::context::DialogPurpose;
|
|
||||||
use crate::state::app::search::SearchState; // ADDED
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use common::proto::multieko2::table_definition::ProfileTreeResponse;
|
||||||
|
// NEW: Import the types we need for the cache
|
||||||
|
use common::proto::multieko2::table_structure::TableStructureResponse;
|
||||||
|
use crate::modes::handlers::mode_manager::AppMode;
|
||||||
|
use crate::state::app::search::SearchState;
|
||||||
|
use crate::ui::handlers::context::DialogPurpose;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::env;
|
||||||
|
use std::sync::Arc;
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
// --- YOUR EXISTING DIALOGSTATE IS UNTOUCHED ---
|
// --- DialogState and UiState are unchanged ---
|
||||||
pub struct DialogState {
|
pub struct DialogState {
|
||||||
pub dialog_show: bool,
|
pub dialog_show: bool,
|
||||||
pub dialog_title: String,
|
pub dialog_title: String,
|
||||||
@@ -30,7 +34,7 @@ pub struct UiState {
|
|||||||
pub show_form: bool,
|
pub show_form: bool,
|
||||||
pub show_login: bool,
|
pub show_login: bool,
|
||||||
pub show_register: bool,
|
pub show_register: bool,
|
||||||
pub show_search_palette: bool, // ADDED
|
pub show_search_palette: bool,
|
||||||
pub focus_outside_canvas: bool,
|
pub focus_outside_canvas: bool,
|
||||||
pub dialog: DialogState,
|
pub dialog: DialogState,
|
||||||
}
|
}
|
||||||
@@ -52,10 +56,12 @@ pub struct AppState {
|
|||||||
pub current_view_profile_name: Option<String>,
|
pub current_view_profile_name: Option<String>,
|
||||||
pub current_view_table_name: Option<String>,
|
pub current_view_table_name: Option<String>,
|
||||||
|
|
||||||
|
// NEW: The "Rulebook" cache. We use Arc for efficient sharing.
|
||||||
|
pub schema_cache: HashMap<String, Arc<TableStructureResponse>>,
|
||||||
|
|
||||||
pub focused_button_index: usize,
|
pub focused_button_index: usize,
|
||||||
pub pending_table_structure_fetch: Option<(String, String)>,
|
pub pending_table_structure_fetch: Option<(String, String)>,
|
||||||
|
|
||||||
// ADDED: State for the search palette
|
|
||||||
pub search_state: Option<SearchState>,
|
pub search_state: Option<SearchState>,
|
||||||
|
|
||||||
// UI preferences
|
// UI preferences
|
||||||
@@ -67,9 +73,7 @@ pub struct AppState {
|
|||||||
|
|
||||||
impl AppState {
|
impl AppState {
|
||||||
pub fn new() -> Result<Self> {
|
pub fn new() -> Result<Self> {
|
||||||
let current_dir = env::current_dir()?
|
let current_dir = env::current_dir()?.to_string_lossy().to_string();
|
||||||
.to_string_lossy()
|
|
||||||
.to_string();
|
|
||||||
Ok(AppState {
|
Ok(AppState {
|
||||||
current_dir,
|
current_dir,
|
||||||
profile_tree: ProfileTreeResponse::default(),
|
profile_tree: ProfileTreeResponse::default(),
|
||||||
@@ -77,9 +81,10 @@ impl AppState {
|
|||||||
current_view_profile_name: None,
|
current_view_profile_name: None,
|
||||||
current_view_table_name: None,
|
current_view_table_name: None,
|
||||||
current_mode: AppMode::General,
|
current_mode: AppMode::General,
|
||||||
|
schema_cache: HashMap::new(), // NEW: Initialize the cache
|
||||||
focused_button_index: 0,
|
focused_button_index: 0,
|
||||||
pending_table_structure_fetch: None,
|
pending_table_structure_fetch: None,
|
||||||
search_state: None, // ADDED
|
search_state: None,
|
||||||
ui: UiState::default(),
|
ui: UiState::default(),
|
||||||
|
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
use common::proto::multieko2::search::search_response::Hit;
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
|
|
||||||
pub trait CanvasState {
|
pub trait CanvasState {
|
||||||
|
// --- Existing methods (unchanged) ---
|
||||||
fn current_field(&self) -> usize;
|
fn current_field(&self) -> usize;
|
||||||
fn current_cursor_pos(&self) -> usize;
|
fn current_cursor_pos(&self) -> usize;
|
||||||
fn has_unsaved_changes(&self) -> bool;
|
fn has_unsaved_changes(&self) -> bool;
|
||||||
@@ -10,15 +11,22 @@ pub trait CanvasState {
|
|||||||
fn get_current_input(&self) -> &str;
|
fn get_current_input(&self) -> &str;
|
||||||
fn get_current_input_mut(&mut self) -> &mut String;
|
fn get_current_input_mut(&mut self) -> &mut String;
|
||||||
fn fields(&self) -> Vec<&str>;
|
fn fields(&self) -> Vec<&str>;
|
||||||
|
|
||||||
fn set_current_field(&mut self, index: usize);
|
fn set_current_field(&mut self, index: usize);
|
||||||
fn set_current_cursor_pos(&mut self, pos: usize);
|
fn set_current_cursor_pos(&mut self, pos: usize);
|
||||||
fn set_has_unsaved_changes(&mut self, changed: bool);
|
fn set_has_unsaved_changes(&mut self, changed: bool);
|
||||||
|
|
||||||
// --- Autocomplete Support ---
|
|
||||||
fn get_suggestions(&self) -> Option<&[String]>;
|
fn get_suggestions(&self) -> Option<&[String]>;
|
||||||
fn get_selected_suggestion_index(&self) -> Option<usize>;
|
fn get_selected_suggestion_index(&self) -> Option<usize>;
|
||||||
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
|
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_display_value_for_field(&self, index: usize) -> &str {
|
||||||
|
self.inputs()
|
||||||
|
.get(index)
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
}
|
||||||
|
fn has_display_override(&self, _index: usize) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,18 +3,26 @@
|
|||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
use crate::state::app::highlight::HighlightState;
|
use crate::state::app::highlight::HighlightState;
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use common::proto::multieko2::search::search_response::Hit; // Import Hit
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
use ratatui::layout::Rect;
|
use ratatui::layout::Rect;
|
||||||
use ratatui::Frame;
|
use ratatui::Frame;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
// A struct to bridge the display name (label) to the data key from the server.
|
fn json_value_to_string(value: &serde_json::Value) -> String {
|
||||||
|
match value {
|
||||||
|
serde_json::Value::String(s) => s.clone(),
|
||||||
|
serde_json::Value::Number(n) => n.to_string(),
|
||||||
|
serde_json::Value::Bool(b) => b.to_string(),
|
||||||
|
_ => String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct FieldDefinition {
|
pub struct FieldDefinition {
|
||||||
pub display_name: String,
|
pub display_name: String,
|
||||||
pub data_key: String,
|
pub data_key: String,
|
||||||
pub is_link: bool,
|
pub is_link: bool,
|
||||||
pub link_target_table: Option<String>,
|
pub link_target_table: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@@ -29,12 +37,11 @@ pub struct FormState {
|
|||||||
pub current_field: usize,
|
pub current_field: usize,
|
||||||
pub has_unsaved_changes: bool,
|
pub has_unsaved_changes: bool,
|
||||||
pub current_cursor_pos: usize,
|
pub current_cursor_pos: usize,
|
||||||
|
|
||||||
// --- MODIFIED AUTOCOMPLETE STATE ---
|
|
||||||
pub autocomplete_active: bool,
|
pub autocomplete_active: bool,
|
||||||
pub autocomplete_suggestions: Vec<Hit>, // Changed to use the Hit struct
|
pub autocomplete_suggestions: Vec<Hit>,
|
||||||
pub selected_suggestion_index: Option<usize>,
|
pub selected_suggestion_index: Option<usize>,
|
||||||
pub autocomplete_loading: bool, // To show a loading indicator
|
pub autocomplete_loading: bool,
|
||||||
|
pub link_display_map: HashMap<usize, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FormState {
|
impl FormState {
|
||||||
@@ -55,11 +62,48 @@ impl FormState {
|
|||||||
current_field: 0,
|
current_field: 0,
|
||||||
has_unsaved_changes: false,
|
has_unsaved_changes: false,
|
||||||
current_cursor_pos: 0,
|
current_cursor_pos: 0,
|
||||||
// --- INITIALIZE NEW STATE ---
|
|
||||||
autocomplete_active: false,
|
autocomplete_active: false,
|
||||||
autocomplete_suggestions: Vec::new(),
|
autocomplete_suggestions: Vec::new(),
|
||||||
selected_suggestion_index: None,
|
selected_suggestion_index: None,
|
||||||
autocomplete_loading: false, // Initialize loading state
|
autocomplete_loading: false,
|
||||||
|
link_display_map: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_display_name_for_hit(&self, hit: &Hit) -> String {
|
||||||
|
if let Ok(content_map) =
|
||||||
|
serde_json::from_str::<HashMap<String, serde_json::Value>>(
|
||||||
|
&hit.content_json,
|
||||||
|
)
|
||||||
|
{
|
||||||
|
const IGNORED_KEYS: &[&str] = &["id", "deleted", "created_at"];
|
||||||
|
let mut keys: Vec<_> = content_map
|
||||||
|
.keys()
|
||||||
|
.filter(|k| !IGNORED_KEYS.contains(&k.as_str()))
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
keys.sort();
|
||||||
|
|
||||||
|
let values: Vec<_> = keys
|
||||||
|
.iter()
|
||||||
|
.map(|key| {
|
||||||
|
content_map
|
||||||
|
.get(key)
|
||||||
|
.map(json_value_to_string)
|
||||||
|
.unwrap_or_default()
|
||||||
|
})
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.take(1)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let display_part = values.first().cloned().unwrap_or_default();
|
||||||
|
if display_part.is_empty() {
|
||||||
|
format!("ID: {}", hit.id)
|
||||||
|
} else {
|
||||||
|
format!("{} | ID: {}", display_part, hit.id)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
format!("ID: {} (parse error)", hit.id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -78,7 +122,7 @@ impl FormState {
|
|||||||
crate::components::form::form::render_form(
|
crate::components::form::form::render_form(
|
||||||
f,
|
f,
|
||||||
area,
|
area,
|
||||||
self, // <--- This now correctly passes the concrete &FormState
|
self,
|
||||||
&fields_str_slice,
|
&fields_str_slice,
|
||||||
&self.current_field,
|
&self.current_field,
|
||||||
&values_str_slice,
|
&values_str_slice,
|
||||||
@@ -102,7 +146,8 @@ impl FormState {
|
|||||||
} else {
|
} else {
|
||||||
self.current_position = 1;
|
self.current_position = 1;
|
||||||
}
|
}
|
||||||
self.deactivate_autocomplete(); // Deactivate on reset
|
self.deactivate_autocomplete();
|
||||||
|
self.link_display_map.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_current_input(&self) -> &str {
|
pub fn get_current_input(&self) -> &str {
|
||||||
@@ -113,6 +158,7 @@ impl FormState {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_current_input_mut(&mut self) -> &mut String {
|
pub fn get_current_input_mut(&mut self) -> &mut String {
|
||||||
|
self.link_display_map.remove(&self.current_field);
|
||||||
self.values
|
self.values
|
||||||
.get_mut(self.current_field)
|
.get_mut(self.current_field)
|
||||||
.expect("Invalid current_field index")
|
.expect("Invalid current_field index")
|
||||||
@@ -159,11 +205,10 @@ impl FormState {
|
|||||||
self.has_unsaved_changes = false;
|
self.has_unsaved_changes = false;
|
||||||
self.current_field = 0;
|
self.current_field = 0;
|
||||||
self.current_cursor_pos = 0;
|
self.current_cursor_pos = 0;
|
||||||
self.deactivate_autocomplete(); // Deactivate on update
|
self.deactivate_autocomplete();
|
||||||
|
self.link_display_map.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- NEW HELPER METHOD ---
|
|
||||||
/// Deactivates autocomplete and clears its state.
|
|
||||||
pub fn deactivate_autocomplete(&mut self) {
|
pub fn deactivate_autocomplete(&mut self) {
|
||||||
self.autocomplete_active = false;
|
self.autocomplete_active = false;
|
||||||
self.autocomplete_suggestions.clear();
|
self.autocomplete_suggestions.clear();
|
||||||
@@ -176,58 +221,42 @@ impl CanvasState for FormState {
|
|||||||
fn current_field(&self) -> usize {
|
fn current_field(&self) -> usize {
|
||||||
self.current_field
|
self.current_field
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_cursor_pos(&self) -> usize {
|
fn current_cursor_pos(&self) -> usize {
|
||||||
self.current_cursor_pos
|
self.current_cursor_pos
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_unsaved_changes(&self) -> bool {
|
fn has_unsaved_changes(&self) -> bool {
|
||||||
self.has_unsaved_changes
|
self.has_unsaved_changes
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inputs(&self) -> Vec<&String> {
|
fn inputs(&self) -> Vec<&String> {
|
||||||
self.values.iter().collect()
|
self.values.iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_current_input(&self) -> &str {
|
fn get_current_input(&self) -> &str {
|
||||||
FormState::get_current_input(self)
|
FormState::get_current_input(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_current_input_mut(&mut self) -> &mut String {
|
fn get_current_input_mut(&mut self) -> &mut String {
|
||||||
FormState::get_current_input_mut(self)
|
FormState::get_current_input_mut(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fields(&self) -> Vec<&str> {
|
fn fields(&self) -> Vec<&str> {
|
||||||
self.fields
|
self.fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|f| f.display_name.as_str())
|
.map(|f| f.display_name.as_str())
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_current_field(&mut self, index: usize) {
|
fn set_current_field(&mut self, index: usize) {
|
||||||
if index < self.fields.len() {
|
if index < self.fields.len() {
|
||||||
self.current_field = index;
|
self.current_field = index;
|
||||||
}
|
}
|
||||||
// Deactivate autocomplete when changing fields
|
|
||||||
self.deactivate_autocomplete();
|
self.deactivate_autocomplete();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_current_cursor_pos(&mut self, pos: usize) {
|
fn set_current_cursor_pos(&mut self, pos: usize) {
|
||||||
self.current_cursor_pos = pos;
|
self.current_cursor_pos = pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_has_unsaved_changes(&mut self, changed: bool) {
|
fn set_has_unsaved_changes(&mut self, changed: bool) {
|
||||||
self.has_unsaved_changes = changed;
|
self.has_unsaved_changes = changed;
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- MODIFIED: Implement autocomplete trait methods ---
|
|
||||||
|
|
||||||
/// Returns None because this state uses rich suggestions.
|
|
||||||
fn get_suggestions(&self) -> Option<&[String]> {
|
fn get_suggestions(&self) -> Option<&[String]> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns rich suggestions.
|
|
||||||
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
|
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
|
||||||
if self.autocomplete_active {
|
if self.autocomplete_active {
|
||||||
Some(&self.autocomplete_suggestions)
|
Some(&self.autocomplete_suggestions)
|
||||||
@@ -235,7 +264,6 @@ impl CanvasState for FormState {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_selected_suggestion_index(&self) -> Option<usize> {
|
fn get_selected_suggestion_index(&self) -> Option<usize> {
|
||||||
if self.autocomplete_active {
|
if self.autocomplete_active {
|
||||||
self.selected_suggestion_index
|
self.selected_suggestion_index
|
||||||
@@ -243,4 +271,19 @@ impl CanvasState for FormState {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_display_value_for_field(&self, index: usize) -> &str {
|
||||||
|
if let Some(display_text) = self.link_display_map.get(&index) {
|
||||||
|
return display_text.as_str();
|
||||||
|
}
|
||||||
|
self.inputs()
|
||||||
|
.get(index)
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- IMPLEMENT THE NEW TRAIT METHOD ---
|
||||||
|
fn has_display_override(&self, index: usize) -> bool {
|
||||||
|
self.link_display_map.contains_key(&index)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,19 +1,22 @@
|
|||||||
// src/tui/functions/common/form.rs
|
// src/tui/functions/common/form.rs
|
||||||
|
|
||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
|
use crate::state::app::state::AppState; // NEW: Import AppState
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::FormState;
|
||||||
use anyhow::{Context, Result}; // Added Context
|
use crate::utils::data_converter; // NEW: Import our translator
|
||||||
use std::collections::HashMap; // NEW
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum SaveOutcome {
|
pub enum SaveOutcome {
|
||||||
NoChange,
|
NoChange,
|
||||||
UpdatedExisting,
|
UpdatedExisting,
|
||||||
CreatedNew(i64), // Keep the ID
|
CreatedNew(i64),
|
||||||
}
|
}
|
||||||
|
|
||||||
// MODIFIED save function
|
// MODIFIED save function signature and logic
|
||||||
pub async fn save(
|
pub async fn save(
|
||||||
|
app_state: &AppState, // NEW: Pass in AppState
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
) -> Result<SaveOutcome> {
|
) -> Result<SaveOutcome> {
|
||||||
@@ -21,42 +24,64 @@ pub async fn save(
|
|||||||
return Ok(SaveOutcome::NoChange);
|
return Ok(SaveOutcome::NoChange);
|
||||||
}
|
}
|
||||||
|
|
||||||
let data_map: HashMap<String, String> = form_state.fields.iter()
|
// --- NEW: VALIDATION & CONVERSION STEP ---
|
||||||
|
let cache_key =
|
||||||
|
format!("{}.{}", form_state.profile_name, form_state.table_name);
|
||||||
|
let schema = match app_state.schema_cache.get(&cache_key) {
|
||||||
|
Some(s) => s,
|
||||||
|
None => {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Schema for table '{}' not found in cache. Cannot save.",
|
||||||
|
form_state.table_name
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let data_map: HashMap<String, String> = form_state
|
||||||
|
.fields
|
||||||
|
.iter()
|
||||||
.zip(form_state.values.iter())
|
.zip(form_state.values.iter())
|
||||||
.map(|(field_def, value)| (field_def.data_key.clone(), value.clone()))
|
.map(|(field_def, value)| (field_def.data_key.clone(), value.clone()))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
// Use our new translator. It returns a user-friendly error on failure.
|
||||||
|
let converted_data =
|
||||||
|
match data_converter::convert_and_validate_data(&data_map, schema) {
|
||||||
|
Ok(data) => data,
|
||||||
|
Err(user_error) => return Err(anyhow!(user_error)),
|
||||||
|
};
|
||||||
|
// --- END OF NEW STEP ---
|
||||||
|
|
||||||
let outcome: SaveOutcome;
|
let outcome: SaveOutcome;
|
||||||
|
let is_new_entry = form_state.id == 0
|
||||||
let is_new_entry = form_state.id == 0 || (form_state.total_count > 0 && form_state.current_position > form_state.total_count) || (form_state.total_count == 0 && form_state.current_position == 1) ;
|
|| (form_state.total_count > 0
|
||||||
|
&& form_state.current_position > form_state.total_count)
|
||||||
|
|| (form_state.total_count == 0 && form_state.current_position == 1);
|
||||||
|
|
||||||
if is_new_entry {
|
if is_new_entry {
|
||||||
let response = grpc_client
|
let response = grpc_client
|
||||||
.post_table_data(
|
.post_table_data(
|
||||||
form_state.profile_name.clone(),
|
form_state.profile_name.clone(),
|
||||||
form_state.table_name.clone(),
|
form_state.table_name.clone(),
|
||||||
data_map,
|
converted_data, // Use the validated & converted data
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("Failed to post new table data")?;
|
.context("Failed to post new table data")?;
|
||||||
|
|
||||||
if response.success {
|
if response.success {
|
||||||
form_state.id = response.inserted_id;
|
form_state.id = response.inserted_id;
|
||||||
// After creating a new entry, total_count increases, and current_position becomes this new total_count
|
|
||||||
form_state.total_count += 1;
|
form_state.total_count += 1;
|
||||||
form_state.current_position = form_state.total_count;
|
form_state.current_position = form_state.total_count;
|
||||||
outcome = SaveOutcome::CreatedNew(response.inserted_id);
|
outcome = SaveOutcome::CreatedNew(response.inserted_id);
|
||||||
} else {
|
} else {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow!(
|
||||||
"Server failed to insert data: {}",
|
"Server failed to insert data: {}",
|
||||||
response.message
|
response.message
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// This assumes form_state.id is valid for an existing record
|
|
||||||
if form_state.id == 0 {
|
if form_state.id == 0 {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow!(
|
||||||
"Cannot update record: ID is 0, but not classified as new entry."
|
"Cannot update record: ID is 0, but not classified as new entry."
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@@ -65,7 +90,7 @@ pub async fn save(
|
|||||||
form_state.profile_name.clone(),
|
form_state.profile_name.clone(),
|
||||||
form_state.table_name.clone(),
|
form_state.table_name.clone(),
|
||||||
form_state.id,
|
form_state.id,
|
||||||
data_map,
|
converted_data, // Use the validated & converted data
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("Failed to put (update) table data")?;
|
.context("Failed to put (update) table data")?;
|
||||||
@@ -73,7 +98,7 @@ pub async fn save(
|
|||||||
if response.success {
|
if response.success {
|
||||||
outcome = SaveOutcome::UpdatedExisting;
|
outcome = SaveOutcome::UpdatedExisting;
|
||||||
} else {
|
} else {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow!(
|
||||||
"Server failed to update data: {}",
|
"Server failed to update data: {}",
|
||||||
response.message
|
response.message
|
||||||
));
|
));
|
||||||
|
|||||||
@@ -350,123 +350,91 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
let current_view_profile = app_state.current_view_profile_name.clone();
|
let current_view_profile = app_state.current_view_profile_name.clone();
|
||||||
let current_view_table = app_state.current_view_table_name.clone();
|
let current_view_table = app_state.current_view_table_name.clone();
|
||||||
|
|
||||||
|
// This condition correctly detects a table switch.
|
||||||
if prev_view_profile_name != current_view_profile
|
if prev_view_profile_name != current_view_profile
|
||||||
|| prev_view_table_name != current_view_table
|
|| prev_view_table_name != current_view_table
|
||||||
{
|
{
|
||||||
if let (Some(prof_name), Some(tbl_name)) =
|
if let (Some(prof_name), Some(tbl_name)) =
|
||||||
(current_view_profile.as_ref(), current_view_table.as_ref())
|
(current_view_profile.as_ref(), current_view_table.as_ref())
|
||||||
{
|
{
|
||||||
|
// --- START OF REFACTORED LOGIC ---
|
||||||
app_state.show_loading_dialog(
|
app_state.show_loading_dialog(
|
||||||
"Loading Table",
|
"Loading Table",
|
||||||
&format!("Fetching data for {}.{}...", prof_name, tbl_name),
|
&format!("Fetching data for {}.{}...", prof_name, tbl_name),
|
||||||
);
|
);
|
||||||
needs_redraw = true;
|
needs_redraw = true;
|
||||||
|
|
||||||
match grpc_client
|
// 1. Call our new, central function. It handles fetching AND caching.
|
||||||
.get_table_structure(prof_name.clone(), tbl_name.clone())
|
match UiService::load_table_view(
|
||||||
.await
|
&mut grpc_client,
|
||||||
|
&mut app_state,
|
||||||
|
prof_name,
|
||||||
|
tbl_name,
|
||||||
|
)
|
||||||
|
.await
|
||||||
{
|
{
|
||||||
Ok(structure_response) => {
|
Ok(mut new_form_state) => {
|
||||||
// --- START OF MODIFIED LOGIC ---
|
// 2. The function succeeded, we have a new FormState.
|
||||||
let all_columns: Vec<String> = structure_response
|
// Now, fetch its data.
|
||||||
.columns
|
|
||||||
.iter()
|
|
||||||
.map(|c| c.name.clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut field_definitions: Vec<FieldDefinition> =
|
|
||||||
filter_user_columns(all_columns)
|
|
||||||
.into_iter()
|
|
||||||
.filter(|col_name| !col_name.ends_with("_id"))
|
|
||||||
.map(|col_name| FieldDefinition {
|
|
||||||
display_name: col_name.clone(),
|
|
||||||
data_key: col_name,
|
|
||||||
is_link: false,
|
|
||||||
link_target_table: None, // Regular fields have no target
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let linked_tables: Vec<String> = app_state
|
|
||||||
.profile_tree
|
|
||||||
.profiles
|
|
||||||
.iter()
|
|
||||||
.find(|p| p.name == *prof_name)
|
|
||||||
.and_then(|profile| {
|
|
||||||
profile.tables.iter().find(|t| t.name == *tbl_name)
|
|
||||||
})
|
|
||||||
.map_or(vec![], |table| table.depends_on.clone());
|
|
||||||
|
|
||||||
for linked_table_name in linked_tables {
|
|
||||||
let base_name = linked_table_name
|
|
||||||
.split_once('_')
|
|
||||||
.map_or(linked_table_name.as_str(), |(_, rest)| rest);
|
|
||||||
let data_key = format!("{}_id", base_name);
|
|
||||||
let display_name = linked_table_name.clone(); // Clone for use below
|
|
||||||
|
|
||||||
field_definitions.push(FieldDefinition {
|
|
||||||
display_name,
|
|
||||||
data_key,
|
|
||||||
is_link: true,
|
|
||||||
// --- POPULATE THE NEW FIELD ---
|
|
||||||
link_target_table: Some(linked_table_name),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// --- END OF MODIFIED LOGIC ---
|
|
||||||
|
|
||||||
form_state = FormState::new(
|
|
||||||
prof_name.clone(),
|
|
||||||
tbl_name.clone(),
|
|
||||||
field_definitions, // This now contains the complete definitions
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Err(e) = UiService::fetch_and_set_table_count(
|
if let Err(e) = UiService::fetch_and_set_table_count(
|
||||||
&mut grpc_client,
|
&mut grpc_client,
|
||||||
&mut form_state,
|
&mut new_form_state,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
|
// Handle count fetching error
|
||||||
app_state.update_dialog_content(
|
app_state.update_dialog_content(
|
||||||
&format!("Error fetching count: {}", e),
|
&format!("Error fetching count: {}", e),
|
||||||
vec!["OK".to_string()],
|
vec!["OK".to_string()],
|
||||||
DialogPurpose::LoginFailed,
|
DialogPurpose::LoginFailed, // Or a more appropriate purpose
|
||||||
);
|
);
|
||||||
} else if form_state.total_count > 0 {
|
} else if new_form_state.total_count > 0 {
|
||||||
|
// If there are records, load the first/last one
|
||||||
if let Err(e) = UiService::load_table_data_by_position(
|
if let Err(e) = UiService::load_table_data_by_position(
|
||||||
&mut grpc_client,
|
&mut grpc_client,
|
||||||
&mut form_state,
|
&mut new_form_state,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
|
// Handle data loading error
|
||||||
app_state.update_dialog_content(
|
app_state.update_dialog_content(
|
||||||
&format!("Error loading data: {}", e),
|
&format!("Error loading data: {}", e),
|
||||||
vec!["OK".to_string()],
|
vec!["OK".to_string()],
|
||||||
DialogPurpose::LoginFailed,
|
DialogPurpose::LoginFailed, // Or a more appropriate purpose
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
// Success! Hide the loading dialog.
|
||||||
app_state.hide_dialog();
|
app_state.hide_dialog();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
form_state.reset_to_empty();
|
// No records, so just reset to an empty form.
|
||||||
|
new_form_state.reset_to_empty();
|
||||||
app_state.hide_dialog();
|
app_state.hide_dialog();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 3. CRITICAL: Replace the old form_state with the new one.
|
||||||
|
form_state = new_form_state;
|
||||||
|
|
||||||
|
// 4. Update our tracking variables.
|
||||||
prev_view_profile_name = current_view_profile;
|
prev_view_profile_name = current_view_profile;
|
||||||
prev_view_table_name = current_view_table;
|
prev_view_table_name = current_view_table;
|
||||||
table_just_switched = true;
|
table_just_switched = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
// This handles errors from load_table_view (e.g., schema fetch failed)
|
||||||
app_state.update_dialog_content(
|
app_state.update_dialog_content(
|
||||||
&format!("Error fetching table structure: {}", e),
|
&format!("Error loading table: {}", e),
|
||||||
vec!["OK".to_string()],
|
vec!["OK".to_string()],
|
||||||
DialogPurpose::LoginFailed,
|
DialogPurpose::LoginFailed, // Or a more appropriate purpose
|
||||||
);
|
);
|
||||||
|
// Revert the view change in app_state to avoid a loop
|
||||||
app_state.current_view_profile_name =
|
app_state.current_view_profile_name =
|
||||||
prev_view_profile_name.clone();
|
prev_view_profile_name.clone();
|
||||||
app_state.current_view_table_name =
|
app_state.current_view_table_name =
|
||||||
prev_view_table_name.clone();
|
prev_view_table_name.clone();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// --- END OF REFACTORED LOGIC ---
|
||||||
}
|
}
|
||||||
needs_redraw = true;
|
needs_redraw = true;
|
||||||
}
|
}
|
||||||
|
|||||||
50
client/src/utils/data_converter.rs
Normal file
50
client/src/utils/data_converter.rs
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
// src/utils/data_converter.rs
|
||||||
|
|
||||||
|
use common::proto::multieko2::table_structure::TableStructureResponse;
|
||||||
|
use prost_types::{value::Kind, NullValue, Value};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
pub fn convert_and_validate_data(
|
||||||
|
data: &HashMap<String, String>,
|
||||||
|
schema: &TableStructureResponse,
|
||||||
|
) -> Result<HashMap<String, Value>, String> {
|
||||||
|
let type_map: HashMap<_, _> = schema
|
||||||
|
.columns
|
||||||
|
.iter()
|
||||||
|
.map(|col| (col.name.as_str(), col.data_type.as_str()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
data.iter()
|
||||||
|
.map(|(key, str_value)| {
|
||||||
|
let expected_type = type_map.get(key.as_str()).unwrap_or(&"TEXT");
|
||||||
|
|
||||||
|
let kind = if str_value.is_empty() {
|
||||||
|
// TODO: Use the correct enum variant
|
||||||
|
Kind::NullValue(NullValue::NullValue.into())
|
||||||
|
} else {
|
||||||
|
// Attempt to parse the string based on the expected type
|
||||||
|
match *expected_type {
|
||||||
|
"BOOL" => match str_value.to_lowercase().parse::<bool>() {
|
||||||
|
Ok(v) => Kind::BoolValue(v),
|
||||||
|
Err(_) => return Err(format!("Invalid boolean for '{}': must be 'true' or 'false'", key)),
|
||||||
|
},
|
||||||
|
"INT8" | "INT4" | "INT2" | "SERIAL" | "BIGSERIAL" => {
|
||||||
|
match str_value.parse::<f64>() {
|
||||||
|
Ok(v) => Kind::NumberValue(v),
|
||||||
|
Err(_) => return Err(format!("Invalid number for '{}': must be a whole number", key)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"NUMERIC" | "FLOAT4" | "FLOAT8" => match str_value.parse::<f64>() {
|
||||||
|
Ok(v) => Kind::NumberValue(v),
|
||||||
|
Err(_) => return Err(format!("Invalid decimal for '{}': must be a number", key)),
|
||||||
|
},
|
||||||
|
"TIMESTAMPTZ" | "DATE" | "TIME" | "TEXT" | "VARCHAR" | "UUID" => {
|
||||||
|
Kind::StringValue(str_value.clone())
|
||||||
|
}
|
||||||
|
_ => Kind::StringValue(str_value.clone()),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok((key.clone(), Value { kind: Some(kind) }))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
@@ -2,5 +2,8 @@
|
|||||||
|
|
||||||
pub mod columns;
|
pub mod columns;
|
||||||
pub mod debug_logger;
|
pub mod debug_logger;
|
||||||
|
pub mod data_converter;
|
||||||
|
|
||||||
pub use columns::*;
|
pub use columns::*;
|
||||||
pub use debug_logger::*;
|
pub use debug_logger::*;
|
||||||
|
pub use data_converter::*;
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
prost-types = { workspace = true }
|
||||||
|
|
||||||
tonic = "0.13.0"
|
tonic = "0.13.0"
|
||||||
prost = "0.13.5"
|
prost = "0.13.5"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ syntax = "proto3";
|
|||||||
package multieko2.tables_data;
|
package multieko2.tables_data;
|
||||||
|
|
||||||
import "common.proto";
|
import "common.proto";
|
||||||
|
import "google/protobuf/struct.proto";
|
||||||
|
|
||||||
service TablesData {
|
service TablesData {
|
||||||
rpc PostTableData (PostTableDataRequest) returns (PostTableDataResponse);
|
rpc PostTableData (PostTableDataRequest) returns (PostTableDataResponse);
|
||||||
@@ -16,7 +17,7 @@ service TablesData {
|
|||||||
message PostTableDataRequest {
|
message PostTableDataRequest {
|
||||||
string profile_name = 1;
|
string profile_name = 1;
|
||||||
string table_name = 2;
|
string table_name = 2;
|
||||||
map<string, string> data = 3;
|
map<string, google.protobuf.Value> data = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message PostTableDataResponse {
|
message PostTableDataResponse {
|
||||||
@@ -29,7 +30,7 @@ message PutTableDataRequest {
|
|||||||
string profile_name = 1;
|
string profile_name = 1;
|
||||||
string table_name = 2;
|
string table_name = 2;
|
||||||
int64 id = 3;
|
int64 id = 3;
|
||||||
map<string, string> data = 4;
|
map<string, google.protobuf.Value> data = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message PutTableDataResponse {
|
message PutTableDataResponse {
|
||||||
|
|||||||
Binary file not shown.
@@ -5,10 +5,10 @@ pub struct PostTableDataRequest {
|
|||||||
pub profile_name: ::prost::alloc::string::String,
|
pub profile_name: ::prost::alloc::string::String,
|
||||||
#[prost(string, tag = "2")]
|
#[prost(string, tag = "2")]
|
||||||
pub table_name: ::prost::alloc::string::String,
|
pub table_name: ::prost::alloc::string::String,
|
||||||
#[prost(map = "string, string", tag = "3")]
|
#[prost(map = "string, message", tag = "3")]
|
||||||
pub data: ::std::collections::HashMap<
|
pub data: ::std::collections::HashMap<
|
||||||
::prost::alloc::string::String,
|
::prost::alloc::string::String,
|
||||||
::prost::alloc::string::String,
|
::prost_types::Value,
|
||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
@@ -28,10 +28,10 @@ pub struct PutTableDataRequest {
|
|||||||
pub table_name: ::prost::alloc::string::String,
|
pub table_name: ::prost::alloc::string::String,
|
||||||
#[prost(int64, tag = "3")]
|
#[prost(int64, tag = "3")]
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
#[prost(map = "string, string", tag = "4")]
|
#[prost(map = "string, message", tag = "4")]
|
||||||
pub data: ::std::collections::HashMap<
|
pub data: ::std::collections::HashMap<
|
||||||
::prost::alloc::string::String,
|
::prost::alloc::string::String,
|
||||||
::prost::alloc::string::String,
|
::prost_types::Value,
|
||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ search = { path = "../search" }
|
|||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
tantivy = { workspace = true }
|
tantivy = { workspace = true }
|
||||||
|
prost-types = { workspace = true }
|
||||||
chrono = { version = "0.4.40", features = ["serde"] }
|
chrono = { version = "0.4.40", features = ["serde"] }
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
prost = "0.13.5"
|
prost = "0.13.5"
|
||||||
@@ -41,3 +42,4 @@ path = "src/lib.rs"
|
|||||||
tokio = { version = "1.44", features = ["full", "test-util"] }
|
tokio = { version = "1.44", features = ["full", "test-util"] }
|
||||||
rstest = "0.25.0"
|
rstest = "0.25.0"
|
||||||
lazy_static = "1.5.0"
|
lazy_static = "1.5.0"
|
||||||
|
rand = "0.9.1"
|
||||||
|
|||||||
13
server/Makefile
Normal file
13
server/Makefile
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Makefile
|
||||||
|
|
||||||
|
test: reset_db run_tests
|
||||||
|
|
||||||
|
reset_db:
|
||||||
|
@echo "Resetting test database..."
|
||||||
|
@./scripts/reset_test_db.sh
|
||||||
|
|
||||||
|
run_tests:
|
||||||
|
@echo "Running tests..."
|
||||||
|
@cargo test
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
-- Main table definitions
|
-- Main table definitions
|
||||||
|
CREATE SCHEMA IF NOT EXISTS gen;
|
||||||
|
|
||||||
CREATE TABLE table_definitions (
|
CREATE TABLE table_definitions (
|
||||||
id BIGSERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
-- Add migration script here
|
|
||||||
|
|
||||||
CREATE SCHEMA IF NOT EXISTS gen;
|
|
||||||
9
server/scripts/reset_test_db.sh
Executable file
9
server/scripts/reset_test_db.sh
Executable file
@@ -0,0 +1,9 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# scripts/reset_test_db.sh
|
||||||
|
|
||||||
|
DATABASE_URL=${TEST_DATABASE_URL:-"postgres://multi_psql_dev:3@localhost:5432/multi_rust_test"}
|
||||||
|
|
||||||
|
echo "Reset db script"
|
||||||
|
yes | sqlx database drop --database-url "$DATABASE_URL"
|
||||||
|
sqlx database create --database-url "$DATABASE_URL"
|
||||||
|
echo "Test database reset complete."
|
||||||
@@ -1,34 +1,51 @@
|
|||||||
// src/shared/schema_qualifier.rs
|
// src/shared/schema_qualifier.rs
|
||||||
|
use sqlx::PgPool;
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
|
|
||||||
/// Qualifies table names with the appropriate schema
|
// TODO in the future, remove database query on every request and implement caching for scalable
|
||||||
///
|
// solution with many data and requests
|
||||||
|
|
||||||
|
/// Qualifies a table name by checking for its existence in the table_definitions table.
|
||||||
|
/// This is the robust, "source of truth" approach.
|
||||||
|
///
|
||||||
/// Rules:
|
/// Rules:
|
||||||
/// - Tables created via PostTableDefinition (dynamically created tables) are in 'gen' schema
|
/// - If a table is found in `table_definitions`, it is qualified with the 'gen' schema.
|
||||||
/// - System tables (like users, profiles) remain in 'public' schema
|
/// - Otherwise, it is assumed to be a system table in the 'public' schema.
|
||||||
pub fn qualify_table_name(table_name: &str) -> String {
|
pub async fn qualify_table_name(
|
||||||
// Check if table matches the pattern of dynamically created tables (e.g., 2025_something)
|
db_pool: &PgPool,
|
||||||
if table_name.starts_with(|c: char| c.is_ascii_digit()) && table_name.contains('_') {
|
profile_name: &str,
|
||||||
format!("gen.\"{}\"", table_name)
|
table_name: &str,
|
||||||
|
) -> Result<String, Status> {
|
||||||
|
// Check if a definition exists for this table in the given profile.
|
||||||
|
let definition_exists = sqlx::query!(
|
||||||
|
r#"SELECT EXISTS (
|
||||||
|
SELECT 1 FROM table_definitions td
|
||||||
|
JOIN profiles p ON td.profile_id = p.id
|
||||||
|
WHERE p.name = $1 AND td.table_name = $2
|
||||||
|
)"#,
|
||||||
|
profile_name,
|
||||||
|
table_name
|
||||||
|
)
|
||||||
|
.fetch_one(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Schema lookup failed: {}", e)))?
|
||||||
|
.exists
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
if definition_exists {
|
||||||
|
// It's a user-defined table, so it lives in 'gen'.
|
||||||
|
Ok(format!("gen.\"{}\"", table_name))
|
||||||
} else {
|
} else {
|
||||||
format!("\"{}\"", table_name)
|
// It's not a user-defined table, so it must be a system table in 'public'.
|
||||||
|
Ok(format!("\"{}\"", table_name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Qualifies table names for data operations
|
/// Qualifies table names for data operations
|
||||||
pub fn qualify_table_name_for_data(table_name: &str) -> Result<String, Status> {
|
pub async fn qualify_table_name_for_data(
|
||||||
Ok(qualify_table_name(table_name))
|
db_pool: &PgPool,
|
||||||
}
|
profile_name: &str,
|
||||||
|
table_name: &str,
|
||||||
#[cfg(test)]
|
) -> Result<String, Status> {
|
||||||
mod tests {
|
qualify_table_name(db_pool, profile_name, table_name).await
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_qualify_table_name() {
|
|
||||||
assert_eq!(qualify_table_name("2025_test_schema3"), "gen.\"2025_test_schema3\"");
|
|
||||||
assert_eq!(qualify_table_name("users"), "\"users\"");
|
|
||||||
assert_eq!(qualify_table_name("profiles"), "\"profiles\"");
|
|
||||||
assert_eq!(qualify_table_name("adresar"), "\"adresar\"");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,19 +1,21 @@
|
|||||||
|
// src/table_definition/handlers/post_table_definition.rs
|
||||||
|
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::{PgPool, Transaction, Postgres};
|
use sqlx::{PgPool, Transaction, Postgres};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
|
||||||
use common::proto::multieko2::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse};
|
use common::proto::multieko2::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse};
|
||||||
|
|
||||||
const GENERATED_SCHEMA_NAME: &str = "gen";
|
const GENERATED_SCHEMA_NAME: &str = "gen";
|
||||||
|
|
||||||
const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
|
const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
|
||||||
("text", "TEXT"),
|
("text", "TEXT"),
|
||||||
("psc", "TEXT"),
|
("string", "TEXT"),
|
||||||
("phone", "VARCHAR(15)"),
|
|
||||||
("address", "TEXT"),
|
|
||||||
("email", "VARCHAR(255)"),
|
|
||||||
("boolean", "BOOLEAN"),
|
("boolean", "BOOLEAN"),
|
||||||
("timestamp", "TIMESTAMPTZ"),
|
("timestamp", "TIMESTAMPTZ"),
|
||||||
|
("time", "TIMESTAMPTZ"),
|
||||||
|
("money", "NUMERIC(14, 4)"),
|
||||||
|
("integer", "INTEGER"),
|
||||||
|
("date", "DATE"),
|
||||||
];
|
];
|
||||||
|
|
||||||
fn is_valid_identifier(s: &str) -> bool {
|
fn is_valid_identifier(s: &str) -> bool {
|
||||||
@@ -24,11 +26,9 @@ fn is_valid_identifier(s: &str) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn sanitize_table_name(s: &str) -> String {
|
fn sanitize_table_name(s: &str) -> String {
|
||||||
let year = OffsetDateTime::now_utc().year();
|
s.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
||||||
let cleaned = s.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
|
||||||
.trim()
|
.trim()
|
||||||
.to_lowercase();
|
.to_lowercase()
|
||||||
format!("{}_{}", year, cleaned)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sanitize_identifier(s: &str) -> String {
|
fn sanitize_identifier(s: &str) -> String {
|
||||||
@@ -37,12 +37,60 @@ fn sanitize_identifier(s: &str) -> String {
|
|||||||
.to_lowercase()
|
.to_lowercase()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn map_field_type(field_type: &str) -> Result<&str, Status> {
|
fn map_field_type(field_type: &str) -> Result<String, Status> {
|
||||||
|
let lower_field_type = field_type.to_lowercase();
|
||||||
|
|
||||||
|
// Special handling for "decimal(precision, scale)"
|
||||||
|
if lower_field_type.starts_with("decimal(") && lower_field_type.ends_with(')') {
|
||||||
|
// Extract the part inside the parentheses, e.g., "10, 2"
|
||||||
|
let args = lower_field_type
|
||||||
|
.strip_prefix("decimal(")
|
||||||
|
.and_then(|s| s.strip_suffix(')'))
|
||||||
|
.unwrap_or(""); // Should always succeed due to the checks above
|
||||||
|
|
||||||
|
// Split into precision and scale parts
|
||||||
|
if let Some((p_str, s_str)) = args.split_once(',') {
|
||||||
|
// Parse precision, returning an error if it's not a valid number
|
||||||
|
let precision = p_str.trim().parse::<u32>().map_err(|_| {
|
||||||
|
Status::invalid_argument("Invalid precision in decimal type")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Parse scale, returning an error if it's not a valid number
|
||||||
|
let scale = s_str.trim().parse::<u32>().map_err(|_| {
|
||||||
|
Status::invalid_argument("Invalid scale in decimal type")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Add validation based on PostgreSQL rules
|
||||||
|
if precision < 1 {
|
||||||
|
return Err(Status::invalid_argument("Precision must be at least 1"));
|
||||||
|
}
|
||||||
|
if scale > precision {
|
||||||
|
return Err(Status::invalid_argument(
|
||||||
|
"Scale cannot be greater than precision",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// If everything is valid, build and return the NUMERIC type string
|
||||||
|
return Ok(format!("NUMERIC({}, {})", precision, scale));
|
||||||
|
} else {
|
||||||
|
// The format was wrong, e.g., "decimal(10)" or "decimal()"
|
||||||
|
return Err(Status::invalid_argument(
|
||||||
|
"Invalid decimal format. Expected: decimal(precision, scale)",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not a decimal, fall back to the predefined list
|
||||||
PREDEFINED_FIELD_TYPES
|
PREDEFINED_FIELD_TYPES
|
||||||
.iter()
|
.iter()
|
||||||
.find(|(key, _)| *key == field_type.to_lowercase().as_str())
|
.find(|(key, _)| *key == lower_field_type.as_str())
|
||||||
.map(|(_, sql_type)| *sql_type)
|
.map(|(_, sql_type)| sql_type.to_string()) // Convert to an owned String
|
||||||
.ok_or_else(|| Status::invalid_argument(format!("Invalid field type: {}", field_type)))
|
.ok_or_else(|| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Invalid field type: {}",
|
||||||
|
field_type
|
||||||
|
))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_invalid_table_name(table_name: &str) -> bool {
|
fn is_invalid_table_name(table_name: &str) -> bool {
|
||||||
@@ -56,7 +104,21 @@ pub async fn post_table_definition(
|
|||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
request: PostTableDefinitionRequest,
|
request: PostTableDefinitionRequest,
|
||||||
) -> Result<TableDefinitionResponse, Status> {
|
) -> Result<TableDefinitionResponse, Status> {
|
||||||
|
if request.profile_name.trim().is_empty() {
|
||||||
|
return Err(Status::invalid_argument("Profile name cannot be empty"));
|
||||||
|
}
|
||||||
|
|
||||||
|
const MAX_IDENTIFIER_LENGTH: usize = 63;
|
||||||
|
|
||||||
let base_name = sanitize_table_name(&request.table_name);
|
let base_name = sanitize_table_name(&request.table_name);
|
||||||
|
if base_name.len() > MAX_IDENTIFIER_LENGTH {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Identifier '{}' exceeds the {} character limit.",
|
||||||
|
base_name,
|
||||||
|
MAX_IDENTIFIER_LENGTH
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
let user_part_cleaned = request.table_name
|
let user_part_cleaned = request.table_name
|
||||||
.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
||||||
.trim_matches('_')
|
.trim_matches('_')
|
||||||
@@ -131,6 +193,9 @@ async fn execute_table_definition(
|
|||||||
if !is_valid_identifier(&col_def.name) {
|
if !is_valid_identifier(&col_def.name) {
|
||||||
return Err(Status::invalid_argument("Invalid column name"));
|
return Err(Status::invalid_argument("Invalid column name"));
|
||||||
}
|
}
|
||||||
|
if col_name.ends_with("_id") || col_name == "id" || col_name == "deleted" || col_name == "created_at" {
|
||||||
|
return Err(Status::invalid_argument("Invalid column name"));
|
||||||
|
}
|
||||||
let sql_type = map_field_type(&col_def.field_type)?;
|
let sql_type = map_field_type(&col_def.field_type)?;
|
||||||
columns.push(format!("\"{}\" {}", col_name, sql_type));
|
columns.push(format!("\"{}\" {}", col_name, sql_type));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -38,7 +38,12 @@ pub async fn delete_table_data(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&request.table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&request.profile_name,
|
||||||
|
&request.table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// Perform soft delete using qualified table name
|
// Perform soft delete using qualified table name
|
||||||
let query = format!(
|
let query = format!(
|
||||||
|
|||||||
@@ -88,7 +88,12 @@ pub async fn get_table_data(
|
|||||||
// --- END OF FIX ---
|
// --- END OF FIX ---
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"SELECT {} FROM {} WHERE id = $1 AND deleted = false",
|
"SELECT {} FROM {} WHERE id = $1 AND deleted = false",
|
||||||
|
|||||||
@@ -45,7 +45,12 @@ pub async fn get_table_data_by_position(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let id_result = sqlx::query_scalar(
|
let id_result = sqlx::query_scalar(
|
||||||
&format!(
|
&format!(
|
||||||
|
|||||||
@@ -47,7 +47,12 @@ pub async fn get_table_data_count(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 2. QUALIFY THE TABLE NAME using the imported function
|
// 2. QUALIFY THE TABLE NAME using the imported function
|
||||||
let qualified_table_name = qualify_table_name_for_data(&request.table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&request.profile_name,
|
||||||
|
&request.table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// 3. USE THE QUALIFIED NAME in the SQL query
|
// 3. USE THE QUALIFIED NAME in the SQL query
|
||||||
let query_sql = format!(
|
let query_sql = format!(
|
||||||
@@ -56,7 +61,7 @@ pub async fn get_table_data_count(
|
|||||||
FROM {}
|
FROM {}
|
||||||
WHERE deleted = FALSE
|
WHERE deleted = FALSE
|
||||||
"#,
|
"#,
|
||||||
qualified_table_name // Use the schema-qualified name here
|
qualified_table
|
||||||
);
|
);
|
||||||
|
|
||||||
// The rest of the logic remains largely the same, but error messages can be more specific.
|
// The rest of the logic remains largely the same, but error messages can be more specific.
|
||||||
@@ -81,14 +86,14 @@ pub async fn get_table_data_count(
|
|||||||
// even though it was defined in table_definitions. This is an inconsistency.
|
// even though it was defined in table_definitions. This is an inconsistency.
|
||||||
return Err(Status::internal(format!(
|
return Err(Status::internal(format!(
|
||||||
"Table '{}' is defined but does not physically exist in the database as {}.",
|
"Table '{}' is defined but does not physically exist in the database as {}.",
|
||||||
request.table_name, qualified_table_name
|
request.table_name, qualified_table
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// For other errors, provide a general message.
|
// For other errors, provide a general message.
|
||||||
Err(Status::internal(format!(
|
Err(Status::internal(format!(
|
||||||
"Count query failed for table {}: {}",
|
"Count query failed for table {}: {}",
|
||||||
qualified_table_name, e
|
qualified_table, e
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,17 +7,15 @@ use chrono::{DateTime, Utc};
|
|||||||
use common::proto::multieko2::tables_data::{PostTableDataRequest, PostTableDataResponse};
|
use common::proto::multieko2::tables_data::{PostTableDataRequest, PostTableDataResponse};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use crate::shared::schema_qualifier::qualify_table_name_for_data;
|
use prost_types::value::Kind;
|
||||||
|
|
||||||
use crate::steel::server::execution::{self, Value};
|
use crate::steel::server::execution::{self, Value};
|
||||||
use crate::steel::server::functions::SteelContext;
|
use crate::steel::server::functions::SteelContext;
|
||||||
|
|
||||||
// Add these imports
|
|
||||||
use crate::indexer::{IndexCommand, IndexCommandData};
|
use crate::indexer::{IndexCommand, IndexCommandData};
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
// MODIFIED: Function signature now accepts the indexer sender
|
|
||||||
pub async fn post_table_data(
|
pub async fn post_table_data(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
request: PostTableDataRequest,
|
request: PostTableDataRequest,
|
||||||
@@ -25,11 +23,6 @@ pub async fn post_table_data(
|
|||||||
) -> Result<PostTableDataResponse, Status> {
|
) -> Result<PostTableDataResponse, Status> {
|
||||||
let profile_name = request.profile_name;
|
let profile_name = request.profile_name;
|
||||||
let table_name = request.table_name;
|
let table_name = request.table_name;
|
||||||
let mut data = HashMap::new();
|
|
||||||
|
|
||||||
for (key, value) in request.data {
|
|
||||||
data.insert(key, value.trim().to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lookup profile
|
// Lookup profile
|
||||||
let profile = sqlx::query!(
|
let profile = sqlx::query!(
|
||||||
@@ -94,13 +87,32 @@ pub async fn post_table_data(
|
|||||||
|
|
||||||
// Validate all data columns
|
// Validate all data columns
|
||||||
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
||||||
for key in data.keys() {
|
for key in request.data.keys() {
|
||||||
if !system_columns_set.contains(key.as_str()) &&
|
if !system_columns_set.contains(key.as_str()) &&
|
||||||
!user_columns.contains(&&key.to_string()) {
|
!user_columns.contains(&&key.to_string()) {
|
||||||
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// FIX #1: SCRIPT VALIDATION LOOP
|
||||||
|
// This loop now correctly handles JSON `null` (which becomes `None`).
|
||||||
|
// ========================================================================
|
||||||
|
let mut string_data_for_scripts = HashMap::new();
|
||||||
|
for (key, proto_value) in &request.data {
|
||||||
|
let str_val = match &proto_value.kind {
|
||||||
|
Some(Kind::StringValue(s)) => s.clone(),
|
||||||
|
Some(Kind::NumberValue(n)) => n.to_string(),
|
||||||
|
Some(Kind::BoolValue(b)) => b.to_string(),
|
||||||
|
// This now correctly skips both protobuf `NULL` and JSON `null`.
|
||||||
|
Some(Kind::NullValue(_)) | None => continue,
|
||||||
|
Some(Kind::StructValue(_)) | Some(Kind::ListValue(_)) => {
|
||||||
|
return Err(Status::invalid_argument(format!("Unsupported type for script validation in column '{}'", key)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
string_data_for_scripts.insert(key.clone(), str_val);
|
||||||
|
}
|
||||||
|
|
||||||
// Validate Steel scripts
|
// Validate Steel scripts
|
||||||
let scripts = sqlx::query!(
|
let scripts = sqlx::query!(
|
||||||
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
||||||
@@ -113,21 +125,18 @@ pub async fn post_table_data(
|
|||||||
for script_record in scripts {
|
for script_record in scripts {
|
||||||
let target_column = script_record.target_column;
|
let target_column = script_record.target_column;
|
||||||
|
|
||||||
// Ensure target column exists in submitted data
|
let user_value = string_data_for_scripts.get(&target_column)
|
||||||
let user_value = data.get(&target_column)
|
|
||||||
.ok_or_else(|| Status::invalid_argument(
|
.ok_or_else(|| Status::invalid_argument(
|
||||||
format!("Script target column '{}' is required", target_column)
|
format!("Script target column '{}' is required", target_column)
|
||||||
))?;
|
))?;
|
||||||
|
|
||||||
// Create execution context
|
|
||||||
let context = SteelContext {
|
let context = SteelContext {
|
||||||
current_table: table_name.clone(), // Keep base name for scripts
|
current_table: table_name.clone(),
|
||||||
profile_id,
|
profile_id,
|
||||||
row_data: data.clone(),
|
row_data: string_data_for_scripts.clone(),
|
||||||
db_pool: Arc::new(db_pool.clone()),
|
db_pool: Arc::new(db_pool.clone()),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Execute validation script
|
|
||||||
let script_result = execution::execute_script(
|
let script_result = execution::execute_script(
|
||||||
script_record.script,
|
script_record.script,
|
||||||
"STRINGS",
|
"STRINGS",
|
||||||
@@ -138,7 +147,6 @@ pub async fn post_table_data(
|
|||||||
format!("Script execution failed for '{}': {}", target_column, e)
|
format!("Script execution failed for '{}': {}", target_column, e)
|
||||||
))?;
|
))?;
|
||||||
|
|
||||||
// Validate script output
|
|
||||||
let Value::Strings(mut script_output) = script_result else {
|
let Value::Strings(mut script_output) = script_result else {
|
||||||
return Err(Status::internal("Script must return string values"));
|
return Err(Status::internal("Script must return string values"));
|
||||||
};
|
};
|
||||||
@@ -160,11 +168,16 @@ pub async fn post_table_data(
|
|||||||
let mut placeholders = Vec::new();
|
let mut placeholders = Vec::new();
|
||||||
let mut param_idx = 1;
|
let mut param_idx = 1;
|
||||||
|
|
||||||
for (col, value) in data {
|
// ========================================================================
|
||||||
|
// FIX #2: DATABASE INSERTION LOOP
|
||||||
|
// This loop now correctly handles JSON `null` (which becomes `None`)
|
||||||
|
// without crashing and correctly inserts a SQL NULL.
|
||||||
|
// ========================================================================
|
||||||
|
for (col, proto_value) in request.data {
|
||||||
let sql_type = if system_columns_set.contains(col.as_str()) {
|
let sql_type = if system_columns_set.contains(col.as_str()) {
|
||||||
match col.as_str() {
|
match col.as_str() {
|
||||||
"deleted" => "BOOLEAN",
|
"deleted" => "BOOLEAN",
|
||||||
_ if col.ends_with("_id") => "BIGINT", // Handle foreign keys
|
_ if col.ends_with("_id") => "BIGINT",
|
||||||
_ => return Err(Status::invalid_argument("Invalid system column")),
|
_ => return Err(Status::invalid_argument("Invalid system column")),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -174,36 +187,65 @@ pub async fn post_table_data(
|
|||||||
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Check for `None` (from JSON null) or `Some(NullValue)` first.
|
||||||
|
let kind = match &proto_value.kind {
|
||||||
|
None | Some(Kind::NullValue(_)) => {
|
||||||
|
// It's a null value. Add the correct SQL NULL type and continue.
|
||||||
|
match sql_type {
|
||||||
|
"BOOLEAN" => params.add(None::<bool>),
|
||||||
|
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => params.add(None::<String>),
|
||||||
|
"TIMESTAMPTZ" => params.add(None::<DateTime<Utc>>),
|
||||||
|
"BIGINT" => params.add(None::<i64>),
|
||||||
|
_ => return Err(Status::invalid_argument(format!("Unsupported type for null value: {}", sql_type))),
|
||||||
|
}.map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||||
|
|
||||||
|
columns_list.push(format!("\"{}\"", col));
|
||||||
|
placeholders.push(format!("${}", param_idx));
|
||||||
|
param_idx += 1;
|
||||||
|
continue; // Skip to the next column in the loop
|
||||||
|
}
|
||||||
|
// If it's not null, just pass the inner `Kind` through.
|
||||||
|
Some(k) => k,
|
||||||
|
};
|
||||||
|
|
||||||
|
// From here, we know `kind` is not a null type.
|
||||||
match sql_type {
|
match sql_type {
|
||||||
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => {
|
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => {
|
||||||
if let Some(max_len) = sql_type.strip_prefix("VARCHAR(")
|
if let Kind::StringValue(value) = kind {
|
||||||
.and_then(|s| s.strip_suffix(')'))
|
if let Some(max_len) = sql_type.strip_prefix("VARCHAR(").and_then(|s| s.strip_suffix(')')).and_then(|s| s.parse::<usize>().ok()) {
|
||||||
.and_then(|s| s.parse::<usize>().ok())
|
if value.len() > max_len {
|
||||||
{
|
return Err(Status::internal(format!("Value too long for {}", col)));
|
||||||
if value.len() > max_len {
|
}
|
||||||
return Err(Status::internal(format!("Value too long for {}", col)));
|
|
||||||
}
|
}
|
||||||
|
params.add(value).map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected string for column '{}'", col)));
|
||||||
}
|
}
|
||||||
params.add(value)
|
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
|
||||||
},
|
},
|
||||||
"BOOLEAN" => {
|
"BOOLEAN" => {
|
||||||
let val = value.parse::<bool>()
|
if let Kind::BoolValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid boolean for {}", col)))?;
|
params.add(val).map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
||||||
params.add(val)
|
} else {
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected boolean for column '{}'", col)));
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"TIMESTAMPTZ" => {
|
"TIMESTAMPTZ" => {
|
||||||
let dt = DateTime::parse_from_rfc3339(&value)
|
if let Kind::StringValue(value) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
let dt = DateTime::parse_from_rfc3339(value).map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
||||||
params.add(dt.with_timezone(&Utc))
|
params.add(dt.with_timezone(&Utc)).map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected ISO 8601 string for column '{}'", col)));
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"BIGINT" => {
|
"BIGINT" => {
|
||||||
let val = value.parse::<i64>()
|
if let Kind::NumberValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid integer for {}", col)))?;
|
if val.fract() != 0.0 {
|
||||||
params.add(val)
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
}
|
||||||
|
params.add(*val as i64).map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
},
|
},
|
||||||
_ => return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type))),
|
_ => return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type))),
|
||||||
}
|
}
|
||||||
@@ -218,7 +260,12 @@ pub async fn post_table_data(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
let qualified_table = crate::shared::schema_qualifier::qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"INSERT INTO {} ({}) VALUES ({}) RETURNING id",
|
"INSERT INTO {} ({}) VALUES ({}) RETURNING id",
|
||||||
@@ -227,7 +274,6 @@ pub async fn post_table_data(
|
|||||||
placeholders.join(", ")
|
placeholders.join(", ")
|
||||||
);
|
);
|
||||||
|
|
||||||
// Execute query with enhanced error handling
|
|
||||||
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
||||||
.fetch_one(db_pool)
|
.fetch_one(db_pool)
|
||||||
.await;
|
.await;
|
||||||
@@ -235,7 +281,6 @@ pub async fn post_table_data(
|
|||||||
let inserted_id = match result {
|
let inserted_id = match result {
|
||||||
Ok(id) => id,
|
Ok(id) => id,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
// Handle "relation does not exist" error specifically
|
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
||||||
return Err(Status::internal(format!(
|
return Err(Status::internal(format!(
|
||||||
@@ -248,15 +293,12 @@ pub async fn post_table_data(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// After a successful insert, send a command to the indexer.
|
|
||||||
let command = IndexCommand::AddOrUpdate(IndexCommandData {
|
let command = IndexCommand::AddOrUpdate(IndexCommandData {
|
||||||
table_name: table_name.clone(),
|
table_name: table_name.clone(),
|
||||||
row_id: inserted_id,
|
row_id: inserted_id,
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Err(e) = indexer_tx.send(command).await {
|
if let Err(e) = indexer_tx.send(command).await {
|
||||||
// If sending fails, the DB is updated but the index will be stale.
|
|
||||||
// This is a critical situation to log and monitor.
|
|
||||||
error!(
|
error!(
|
||||||
"CRITICAL: DB insert for table '{}' (id: {}) succeeded but failed to queue for indexing: {}. Search index is now inconsistent.",
|
"CRITICAL: DB insert for table '{}' (id: {}) succeeded but failed to queue for indexing: {}. Search index is now inconsistent.",
|
||||||
table_name, inserted_id, e
|
table_name, inserted_id, e
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ use sqlx::{PgPool, Arguments, Postgres};
|
|||||||
use sqlx::postgres::PgArguments;
|
use sqlx::postgres::PgArguments;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use common::proto::multieko2::tables_data::{PutTableDataRequest, PutTableDataResponse};
|
use common::proto::multieko2::tables_data::{PutTableDataRequest, PutTableDataResponse};
|
||||||
use std::collections::HashMap;
|
use crate::shared::schema_qualifier::qualify_table_name_for_data;
|
||||||
use crate::shared::schema_qualifier::qualify_table_name_for_data; // Import schema qualifier
|
use prost_types::value::Kind;
|
||||||
|
|
||||||
pub async fn put_table_data(
|
pub async fn put_table_data(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
@@ -15,20 +15,9 @@ pub async fn put_table_data(
|
|||||||
let table_name = request.table_name;
|
let table_name = request.table_name;
|
||||||
let record_id = request.id;
|
let record_id = request.id;
|
||||||
|
|
||||||
// Preprocess and validate data
|
// If no data is provided to update, it's an invalid request.
|
||||||
let mut processed_data = HashMap::new();
|
if request.data.is_empty() {
|
||||||
let mut null_fields = Vec::new();
|
return Err(Status::invalid_argument("No fields provided to update."));
|
||||||
|
|
||||||
// CORRECTED: Generic handling for all fields.
|
|
||||||
// Any field with an empty string will be added to the null_fields list.
|
|
||||||
// The special, hardcoded logic for "firma" has been removed.
|
|
||||||
for (key, value) in request.data {
|
|
||||||
let trimmed = value.trim().to_string();
|
|
||||||
if trimmed.is_empty() {
|
|
||||||
null_fields.push(key);
|
|
||||||
} else {
|
|
||||||
processed_data.insert(key, trimmed);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Lookup profile
|
// Lookup profile
|
||||||
@@ -70,14 +59,29 @@ pub async fn put_table_data(
|
|||||||
columns.push((name, sql_type));
|
columns.push((name, sql_type));
|
||||||
}
|
}
|
||||||
|
|
||||||
// CORRECTED: "firma" is not a system column.
|
// Get all foreign key columns for this table (needed for validation)
|
||||||
// It should be treated as a user-defined column.
|
let fk_columns = sqlx::query!(
|
||||||
let system_columns = ["deleted"];
|
r#"SELECT ltd.table_name
|
||||||
|
FROM table_definition_links tdl
|
||||||
|
JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
||||||
|
WHERE tdl.source_table_id = $1"#,
|
||||||
|
table_def.id
|
||||||
|
)
|
||||||
|
.fetch_all(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
||||||
|
|
||||||
|
let mut system_columns = vec!["deleted".to_string()];
|
||||||
|
for fk in fk_columns {
|
||||||
|
let base_name = fk.table_name.split_once('_').map_or(fk.table_name.as_str(), |(_, rest)| rest);
|
||||||
|
system_columns.push(format!("{}_id", base_name));
|
||||||
|
}
|
||||||
|
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
||||||
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
||||||
|
|
||||||
// Validate input columns
|
// Validate input columns
|
||||||
for key in processed_data.keys() {
|
for key in request.data.keys() {
|
||||||
if !system_columns.contains(&key.as_str()) && !user_columns.contains(&key) {
|
if !system_columns_set.contains(key.as_str()) && !user_columns.contains(&key) {
|
||||||
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -87,54 +91,65 @@ pub async fn put_table_data(
|
|||||||
let mut set_clauses = Vec::new();
|
let mut set_clauses = Vec::new();
|
||||||
let mut param_idx = 1;
|
let mut param_idx = 1;
|
||||||
|
|
||||||
// Add data parameters for non-empty fields
|
for (col, proto_value) in request.data {
|
||||||
for (col, value) in &processed_data {
|
let sql_type = if system_columns_set.contains(col.as_str()) {
|
||||||
// CORRECTED: The logic for "firma" is removed from this match.
|
|
||||||
// It will now fall through to the `else` block and have its type
|
|
||||||
// correctly looked up from the `columns` vector.
|
|
||||||
let sql_type = if system_columns.contains(&col.as_str()) {
|
|
||||||
match col.as_str() {
|
match col.as_str() {
|
||||||
"deleted" => "BOOLEAN",
|
"deleted" => "BOOLEAN",
|
||||||
|
_ if col.ends_with("_id") => "BIGINT",
|
||||||
_ => return Err(Status::invalid_argument("Invalid system column")),
|
_ => return Err(Status::invalid_argument("Invalid system column")),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
columns.iter()
|
columns.iter()
|
||||||
.find(|(name, _)| name == col)
|
.find(|(name, _)| name == &col)
|
||||||
.map(|(_, sql_type)| sql_type.as_str())
|
.map(|(_, sql_type)| sql_type.as_str())
|
||||||
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// A provided value cannot be null or empty in a PUT request.
|
||||||
|
// To clear a field, it should be set to an empty string "" for text,
|
||||||
|
// or a specific value for other types if needed (though typically not done).
|
||||||
|
// For now, we reject nulls.
|
||||||
|
let kind = proto_value.kind.ok_or_else(|| {
|
||||||
|
Status::invalid_argument(format!("Value for column '{}' cannot be empty in a PUT request. To clear a text field, send an empty string.", col))
|
||||||
|
})?;
|
||||||
|
|
||||||
match sql_type {
|
match sql_type {
|
||||||
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => {
|
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => {
|
||||||
if let Some(max_len) = sql_type.strip_prefix("VARCHAR(")
|
if let Kind::StringValue(value) = kind {
|
||||||
.and_then(|s| s.strip_suffix(')'))
|
params.add(value)
|
||||||
.and_then(|s| s.parse::<usize>().ok())
|
.map_err(|e| Status::internal(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
||||||
{
|
} else {
|
||||||
if value.len() > max_len {
|
return Err(Status::invalid_argument(format!("Expected string for column '{}'", col)));
|
||||||
return Err(Status::internal(format!("Value too long for {}", col)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
params.add(value)
|
|
||||||
.map_err(|e| Status::internal(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
|
||||||
},
|
},
|
||||||
"BOOLEAN" => {
|
"BOOLEAN" => {
|
||||||
let val = value.parse::<bool>()
|
if let Kind::BoolValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid boolean for {}", col)))?;
|
params.add(val)
|
||||||
params.add(val)
|
.map_err(|e| Status::internal(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
||||||
.map_err(|e| Status::internal(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected boolean for column '{}'", col)));
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"TIMESTAMPTZ" => {
|
"TIMESTAMPTZ" => {
|
||||||
let dt = DateTime::parse_from_rfc3339(value)
|
if let Kind::StringValue(value) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
let dt = DateTime::parse_from_rfc3339(&value)
|
||||||
params.add(dt.with_timezone(&Utc))
|
.map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
||||||
.map_err(|e| Status::internal(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
params.add(dt.with_timezone(&Utc))
|
||||||
|
.map_err(|e| Status::internal(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected ISO 8601 string for column '{}'", col)));
|
||||||
|
}
|
||||||
},
|
},
|
||||||
// ADDED: BIGINT handling for completeness, if needed for other columns.
|
|
||||||
"BIGINT" => {
|
"BIGINT" => {
|
||||||
let val = value.parse::<i64>()
|
if let Kind::NumberValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid integer for {}", col)))?;
|
if val.fract() != 0.0 {
|
||||||
params.add(val)
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
.map_err(|e| Status::internal(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
}
|
||||||
|
params.add(val as i64)
|
||||||
|
.map_err(|e| Status::internal(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
},
|
},
|
||||||
_ => return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type))),
|
_ => return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type))),
|
||||||
}
|
}
|
||||||
@@ -143,27 +158,15 @@ pub async fn put_table_data(
|
|||||||
param_idx += 1;
|
param_idx += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add NULL clauses for empty fields
|
|
||||||
for field in null_fields {
|
|
||||||
// Make sure the field is valid
|
|
||||||
if !system_columns.contains(&field.as_str()) && !user_columns.contains(&&field) {
|
|
||||||
return Err(Status::invalid_argument(format!("Invalid column to set NULL: {}", field)));
|
|
||||||
}
|
|
||||||
set_clauses.push(format!("\"{}\" = NULL", field));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure we have at least one field to update
|
|
||||||
if set_clauses.is_empty() {
|
|
||||||
return Err(Status::invalid_argument("No valid fields to update"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add ID parameter at the end
|
|
||||||
params.add(record_id)
|
params.add(record_id)
|
||||||
.map_err(|e| Status::internal(format!("Failed to add record_id parameter: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Failed to add record_id parameter: {}", e)))?;
|
||||||
|
|
||||||
// Qualify table name with schema
|
let qualified_table = qualify_table_name_for_data(
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
let set_clause = set_clauses.join(", ");
|
let set_clause = set_clauses.join(", ");
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"UPDATE {} SET {} WHERE id = ${} AND deleted = FALSE RETURNING id",
|
"UPDATE {} SET {} WHERE id = ${} AND deleted = FALSE RETURNING id",
|
||||||
@@ -184,7 +187,6 @@ pub async fn put_table_data(
|
|||||||
}),
|
}),
|
||||||
Ok(None) => Err(Status::not_found("Record not found or already deleted")),
|
Ok(None) => Err(Status::not_found("Record not found or already deleted")),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
// Handle "relation does not exist" error specifically
|
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
||||||
return Err(Status::internal(format!(
|
return Err(Status::internal(format!(
|
||||||
|
|||||||
@@ -1,56 +1,75 @@
|
|||||||
// tests/common/mod.rs
|
// tests/common/mod.rs
|
||||||
use dotenvy;
|
|
||||||
use sqlx::{postgres::PgPoolOptions, PgPool};
|
use dotenvy::dotenv;
|
||||||
|
// --- CHANGE 1: Add Alphanumeric to the use statement ---
|
||||||
|
use rand::distr::Alphanumeric;
|
||||||
|
use rand::Rng;
|
||||||
|
use sqlx::{postgres::PgPoolOptions, Connection, Executor, PgConnection, PgPool};
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
pub async fn setup_test_db() -> PgPool {
|
// (The get_database_url and get_root_connection functions remain the same)
|
||||||
// Get path to server directory
|
fn get_database_url() -> String {
|
||||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR must be set");
|
dotenv().ok();
|
||||||
let env_path = Path::new(&manifest_dir).join(".env_test");
|
env::var("TEST_DATABASE_URL").expect("TEST_DATABASE_URL must be set")
|
||||||
|
}
|
||||||
|
async fn get_root_connection() -> PgConnection {
|
||||||
|
PgConnection::connect(&get_database_url())
|
||||||
|
.await
|
||||||
|
.expect("Failed to create root connection to test database")
|
||||||
|
}
|
||||||
|
|
||||||
// Load environment variables
|
|
||||||
dotenvy::from_path(env_path).ok();
|
|
||||||
|
|
||||||
// Create connection pool
|
/// The primary test setup function.
|
||||||
let database_url = env::var("TEST_DATABASE_URL").expect("TEST_DATABASE_URL must be set");
|
/// Creates a new, unique schema and returns a connection pool that is scoped to that schema.
|
||||||
|
/// This is the key to test isolation.
|
||||||
|
pub async fn setup_isolated_db() -> PgPool {
|
||||||
|
let mut root_conn = get_root_connection().await;
|
||||||
|
|
||||||
|
let schema_name = format!(
|
||||||
|
"test_{}",
|
||||||
|
rand::thread_rng()
|
||||||
|
// --- CHANGE 2: Pass a reference to Alphanumeric directly ---
|
||||||
|
.sample_iter(&Alphanumeric)
|
||||||
|
.take(12)
|
||||||
|
.map(char::from)
|
||||||
|
.collect::<String>()
|
||||||
|
.to_lowercase()
|
||||||
|
);
|
||||||
|
|
||||||
|
root_conn
|
||||||
|
.execute(format!("CREATE SCHEMA \"{}\"", schema_name).as_str())
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| panic!("Failed to create schema: {}", schema_name));
|
||||||
|
|
||||||
let pool = PgPoolOptions::new()
|
let pool = PgPoolOptions::new()
|
||||||
.max_connections(5)
|
.max_connections(5)
|
||||||
.connect(&database_url)
|
.after_connect(move |conn, _meta| {
|
||||||
|
let schema_name = schema_name.clone();
|
||||||
|
Box::pin(async move {
|
||||||
|
conn.execute(format!("SET search_path TO \"{}\"", schema_name).as_str())
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.connect(&get_database_url())
|
||||||
.await
|
.await
|
||||||
.expect("Failed to create pool");
|
.expect("Failed to create isolated pool");
|
||||||
|
|
||||||
// Run migrations
|
|
||||||
sqlx::migrate!()
|
sqlx::migrate!()
|
||||||
.run(&pool)
|
.run(&pool)
|
||||||
.await
|
.await
|
||||||
.expect("Migrations failed");
|
.expect("Migrations failed in isolated schema");
|
||||||
|
|
||||||
// Insert default profile if it doesn't exist
|
sqlx::query!(
|
||||||
let profile = sqlx::query!(
|
|
||||||
r#"
|
r#"
|
||||||
INSERT INTO profiles (name)
|
INSERT INTO profiles (name)
|
||||||
VALUES ('default')
|
VALUES ('default')
|
||||||
ON CONFLICT (name) DO NOTHING
|
ON CONFLICT (name) DO NOTHING
|
||||||
RETURNING id
|
|
||||||
"#
|
"#
|
||||||
)
|
)
|
||||||
.fetch_optional(&pool)
|
.execute(&pool)
|
||||||
.await
|
.await
|
||||||
.expect("Failed to insert test profile");
|
.expect("Failed to insert test profile in isolated schema");
|
||||||
|
|
||||||
let profile_id = if let Some(profile) = profile {
|
|
||||||
profile.id
|
|
||||||
} else {
|
|
||||||
// If the profile already exists, fetch its ID
|
|
||||||
sqlx::query!(
|
|
||||||
"SELECT id FROM profiles WHERE name = 'default'"
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.expect("Failed to fetch default profile ID")
|
|
||||||
.id
|
|
||||||
};
|
|
||||||
|
|
||||||
pool
|
pool
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
// tests/mod.rs
|
// tests/mod.rs
|
||||||
pub mod adresar;
|
// pub mod adresar;
|
||||||
pub mod tables_data;
|
// pub mod tables_data;
|
||||||
pub mod common;
|
pub mod common;
|
||||||
|
pub mod table_definition;
|
||||||
|
|||||||
3
server/tests/table_definition/mod.rs
Normal file
3
server/tests/table_definition/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// server/tests/table_definition/mod.rs
|
||||||
|
|
||||||
|
pub mod post_table_definition_test;
|
||||||
511
server/tests/table_definition/post_table_definition_test.rs
Normal file
511
server/tests/table_definition/post_table_definition_test.rs
Normal file
@@ -0,0 +1,511 @@
|
|||||||
|
// tests/table_definition/post_table_definition_test.rs
|
||||||
|
|
||||||
|
// Keep all your normal use statements
|
||||||
|
use common::proto::multieko2::table_definition::{
|
||||||
|
ColumnDefinition, PostTableDefinitionRequest, TableLink,
|
||||||
|
};
|
||||||
|
use rstest::{fixture, rstest};
|
||||||
|
use server::table_definition::handlers::post_table_definition;
|
||||||
|
use sqlx::{postgres::PgPoolOptions, Connection, Executor, PgConnection, PgPool, Row}; // Add PgConnection etc.
|
||||||
|
use tonic::Code;
|
||||||
|
// Add these two new use statements for the isolation logic
|
||||||
|
use rand::distr::Alphanumeric;
|
||||||
|
use rand::Rng;
|
||||||
|
use std::env;
|
||||||
|
use dotenvy;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
// ===================================================================
|
||||||
|
// SPECIALIZED SETUP FOR `table_definition` TESTS
|
||||||
|
// This setup logic is now local to this file and will not affect other tests.
|
||||||
|
// ===================================================================
|
||||||
|
async fn setup_isolated_gen_schema_db() -> PgPool {
|
||||||
|
// ---- ADD THIS BLOCK TO LOAD THE .env_test FILE ----
|
||||||
|
let manifest_dir = env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR must be set");
|
||||||
|
let env_path = Path::new(&manifest_dir).join(".env_test");
|
||||||
|
dotenvy::from_path(env_path).ok();
|
||||||
|
// ----------------------------------------------------
|
||||||
|
|
||||||
|
let database_url = env::var("TEST_DATABASE_URL").expect("TEST_DATABASE_URL must be set");
|
||||||
|
|
||||||
|
let unique_schema_name = format!(
|
||||||
|
"test_{}",
|
||||||
|
rand::thread_rng()
|
||||||
|
.sample_iter(&Alphanumeric)
|
||||||
|
.take(12)
|
||||||
|
.map(char::from)
|
||||||
|
.collect::<String>()
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut root_conn = PgConnection::connect(&database_url).await.unwrap();
|
||||||
|
root_conn
|
||||||
|
.execute(format!("CREATE SCHEMA \"{}\"", unique_schema_name).as_str())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let pool = PgPoolOptions::new()
|
||||||
|
.max_connections(5)
|
||||||
|
.after_connect(move |conn, _meta| {
|
||||||
|
let schema = unique_schema_name.clone();
|
||||||
|
Box::pin(async move {
|
||||||
|
conn.execute(format!("SET search_path = '{}'", schema).as_str())
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.connect(&database_url)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create isolated pool");
|
||||||
|
|
||||||
|
sqlx::migrate!()
|
||||||
|
.run(&pool)
|
||||||
|
.await
|
||||||
|
.expect("Migrations failed in isolated schema");
|
||||||
|
|
||||||
|
sqlx::query!("INSERT INTO profiles (name) VALUES ('default') ON CONFLICT (name) DO NOTHING")
|
||||||
|
.execute(&pool)
|
||||||
|
.await
|
||||||
|
.expect("Failed to insert test profile in isolated schema");
|
||||||
|
|
||||||
|
pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========= Fixtures for THIS FILE ONLY =========
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
async fn pool() -> PgPool {
|
||||||
|
// This fixture now calls the LOCAL, SPECIALIZED setup function.
|
||||||
|
setup_isolated_gen_schema_db().await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
async fn closed_pool(#[future] pool: PgPool) -> PgPool {
|
||||||
|
let pool = pool.await;
|
||||||
|
pool.close().await;
|
||||||
|
pool
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This fixture now works perfectly and is also isolated,
|
||||||
|
/// because it depends on the `pool` fixture above. No changes needed here!
|
||||||
|
#[fixture]
|
||||||
|
async fn pool_with_preexisting_table(#[future] pool: PgPool) -> PgPool {
|
||||||
|
let pool = pool.await;
|
||||||
|
let create_customers_req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "customers".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "customer_name".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
indexes: vec!["customer_name".into()],
|
||||||
|
links: vec![],
|
||||||
|
};
|
||||||
|
post_table_definition(&pool, create_customers_req)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create pre-requisite 'customers' table");
|
||||||
|
pool
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// ========= Helper Functions =========
|
||||||
|
|
||||||
|
/// Checks the PostgreSQL information_schema to verify a table and its columns exist.
|
||||||
|
async fn assert_table_structure_is_correct(
|
||||||
|
pool: &PgPool,
|
||||||
|
table_name: &str,
|
||||||
|
expected_cols: &[(&str, &str)],
|
||||||
|
) {
|
||||||
|
let table_exists = sqlx::query_scalar::<_, bool>(
|
||||||
|
"SELECT EXISTS (
|
||||||
|
SELECT FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'gen' AND table_name = $1
|
||||||
|
)",
|
||||||
|
)
|
||||||
|
.bind(table_name)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(table_exists, "Table 'gen.{}' was not created", table_name);
|
||||||
|
|
||||||
|
for (col_name, col_type) in expected_cols {
|
||||||
|
let record = sqlx::query(
|
||||||
|
"SELECT data_type FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'gen' AND table_name = $1 AND column_name = $2",
|
||||||
|
)
|
||||||
|
.bind(table_name)
|
||||||
|
.bind(col_name)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let found_type = record.unwrap_or_else(|| panic!("Column '{}' not found in table '{}'", col_name, table_name)).get::<String, _>("data_type");
|
||||||
|
|
||||||
|
// Handle type mappings, e.g., TEXT -> character varying, NUMERIC -> numeric
|
||||||
|
let normalized_found_type = found_type.to_lowercase();
|
||||||
|
let normalized_expected_type = col_type.to_lowercase();
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
normalized_found_type.contains(&normalized_expected_type),
|
||||||
|
"Column '{}' has wrong type. Expected: {}, Found: {}",
|
||||||
|
col_name,
|
||||||
|
col_type,
|
||||||
|
found_type
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========= Tests =========
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_create_table_success(#[future] pool: PgPool) {
|
||||||
|
// Arrange
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "invoices".into(),
|
||||||
|
columns: vec![
|
||||||
|
ColumnDefinition {
|
||||||
|
name: "invoice_number".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
},
|
||||||
|
ColumnDefinition {
|
||||||
|
name: "amount".into(),
|
||||||
|
field_type: "decimal(10, 2)".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
indexes: vec!["invoice_number".into()],
|
||||||
|
links: vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let response = post_table_definition(&pool, request).await.unwrap();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
assert!(response.success);
|
||||||
|
assert!(response.sql.contains("CREATE TABLE gen.\"invoices\""));
|
||||||
|
assert!(response.sql.contains("\"invoice_number\" TEXT"));
|
||||||
|
assert!(response.sql.contains("\"amount\" NUMERIC(10, 2)"));
|
||||||
|
assert!(response
|
||||||
|
.sql
|
||||||
|
.contains("CREATE INDEX \"idx_invoices_invoice_number\""));
|
||||||
|
|
||||||
|
// Verify actual DB state
|
||||||
|
assert_table_structure_is_correct(
|
||||||
|
&pool,
|
||||||
|
"invoices",
|
||||||
|
&[
|
||||||
|
("id", "bigint"),
|
||||||
|
("deleted", "boolean"),
|
||||||
|
("invoice_number", "text"),
|
||||||
|
("amount", "numeric"),
|
||||||
|
("created_at", "timestamp with time zone"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_invalid_decimal_format(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let invalid_types = vec![
|
||||||
|
"decimal(0,0)", // precision too small
|
||||||
|
"decimal(5,10)", // scale > precision
|
||||||
|
"decimal(10)", // missing scale
|
||||||
|
"decimal(a,b)", // non-numeric
|
||||||
|
];
|
||||||
|
|
||||||
|
for invalid_type in invalid_types {
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: format!("table_{}", invalid_type),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "amount".into(),
|
||||||
|
field_type: invalid_type.into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
assert_eq!(result.unwrap_err().code(), Code::InvalidArgument);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_create_table_with_link(
|
||||||
|
#[future] pool_with_preexisting_table: PgPool,
|
||||||
|
) {
|
||||||
|
// Arrange
|
||||||
|
let pool = pool_with_preexisting_table.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "orders".into(),
|
||||||
|
columns: vec![],
|
||||||
|
indexes: vec![],
|
||||||
|
links: vec![TableLink { // CORRECTED
|
||||||
|
linked_table_name: "customers".into(),
|
||||||
|
required: true,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let response = post_table_definition(&pool, request).await.unwrap();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
assert!(response.success);
|
||||||
|
assert!(response.sql.contains(
|
||||||
|
"\"customers_id\" BIGINT NOT NULL REFERENCES gen.\"customers\"(id)"
|
||||||
|
));
|
||||||
|
assert!(response
|
||||||
|
.sql
|
||||||
|
.contains("CREATE INDEX \"idx_orders_customers_fk\""));
|
||||||
|
|
||||||
|
// Verify actual DB state
|
||||||
|
assert_table_structure_is_correct(
|
||||||
|
&pool,
|
||||||
|
"orders",
|
||||||
|
&[("customers_id", "bigint")],
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_duplicate_table_name(#[future] pool: PgPool) {
|
||||||
|
// Arrange
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "reused_name".into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
// Create it once
|
||||||
|
post_table_definition(&pool, request.clone()).await.unwrap();
|
||||||
|
|
||||||
|
// Act: Try to create it again
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::AlreadyExists);
|
||||||
|
assert_eq!(err.message(), "Table already exists in this profile");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_invalid_table_name(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let mut request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "ends_with_id".into(), // Invalid name
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = post_table_definition(&pool, request.clone()).await;
|
||||||
|
assert_eq!(result.unwrap_err().code(), Code::InvalidArgument);
|
||||||
|
|
||||||
|
request.table_name = "deleted".into(); // Reserved name
|
||||||
|
let result = post_table_definition(&pool, request.clone()).await;
|
||||||
|
assert_eq!(result.unwrap_err().code(), Code::InvalidArgument);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_invalid_column_type(#[future] pool: PgPool) {
|
||||||
|
// Arrange
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "bad_col_type".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "some_col".into(),
|
||||||
|
field_type: "super_string_9000".into(), // Invalid type
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::InvalidArgument);
|
||||||
|
assert!(err.message().contains("Invalid field type"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_index_for_nonexistent_column(#[future] pool: PgPool) {
|
||||||
|
// Arrange
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "bad_index".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "real_column".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
indexes: vec!["fake_column".into()], // Index on a column not in the list
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::InvalidArgument);
|
||||||
|
assert!(err.message().contains("Index column fake_column not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_link_to_nonexistent_table(#[future] pool: PgPool) {
|
||||||
|
// Arrange
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "bad_link".into(),
|
||||||
|
links: vec![TableLink { // CORRECTED
|
||||||
|
linked_table_name: "i_do_not_exist".into(),
|
||||||
|
required: false,
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::NotFound);
|
||||||
|
assert!(err.message().contains("Linked table i_do_not_exist not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_database_error_on_closed_pool(
|
||||||
|
#[future] closed_pool: PgPool,
|
||||||
|
) {
|
||||||
|
// Arrange
|
||||||
|
let pool = closed_pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "wont_be_created".into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
assert_eq!(result.unwrap_err().code(), Code::Internal);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tests that minimal, uppercase and whitespace‐padded decimal specs
|
||||||
|
// are accepted and correctly mapped to NUMERIC(p, s).
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_valid_decimal_variants(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let cases = vec![
|
||||||
|
("decimal(1,1)", "NUMERIC(1, 1)"),
|
||||||
|
("decimal(1,0)", "NUMERIC(1, 0)"),
|
||||||
|
("DECIMAL(5,2)", "NUMERIC(5, 2)"),
|
||||||
|
("decimal( 5 , 2 )", "NUMERIC(5, 2)"),
|
||||||
|
];
|
||||||
|
for (i, (typ, expect)) in cases.into_iter().enumerate() {
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: format!("dec_valid_{}", i),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "amount".into(),
|
||||||
|
field_type: typ.into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let resp = post_table_definition(&pool, request).await.unwrap();
|
||||||
|
assert!(resp.success, "{}", typ);
|
||||||
|
assert!(
|
||||||
|
resp.sql.contains(expect),
|
||||||
|
"expected `{}` to map to {}, got `{}`",
|
||||||
|
typ,
|
||||||
|
expect,
|
||||||
|
resp.sql
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tests that malformed decimal inputs are rejected with InvalidArgument.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_malformed_decimal_inputs(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let bad = vec!["decimal", "decimal()", "decimal(5,)", "decimal(,2)", "decimal(, )"];
|
||||||
|
for (i, typ) in bad.into_iter().enumerate() {
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: format!("dec_bad_{}", i),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "amt".into(),
|
||||||
|
field_type: typ.into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, request).await.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::InvalidArgument, "{}", typ);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tests that obviously invalid column identifiers are rejected
|
||||||
|
// (start with digit/underscore, contain space or hyphen, or are empty).
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_invalid_column_names(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let bad_names = vec!["1col", "_col", "col name", "col-name", ""];
|
||||||
|
for name in bad_names {
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "tbl_invalid_cols".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: name.into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, request).await.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::InvalidArgument, "{}", name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tests that a user‐supplied column ending in "_id" is rejected
|
||||||
|
// to avoid collision with system‐generated FKs.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_column_name_suffix_id(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "tbl_suffix_id".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "user_id".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, request).await.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::InvalidArgument);
|
||||||
|
assert!(
|
||||||
|
err.message().to_lowercase().contains("invalid column name"),
|
||||||
|
"unexpected error message: {}",
|
||||||
|
err.message()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
include!("post_table_definition_test2.rs");
|
||||||
|
include!("post_table_definition_test3.rs");
|
||||||
|
include!("post_table_definition_test4.rs");
|
||||||
490
server/tests/table_definition/post_table_definition_test2.rs
Normal file
490
server/tests/table_definition/post_table_definition_test2.rs
Normal file
@@ -0,0 +1,490 @@
|
|||||||
|
// ============================================================================
|
||||||
|
// Additional edge‐case tests for PostTableDefinition
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// 1) Field‐type mapping for every predefined key, in various casing.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_field_type_mapping_various_casing(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let cases = vec![
|
||||||
|
("text", "TEXT", "text"),
|
||||||
|
("TEXT", "TEXT", "text"),
|
||||||
|
("TeXt", "TEXT", "text"),
|
||||||
|
("string", "TEXT", "text"),
|
||||||
|
("boolean", "BOOLEAN", "boolean"),
|
||||||
|
("Boolean", "BOOLEAN", "boolean"),
|
||||||
|
("timestamp", "TIMESTAMPTZ", "timestamp with time zone"),
|
||||||
|
("time", "TIMESTAMPTZ", "timestamp with time zone"),
|
||||||
|
("money", "NUMERIC(14, 4)", "numeric"),
|
||||||
|
("integer", "INTEGER", "integer"),
|
||||||
|
("date", "DATE", "date"),
|
||||||
|
];
|
||||||
|
for (i, &(input, expected_sql, expected_db)) in cases.iter().enumerate() {
|
||||||
|
let tbl = format!("ftm_{}", i);
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: tbl.clone(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "col".into(),
|
||||||
|
field_type: input.into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let resp = post_table_definition(&pool, req).await.unwrap();
|
||||||
|
assert!(
|
||||||
|
resp.sql.contains(&format!("\"col\" {}", expected_sql)),
|
||||||
|
"field‐type {:?} did not map to {} in `{}`",
|
||||||
|
input,
|
||||||
|
expected_sql,
|
||||||
|
resp.sql
|
||||||
|
);
|
||||||
|
assert_table_structure_is_correct(
|
||||||
|
&pool,
|
||||||
|
&tbl,
|
||||||
|
&[
|
||||||
|
("id", "bigint"),
|
||||||
|
("deleted", "boolean"),
|
||||||
|
("col", expected_db),
|
||||||
|
("created_at", "timestamp with time zone"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3) Invalid index names must be rejected.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_invalid_index_names(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let bad_idxs = vec!["1col", "_col", "col-name"];
|
||||||
|
for idx in bad_idxs {
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "idx_bad".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "good".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
indexes: vec![idx.into()],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, req).await.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::InvalidArgument);
|
||||||
|
assert!(
|
||||||
|
err
|
||||||
|
.message()
|
||||||
|
.to_lowercase()
|
||||||
|
.contains("invalid index name"),
|
||||||
|
"{:?} yielded wrong message: {}",
|
||||||
|
idx,
|
||||||
|
err.message()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4) More invalid‐table‐name cases: starts-with digit/underscore or sanitizes to empty.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_more_invalid_table_names(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let cases = vec![
|
||||||
|
("1tbl", "invalid table name"),
|
||||||
|
("_tbl", "invalid table name"),
|
||||||
|
("!@#$", "cannot be empty"),
|
||||||
|
("__", "cannot be empty"),
|
||||||
|
];
|
||||||
|
for (name, expected_msg) in cases {
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: name.into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, req).await.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::InvalidArgument);
|
||||||
|
assert!(
|
||||||
|
err.message().to_lowercase().contains(expected_msg),
|
||||||
|
"{:?} => {}",
|
||||||
|
name,
|
||||||
|
err.message()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5) Name‐sanitization: mixed‐case table names and strip invalid characters.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_name_sanitization(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "My-Table!123".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "User Name".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let resp = post_table_definition(&pool, req).await.unwrap();
|
||||||
|
assert!(
|
||||||
|
resp.sql.contains("CREATE TABLE gen.\"mytable123\""),
|
||||||
|
"{:?}",
|
||||||
|
resp.sql
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
resp.sql.contains("\"username\" TEXT"),
|
||||||
|
"{:?}",
|
||||||
|
resp.sql
|
||||||
|
);
|
||||||
|
assert_table_structure_is_correct(
|
||||||
|
&pool,
|
||||||
|
"mytable123",
|
||||||
|
&[
|
||||||
|
("id", "bigint"),
|
||||||
|
("deleted", "boolean"),
|
||||||
|
("username", "text"),
|
||||||
|
("created_at", "timestamp with time zone"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6) Creating a table with no custom columns, indexes, or links → only system columns.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_create_minimal_table(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "minimal".into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let resp = post_table_definition(&pool, req).await.unwrap();
|
||||||
|
assert!(resp.sql.contains("id BIGSERIAL PRIMARY KEY"));
|
||||||
|
assert!(resp.sql.contains("deleted BOOLEAN NOT NULL"));
|
||||||
|
assert!(resp.sql.contains("created_at TIMESTAMPTZ"));
|
||||||
|
assert_table_structure_is_correct(
|
||||||
|
&pool,
|
||||||
|
"minimal",
|
||||||
|
&[
|
||||||
|
("id", "bigint"),
|
||||||
|
("deleted", "boolean"),
|
||||||
|
("created_at", "timestamp with time zone"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7) Required & optional links: NOT NULL vs NULL.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_nullable_and_multiple_links(#[future] pool_with_preexisting_table: PgPool) {
|
||||||
|
let pool = pool_with_preexisting_table.await;
|
||||||
|
// create a second link‐target
|
||||||
|
let sup = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "suppliers".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "sup_name".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
indexes: vec!["sup_name".into()],
|
||||||
|
links: vec![],
|
||||||
|
};
|
||||||
|
post_table_definition(&pool, sup).await.unwrap();
|
||||||
|
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "orders_links".into(),
|
||||||
|
columns: vec![],
|
||||||
|
indexes: vec![],
|
||||||
|
links: vec![
|
||||||
|
TableLink {
|
||||||
|
linked_table_name: "customers".into(),
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
TableLink {
|
||||||
|
linked_table_name: "suppliers".into(),
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
let resp = post_table_definition(&pool, req).await.unwrap();
|
||||||
|
assert!(
|
||||||
|
resp
|
||||||
|
.sql
|
||||||
|
.contains("\"customers_id\" BIGINT NOT NULL"),
|
||||||
|
"{:?}",
|
||||||
|
resp.sql
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
resp.sql.contains("\"suppliers_id\" BIGINT"),
|
||||||
|
"{:?}",
|
||||||
|
resp.sql
|
||||||
|
);
|
||||||
|
// DB‐level nullability for optional FK
|
||||||
|
let is_nullable: String = sqlx::query_scalar!(
|
||||||
|
"SELECT is_nullable \
|
||||||
|
FROM information_schema.columns \
|
||||||
|
WHERE table_schema='gen' \
|
||||||
|
AND table_name=$1 \
|
||||||
|
AND column_name='suppliers_id'",
|
||||||
|
"orders_links"
|
||||||
|
)
|
||||||
|
.fetch_one(&pool)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(is_nullable, "YES");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8) Duplicate links in one request → Internal.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_duplicate_links(#[future] pool_with_preexisting_table: PgPool) {
|
||||||
|
let pool = pool_with_preexisting_table.await;
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "dup_links".into(),
|
||||||
|
columns: vec![],
|
||||||
|
indexes: vec![],
|
||||||
|
links: vec![
|
||||||
|
TableLink {
|
||||||
|
linked_table_name: "customers".into(),
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
TableLink {
|
||||||
|
linked_table_name: "customers".into(),
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, req).await.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::Internal);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 9) Self‐referential FK: link child back to same‐profile parent.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_self_referential_link(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
post_table_definition(
|
||||||
|
&pool,
|
||||||
|
PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "selfref".into(),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let resp = post_table_definition(
|
||||||
|
&pool,
|
||||||
|
PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "selfref_child".into(),
|
||||||
|
links: vec![TableLink {
|
||||||
|
linked_table_name: "selfref".into(),
|
||||||
|
required: true,
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(
|
||||||
|
resp
|
||||||
|
.sql
|
||||||
|
.contains("\"selfref_id\" BIGINT NOT NULL REFERENCES gen.\"selfref\"(id)"),
|
||||||
|
"{:?}",
|
||||||
|
resp.sql
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 11) Cross‐profile uniqueness & link isolation.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_cross_profile_uniqueness_and_link_isolation(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
|
||||||
|
// Profile A: foo
|
||||||
|
post_table_definition(&pool, PostTableDefinitionRequest {
|
||||||
|
profile_name: "A".into(),
|
||||||
|
table_name: "foo".into(),
|
||||||
|
columns: vec![ColumnDefinition { name: "col".into(), field_type: "text".into() }], // Added this
|
||||||
|
..Default::default()
|
||||||
|
}).await.unwrap();
|
||||||
|
|
||||||
|
// Profile B: foo, bar
|
||||||
|
post_table_definition(&pool, PostTableDefinitionRequest {
|
||||||
|
profile_name: "B".into(),
|
||||||
|
table_name: "foo".into(),
|
||||||
|
columns: vec![ColumnDefinition { name: "col".into(), field_type: "text".into() }], // Added this
|
||||||
|
..Default::default()
|
||||||
|
}).await.unwrap();
|
||||||
|
|
||||||
|
post_table_definition(&pool, PostTableDefinitionRequest {
|
||||||
|
profile_name: "B".into(),
|
||||||
|
table_name: "bar".into(),
|
||||||
|
columns: vec![ColumnDefinition { name: "col".into(), field_type: "text".into() }], // Added this
|
||||||
|
..Default::default()
|
||||||
|
}).await.unwrap();
|
||||||
|
|
||||||
|
// A linking to B.bar → NotFound
|
||||||
|
let err = post_table_definition(&pool, PostTableDefinitionRequest {
|
||||||
|
profile_name: "A".into(),
|
||||||
|
table_name: "linker".into(),
|
||||||
|
columns: vec![ColumnDefinition { name: "col".into(), field_type: "text".into() }], // Added this
|
||||||
|
links: vec![TableLink {
|
||||||
|
linked_table_name: "bar".into(),
|
||||||
|
required: false,
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
}).await.unwrap_err();
|
||||||
|
|
||||||
|
assert_eq!(err.code(), Code::NotFound);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 12) SQL‐injection attempts are sanitized.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_sql_injection_sanitization(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "users; DROP TABLE users;".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "col\"; DROP".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let resp = post_table_definition(&pool, req).await.unwrap();
|
||||||
|
assert!(
|
||||||
|
resp
|
||||||
|
.sql
|
||||||
|
.contains("CREATE TABLE gen.\"usersdroptableusers\""),
|
||||||
|
"{:?}",
|
||||||
|
resp.sql
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
resp.sql.contains("\"coldrop\" TEXT"),
|
||||||
|
"{:?}",
|
||||||
|
resp.sql
|
||||||
|
);
|
||||||
|
assert_table_structure_is_correct(
|
||||||
|
&pool,
|
||||||
|
"usersdroptableusers",
|
||||||
|
&[
|
||||||
|
("id", "bigint"),
|
||||||
|
("deleted", "boolean"),
|
||||||
|
("coldrop", "text"),
|
||||||
|
("created_at", "timestamp with time zone"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 13) Reserved‐column shadowing: id, deleted, created_at cannot be user‐defined.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_reserved_column_shadowing(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
for col in &["id", "deleted", "created_at"] {
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: format!("tbl_{}", col),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: (*col).into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, req).await.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::Internal, "{:?}", col);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 14) Identifier‐length overflow (>63 chars) yields Internal.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_long_identifier_length(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let long = "a".repeat(64);
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: long.clone(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: long.clone(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, req).await.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::InvalidArgument);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 15) Decimal precision overflow must be caught by our parser.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_decimal_precision_overflow(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "dp_overflow".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "amount".into(),
|
||||||
|
field_type: "decimal(9999999999,1)".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, req).await.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::InvalidArgument);
|
||||||
|
assert!(
|
||||||
|
err
|
||||||
|
.message()
|
||||||
|
.to_lowercase()
|
||||||
|
.contains("invalid precision"),
|
||||||
|
"{}",
|
||||||
|
err.message()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 16) Repeated profile insertion only creates one profile row.
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_repeated_profile_insertion(#[future] pool: PgPool) {
|
||||||
|
let pool = pool.await;
|
||||||
|
let prof = "repeat_prof";
|
||||||
|
post_table_definition(
|
||||||
|
&pool,
|
||||||
|
PostTableDefinitionRequest {
|
||||||
|
profile_name: prof.into(),
|
||||||
|
table_name: "t1".into(),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
post_table_definition(
|
||||||
|
&pool,
|
||||||
|
PostTableDefinitionRequest {
|
||||||
|
profile_name: prof.into(),
|
||||||
|
table_name: "t2".into(),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let cnt: i64 = sqlx::query_scalar!(
|
||||||
|
"SELECT COUNT(*) FROM profiles WHERE name = $1",
|
||||||
|
prof
|
||||||
|
)
|
||||||
|
.fetch_one(&pool)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(cnt, 1);
|
||||||
|
}
|
||||||
242
server/tests/table_definition/post_table_definition_test3.rs
Normal file
242
server/tests/table_definition/post_table_definition_test3.rs
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
// tests/table_definition/post_table_definition_test3.rs
|
||||||
|
|
||||||
|
// NOTE: All 'use' statements have been removed from this file.
|
||||||
|
// They are inherited from the parent file that includes this one.
|
||||||
|
|
||||||
|
// ========= Helper Functions for this Test File =========
|
||||||
|
|
||||||
|
/// Checks that a table definition does NOT exist for a given profile and table name.
|
||||||
|
async fn assert_table_definition_does_not_exist(pool: &PgPool, profile_name: &str, table_name: &str) {
|
||||||
|
let count: i64 = sqlx::query_scalar!(
|
||||||
|
"SELECT COUNT(*) FROM table_definitions td
|
||||||
|
JOIN profiles p ON td.profile_id = p.id
|
||||||
|
WHERE p.name = $1 AND td.table_name = $2",
|
||||||
|
profile_name,
|
||||||
|
table_name
|
||||||
|
)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await
|
||||||
|
.expect("Failed to query for table definition")
|
||||||
|
.unwrap_or(0);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
count, 0,
|
||||||
|
"Table definition for '{}/{}' was found but should have been rolled back.",
|
||||||
|
profile_name, table_name
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========= Category 2: Advanced Identifier and Naming Collisions =========
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_column_name_collision_with_fk(
|
||||||
|
#[future] pool_with_preexisting_table: PgPool,
|
||||||
|
) {
|
||||||
|
// Scenario: Create a table that links to 'customers' and also defines its own 'customers_id' column.
|
||||||
|
// Expected: The generated CREATE TABLE will have a duplicate column, causing a database error.
|
||||||
|
let pool = pool_with_preexisting_table.await; // Provides 'customers' table
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "orders_collision".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "customers_id".into(), // This will collide with the generated FK
|
||||||
|
field_type: "integer".into(),
|
||||||
|
}],
|
||||||
|
links: vec![TableLink {
|
||||||
|
linked_table_name: "customers".into(),
|
||||||
|
required: true,
|
||||||
|
}],
|
||||||
|
indexes: vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(
|
||||||
|
err.code(),
|
||||||
|
Code::Internal,
|
||||||
|
"Expected Internal error due to duplicate column in CREATE TABLE"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_duplicate_column_names_in_request(#[future] pool: PgPool) {
|
||||||
|
// Scenario: The request itself contains two columns with the same name.
|
||||||
|
// Expected: Database error on CREATE TABLE with duplicate column definition.
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "duplicate_cols".into(),
|
||||||
|
columns: vec![
|
||||||
|
ColumnDefinition {
|
||||||
|
name: "product_name".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
},
|
||||||
|
ColumnDefinition {
|
||||||
|
name: "product_name".into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::Internal);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_link_to_sanitized_table_name(#[future] pool: PgPool) {
|
||||||
|
// Scenario: Test that linking requires using the sanitized name, not the original.
|
||||||
|
let pool = pool.await;
|
||||||
|
let original_name = "My Invoices";
|
||||||
|
let sanitized_name = "myinvoices";
|
||||||
|
|
||||||
|
// 1. Create the table with a name that requires sanitization.
|
||||||
|
let create_req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: original_name.into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let resp = post_table_definition(&pool, create_req).await.unwrap();
|
||||||
|
assert!(resp.sql.contains(&format!("gen.\"{}\"", sanitized_name)));
|
||||||
|
|
||||||
|
// 2. Attempt to link to the *original* name, which should fail.
|
||||||
|
let link_req_fail = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "payments".into(),
|
||||||
|
links: vec![TableLink {
|
||||||
|
linked_table_name: original_name.into(),
|
||||||
|
required: true,
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let err = post_table_definition(&pool, link_req_fail)
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::NotFound);
|
||||||
|
assert!(err.message().contains("Linked table My Invoices not found"));
|
||||||
|
|
||||||
|
// 3. Attempt to link to the *sanitized* name, which should succeed.
|
||||||
|
let link_req_success = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "payments_sanitized".into(),
|
||||||
|
links: vec![TableLink {
|
||||||
|
linked_table_name: sanitized_name.into(),
|
||||||
|
required: true,
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let success_resp = post_table_definition(&pool, link_req_success).await.unwrap();
|
||||||
|
assert!(success_resp.success);
|
||||||
|
assert!(success_resp
|
||||||
|
.sql
|
||||||
|
.contains(&format!("REFERENCES gen.\"{}\"(id)", sanitized_name)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========= Category 3: Complex Link and Profile Logic =========
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_true_self_referential_link(#[future] pool: PgPool) {
|
||||||
|
// Scenario: A table attempts to link to itself in the same request.
|
||||||
|
// Expected: NotFound, because the table definition doesn't exist yet at link-check time.
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "employees".into(),
|
||||||
|
links: vec![TableLink {
|
||||||
|
linked_table_name: "employees".into(), // Self-reference
|
||||||
|
required: false, // For a manager_id FK
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::NotFound);
|
||||||
|
assert!(err.message().contains("Linked table employees not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_behavior_on_empty_profile_name(#[future] pool: PgPool) {
|
||||||
|
// Scenario: Attempt to create a table with an empty profile name.
|
||||||
|
// Expected: This should be rejected by input validation.
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "".into(),
|
||||||
|
table_name: "table_in_empty_profile".into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(
|
||||||
|
err.code(),
|
||||||
|
Code::InvalidArgument, // Changed from Internal
|
||||||
|
"Expected InvalidArgument error from input validation"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
err.message().contains("Profile name cannot be empty"), // Updated message
|
||||||
|
"Unexpected error message: {}",
|
||||||
|
err.message()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========= Category 4: Concurrency =========
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "Concurrency tests can be flaky and require careful setup"]
|
||||||
|
async fn test_race_condition_on_table_creation(#[future] pool: PgPool) {
|
||||||
|
// Scenario: Two requests try to create the exact same table at the same time.
|
||||||
|
// Expected: One succeeds, the other fails with AlreadyExists.
|
||||||
|
let pool = pool.await;
|
||||||
|
let request1 = PostTableDefinitionRequest {
|
||||||
|
profile_name: "concurrent_profile".into(),
|
||||||
|
table_name: "racy_table".into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let request2 = request1.clone();
|
||||||
|
|
||||||
|
let pool1 = pool.clone();
|
||||||
|
let pool2 = pool.clone();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let (res1, res2) = tokio::join!(
|
||||||
|
post_table_definition(&pool1, request1),
|
||||||
|
post_table_definition(&pool2, request2)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let results = vec![res1, res2];
|
||||||
|
let success_count = results.iter().filter(|r| r.is_ok()).count();
|
||||||
|
let failure_count = results.iter().filter(|r| r.is_err()).count();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
success_count, 1,
|
||||||
|
"Exactly one request should succeed"
|
||||||
|
);
|
||||||
|
assert_eq!(failure_count, 1, "Exactly one request should fail");
|
||||||
|
|
||||||
|
let err = results
|
||||||
|
.into_iter()
|
||||||
|
.find(|r| r.is_err())
|
||||||
|
.unwrap()
|
||||||
|
.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::AlreadyExists);
|
||||||
|
assert_eq!(err.message(), "Table already exists in this profile");
|
||||||
|
}
|
||||||
222
server/tests/table_definition/post_table_definition_test4.rs
Normal file
222
server/tests/table_definition/post_table_definition_test4.rs
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
// tests/table_definition/post_table_definition_test4.rs
|
||||||
|
|
||||||
|
// NOTE: All 'use' statements are inherited from the parent file that includes this one.
|
||||||
|
|
||||||
|
// ========= Category 5: Implementation-Specific Edge Cases =========
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_on_fk_base_name_collision(#[future] pool: PgPool) {
|
||||||
|
// Scenario: Link to two tables (`team1_users`, `team2_users`) that both have a
|
||||||
|
// base name of "users". This should cause a duplicate "users_id" column in the
|
||||||
|
// generated SQL.
|
||||||
|
let pool = pool.await;
|
||||||
|
|
||||||
|
// Arrange: Create the two prerequisite tables
|
||||||
|
let req1 = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "team1_users".into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
post_table_definition(&pool, req1).await.unwrap();
|
||||||
|
|
||||||
|
let req2 = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "team2_users".into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
post_table_definition(&pool, req2).await.unwrap();
|
||||||
|
|
||||||
|
// Arrange: A request that links to both, causing the collision
|
||||||
|
let colliding_req = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "tasks".into(),
|
||||||
|
links: vec![
|
||||||
|
TableLink {
|
||||||
|
linked_table_name: "team1_users".into(),
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
TableLink {
|
||||||
|
linked_table_name: "team2_users".into(),
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, colliding_req).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(
|
||||||
|
err.code(),
|
||||||
|
Code::Internal,
|
||||||
|
"Expected Internal error from duplicate column in CREATE TABLE"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_sql_reserved_keywords_as_identifiers_are_allowed(#[future] pool: PgPool) {
|
||||||
|
// NOTE: This test confirms that the system currently allows SQL reserved keywords
|
||||||
|
// as column names because they are correctly quoted. This is technically correct,
|
||||||
|
// but some systems add validation to block this as a policy to prevent user confusion.
|
||||||
|
let pool = pool.await;
|
||||||
|
let keywords = vec!["user", "select", "group", "order"];
|
||||||
|
|
||||||
|
for (i, keyword) in keywords.into_iter().enumerate() {
|
||||||
|
let table_name = format!("keyword_test_{}", i);
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: table_name.clone(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: keyword.into(),
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
let response = post_table_definition(&pool, request)
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"Failed to create table with reserved keyword '{}': {:?}",
|
||||||
|
keyword, e
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
assert!(response.success);
|
||||||
|
assert!(response.sql.contains(&format!("\"{}\" TEXT", keyword)));
|
||||||
|
|
||||||
|
assert_table_structure_is_correct(&pool, &table_name, &[(keyword, "text")]).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========= Category 6: Environmental and Extreme Edge Cases =========
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_sanitization_of_unicode_and_special_chars(#[future] pool: PgPool) {
|
||||||
|
// Scenario: Use identifiers with characters that should be stripped by sanitization,
|
||||||
|
// including multi-byte unicode (emoji) and a null byte.
|
||||||
|
let pool = pool.await;
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "produits_😂".into(), // Should become "produits_"
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "col\0with_null".into(), // Should become "colwith_null"
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let response = post_table_definition(&pool, request).await.unwrap();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
assert!(response.success);
|
||||||
|
|
||||||
|
// Assert that the generated SQL contains the SANITIZED names
|
||||||
|
assert!(response.sql.contains("CREATE TABLE gen.\"produits_\""));
|
||||||
|
assert!(response.sql.contains("\"colwith_null\" TEXT"));
|
||||||
|
|
||||||
|
// Verify the actual structure in the database
|
||||||
|
assert_table_structure_is_correct(&pool, "produits_", &[("colwith_null", "text")]).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_fail_gracefully_if_schema_is_missing(#[future] pool: PgPool) {
|
||||||
|
// Scenario: The handler relies on the 'gen' schema existing. This test ensures
|
||||||
|
// it fails gracefully if that assumption is broken.
|
||||||
|
let pool = pool.await;
|
||||||
|
|
||||||
|
// Arrange: Drop the schema that the handler needs
|
||||||
|
sqlx::query("DROP SCHEMA gen CASCADE;")
|
||||||
|
.execute(&pool)
|
||||||
|
.await
|
||||||
|
.expect("Failed to drop 'gen' schema for test setup");
|
||||||
|
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "this_will_fail".into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert_eq!(err.code(), Code::Internal);
|
||||||
|
// Check for the Postgres error message for a missing schema.
|
||||||
|
assert!(err.message().to_lowercase().contains("schema \"gen\" does not exist"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_column_name_with_id_suffix_is_rejected(#[future] pool: PgPool) {
|
||||||
|
// Test that column names ending with '_id' are properly rejected during input validation
|
||||||
|
let pool = pool.await;
|
||||||
|
|
||||||
|
// Test 1: Column ending with '_id' should be rejected
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "orders".into(), // Valid table name
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "legacy_order_id".into(), // This should be rejected
|
||||||
|
field_type: "integer".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act & Assert - should fail validation
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
assert!(result.is_err(), "Column names ending with '_id' should be rejected");
|
||||||
|
if let Err(status) = result {
|
||||||
|
assert_eq!(status.code(), tonic::Code::InvalidArgument);
|
||||||
|
assert!(status.message().contains("Invalid column name"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test 2: Column named exactly 'id' should be rejected
|
||||||
|
let request2 = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "orders".into(),
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "id".into(), // This should be rejected
|
||||||
|
field_type: "integer".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let result2 = post_table_definition(&pool, request2).await;
|
||||||
|
assert!(result2.is_err(), "Column named 'id' should be rejected");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_table_name_with_id_suffix_is_rejected(#[future] pool: PgPool) {
|
||||||
|
// Test that table names ending with '_id' are properly rejected during input validation
|
||||||
|
let pool = pool.await;
|
||||||
|
|
||||||
|
let request = PostTableDefinitionRequest {
|
||||||
|
profile_name: "default".into(),
|
||||||
|
table_name: "orders_id".into(), // This should be rejected
|
||||||
|
columns: vec![ColumnDefinition {
|
||||||
|
name: "customer_name".into(), // Valid column name
|
||||||
|
field_type: "text".into(),
|
||||||
|
}],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act & Assert - should fail validation
|
||||||
|
let result = post_table_definition(&pool, request).await;
|
||||||
|
assert!(result.is_err(), "Table names ending with '_id' should be rejected");
|
||||||
|
if let Err(status) = result {
|
||||||
|
assert_eq!(status.code(), tonic::Code::InvalidArgument);
|
||||||
|
assert!(status.message().contains("Table name cannot be 'id', 'deleted', 'created_at' or end with '_id'"));
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user