Compare commits
80 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
625c9b3e09 | ||
|
|
e20623ed53 | ||
|
|
aa9adf7348 | ||
|
|
2e82aba0d1 | ||
|
|
b7a3f0f8d9 | ||
|
|
38c82389f7 | ||
|
|
cb0a2bee17 | ||
|
|
dc99131794 | ||
|
|
5c23f61a10 | ||
|
|
f87e3c03cb | ||
|
|
d346670839 | ||
|
|
560d8b7234 | ||
|
|
b297c2b311 | ||
|
|
d390c567d5 | ||
|
|
029e614b9c | ||
|
|
f9a78e4eec | ||
|
|
d8758f7531 | ||
|
|
4e86ecff84 | ||
|
|
070d091e07 | ||
|
|
7403b3c3f8 | ||
|
|
1b1e7b7205 | ||
|
|
1b8f19f1ce | ||
|
|
2a14eadf34 | ||
|
|
fd36cd5795 | ||
|
|
f4286ac3c9 | ||
|
|
92d5eb4844 | ||
|
|
87b9f6ab87 | ||
|
|
06d98aab5c | ||
|
|
298f56a53c | ||
|
|
714a5f2f1c | ||
|
|
4e29d0084f | ||
|
|
63f1b4da2e | ||
|
|
9477f53432 | ||
|
|
ed786f087c | ||
|
|
8e22ea05ff | ||
|
|
8414657224 | ||
|
|
e25213ed1b | ||
|
|
4843b0778c | ||
|
|
f5fae98c69 | ||
|
|
6faf0a4a31 | ||
|
|
011fafc0ff | ||
|
|
8ebe74484c | ||
|
|
3eb9523103 | ||
|
|
3dfa922b9e | ||
|
|
248d54a30f | ||
|
|
b30fef4ccd | ||
|
|
a9c4527318 | ||
|
|
c31f08d5b8 | ||
|
|
9e0fa9ddb1 | ||
|
|
8fcd28832d | ||
|
|
cccf029464 | ||
|
|
512e7fb9e7 | ||
|
|
0e69df8282 | ||
|
|
eb5532c200 | ||
|
|
49ed1dfe33 | ||
|
|
62d1c3f7f5 | ||
|
|
b49dce3334 | ||
|
|
8ace9bc4d1 | ||
|
|
ce490007ed | ||
|
|
eb96c64e26 | ||
|
|
2ac96a8486 | ||
|
|
b8e6cc22af | ||
|
|
634a01f618 | ||
|
|
6abea062ba | ||
|
|
f50887a326 | ||
|
|
3c0af05a3c | ||
|
|
c9131d4457 | ||
|
|
2af79a3ef2 | ||
|
|
afd9228efa | ||
|
|
495d77fda5 | ||
|
|
679bb3b6ab | ||
|
|
350c522d19 | ||
|
|
4760f42589 | ||
|
|
50d15e321f | ||
|
|
a3e7fd8f0a | ||
|
|
645172747a | ||
|
|
7c4ac1eebc | ||
|
|
4b4301ad49 | ||
|
|
b60e03eb70 | ||
|
|
2c7bda3ff1 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,2 +1,4 @@
|
|||||||
/target
|
/target
|
||||||
.env
|
.env
|
||||||
|
/tantivy_indexes
|
||||||
|
server/tantivy_indexes
|
||||||
|
|||||||
702
Cargo.lock
generated
702
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
28
Cargo.toml
28
Cargo.toml
@@ -1,5 +1,5 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = ["client", "server", "common"]
|
members = ["client", "server", "common", "search"]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
@@ -16,4 +16,28 @@ categories = ["command-line-interface"]
|
|||||||
|
|
||||||
# [workspace.metadata]
|
# [workspace.metadata]
|
||||||
# TODO:
|
# TODO:
|
||||||
# documentation = "https://docs.rs/accounting-client"`
|
# documentation = "https://docs.rs/accounting-client"
|
||||||
|
|
||||||
|
[workspace.dependencies]
|
||||||
|
# Async and gRPC
|
||||||
|
tokio = { version = "1.44.2", features = ["full"] }
|
||||||
|
tonic = "0.13.0"
|
||||||
|
prost = "0.13.5"
|
||||||
|
async-trait = "0.1.88"
|
||||||
|
prost-types = "0.13.0"
|
||||||
|
|
||||||
|
# Data Handling & Serialization
|
||||||
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
serde_json = "1.0.140"
|
||||||
|
time = "0.3.41"
|
||||||
|
|
||||||
|
# Utilities & Error Handling
|
||||||
|
anyhow = "1.0.98"
|
||||||
|
dotenvy = "0.15.7"
|
||||||
|
lazy_static = "1.5.0"
|
||||||
|
tracing = "0.1.41"
|
||||||
|
|
||||||
|
# Search crate
|
||||||
|
tantivy = "0.24.1"
|
||||||
|
|
||||||
|
common = { path = "./common" }
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ anyhow = "1.0.98"
|
|||||||
async-trait = "0.1.88"
|
async-trait = "0.1.88"
|
||||||
common = { path = "../common" }
|
common = { path = "../common" }
|
||||||
|
|
||||||
|
prost-types = { workspace = true }
|
||||||
crossterm = "0.28.1"
|
crossterm = "0.28.1"
|
||||||
dirs = "6.0.0"
|
dirs = "6.0.0"
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ toggle_buffer_list = ["ctrl+b"]
|
|||||||
next_field = ["Tab"]
|
next_field = ["Tab"]
|
||||||
prev_field = ["Shift+Tab"]
|
prev_field = ["Shift+Tab"]
|
||||||
exit_table_scroll = ["esc"]
|
exit_table_scroll = ["esc"]
|
||||||
|
open_search = ["ctrl+f"]
|
||||||
|
|
||||||
[keybindings.common]
|
[keybindings.common]
|
||||||
save = ["ctrl+s"]
|
save = ["ctrl+s"]
|
||||||
@@ -69,10 +70,11 @@ prev_field = ["shift+enter"]
|
|||||||
exit = ["esc", "ctrl+e"]
|
exit = ["esc", "ctrl+e"]
|
||||||
delete_char_forward = ["delete"]
|
delete_char_forward = ["delete"]
|
||||||
delete_char_backward = ["backspace"]
|
delete_char_backward = ["backspace"]
|
||||||
move_left = ["left"]
|
move_left = [""]
|
||||||
move_right = ["right"]
|
move_right = ["right"]
|
||||||
suggestion_down = ["ctrl+n", "tab"]
|
suggestion_down = ["ctrl+n", "tab"]
|
||||||
suggestion_up = ["ctrl+p", "shift+tab"]
|
suggestion_up = ["ctrl+p", "shift+tab"]
|
||||||
|
trigger_autocomplete = ["left"]
|
||||||
|
|
||||||
[keybindings.command]
|
[keybindings.command]
|
||||||
exit_command_mode = ["ctrl+g", "esc"]
|
exit_command_mode = ["ctrl+g", "esc"]
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ pub mod text_editor;
|
|||||||
pub mod background;
|
pub mod background;
|
||||||
pub mod dialog;
|
pub mod dialog;
|
||||||
pub mod autocomplete;
|
pub mod autocomplete;
|
||||||
|
pub mod search_palette;
|
||||||
pub mod find_file_palette;
|
pub mod find_file_palette;
|
||||||
|
|
||||||
pub use command_line::*;
|
pub use command_line::*;
|
||||||
@@ -13,4 +14,5 @@ pub use text_editor::*;
|
|||||||
pub use background::*;
|
pub use background::*;
|
||||||
pub use dialog::*;
|
pub use dialog::*;
|
||||||
pub use autocomplete::*;
|
pub use autocomplete::*;
|
||||||
|
pub use search_palette::*;
|
||||||
pub use find_file_palette::*;
|
pub use find_file_palette::*;
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
// src/components/common/autocomplete.rs
|
// src/components/common/autocomplete.rs
|
||||||
|
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::pages::form::FormState;
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
layout::Rect,
|
layout::Rect,
|
||||||
style::{Color, Modifier, Style},
|
style::{Color, Modifier, Style},
|
||||||
@@ -9,7 +11,8 @@ use ratatui::{
|
|||||||
};
|
};
|
||||||
use unicode_width::UnicodeWidthStr;
|
use unicode_width::UnicodeWidthStr;
|
||||||
|
|
||||||
/// Renders an opaque dropdown list for autocomplete suggestions.
|
/// Renders an opaque dropdown list for simple string-based suggestions.
|
||||||
|
/// THIS IS THE RESTORED FUNCTION.
|
||||||
pub fn render_autocomplete_dropdown(
|
pub fn render_autocomplete_dropdown(
|
||||||
f: &mut Frame,
|
f: &mut Frame,
|
||||||
input_rect: Rect,
|
input_rect: Rect,
|
||||||
@@ -21,39 +24,32 @@ pub fn render_autocomplete_dropdown(
|
|||||||
if suggestions.is_empty() {
|
if suggestions.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// --- Calculate Dropdown Size & Position ---
|
let max_suggestion_width =
|
||||||
let max_suggestion_width = suggestions.iter().map(|s| s.width()).max().unwrap_or(0) as u16;
|
suggestions.iter().map(|s| s.width()).max().unwrap_or(0) as u16;
|
||||||
let horizontal_padding: u16 = 2;
|
let horizontal_padding: u16 = 2;
|
||||||
let dropdown_width = (max_suggestion_width + horizontal_padding).max(10);
|
let dropdown_width = (max_suggestion_width + horizontal_padding).max(10);
|
||||||
let dropdown_height = (suggestions.len() as u16).min(5);
|
let dropdown_height = (suggestions.len() as u16).min(5);
|
||||||
|
|
||||||
let mut dropdown_area = Rect {
|
let mut dropdown_area = Rect {
|
||||||
x: input_rect.x, // Align horizontally with input
|
x: input_rect.x,
|
||||||
y: input_rect.y + 1, // Position directly below input
|
y: input_rect.y + 1,
|
||||||
width: dropdown_width,
|
width: dropdown_width,
|
||||||
height: dropdown_height,
|
height: dropdown_height,
|
||||||
};
|
};
|
||||||
|
|
||||||
// --- Clamping Logic (prevent rendering off-screen) ---
|
|
||||||
// Clamp vertically (if it goes below the frame)
|
|
||||||
if dropdown_area.bottom() > frame_area.height {
|
if dropdown_area.bottom() > frame_area.height {
|
||||||
dropdown_area.y = input_rect.y.saturating_sub(dropdown_height); // Try rendering above
|
dropdown_area.y = input_rect.y.saturating_sub(dropdown_height);
|
||||||
}
|
}
|
||||||
// Clamp horizontally (if it goes past the right edge)
|
|
||||||
if dropdown_area.right() > frame_area.width {
|
if dropdown_area.right() > frame_area.width {
|
||||||
dropdown_area.x = frame_area.width.saturating_sub(dropdown_width);
|
dropdown_area.x = frame_area.width.saturating_sub(dropdown_width);
|
||||||
}
|
}
|
||||||
// Ensure x is not negative (if clamping pushes it left)
|
|
||||||
dropdown_area.x = dropdown_area.x.max(0);
|
dropdown_area.x = dropdown_area.x.max(0);
|
||||||
// Ensure y is not negative (if clamping pushes it up)
|
|
||||||
dropdown_area.y = dropdown_area.y.max(0);
|
dropdown_area.y = dropdown_area.y.max(0);
|
||||||
// --- End Clamping ---
|
|
||||||
|
|
||||||
// Render a solid background block first to ensure opacity
|
let background_block =
|
||||||
let background_block = Block::default().style(Style::default().bg(Color::DarkGray));
|
Block::default().style(Style::default().bg(Color::DarkGray));
|
||||||
f.render_widget(background_block, dropdown_area);
|
f.render_widget(background_block, dropdown_area);
|
||||||
|
|
||||||
// Create list items, ensuring each has a defined background
|
|
||||||
let items: Vec<ListItem> = suggestions
|
let items: Vec<ListItem> = suggestions
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
@@ -61,30 +57,97 @@ pub fn render_autocomplete_dropdown(
|
|||||||
let is_selected = selected_index == Some(i);
|
let is_selected = selected_index == Some(i);
|
||||||
let s_width = s.width() as u16;
|
let s_width = s.width() as u16;
|
||||||
let padding_needed = dropdown_width.saturating_sub(s_width);
|
let padding_needed = dropdown_width.saturating_sub(s_width);
|
||||||
let padded_s = format!("{}{}", s, " ".repeat(padding_needed as usize));
|
let padded_s =
|
||||||
|
format!("{}{}", s, " ".repeat(padding_needed as usize));
|
||||||
|
|
||||||
ListItem::new(padded_s).style(if is_selected {
|
ListItem::new(padded_s).style(if is_selected {
|
||||||
Style::default()
|
Style::default()
|
||||||
.fg(theme.bg) // Text color on highlight
|
.fg(theme.bg)
|
||||||
.bg(theme.highlight) // Highlight background
|
.bg(theme.highlight)
|
||||||
.add_modifier(Modifier::BOLD)
|
.add_modifier(Modifier::BOLD)
|
||||||
} else {
|
} else {
|
||||||
// Style for non-selected items (matching background block)
|
Style::default().fg(theme.fg).bg(Color::DarkGray)
|
||||||
Style::default()
|
|
||||||
.fg(theme.fg) // Text color on gray
|
|
||||||
.bg(Color::DarkGray) // Explicit gray background
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// Create the list widget (without its own block)
|
|
||||||
let list = List::new(items);
|
let list = List::new(items);
|
||||||
|
let mut list_state = ListState::default();
|
||||||
|
list_state.select(selected_index);
|
||||||
|
|
||||||
// State for managing selection highlight (still needed for logic)
|
f.render_stateful_widget(list, dropdown_area, &mut list_state);
|
||||||
let mut profile_list_state = ListState::default();
|
|
||||||
profile_list_state.select(selected_index);
|
|
||||||
|
|
||||||
// Render the list statefully *over* the background block
|
|
||||||
f.render_stateful_widget(list, dropdown_area, &mut profile_list_state);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Renders an opaque dropdown list for rich `Hit`-based suggestions.
|
||||||
|
/// RENAMED from render_rich_autocomplete_dropdown
|
||||||
|
pub fn render_hit_autocomplete_dropdown(
|
||||||
|
f: &mut Frame,
|
||||||
|
input_rect: Rect,
|
||||||
|
frame_area: Rect,
|
||||||
|
theme: &Theme,
|
||||||
|
suggestions: &[Hit],
|
||||||
|
selected_index: Option<usize>,
|
||||||
|
form_state: &FormState,
|
||||||
|
) {
|
||||||
|
if suggestions.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let display_names: Vec<String> = suggestions
|
||||||
|
.iter()
|
||||||
|
.map(|hit| form_state.get_display_name_for_hit(hit))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let max_suggestion_width =
|
||||||
|
display_names.iter().map(|s| s.width()).max().unwrap_or(0) as u16;
|
||||||
|
let horizontal_padding: u16 = 2;
|
||||||
|
let dropdown_width = (max_suggestion_width + horizontal_padding).max(10);
|
||||||
|
let dropdown_height = (suggestions.len() as u16).min(5);
|
||||||
|
|
||||||
|
let mut dropdown_area = Rect {
|
||||||
|
x: input_rect.x,
|
||||||
|
y: input_rect.y + 1,
|
||||||
|
width: dropdown_width,
|
||||||
|
height: dropdown_height,
|
||||||
|
};
|
||||||
|
|
||||||
|
if dropdown_area.bottom() > frame_area.height {
|
||||||
|
dropdown_area.y = input_rect.y.saturating_sub(dropdown_height);
|
||||||
|
}
|
||||||
|
if dropdown_area.right() > frame_area.width {
|
||||||
|
dropdown_area.x = frame_area.width.saturating_sub(dropdown_width);
|
||||||
|
}
|
||||||
|
dropdown_area.x = dropdown_area.x.max(0);
|
||||||
|
dropdown_area.y = dropdown_area.y.max(0);
|
||||||
|
|
||||||
|
let background_block =
|
||||||
|
Block::default().style(Style::default().bg(Color::DarkGray));
|
||||||
|
f.render_widget(background_block, dropdown_area);
|
||||||
|
|
||||||
|
let items: Vec<ListItem> = display_names
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, s)| {
|
||||||
|
let is_selected = selected_index == Some(i);
|
||||||
|
let s_width = s.width() as u16;
|
||||||
|
let padding_needed = dropdown_width.saturating_sub(s_width);
|
||||||
|
let padded_s =
|
||||||
|
format!("{}{}", s, " ".repeat(padding_needed as usize));
|
||||||
|
|
||||||
|
ListItem::new(padded_s).style(if is_selected {
|
||||||
|
Style::default()
|
||||||
|
.fg(theme.bg)
|
||||||
|
.bg(theme.highlight)
|
||||||
|
.add_modifier(Modifier::BOLD)
|
||||||
|
} else {
|
||||||
|
Style::default().fg(theme.fg).bg(Color::DarkGray)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let list = List::new(items);
|
||||||
|
let mut list_state = ListState::default();
|
||||||
|
list_state.select(selected_index);
|
||||||
|
|
||||||
|
f.render_stateful_widget(list, dropdown_area, &mut list_state);
|
||||||
|
}
|
||||||
|
|||||||
121
client/src/components/common/search_palette.rs
Normal file
121
client/src/components/common/search_palette.rs
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
// src/components/common/search_palette.rs
|
||||||
|
|
||||||
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::app::search::SearchState;
|
||||||
|
use ratatui::{
|
||||||
|
layout::{Constraint, Direction, Layout, Rect},
|
||||||
|
style::{Modifier, Style},
|
||||||
|
text::{Line, Span},
|
||||||
|
widgets::{Block, Borders, Clear, List, ListItem, Paragraph},
|
||||||
|
Frame,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Renders the search palette dialog over the main UI.
|
||||||
|
pub fn render_search_palette(
|
||||||
|
f: &mut Frame,
|
||||||
|
area: Rect,
|
||||||
|
theme: &Theme,
|
||||||
|
state: &SearchState,
|
||||||
|
) {
|
||||||
|
// --- Dialog Area Calculation ---
|
||||||
|
let height = (area.height as f32 * 0.7).min(30.0) as u16;
|
||||||
|
let width = (area.width as f32 * 0.6).min(100.0) as u16;
|
||||||
|
let dialog_area = Rect {
|
||||||
|
x: area.x + (area.width - width) / 2,
|
||||||
|
y: area.y + (area.height - height) / 4,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
};
|
||||||
|
|
||||||
|
f.render_widget(Clear, dialog_area); // Clear background
|
||||||
|
|
||||||
|
let block = Block::default()
|
||||||
|
.title(format!(" Search in '{}' ", state.table_name))
|
||||||
|
.borders(Borders::ALL)
|
||||||
|
.border_style(Style::default().fg(theme.accent));
|
||||||
|
f.render_widget(block.clone(), dialog_area);
|
||||||
|
|
||||||
|
// --- Inner Layout (Input + Results) ---
|
||||||
|
let inner_chunks = Layout::default()
|
||||||
|
.direction(Direction::Vertical)
|
||||||
|
.margin(1)
|
||||||
|
.constraints([
|
||||||
|
Constraint::Length(3), // For input box
|
||||||
|
Constraint::Min(0), // For results list
|
||||||
|
])
|
||||||
|
.split(dialog_area);
|
||||||
|
|
||||||
|
// --- Render Input Box ---
|
||||||
|
let input_block = Block::default()
|
||||||
|
.title("Query")
|
||||||
|
.borders(Borders::ALL)
|
||||||
|
.border_style(Style::default().fg(theme.border));
|
||||||
|
let input_text = Paragraph::new(state.input.as_str())
|
||||||
|
.block(input_block)
|
||||||
|
.style(Style::default().fg(theme.fg));
|
||||||
|
f.render_widget(input_text, inner_chunks[0]);
|
||||||
|
// Set cursor position
|
||||||
|
f.set_cursor(
|
||||||
|
inner_chunks[0].x + state.cursor_position as u16 + 1,
|
||||||
|
inner_chunks[0].y + 1,
|
||||||
|
);
|
||||||
|
|
||||||
|
// --- Render Results List ---
|
||||||
|
if state.is_loading {
|
||||||
|
let loading_p = Paragraph::new("Searching...")
|
||||||
|
.style(Style::default().fg(theme.fg).add_modifier(Modifier::ITALIC));
|
||||||
|
f.render_widget(loading_p, inner_chunks[1]);
|
||||||
|
} else {
|
||||||
|
let list_items: Vec<ListItem> = state
|
||||||
|
.results
|
||||||
|
.iter()
|
||||||
|
.map(|hit| {
|
||||||
|
// Parse the JSON string to make it readable
|
||||||
|
let content_summary = match serde_json::from_str::<
|
||||||
|
serde_json::Value,
|
||||||
|
>(&hit.content_json)
|
||||||
|
{
|
||||||
|
Ok(json) => {
|
||||||
|
if let Some(obj) = json.as_object() {
|
||||||
|
// Create a summary from the first few non-null string values
|
||||||
|
obj.values()
|
||||||
|
.filter_map(|v| v.as_str())
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.take(3)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(" | ")
|
||||||
|
} else {
|
||||||
|
"Non-object JSON".to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => "Invalid JSON content".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let line = Line::from(vec![
|
||||||
|
Span::styled(
|
||||||
|
format!("{:<4.2} ", hit.score),
|
||||||
|
Style::default().fg(theme.accent),
|
||||||
|
),
|
||||||
|
Span::raw(content_summary),
|
||||||
|
]);
|
||||||
|
ListItem::new(line)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let results_list = List::new(list_items)
|
||||||
|
.block(Block::default().title("Results"))
|
||||||
|
.highlight_style(
|
||||||
|
Style::default()
|
||||||
|
.bg(theme.highlight)
|
||||||
|
.fg(theme.bg)
|
||||||
|
.add_modifier(Modifier::BOLD),
|
||||||
|
)
|
||||||
|
.highlight_symbol(">> ");
|
||||||
|
|
||||||
|
// We need a mutable ListState to render the selection
|
||||||
|
let mut list_state =
|
||||||
|
ratatui::widgets::ListState::default().with_selected(Some(state.selected_index));
|
||||||
|
|
||||||
|
f.render_stateful_widget(results_list, inner_chunks[1], &mut list_state);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
// src/components/common/status_line.rs
|
// client/src/components/common/status_line.rs
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
use crate::state::app::state::AppState;
|
use crate::state::app::state::AppState;
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
layout::Rect,
|
layout::Rect,
|
||||||
style::Style,
|
style::Style,
|
||||||
text::{Line, Span},
|
text::{Line, Span, Text},
|
||||||
widgets::Paragraph,
|
widgets::{Paragraph, Wrap}, // Make sure Wrap is imported
|
||||||
Frame,
|
Frame,
|
||||||
};
|
};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@@ -20,22 +20,39 @@ pub fn render_status_line(
|
|||||||
current_fps: f64,
|
current_fps: f64,
|
||||||
app_state: &AppState,
|
app_state: &AppState,
|
||||||
) {
|
) {
|
||||||
// --- START FIX ---
|
|
||||||
// Ensure debug_text is always a &str, which implements UnicodeWidthStr.
|
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
let debug_text = app_state.debug_info.as_str();
|
{
|
||||||
#[cfg(not(feature = "ui-debug"))]
|
if let Some(debug_state) = &app_state.debug_state {
|
||||||
let debug_text = "";
|
let paragraph = if debug_state.is_error {
|
||||||
// --- END FIX ---
|
// --- THIS IS THE CRITICAL LOGIC FOR ERRORS ---
|
||||||
|
// 1. Create a `Text` object, which can contain multiple lines.
|
||||||
|
let error_text = Text::from(debug_state.displayed_message.clone());
|
||||||
|
|
||||||
let debug_width = UnicodeWidthStr::width(debug_text);
|
// 2. Create a Paragraph from the Text and TELL IT TO WRAP.
|
||||||
let debug_separator_width = if !debug_text.is_empty() { UnicodeWidthStr::width(" | ") } else { 0 };
|
Paragraph::new(error_text)
|
||||||
|
.wrap(Wrap { trim: true }) // This line makes the text break into new rows.
|
||||||
|
.style(Style::default().bg(theme.highlight).fg(theme.bg))
|
||||||
|
} else {
|
||||||
|
// --- This is for normal, single-line info messages ---
|
||||||
|
Paragraph::new(debug_state.displayed_message.as_str())
|
||||||
|
.style(Style::default().fg(theme.accent).bg(theme.bg))
|
||||||
|
};
|
||||||
|
f.render_widget(paragraph, area);
|
||||||
|
} else {
|
||||||
|
// Fallback for when debug state is None
|
||||||
|
let paragraph = Paragraph::new("").style(Style::default().bg(theme.bg));
|
||||||
|
f.render_widget(paragraph, area);
|
||||||
|
}
|
||||||
|
return; // Stop here and don't render the normal status line.
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- The normal status line rendering logic (unchanged) ---
|
||||||
let program_info = format!("multieko2 v{}", env!("CARGO_PKG_VERSION"));
|
let program_info = format!("multieko2 v{}", env!("CARGO_PKG_VERSION"));
|
||||||
let mode_text = if is_edit_mode { "[EDIT]" } else { "[READ-ONLY]" };
|
let mode_text = if is_edit_mode { "[EDIT]" } else { "[READ-ONLY]" };
|
||||||
|
|
||||||
let home_dir =
|
let home_dir = dirs::home_dir()
|
||||||
dirs::home_dir().map(|p| p.to_string_lossy().into_owned()).unwrap_or_default();
|
.map(|p| p.to_string_lossy().into_owned())
|
||||||
|
.unwrap_or_default();
|
||||||
let display_dir = if current_dir.starts_with(&home_dir) {
|
let display_dir = if current_dir.starts_with(&home_dir) {
|
||||||
current_dir.replacen(&home_dir, "~", 1)
|
current_dir.replacen(&home_dir, "~", 1)
|
||||||
} else {
|
} else {
|
||||||
@@ -50,19 +67,30 @@ pub fn render_status_line(
|
|||||||
let separator = " | ";
|
let separator = " | ";
|
||||||
let separator_width = UnicodeWidthStr::width(separator);
|
let separator_width = UnicodeWidthStr::width(separator);
|
||||||
|
|
||||||
let fixed_width_with_fps = mode_width + separator_width + separator_width +
|
let fixed_width_with_fps = mode_width
|
||||||
program_info_width + separator_width + fps_width +
|
+ separator_width
|
||||||
debug_separator_width + debug_width;
|
+ separator_width
|
||||||
|
+ program_info_width
|
||||||
|
+ separator_width
|
||||||
|
+ fps_width;
|
||||||
|
|
||||||
let show_fps = fixed_width_with_fps <= available_width;
|
let show_fps = fixed_width_with_fps <= available_width;
|
||||||
|
|
||||||
let remaining_width_for_dir = available_width.saturating_sub(
|
let remaining_width_for_dir = available_width.saturating_sub(
|
||||||
mode_width + separator_width +
|
mode_width
|
||||||
separator_width + program_info_width +
|
+ separator_width
|
||||||
(if show_fps { separator_width + fps_width } else { 0 }) +
|
+ separator_width
|
||||||
debug_separator_width + debug_width,
|
+ program_info_width
|
||||||
|
+ (if show_fps {
|
||||||
|
separator_width + fps_width
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
let dir_display_text_str = if UnicodeWidthStr::width(display_dir.as_str()) <= remaining_width_for_dir {
|
let dir_display_text_str = if UnicodeWidthStr::width(display_dir.as_str())
|
||||||
|
<= remaining_width_for_dir
|
||||||
|
{
|
||||||
display_dir
|
display_dir
|
||||||
} else {
|
} else {
|
||||||
let dir_name = Path::new(current_dir)
|
let dir_name = Path::new(current_dir)
|
||||||
@@ -72,14 +100,18 @@ pub fn render_status_line(
|
|||||||
if UnicodeWidthStr::width(dir_name) <= remaining_width_for_dir {
|
if UnicodeWidthStr::width(dir_name) <= remaining_width_for_dir {
|
||||||
dir_name.to_string()
|
dir_name.to_string()
|
||||||
} else {
|
} else {
|
||||||
dir_name.chars().take(remaining_width_for_dir).collect::<String>()
|
dir_name
|
||||||
|
.chars()
|
||||||
|
.take(remaining_width_for_dir)
|
||||||
|
.collect::<String>()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut current_content_width = mode_width + separator_width +
|
let mut current_content_width = mode_width
|
||||||
UnicodeWidthStr::width(dir_display_text_str.as_str()) +
|
+ separator_width
|
||||||
separator_width + program_info_width +
|
+ UnicodeWidthStr::width(dir_display_text_str.as_str())
|
||||||
debug_separator_width + debug_width;
|
+ separator_width
|
||||||
|
+ program_info_width;
|
||||||
if show_fps {
|
if show_fps {
|
||||||
current_content_width += separator_width + fps_width;
|
current_content_width += separator_width + fps_width;
|
||||||
}
|
}
|
||||||
@@ -87,20 +119,24 @@ pub fn render_status_line(
|
|||||||
let mut line_spans = vec![
|
let mut line_spans = vec![
|
||||||
Span::styled(mode_text, Style::default().fg(theme.accent)),
|
Span::styled(mode_text, Style::default().fg(theme.accent)),
|
||||||
Span::styled(separator, Style::default().fg(theme.border)),
|
Span::styled(separator, Style::default().fg(theme.border)),
|
||||||
Span::styled(dir_display_text_str.as_str(), Style::default().fg(theme.fg)),
|
Span::styled(
|
||||||
|
dir_display_text_str.as_str(),
|
||||||
|
Style::default().fg(theme.fg),
|
||||||
|
),
|
||||||
Span::styled(separator, Style::default().fg(theme.border)),
|
Span::styled(separator, Style::default().fg(theme.border)),
|
||||||
Span::styled(program_info.as_str(), Style::default().fg(theme.secondary)),
|
Span::styled(
|
||||||
|
program_info.as_str(),
|
||||||
|
Style::default().fg(theme.secondary),
|
||||||
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
if show_fps {
|
if show_fps {
|
||||||
line_spans.push(Span::styled(separator, Style::default().fg(theme.border)));
|
line_spans
|
||||||
line_spans.push(Span::styled(fps_text.as_str(), Style::default().fg(theme.secondary)));
|
.push(Span::styled(separator, Style::default().fg(theme.border)));
|
||||||
}
|
line_spans.push(Span::styled(
|
||||||
|
fps_text.as_str(),
|
||||||
#[cfg(feature = "ui-debug")]
|
Style::default().fg(theme.secondary),
|
||||||
{
|
));
|
||||||
line_spans.push(Span::styled(separator, Style::default().fg(theme.border)));
|
|
||||||
line_spans.push(Span::styled(debug_text, Style::default().fg(theme.accent)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let padding_needed = available_width.saturating_sub(current_content_width);
|
let padding_needed = available_width.saturating_sub(current_content_width);
|
||||||
@@ -111,8 +147,8 @@ pub fn render_status_line(
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let paragraph = Paragraph::new(Line::from(line_spans))
|
let paragraph =
|
||||||
.style(Style::default().bg(theme.bg));
|
Paragraph::new(Line::from(line_spans)).style(Style::default().bg(theme.bg));
|
||||||
|
|
||||||
f.render_widget(paragraph, area);
|
f.render_widget(paragraph, area);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,36 +1,37 @@
|
|||||||
// src/components/form/form.rs
|
// src/components/form/form.rs
|
||||||
|
use crate::components::common::autocomplete; // <--- ADD THIS IMPORT
|
||||||
|
use crate::components::handlers::canvas::render_canvas;
|
||||||
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::app::highlight::HighlightState;
|
||||||
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
|
use crate::state::pages::form::FormState; // <--- CHANGE THIS IMPORT
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
widgets::{Paragraph, Block, Borders},
|
layout::{Alignment, Constraint, Direction, Layout, Margin, Rect},
|
||||||
layout::{Layout, Constraint, Direction, Rect, Margin, Alignment},
|
|
||||||
style::Style,
|
style::Style,
|
||||||
|
widgets::{Block, Borders, Paragraph},
|
||||||
Frame,
|
Frame,
|
||||||
};
|
};
|
||||||
use crate::config::colors::themes::Theme;
|
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
|
||||||
use crate::state::app::highlight::HighlightState;
|
|
||||||
use crate::components::handlers::canvas::render_canvas;
|
|
||||||
|
|
||||||
pub fn render_form(
|
pub fn render_form(
|
||||||
f: &mut Frame,
|
f: &mut Frame,
|
||||||
area: Rect,
|
area: Rect,
|
||||||
form_state_param: &impl CanvasState,
|
form_state: &FormState, // <--- CHANGE THIS to the concrete type
|
||||||
fields: &[&str],
|
fields: &[&str],
|
||||||
current_field_idx: &usize,
|
current_field_idx: &usize,
|
||||||
inputs: &[&String],
|
inputs: &[&String],
|
||||||
table_name: &str, // This parameter receives the correct table name
|
table_name: &str,
|
||||||
theme: &Theme,
|
theme: &Theme,
|
||||||
is_edit_mode: bool,
|
is_edit_mode: bool,
|
||||||
highlight_state: &HighlightState,
|
highlight_state: &HighlightState,
|
||||||
total_count: u64,
|
total_count: u64,
|
||||||
current_position: u64,
|
current_position: u64,
|
||||||
) {
|
) {
|
||||||
// Use the dynamic `table_name` parameter for the title instead of a hardcoded string.
|
|
||||||
let card_title = format!(" {} ", table_name);
|
let card_title = format!(" {} ", table_name);
|
||||||
|
|
||||||
let adresar_card = Block::default()
|
let adresar_card = Block::default()
|
||||||
.borders(Borders::ALL)
|
.borders(Borders::ALL)
|
||||||
.border_style(Style::default().fg(theme.border))
|
.border_style(Style::default().fg(theme.border))
|
||||||
.title(card_title) // Use the dynamic title
|
.title(card_title)
|
||||||
.style(Style::default().bg(theme.bg).fg(theme.fg));
|
.style(Style::default().bg(theme.bg).fg(theme.fg));
|
||||||
|
|
||||||
f.render_widget(adresar_card, area);
|
f.render_widget(adresar_card, area);
|
||||||
@@ -42,10 +43,7 @@ pub fn render_form(
|
|||||||
|
|
||||||
let main_layout = Layout::default()
|
let main_layout = Layout::default()
|
||||||
.direction(Direction::Vertical)
|
.direction(Direction::Vertical)
|
||||||
.constraints([
|
.constraints([Constraint::Length(1), Constraint::Min(1)])
|
||||||
Constraint::Length(1),
|
|
||||||
Constraint::Min(1),
|
|
||||||
])
|
|
||||||
.split(inner_area);
|
.split(inner_area);
|
||||||
|
|
||||||
let count_position_text = if total_count == 0 && current_position == 1 {
|
let count_position_text = if total_count == 0 && current_position == 1 {
|
||||||
@@ -54,19 +52,22 @@ pub fn render_form(
|
|||||||
format!("Total: {} | New Entry ({})", total_count, current_position)
|
format!("Total: {} | New Entry ({})", total_count, current_position)
|
||||||
} else if total_count == 0 && current_position > 1 {
|
} else if total_count == 0 && current_position > 1 {
|
||||||
format!("Total: 0 | New Entry ({})", current_position)
|
format!("Total: 0 | New Entry ({})", current_position)
|
||||||
}
|
} else {
|
||||||
else {
|
format!(
|
||||||
format!("Total: {} | Position: {}/{}", total_count, current_position, total_count)
|
"Total: {} | Position: {}/{}",
|
||||||
|
total_count, current_position, total_count
|
||||||
|
)
|
||||||
};
|
};
|
||||||
let count_para = Paragraph::new(count_position_text)
|
let count_para = Paragraph::new(count_position_text)
|
||||||
.style(Style::default().fg(theme.fg))
|
.style(Style::default().fg(theme.fg))
|
||||||
.alignment(Alignment::Left);
|
.alignment(Alignment::Left);
|
||||||
f.render_widget(count_para, main_layout[0]);
|
f.render_widget(count_para, main_layout[0]);
|
||||||
|
|
||||||
render_canvas(
|
// Get the active field's rect from render_canvas
|
||||||
|
let active_field_rect = render_canvas(
|
||||||
f,
|
f,
|
||||||
main_layout[1],
|
main_layout[1],
|
||||||
form_state_param,
|
form_state,
|
||||||
fields,
|
fields,
|
||||||
current_field_idx,
|
current_field_idx,
|
||||||
inputs,
|
inputs,
|
||||||
@@ -74,4 +75,41 @@ pub fn render_form(
|
|||||||
is_edit_mode,
|
is_edit_mode,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// --- NEW: RENDER AUTOCOMPLETE ---
|
||||||
|
if form_state.autocomplete_active {
|
||||||
|
if let Some(active_rect) = active_field_rect {
|
||||||
|
let selected_index = form_state.get_selected_suggestion_index();
|
||||||
|
|
||||||
|
if let Some(rich_suggestions) = form_state.get_rich_suggestions() {
|
||||||
|
if !rich_suggestions.is_empty() {
|
||||||
|
// CHANGE THIS to call the renamed function
|
||||||
|
autocomplete::render_hit_autocomplete_dropdown(
|
||||||
|
f,
|
||||||
|
active_rect,
|
||||||
|
f.area(),
|
||||||
|
theme,
|
||||||
|
rich_suggestions,
|
||||||
|
selected_index,
|
||||||
|
form_state,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// The fallback to simple suggestions is now correctly handled
|
||||||
|
// because the original render_autocomplete_dropdown exists again.
|
||||||
|
else if let Some(simple_suggestions) = form_state.get_suggestions() {
|
||||||
|
if !simple_suggestions.is_empty() {
|
||||||
|
autocomplete::render_autocomplete_dropdown(
|
||||||
|
f,
|
||||||
|
active_rect,
|
||||||
|
f.area(),
|
||||||
|
theme,
|
||||||
|
simple_suggestions,
|
||||||
|
selected_index,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
// src/components/handlers/canvas.rs
|
// src/components/handlers/canvas.rs
|
||||||
|
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
widgets::{Paragraph, Block, Borders},
|
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||||
layout::{Layout, Constraint, Direction, Rect},
|
style::{Modifier, Style},
|
||||||
style::{Style, Modifier},
|
|
||||||
text::{Line, Span},
|
text::{Line, Span},
|
||||||
|
widgets::{Block, Borders, Paragraph},
|
||||||
Frame,
|
Frame,
|
||||||
prelude::Alignment,
|
|
||||||
};
|
};
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::app::highlight::HighlightState;
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::app::highlight::HighlightState; // Ensure correct import path
|
use std::cmp::{max, min};
|
||||||
use std::cmp::{min, max};
|
|
||||||
|
|
||||||
pub fn render_canvas(
|
pub fn render_canvas(
|
||||||
f: &mut Frame,
|
f: &mut Frame,
|
||||||
@@ -21,9 +21,8 @@ pub fn render_canvas(
|
|||||||
inputs: &[&String],
|
inputs: &[&String],
|
||||||
theme: &Theme,
|
theme: &Theme,
|
||||||
is_edit_mode: bool,
|
is_edit_mode: bool,
|
||||||
highlight_state: &HighlightState, // Using the enum state
|
highlight_state: &HighlightState,
|
||||||
) -> Option<Rect> {
|
) -> Option<Rect> {
|
||||||
// ... (setup code remains the same) ...
|
|
||||||
let columns = Layout::default()
|
let columns = Layout::default()
|
||||||
.direction(Direction::Horizontal)
|
.direction(Direction::Horizontal)
|
||||||
.constraints([Constraint::Percentage(30), Constraint::Percentage(70)])
|
.constraints([Constraint::Percentage(30), Constraint::Percentage(70)])
|
||||||
@@ -58,46 +57,47 @@ pub fn render_canvas(
|
|||||||
|
|
||||||
let mut active_field_input_rect = None;
|
let mut active_field_input_rect = None;
|
||||||
|
|
||||||
// Render labels
|
|
||||||
for (i, field) in fields.iter().enumerate() {
|
for (i, field) in fields.iter().enumerate() {
|
||||||
let label = Paragraph::new(Line::from(Span::styled(
|
let label = Paragraph::new(Line::from(Span::styled(
|
||||||
format!("{}:", field),
|
format!("{}:", field),
|
||||||
Style::default().fg(theme.fg)),
|
Style::default().fg(theme.fg),
|
||||||
));
|
)));
|
||||||
f.render_widget(label, Rect {
|
f.render_widget(
|
||||||
x: columns[0].x,
|
label,
|
||||||
y: input_block.y + 1 + i as u16,
|
Rect {
|
||||||
width: columns[0].width,
|
x: columns[0].x,
|
||||||
height: 1,
|
y: input_block.y + 1 + i as u16,
|
||||||
});
|
width: columns[0].width,
|
||||||
|
height: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (i, _input) in inputs.iter().enumerate() {
|
||||||
// Render inputs and cursor
|
|
||||||
for (i, input) in inputs.iter().enumerate() {
|
|
||||||
let is_active = i == *current_field_idx;
|
let is_active = i == *current_field_idx;
|
||||||
let current_cursor_pos = form_state.current_cursor_pos();
|
let current_cursor_pos = form_state.current_cursor_pos();
|
||||||
let text = input.as_str();
|
|
||||||
let text_len = text.chars().count();
|
|
||||||
|
|
||||||
|
// Use the trait method to get display value
|
||||||
|
let text = form_state.get_display_value_for_field(i);
|
||||||
|
let text_len = text.chars().count();
|
||||||
let line: Line;
|
let line: Line;
|
||||||
|
|
||||||
// --- Use match on the highlight_state enum ---
|
|
||||||
match highlight_state {
|
match highlight_state {
|
||||||
HighlightState::Off => {
|
HighlightState::Off => {
|
||||||
// Not in highlight mode, render normally
|
|
||||||
line = Line::from(Span::styled(
|
line = Line::from(Span::styled(
|
||||||
text,
|
text,
|
||||||
if is_active { Style::default().fg(theme.highlight) } else { Style::default().fg(theme.fg) }
|
if is_active {
|
||||||
|
Style::default().fg(theme.highlight)
|
||||||
|
} else {
|
||||||
|
Style::default().fg(theme.fg)
|
||||||
|
},
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
HighlightState::Characterwise { anchor } => {
|
HighlightState::Characterwise { anchor } => {
|
||||||
// --- Character-wise Highlight Logic ---
|
|
||||||
let (anchor_field, anchor_char) = *anchor;
|
let (anchor_field, anchor_char) = *anchor;
|
||||||
let start_field = min(anchor_field, *current_field_idx);
|
let start_field = min(anchor_field, *current_field_idx);
|
||||||
let end_field = max(anchor_field, *current_field_idx);
|
let end_field = max(anchor_field, *current_field_idx);
|
||||||
|
|
||||||
// Use start_char and end_char consistently
|
|
||||||
let (start_char, end_char) = if anchor_field == *current_field_idx {
|
let (start_char, end_char) = if anchor_field == *current_field_idx {
|
||||||
(min(anchor_char, current_cursor_pos), max(anchor_char, current_cursor_pos))
|
(min(anchor_char, current_cursor_pos), max(anchor_char, current_cursor_pos))
|
||||||
} else if anchor_field < *current_field_idx {
|
} else if anchor_field < *current_field_idx {
|
||||||
@@ -111,24 +111,20 @@ pub fn render_canvas(
|
|||||||
let normal_style_outside = Style::default().fg(theme.fg);
|
let normal_style_outside = Style::default().fg(theme.fg);
|
||||||
|
|
||||||
if i >= start_field && i <= end_field {
|
if i >= start_field && i <= end_field {
|
||||||
// This line is within the character-wise highlight range
|
if start_field == end_field {
|
||||||
if start_field == end_field { // Case 1: Single Line Highlight
|
|
||||||
// Use start_char and end_char here
|
|
||||||
let clamped_start = start_char.min(text_len);
|
let clamped_start = start_char.min(text_len);
|
||||||
let clamped_end = end_char.min(text_len); // Use text_len for slicing logic
|
let clamped_end = end_char.min(text_len);
|
||||||
|
|
||||||
let before: String = text.chars().take(clamped_start).collect();
|
let before: String = text.chars().take(clamped_start).collect();
|
||||||
let highlighted: String = text.chars().skip(clamped_start).take(clamped_end.saturating_sub(clamped_start) + 1).collect();
|
let highlighted: String = text.chars().skip(clamped_start).take(clamped_end.saturating_sub(clamped_start) + 1).collect();
|
||||||
// Define 'after' here
|
|
||||||
let after: String = text.chars().skip(clamped_end + 1).collect();
|
let after: String = text.chars().skip(clamped_end + 1).collect();
|
||||||
|
|
||||||
line = Line::from(vec![
|
line = Line::from(vec![
|
||||||
Span::styled(before, normal_style_in_highlight),
|
Span::styled(before, normal_style_in_highlight),
|
||||||
Span::styled(highlighted, highlight_style),
|
Span::styled(highlighted, highlight_style),
|
||||||
Span::styled(after, normal_style_in_highlight), // Use defined 'after'
|
Span::styled(after, normal_style_in_highlight),
|
||||||
]);
|
]);
|
||||||
} else if i == start_field { // Case 2: Multi-Line Highlight - Start Line
|
} else if i == start_field {
|
||||||
// Use start_char here
|
|
||||||
let safe_start = start_char.min(text_len);
|
let safe_start = start_char.min(text_len);
|
||||||
let before: String = text.chars().take(safe_start).collect();
|
let before: String = text.chars().take(safe_start).collect();
|
||||||
let highlighted: String = text.chars().skip(safe_start).collect();
|
let highlighted: String = text.chars().skip(safe_start).collect();
|
||||||
@@ -136,8 +132,7 @@ pub fn render_canvas(
|
|||||||
Span::styled(before, normal_style_in_highlight),
|
Span::styled(before, normal_style_in_highlight),
|
||||||
Span::styled(highlighted, highlight_style),
|
Span::styled(highlighted, highlight_style),
|
||||||
]);
|
]);
|
||||||
} else if i == end_field { // Case 3: Multi-Line Highlight - End Line (Corrected index)
|
} else if i == end_field {
|
||||||
// Use end_char here
|
|
||||||
let safe_end_inclusive = if text_len > 0 { end_char.min(text_len - 1) } else { 0 };
|
let safe_end_inclusive = if text_len > 0 { end_char.min(text_len - 1) } else { 0 };
|
||||||
let highlighted: String = text.chars().take(safe_end_inclusive + 1).collect();
|
let highlighted: String = text.chars().take(safe_end_inclusive + 1).collect();
|
||||||
let after: String = text.chars().skip(safe_end_inclusive + 1).collect();
|
let after: String = text.chars().skip(safe_end_inclusive + 1).collect();
|
||||||
@@ -145,19 +140,17 @@ pub fn render_canvas(
|
|||||||
Span::styled(highlighted, highlight_style),
|
Span::styled(highlighted, highlight_style),
|
||||||
Span::styled(after, normal_style_in_highlight),
|
Span::styled(after, normal_style_in_highlight),
|
||||||
]);
|
]);
|
||||||
} else { // Case 4: Multi-Line Highlight - Middle Line (Corrected index)
|
} else {
|
||||||
line = Line::from(Span::styled(text, highlight_style)); // Highlight whole line
|
line = Line::from(Span::styled(text, highlight_style));
|
||||||
}
|
}
|
||||||
} else { // Case 5: Line Outside Character-wise Highlight Range
|
} else {
|
||||||
line = Line::from(Span::styled(
|
line = Line::from(Span::styled(
|
||||||
text,
|
text,
|
||||||
// Use normal styling (active or inactive)
|
|
||||||
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
HighlightState::Linewise { anchor_line } => {
|
HighlightState::Linewise { anchor_line } => {
|
||||||
// --- Linewise Highlight Logic ---
|
|
||||||
let start_field = min(*anchor_line, *current_field_idx);
|
let start_field = min(*anchor_line, *current_field_idx);
|
||||||
let end_field = max(*anchor_line, *current_field_idx);
|
let end_field = max(*anchor_line, *current_field_idx);
|
||||||
let highlight_style = Style::default().fg(theme.highlight).bg(theme.highlight_bg).add_modifier(Modifier::BOLD);
|
let highlight_style = Style::default().fg(theme.highlight).bg(theme.highlight_bg).add_modifier(Modifier::BOLD);
|
||||||
@@ -165,25 +158,31 @@ pub fn render_canvas(
|
|||||||
let normal_style_outside = Style::default().fg(theme.fg);
|
let normal_style_outside = Style::default().fg(theme.fg);
|
||||||
|
|
||||||
if i >= start_field && i <= end_field {
|
if i >= start_field && i <= end_field {
|
||||||
// Highlight the entire line
|
|
||||||
line = Line::from(Span::styled(text, highlight_style));
|
line = Line::from(Span::styled(text, highlight_style));
|
||||||
} else {
|
} else {
|
||||||
// Line outside linewise highlight range
|
|
||||||
line = Line::from(Span::styled(
|
line = Line::from(Span::styled(
|
||||||
text,
|
text,
|
||||||
// Use normal styling (active or inactive)
|
|
||||||
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} // End match highlight_state
|
}
|
||||||
|
|
||||||
let input_display = Paragraph::new(line).alignment(Alignment::Left);
|
let input_display = Paragraph::new(line).alignment(Alignment::Left);
|
||||||
f.render_widget(input_display, input_rows[i]);
|
f.render_widget(input_display, input_rows[i]);
|
||||||
|
|
||||||
if is_active {
|
if is_active {
|
||||||
active_field_input_rect = Some(input_rows[i]);
|
active_field_input_rect = Some(input_rows[i]);
|
||||||
let cursor_x = input_rows[i].x + form_state.current_cursor_pos() as u16;
|
|
||||||
|
// --- CORRECTED CURSOR POSITIONING LOGIC ---
|
||||||
|
// Use the new generic trait method to check for an override.
|
||||||
|
let cursor_x = if form_state.has_display_override(i) {
|
||||||
|
// If an override exists, place the cursor at the end.
|
||||||
|
input_rows[i].x + text.chars().count() as u16
|
||||||
|
} else {
|
||||||
|
// Otherwise, use the real cursor position.
|
||||||
|
input_rows[i].x + form_state.current_cursor_pos() as u16
|
||||||
|
};
|
||||||
let cursor_y = input_rows[i].y;
|
let cursor_y = input_rows[i].y;
|
||||||
f.set_cursor_position((cursor_x, cursor_y));
|
f.set_cursor_position((cursor_x, cursor_y));
|
||||||
}
|
}
|
||||||
@@ -191,4 +190,3 @@ pub fn render_canvas(
|
|||||||
|
|
||||||
active_field_input_rect
|
active_field_input_rect
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use crate::services::grpc_client::GrpcClient;
|
|||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::FormState;
|
||||||
use crate::state::pages::auth::RegisterState;
|
use crate::state::pages::auth::RegisterState;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
use crate::tui::functions::common::form::{revert, save};
|
use crate::tui::functions::common::form::{revert, save};
|
||||||
use crossterm::event::{KeyCode, KeyEvent};
|
use crossterm::event::{KeyCode, KeyEvent};
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
@@ -13,6 +14,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
action: &str,
|
action: &str,
|
||||||
state: &mut S,
|
state: &mut S,
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
|
app_state: &AppState,
|
||||||
current_position: &mut u64,
|
current_position: &mut u64,
|
||||||
total_count: u64,
|
total_count: u64,
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
@@ -27,6 +29,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
match action {
|
match action {
|
||||||
"save" => {
|
"save" => {
|
||||||
let outcome = save(
|
let outcome = save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::FormState;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
use crate::tui::functions::common::form::{revert, save};
|
use crate::tui::functions::common::form::{revert, save};
|
||||||
use crate::tui::functions::common::form::SaveOutcome;
|
use crate::tui::functions::common::form::SaveOutcome;
|
||||||
use crate::modes::handlers::event::EventOutcome;
|
use crate::modes::handlers::event::EventOutcome;
|
||||||
@@ -14,6 +15,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
action: &str,
|
action: &str,
|
||||||
state: &mut S,
|
state: &mut S,
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
|
app_state: &AppState,
|
||||||
) -> Result<EventOutcome> {
|
) -> Result<EventOutcome> {
|
||||||
match action {
|
match action {
|
||||||
"save" | "revert" => {
|
"save" | "revert" => {
|
||||||
@@ -26,10 +28,11 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
match action {
|
match action {
|
||||||
"save" => {
|
"save" => {
|
||||||
let save_result = save(
|
let save_result = save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await;
|
).await;
|
||||||
|
|
||||||
match save_result {
|
match save_result {
|
||||||
Ok(save_outcome) => {
|
Ok(save_outcome) => {
|
||||||
let message = match save_outcome {
|
let message = match save_outcome {
|
||||||
@@ -47,7 +50,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await;
|
).await;
|
||||||
|
|
||||||
match revert_result {
|
match revert_result {
|
||||||
Ok(message) => Ok(EventOutcome::Ok(message)),
|
Ok(message) => Ok(EventOutcome::Ok(message)),
|
||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
// client/src/main.rs
|
// client/src/main.rs
|
||||||
use client::run_ui;
|
use client::run_ui;
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
use client::utils::debug_logger::UiDebugWriter;
|
||||||
use dotenvy::dotenv;
|
use dotenvy::dotenv;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use tracing_subscriber;
|
use tracing_subscriber;
|
||||||
@@ -7,8 +9,22 @@ use std::env;
|
|||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> Result<()> {
|
||||||
if env::var("ENABLE_TRACING").is_ok() {
|
#[cfg(feature = "ui-debug")]
|
||||||
tracing_subscriber::fmt::init();
|
{
|
||||||
|
// If ui-debug is on, set up our custom writer.
|
||||||
|
let writer = UiDebugWriter::new();
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_level(false) // Don't show INFO, ERROR, etc.
|
||||||
|
.with_target(false) // Don't show the module path.
|
||||||
|
.without_time() // This is the correct and simpler method.
|
||||||
|
.with_writer(move || writer.clone())
|
||||||
|
.init();
|
||||||
|
}
|
||||||
|
#[cfg(not(feature = "ui-debug"))]
|
||||||
|
{
|
||||||
|
if env::var("ENABLE_TRACING").is_ok() {
|
||||||
|
tracing_subscriber::fmt::init();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dotenv().ok();
|
dotenv().ok();
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ pub async fn handle_core_action(
|
|||||||
Ok(EventOutcome::Ok(message))
|
Ok(EventOutcome::Ok(message))
|
||||||
} else {
|
} else {
|
||||||
let save_outcome = form_save(
|
let save_outcome = form_save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await.context("Register save action failed")?;
|
).await.context("Register save action failed")?;
|
||||||
@@ -52,6 +53,7 @@ pub async fn handle_core_action(
|
|||||||
login_save(auth_state, login_state, auth_client, app_state).await.context("Login save n quit action failed")?
|
login_save(auth_state, login_state, auth_client, app_state).await.context("Login save n quit action failed")?
|
||||||
} else {
|
} else {
|
||||||
let save_outcome = form_save(
|
let save_outcome = form_save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await?;
|
).await?;
|
||||||
|
|||||||
@@ -1,20 +1,22 @@
|
|||||||
// src/modes/canvas/edit.rs
|
// src/modes/canvas/edit.rs
|
||||||
use crate::config::binds::config::Config;
|
use crate::config::binds::config::Config;
|
||||||
|
use crate::functions::modes::edit::{
|
||||||
|
add_logic_e, add_table_e, auth_e, form_e,
|
||||||
|
};
|
||||||
|
use crate::modes::handlers::event::EventHandler;
|
||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
|
use crate::state::pages::admin::AdminState;
|
||||||
use crate::state::pages::{
|
use crate::state::pages::{
|
||||||
auth::{LoginState, RegisterState},
|
auth::{LoginState, RegisterState},
|
||||||
canvas_state::CanvasState,
|
canvas_state::CanvasState,
|
||||||
|
form::FormState,
|
||||||
};
|
};
|
||||||
use crate::state::pages::form::FormState; // <<< ADD THIS LINE
|
|
||||||
// AddLogicState is already imported
|
|
||||||
// AddTableState is already imported
|
|
||||||
use crate::state::pages::admin::AdminState;
|
|
||||||
use crate::modes::handlers::event::EventOutcome;
|
|
||||||
use crate::functions::modes::edit::{add_logic_e, auth_e, form_e, add_table_e};
|
|
||||||
use crate::state::app::state::AppState;
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use crossterm::event::KeyEvent; // Removed KeyCode, KeyModifiers as they were unused
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
use tracing::debug;
|
use crossterm::event::{KeyCode, KeyEvent};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use tracing::{debug, info};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum EditEventOutcome {
|
pub enum EditEventOutcome {
|
||||||
@@ -22,231 +24,313 @@ pub enum EditEventOutcome {
|
|||||||
ExitEditMode,
|
ExitEditMode,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper function to spawn a non-blocking search task for autocomplete.
|
||||||
|
async fn trigger_form_autocomplete_search(
|
||||||
|
form_state: &mut FormState,
|
||||||
|
grpc_client: &mut GrpcClient,
|
||||||
|
sender: mpsc::UnboundedSender<Vec<Hit>>,
|
||||||
|
) {
|
||||||
|
if let Some(field_def) = form_state.fields.get(form_state.current_field) {
|
||||||
|
if field_def.is_link {
|
||||||
|
if let Some(target_table) = &field_def.link_target_table {
|
||||||
|
// 1. Update state for immediate UI feedback
|
||||||
|
form_state.autocomplete_loading = true;
|
||||||
|
form_state.autocomplete_active = true;
|
||||||
|
form_state.autocomplete_suggestions.clear();
|
||||||
|
form_state.selected_suggestion_index = None;
|
||||||
|
|
||||||
|
// 2. Clone everything needed for the background task
|
||||||
|
let query = form_state.get_current_input().to_string();
|
||||||
|
let table_to_search = target_table.clone();
|
||||||
|
let mut grpc_client_clone = grpc_client.clone();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"[Autocomplete] Spawning search in '{}' for query: '{}'",
|
||||||
|
table_to_search, query
|
||||||
|
);
|
||||||
|
|
||||||
|
// 3. Spawn the non-blocking task
|
||||||
|
tokio::spawn(async move {
|
||||||
|
match grpc_client_clone
|
||||||
|
.search_table(table_to_search, query)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) => {
|
||||||
|
// Send results back through the channel
|
||||||
|
let _ = sender.send(response.hits);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!(
|
||||||
|
"[Autocomplete] Search failed: {:?}",
|
||||||
|
e
|
||||||
|
);
|
||||||
|
// Send an empty vec on error so the UI can stop loading
|
||||||
|
let _ = sender.send(vec![]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub async fn handle_edit_event(
|
pub async fn handle_edit_event(
|
||||||
key: KeyEvent,
|
key: KeyEvent,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
form_state: &mut FormState, // Now FormState is in scope
|
form_state: &mut FormState,
|
||||||
login_state: &mut LoginState,
|
login_state: &mut LoginState,
|
||||||
register_state: &mut RegisterState,
|
register_state: &mut RegisterState,
|
||||||
admin_state: &mut AdminState,
|
admin_state: &mut AdminState,
|
||||||
ideal_cursor_column: &mut usize,
|
|
||||||
current_position: &mut u64,
|
current_position: &mut u64,
|
||||||
total_count: u64,
|
total_count: u64,
|
||||||
grpc_client: &mut GrpcClient,
|
event_handler: &mut EventHandler,
|
||||||
app_state: &AppState,
|
app_state: &AppState,
|
||||||
) -> Result<EditEventOutcome> {
|
) -> Result<EditEventOutcome> {
|
||||||
// --- Global command mode check ---
|
// --- AUTOCOMPLETE-SPECIFIC KEY HANDLING ---
|
||||||
if let Some("enter_command_mode") = config.get_action_for_key_in_mode(
|
if app_state.ui.show_form && form_state.autocomplete_active {
|
||||||
&config.keybindings.global, // Assuming command mode can be entered globally
|
if let Some(action) =
|
||||||
key.code,
|
config.get_edit_action_for_key(key.code, key.modifiers)
|
||||||
key.modifiers,
|
{
|
||||||
) {
|
match action {
|
||||||
// This check might be redundant if EventHandler already prevents entering Edit mode
|
"suggestion_down" => {
|
||||||
// when command_mode is true. However, it's a safeguard.
|
if !form_state.autocomplete_suggestions.is_empty() {
|
||||||
return Ok(EditEventOutcome::Message(
|
let current =
|
||||||
"Cannot enter command mode from edit mode here.".to_string(),
|
form_state.selected_suggestion_index.unwrap_or(0);
|
||||||
));
|
let next = (current + 1)
|
||||||
}
|
% form_state.autocomplete_suggestions.len();
|
||||||
|
form_state.selected_suggestion_index = Some(next);
|
||||||
// --- Common actions (save, revert) ---
|
}
|
||||||
if let Some(action) = config.get_action_for_key_in_mode(
|
return Ok(EditEventOutcome::Message(String::new()));
|
||||||
&config.keybindings.common,
|
|
||||||
key.code,
|
|
||||||
key.modifiers,
|
|
||||||
).as_deref() {
|
|
||||||
if matches!(action, "save" | "revert") {
|
|
||||||
let message_string: String = if app_state.ui.show_login {
|
|
||||||
auth_e::execute_common_action(action, login_state, grpc_client, current_position, total_count).await?
|
|
||||||
} else if app_state.ui.show_register {
|
|
||||||
auth_e::execute_common_action(action, register_state, grpc_client, current_position, total_count).await?
|
|
||||||
} else if app_state.ui.show_add_table {
|
|
||||||
// TODO: Implement common actions for AddTable if needed
|
|
||||||
format!("Action '{}' not implemented for Add Table in edit mode.", action)
|
|
||||||
} else if app_state.ui.show_add_logic {
|
|
||||||
// TODO: Implement common actions for AddLogic if needed
|
|
||||||
format!("Action '{}' not implemented for Add Logic in edit mode.", action)
|
|
||||||
} else { // Assuming Form view
|
|
||||||
let outcome = form_e::execute_common_action(action, form_state, grpc_client).await?;
|
|
||||||
match outcome {
|
|
||||||
EventOutcome::Ok(msg) | EventOutcome::DataSaved(_, msg) => msg,
|
|
||||||
_ => format!("Unexpected outcome from common action: {:?}", outcome),
|
|
||||||
}
|
}
|
||||||
};
|
"suggestion_up" => {
|
||||||
return Ok(EditEventOutcome::Message(message_string));
|
if !form_state.autocomplete_suggestions.is_empty() {
|
||||||
|
let current =
|
||||||
|
form_state.selected_suggestion_index.unwrap_or(0);
|
||||||
|
let prev = if current == 0 {
|
||||||
|
form_state.autocomplete_suggestions.len() - 1
|
||||||
|
} else {
|
||||||
|
current - 1
|
||||||
|
};
|
||||||
|
form_state.selected_suggestion_index = Some(prev);
|
||||||
|
}
|
||||||
|
return Ok(EditEventOutcome::Message(String::new()));
|
||||||
|
}
|
||||||
|
"exit" => {
|
||||||
|
form_state.deactivate_autocomplete();
|
||||||
|
return Ok(EditEventOutcome::Message(
|
||||||
|
"Autocomplete cancelled".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
"enter_decider" => {
|
||||||
|
if let Some(selected_idx) =
|
||||||
|
form_state.selected_suggestion_index
|
||||||
|
{
|
||||||
|
if let Some(selection) = form_state
|
||||||
|
.autocomplete_suggestions
|
||||||
|
.get(selected_idx)
|
||||||
|
.cloned()
|
||||||
|
{
|
||||||
|
// --- THIS IS THE CORE LOGIC CHANGE ---
|
||||||
|
|
||||||
|
// 1. Get the friendly display name for the UI
|
||||||
|
let display_name =
|
||||||
|
form_state.get_display_name_for_hit(&selection);
|
||||||
|
|
||||||
|
// 2. Store the REAL ID in the form's values
|
||||||
|
let current_input =
|
||||||
|
form_state.get_current_input_mut();
|
||||||
|
*current_input = selection.id.to_string();
|
||||||
|
|
||||||
|
// 3. Set the persistent display override in the map
|
||||||
|
form_state.link_display_map.insert(
|
||||||
|
form_state.current_field,
|
||||||
|
display_name,
|
||||||
|
);
|
||||||
|
|
||||||
|
// 4. Finalize state
|
||||||
|
form_state.deactivate_autocomplete();
|
||||||
|
form_state.set_has_unsaved_changes(true);
|
||||||
|
return Ok(EditEventOutcome::Message(
|
||||||
|
"Selection made".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
form_state.deactivate_autocomplete();
|
||||||
|
// Fall through to default 'enter' behavior
|
||||||
|
}
|
||||||
|
_ => {} // Let other keys fall through to the live search logic
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Edit-specific actions ---
|
// --- LIVE AUTOCOMPLETE TRIGGER LOGIC ---
|
||||||
if let Some(action_str) = config.get_edit_action_for_key(key.code, key.modifiers).as_deref() {
|
let mut trigger_search = false;
|
||||||
// --- Handle "enter_decider" (Enter key) ---
|
|
||||||
if action_str == "enter_decider" {
|
|
||||||
let effective_action = if app_state.ui.show_register
|
|
||||||
&& register_state.in_suggestion_mode
|
|
||||||
&& register_state.current_field() == 4 { // Role field
|
|
||||||
"select_suggestion"
|
|
||||||
} else if app_state.ui.show_add_logic
|
|
||||||
&& admin_state.add_logic_state.in_target_column_suggestion_mode
|
|
||||||
&& admin_state.add_logic_state.current_field() == 1 { // Target Column field
|
|
||||||
"select_suggestion"
|
|
||||||
} else {
|
|
||||||
"next_field" // Default action for Enter
|
|
||||||
};
|
|
||||||
|
|
||||||
let msg = if app_state.ui.show_login {
|
if app_state.ui.show_form {
|
||||||
auth_e::execute_edit_action(effective_action, key, login_state, ideal_cursor_column).await?
|
// Manual trigger
|
||||||
} else if app_state.ui.show_add_table {
|
if let Some("trigger_autocomplete") =
|
||||||
add_table_e::execute_edit_action(effective_action, key, &mut admin_state.add_table_state, ideal_cursor_column).await?
|
config.get_edit_action_for_key(key.code, key.modifiers)
|
||||||
} else if app_state.ui.show_add_logic {
|
{
|
||||||
add_logic_e::execute_edit_action(effective_action, key, &mut admin_state.add_logic_state, ideal_cursor_column).await?
|
if !form_state.autocomplete_active {
|
||||||
} else if app_state.ui.show_register {
|
trigger_search = true;
|
||||||
auth_e::execute_edit_action(effective_action, key, register_state, ideal_cursor_column).await?
|
}
|
||||||
} else { // Form view
|
}
|
||||||
form_e::execute_edit_action(effective_action, key, form_state, ideal_cursor_column).await?
|
// Live search trigger while typing
|
||||||
};
|
else if form_state.autocomplete_active {
|
||||||
|
if let KeyCode::Char(_) | KeyCode::Backspace = key.code {
|
||||||
|
let action = if let KeyCode::Backspace = key.code {
|
||||||
|
"delete_char_backward"
|
||||||
|
} else {
|
||||||
|
"insert_char"
|
||||||
|
};
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
form_e::execute_edit_action(
|
||||||
|
action,
|
||||||
|
key,
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
trigger_search = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if trigger_search {
|
||||||
|
trigger_form_autocomplete_search(
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.grpc_client,
|
||||||
|
event_handler.autocomplete_result_sender.clone(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
return Ok(EditEventOutcome::Message("Searching...".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- GENERAL EDIT MODE EVENT HANDLING (IF NOT AUTOCOMPLETE) ---
|
||||||
|
|
||||||
|
if let Some(action_str) =
|
||||||
|
config.get_edit_action_for_key(key.code, key.modifiers)
|
||||||
|
{
|
||||||
|
// Handle Enter key (next field)
|
||||||
|
if action_str == "enter_decider" {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
let msg = form_e::execute_edit_action(
|
||||||
|
"next_field",
|
||||||
|
key,
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
return Ok(EditEventOutcome::Message(msg));
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Handle "exit" (Escape key) ---
|
// Handle exiting edit mode
|
||||||
if action_str == "exit" {
|
if action_str == "exit" {
|
||||||
if app_state.ui.show_register && register_state.in_suggestion_mode {
|
return Ok(EditEventOutcome::ExitEditMode);
|
||||||
let msg = auth_e::execute_edit_action("exit_suggestion_mode", key, register_state, ideal_cursor_column).await?;
|
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
|
||||||
} else if app_state.ui.show_add_logic && admin_state.add_logic_state.in_target_column_suggestion_mode {
|
|
||||||
admin_state.add_logic_state.in_target_column_suggestion_mode = false;
|
|
||||||
admin_state.add_logic_state.show_target_column_suggestions = false;
|
|
||||||
admin_state.add_logic_state.selected_target_column_suggestion_index = None;
|
|
||||||
return Ok(EditEventOutcome::Message("Exited column suggestions".to_string()));
|
|
||||||
} else {
|
|
||||||
return Ok(EditEventOutcome::ExitEditMode);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Autocomplete for AddLogicState Target Column ---
|
// Handle all other edit actions
|
||||||
if app_state.ui.show_add_logic && admin_state.add_logic_state.current_field() == 1 { // Target Column field
|
|
||||||
if action_str == "suggestion_down" { // "Tab" is mapped to suggestion_down
|
|
||||||
if !admin_state.add_logic_state.in_target_column_suggestion_mode {
|
|
||||||
// Attempt to open suggestions
|
|
||||||
if let Some(profile_name) = admin_state.add_logic_state.profile_name.clone().into() {
|
|
||||||
if let Some(table_name) = admin_state.add_logic_state.selected_table_name.clone() {
|
|
||||||
debug!("Fetching table structure for autocomplete: Profile='{}', Table='{}'", profile_name, table_name);
|
|
||||||
match grpc_client.get_table_structure(profile_name, table_name).await {
|
|
||||||
Ok(ts_response) => {
|
|
||||||
admin_state.add_logic_state.table_columns_for_suggestions =
|
|
||||||
ts_response.columns.into_iter().map(|c| c.name).collect();
|
|
||||||
admin_state.add_logic_state.update_target_column_suggestions();
|
|
||||||
if !admin_state.add_logic_state.target_column_suggestions.is_empty() {
|
|
||||||
admin_state.add_logic_state.in_target_column_suggestion_mode = true;
|
|
||||||
// update_target_column_suggestions handles initial selection
|
|
||||||
return Ok(EditEventOutcome::Message("Column suggestions shown".to_string()));
|
|
||||||
} else {
|
|
||||||
return Ok(EditEventOutcome::Message("No column suggestions for current input".to_string()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
debug!("Error fetching table structure: {}", e);
|
|
||||||
admin_state.add_logic_state.table_columns_for_suggestions.clear(); // Clear old data on error
|
|
||||||
admin_state.add_logic_state.update_target_column_suggestions();
|
|
||||||
return Ok(EditEventOutcome::Message(format!("Error fetching columns: {}", e)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Ok(EditEventOutcome::Message("No table selected for column suggestions".to_string()));
|
|
||||||
}
|
|
||||||
} else { // Should not happen if AddLogic is properly initialized
|
|
||||||
return Ok(EditEventOutcome::Message("Profile name missing for column suggestions".to_string()));
|
|
||||||
}
|
|
||||||
} else { // Already in suggestion mode, navigate down
|
|
||||||
let msg = add_logic_e::execute_edit_action(action_str, key, &mut admin_state.add_logic_state, ideal_cursor_column).await?;
|
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
|
||||||
}
|
|
||||||
} else if admin_state.add_logic_state.in_target_column_suggestion_mode && action_str == "suggestion_up" {
|
|
||||||
let msg = add_logic_e::execute_edit_action(action_str, key, &mut admin_state.add_logic_state, ideal_cursor_column).await?;
|
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Autocomplete for RegisterState Role Field ---
|
|
||||||
if app_state.ui.show_register && register_state.current_field() == 4 { // Role field
|
|
||||||
if !register_state.in_suggestion_mode && action_str == "suggestion_down" { // Tab
|
|
||||||
register_state.update_role_suggestions();
|
|
||||||
if !register_state.role_suggestions.is_empty() {
|
|
||||||
register_state.in_suggestion_mode = true;
|
|
||||||
// update_role_suggestions should handle initial selection
|
|
||||||
return Ok(EditEventOutcome::Message("Role suggestions shown".to_string()));
|
|
||||||
} else {
|
|
||||||
// If Tab doesn't open suggestions, it might fall through to "next_field"
|
|
||||||
// or you might want specific behavior. For now, let it fall through.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if register_state.in_suggestion_mode && matches!(action_str, "suggestion_down" | "suggestion_up") {
|
|
||||||
let msg = auth_e::execute_edit_action(action_str, key, register_state, ideal_cursor_column).await?;
|
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Dispatch other edit actions ---
|
|
||||||
let msg = if app_state.ui.show_login {
|
let msg = if app_state.ui.show_login {
|
||||||
auth_e::execute_edit_action(action_str, key, login_state, ideal_cursor_column).await?
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
auth_e::execute_edit_action(
|
||||||
|
action_str,
|
||||||
|
key,
|
||||||
|
login_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
} else if app_state.ui.show_add_table {
|
} else if app_state.ui.show_add_table {
|
||||||
add_table_e::execute_edit_action(action_str, key, &mut admin_state.add_table_state, ideal_cursor_column).await?
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
add_table_e::execute_edit_action(
|
||||||
|
action_str,
|
||||||
|
key,
|
||||||
|
&mut admin_state.add_table_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
} else if app_state.ui.show_add_logic {
|
} else if app_state.ui.show_add_logic {
|
||||||
// If not a suggestion action handled above for AddLogic
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
if !(admin_state.add_logic_state.in_target_column_suggestion_mode && matches!(action_str, "suggestion_down" | "suggestion_up")) {
|
add_logic_e::execute_edit_action(
|
||||||
add_logic_e::execute_edit_action(action_str, key, &mut admin_state.add_logic_state, ideal_cursor_column).await?
|
action_str,
|
||||||
} else { String::new() /* Already handled */ }
|
key,
|
||||||
|
&mut admin_state.add_logic_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
} else if app_state.ui.show_register {
|
} else if app_state.ui.show_register {
|
||||||
if !(register_state.in_suggestion_mode && matches!(action_str, "suggestion_down" | "suggestion_up")) {
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
auth_e::execute_edit_action(action_str, key, register_state, ideal_cursor_column).await?
|
auth_e::execute_edit_action(
|
||||||
} else { String::new() /* Already handled */ }
|
action_str,
|
||||||
} else { // Form view
|
key,
|
||||||
form_e::execute_edit_action(action_str, key, form_state, ideal_cursor_column).await?
|
register_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
form_e::execute_edit_action(
|
||||||
|
action_str,
|
||||||
|
key,
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
};
|
};
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
return Ok(EditEventOutcome::Message(msg));
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Character insertion ---
|
// --- FALLBACK FOR CHARACTER INSERTION (IF NO OTHER BINDING MATCHED) ---
|
||||||
// If character insertion happens while in suggestion mode, exit suggestion mode first.
|
if let KeyCode::Char(_) = key.code {
|
||||||
let mut exited_suggestion_mode_for_typing = false;
|
let msg = if app_state.ui.show_login {
|
||||||
if app_state.ui.show_register && register_state.in_suggestion_mode {
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
register_state.in_suggestion_mode = false;
|
auth_e::execute_edit_action(
|
||||||
register_state.show_role_suggestions = false;
|
"insert_char",
|
||||||
register_state.selected_suggestion_index = None;
|
key,
|
||||||
exited_suggestion_mode_for_typing = true;
|
login_state,
|
||||||
}
|
&mut event_handler.ideal_cursor_column,
|
||||||
if app_state.ui.show_add_logic && admin_state.add_logic_state.in_target_column_suggestion_mode {
|
)
|
||||||
admin_state.add_logic_state.in_target_column_suggestion_mode = false;
|
.await?
|
||||||
admin_state.add_logic_state.show_target_column_suggestions = false;
|
} else if app_state.ui.show_add_table {
|
||||||
admin_state.add_logic_state.selected_target_column_suggestion_index = None;
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
exited_suggestion_mode_for_typing = true;
|
add_table_e::execute_edit_action(
|
||||||
|
"insert_char",
|
||||||
|
key,
|
||||||
|
&mut admin_state.add_table_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
} else if app_state.ui.show_add_logic {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
add_logic_e::execute_edit_action(
|
||||||
|
"insert_char",
|
||||||
|
key,
|
||||||
|
&mut admin_state.add_logic_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
} else if app_state.ui.show_register {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
auth_e::execute_edit_action(
|
||||||
|
"insert_char",
|
||||||
|
key,
|
||||||
|
register_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
form_e::execute_edit_action(
|
||||||
|
"insert_char",
|
||||||
|
key,
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
};
|
||||||
|
return Ok(EditEventOutcome::Message(msg));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut char_insert_msg = if app_state.ui.show_login {
|
Ok(EditEventOutcome::Message(String::new())) // No action taken
|
||||||
auth_e::execute_edit_action("insert_char", key, login_state, ideal_cursor_column).await?
|
|
||||||
} else if app_state.ui.show_add_table {
|
|
||||||
add_table_e::execute_edit_action("insert_char", key, &mut admin_state.add_table_state, ideal_cursor_column).await?
|
|
||||||
} else if app_state.ui.show_add_logic {
|
|
||||||
add_logic_e::execute_edit_action("insert_char", key, &mut admin_state.add_logic_state, ideal_cursor_column).await?
|
|
||||||
} else if app_state.ui.show_register {
|
|
||||||
auth_e::execute_edit_action("insert_char", key, register_state, ideal_cursor_column).await?
|
|
||||||
} else { // Form view
|
|
||||||
form_e::execute_edit_action("insert_char", key, form_state, ideal_cursor_column).await?
|
|
||||||
};
|
|
||||||
|
|
||||||
// After character insertion, update suggestions if applicable
|
|
||||||
if app_state.ui.show_register && register_state.current_field() == 4 {
|
|
||||||
register_state.update_role_suggestions();
|
|
||||||
// If we just exited suggestion mode by typing, don't immediately show them again unless Tab is pressed.
|
|
||||||
// However, update_role_suggestions will set show_role_suggestions if matches are found.
|
|
||||||
// This is fine, as the render logic checks in_suggestion_mode.
|
|
||||||
}
|
|
||||||
if app_state.ui.show_add_logic && admin_state.add_logic_state.current_field() == 1 {
|
|
||||||
admin_state.add_logic_state.update_target_column_suggestions();
|
|
||||||
}
|
|
||||||
|
|
||||||
if exited_suggestion_mode_for_typing && char_insert_msg.is_empty() {
|
|
||||||
char_insert_msg = "Suggestions hidden".to_string();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Ok(EditEventOutcome::Message(char_insert_msg))
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ use anyhow::Result;
|
|||||||
pub async fn handle_command_event(
|
pub async fn handle_command_event(
|
||||||
key: KeyEvent,
|
key: KeyEvent,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
app_state: &AppState,
|
app_state: &mut AppState,
|
||||||
login_state: &LoginState,
|
login_state: &LoginState,
|
||||||
register_state: &RegisterState,
|
register_state: &RegisterState,
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
@@ -74,7 +74,7 @@ pub async fn handle_command_event(
|
|||||||
async fn process_command(
|
async fn process_command(
|
||||||
config: &Config,
|
config: &Config,
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
app_state: &AppState,
|
app_state: &mut AppState,
|
||||||
login_state: &LoginState,
|
login_state: &LoginState,
|
||||||
register_state: &RegisterState,
|
register_state: &RegisterState,
|
||||||
command_input: &mut String,
|
command_input: &mut String,
|
||||||
@@ -117,6 +117,7 @@ async fn process_command(
|
|||||||
},
|
},
|
||||||
"save" => {
|
"save" => {
|
||||||
let outcome = save(
|
let outcome = save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await?;
|
).await?;
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ use crate::state::{
|
|||||||
app::{
|
app::{
|
||||||
buffer::{AppView, BufferState},
|
buffer::{AppView, BufferState},
|
||||||
highlight::HighlightState,
|
highlight::HighlightState,
|
||||||
|
search::SearchState, // Correctly imported
|
||||||
state::AppState,
|
state::AppState,
|
||||||
},
|
},
|
||||||
pages::{
|
pages::{
|
||||||
@@ -41,10 +42,12 @@ use crate::tui::{
|
|||||||
use crate::ui::handlers::context::UiContext;
|
use crate::ui::handlers::context::UiContext;
|
||||||
use crate::ui::handlers::rat_state::UiStateHandler;
|
use crate::ui::handlers::rat_state::UiStateHandler;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
use crossterm::cursor::SetCursorStyle;
|
use crossterm::cursor::SetCursorStyle;
|
||||||
use crossterm::event::KeyCode;
|
use crossterm::event::{Event, KeyCode, KeyEvent};
|
||||||
use crossterm::event::{Event, KeyEvent};
|
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
|
use tokio::sync::mpsc::unbounded_channel;
|
||||||
|
use tracing::{error, info};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum EventOutcome {
|
pub enum EventOutcome {
|
||||||
@@ -74,11 +77,17 @@ pub struct EventHandler {
|
|||||||
pub ideal_cursor_column: usize,
|
pub ideal_cursor_column: usize,
|
||||||
pub key_sequence_tracker: KeySequenceTracker,
|
pub key_sequence_tracker: KeySequenceTracker,
|
||||||
pub auth_client: AuthClient,
|
pub auth_client: AuthClient,
|
||||||
|
pub grpc_client: GrpcClient,
|
||||||
pub login_result_sender: mpsc::Sender<LoginResult>,
|
pub login_result_sender: mpsc::Sender<LoginResult>,
|
||||||
pub register_result_sender: mpsc::Sender<RegisterResult>,
|
pub register_result_sender: mpsc::Sender<RegisterResult>,
|
||||||
pub save_table_result_sender: SaveTableResultSender,
|
pub save_table_result_sender: SaveTableResultSender,
|
||||||
pub save_logic_result_sender: SaveLogicResultSender,
|
pub save_logic_result_sender: SaveLogicResultSender,
|
||||||
pub navigation_state: NavigationState,
|
pub navigation_state: NavigationState,
|
||||||
|
pub search_result_sender: mpsc::UnboundedSender<Vec<Hit>>,
|
||||||
|
pub search_result_receiver: mpsc::UnboundedReceiver<Vec<Hit>>,
|
||||||
|
// --- ADDED FOR LIVE AUTOCOMPLETE ---
|
||||||
|
pub autocomplete_result_sender: mpsc::UnboundedSender<Vec<Hit>>,
|
||||||
|
pub autocomplete_result_receiver: mpsc::UnboundedReceiver<Vec<Hit>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EventHandler {
|
impl EventHandler {
|
||||||
@@ -87,7 +96,10 @@ impl EventHandler {
|
|||||||
register_result_sender: mpsc::Sender<RegisterResult>,
|
register_result_sender: mpsc::Sender<RegisterResult>,
|
||||||
save_table_result_sender: SaveTableResultSender,
|
save_table_result_sender: SaveTableResultSender,
|
||||||
save_logic_result_sender: SaveLogicResultSender,
|
save_logic_result_sender: SaveLogicResultSender,
|
||||||
|
grpc_client: GrpcClient,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
|
let (search_tx, search_rx) = unbounded_channel();
|
||||||
|
let (autocomplete_tx, autocomplete_rx) = unbounded_channel(); // ADDED
|
||||||
Ok(EventHandler {
|
Ok(EventHandler {
|
||||||
command_mode: false,
|
command_mode: false,
|
||||||
command_input: String::new(),
|
command_input: String::new(),
|
||||||
@@ -98,11 +110,17 @@ impl EventHandler {
|
|||||||
ideal_cursor_column: 0,
|
ideal_cursor_column: 0,
|
||||||
key_sequence_tracker: KeySequenceTracker::new(400),
|
key_sequence_tracker: KeySequenceTracker::new(400),
|
||||||
auth_client: AuthClient::new().await?,
|
auth_client: AuthClient::new().await?,
|
||||||
|
grpc_client,
|
||||||
login_result_sender,
|
login_result_sender,
|
||||||
register_result_sender,
|
register_result_sender,
|
||||||
save_table_result_sender,
|
save_table_result_sender,
|
||||||
save_logic_result_sender,
|
save_logic_result_sender,
|
||||||
navigation_state: NavigationState::new(),
|
navigation_state: NavigationState::new(),
|
||||||
|
search_result_sender: search_tx,
|
||||||
|
search_result_receiver: search_rx,
|
||||||
|
// --- ADDED ---
|
||||||
|
autocomplete_result_sender: autocomplete_tx,
|
||||||
|
autocomplete_result_receiver: autocomplete_rx,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -114,13 +132,122 @@ impl EventHandler {
|
|||||||
self.navigation_state.activate_find_file(options);
|
self.navigation_state.activate_find_file(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This function handles state changes.
|
||||||
|
async fn handle_search_palette_event(
|
||||||
|
&mut self,
|
||||||
|
key_event: KeyEvent,
|
||||||
|
form_state: &mut FormState,
|
||||||
|
app_state: &mut AppState,
|
||||||
|
) -> Result<EventOutcome> {
|
||||||
|
let mut should_close = false;
|
||||||
|
let mut outcome_message = String::new();
|
||||||
|
let mut trigger_search = false;
|
||||||
|
|
||||||
|
if let Some(search_state) = app_state.search_state.as_mut() {
|
||||||
|
match key_event.code {
|
||||||
|
KeyCode::Esc => {
|
||||||
|
should_close = true;
|
||||||
|
outcome_message = "Search cancelled".to_string();
|
||||||
|
}
|
||||||
|
KeyCode::Enter => {
|
||||||
|
if let Some(selected_hit) =
|
||||||
|
search_state.results.get(search_state.selected_index)
|
||||||
|
{
|
||||||
|
if let Ok(data) = serde_json::from_str::<
|
||||||
|
std::collections::HashMap<String, String>,
|
||||||
|
>(&selected_hit.content_json)
|
||||||
|
{
|
||||||
|
let detached_pos = form_state.total_count + 2;
|
||||||
|
form_state
|
||||||
|
.update_from_response(&data, detached_pos);
|
||||||
|
}
|
||||||
|
should_close = true;
|
||||||
|
outcome_message =
|
||||||
|
format!("Loaded record ID {}", selected_hit.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
KeyCode::Up => search_state.previous_result(),
|
||||||
|
KeyCode::Down => search_state.next_result(),
|
||||||
|
KeyCode::Char(c) => {
|
||||||
|
search_state
|
||||||
|
.input
|
||||||
|
.insert(search_state.cursor_position, c);
|
||||||
|
search_state.cursor_position += 1;
|
||||||
|
trigger_search = true;
|
||||||
|
}
|
||||||
|
KeyCode::Backspace => {
|
||||||
|
if search_state.cursor_position > 0 {
|
||||||
|
search_state.cursor_position -= 1;
|
||||||
|
search_state.input.remove(search_state.cursor_position);
|
||||||
|
trigger_search = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
KeyCode::Left => {
|
||||||
|
search_state.cursor_position =
|
||||||
|
search_state.cursor_position.saturating_sub(1);
|
||||||
|
}
|
||||||
|
KeyCode::Right => {
|
||||||
|
if search_state.cursor_position < search_state.input.len()
|
||||||
|
{
|
||||||
|
search_state.cursor_position += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- START CORRECTED LOGIC ---
|
||||||
|
if trigger_search {
|
||||||
|
search_state.is_loading = true;
|
||||||
|
search_state.results.clear();
|
||||||
|
search_state.selected_index = 0;
|
||||||
|
|
||||||
|
let query = search_state.input.clone();
|
||||||
|
let table_name = search_state.table_name.clone();
|
||||||
|
let sender = self.search_result_sender.clone();
|
||||||
|
let mut grpc_client = self.grpc_client.clone();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"--- 1. Spawning search task for query: '{}' ---",
|
||||||
|
query
|
||||||
|
);
|
||||||
|
// We now move the grpc_client into the task, just like with login.
|
||||||
|
tokio::spawn(async move {
|
||||||
|
info!("--- 2. Background task started. ---");
|
||||||
|
match grpc_client.search_table(table_name, query).await {
|
||||||
|
Ok(response) => {
|
||||||
|
info!(
|
||||||
|
"--- 3a. gRPC call successful. Found {} hits. ---",
|
||||||
|
response.hits.len()
|
||||||
|
);
|
||||||
|
let _ = sender.send(response.hits);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
// THE FIX: Use the debug formatter `{:?}` to print the full error chain.
|
||||||
|
error!("--- 3b. gRPC call failed: {:?} ---", e);
|
||||||
|
let _ = sender.send(vec![]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The borrow on `app_state.search_state` ends here.
|
||||||
|
// Now we can safely modify the Option itself.
|
||||||
|
if should_close {
|
||||||
|
app_state.search_state = None;
|
||||||
|
app_state.ui.show_search_palette = false;
|
||||||
|
app_state.ui.focus_outside_canvas = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(EventOutcome::Ok(outcome_message))
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub async fn handle_event(
|
pub async fn handle_event(
|
||||||
&mut self,
|
&mut self,
|
||||||
event: Event,
|
event: Event,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
terminal: &mut TerminalCore,
|
terminal: &mut TerminalCore,
|
||||||
grpc_client: &mut GrpcClient,
|
|
||||||
command_handler: &mut CommandHandler,
|
command_handler: &mut CommandHandler,
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
auth_state: &mut AuthState,
|
auth_state: &mut AuthState,
|
||||||
@@ -131,17 +258,36 @@ impl EventHandler {
|
|||||||
buffer_state: &mut BufferState,
|
buffer_state: &mut BufferState,
|
||||||
app_state: &mut AppState,
|
app_state: &mut AppState,
|
||||||
) -> Result<EventOutcome> {
|
) -> Result<EventOutcome> {
|
||||||
let mut current_mode = ModeManager::derive_mode(app_state, self, admin_state);
|
if app_state.ui.show_search_palette {
|
||||||
|
if let Event::Key(key_event) = event {
|
||||||
|
// The call no longer passes grpc_client
|
||||||
|
return self
|
||||||
|
.handle_search_palette_event(
|
||||||
|
key_event,
|
||||||
|
form_state,
|
||||||
|
app_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
return Ok(EventOutcome::Ok(String::new()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut current_mode =
|
||||||
|
ModeManager::derive_mode(app_state, self, admin_state);
|
||||||
|
|
||||||
if current_mode == AppMode::General && self.navigation_state.active {
|
if current_mode == AppMode::General && self.navigation_state.active {
|
||||||
if let Event::Key(key_event) = event {
|
if let Event::Key(key_event) = event {
|
||||||
let outcome =
|
let outcome = handle_command_navigation_event(
|
||||||
handle_command_navigation_event(&mut self.navigation_state, key_event, config)
|
&mut self.navigation_state,
|
||||||
.await?;
|
key_event,
|
||||||
|
config,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if !self.navigation_state.active {
|
if !self.navigation_state.active {
|
||||||
self.command_message = outcome.get_message_if_ok();
|
self.command_message = outcome.get_message_if_ok();
|
||||||
current_mode = ModeManager::derive_mode(app_state, self, admin_state);
|
current_mode =
|
||||||
|
ModeManager::derive_mode(app_state, self, admin_state);
|
||||||
}
|
}
|
||||||
app_state.update_mode(current_mode);
|
app_state.update_mode(current_mode);
|
||||||
return Ok(outcome);
|
return Ok(outcome);
|
||||||
@@ -154,23 +300,39 @@ impl EventHandler {
|
|||||||
|
|
||||||
let current_view = {
|
let current_view = {
|
||||||
let ui = &app_state.ui;
|
let ui = &app_state.ui;
|
||||||
if ui.show_intro { AppView::Intro }
|
if ui.show_intro {
|
||||||
else if ui.show_login { AppView::Login }
|
AppView::Intro
|
||||||
else if ui.show_register { AppView::Register }
|
} else if ui.show_login {
|
||||||
else if ui.show_admin { AppView::Admin }
|
AppView::Login
|
||||||
else if ui.show_add_logic { AppView::AddLogic }
|
} else if ui.show_register {
|
||||||
else if ui.show_add_table { AppView::AddTable }
|
AppView::Register
|
||||||
else if ui.show_form { AppView::Form }
|
} else if ui.show_admin {
|
||||||
else { AppView::Scratch }
|
AppView::Admin
|
||||||
|
} else if ui.show_add_logic {
|
||||||
|
AppView::AddLogic
|
||||||
|
} else if ui.show_add_table {
|
||||||
|
AppView::AddTable
|
||||||
|
} else if ui.show_form {
|
||||||
|
AppView::Form
|
||||||
|
} else {
|
||||||
|
AppView::Scratch
|
||||||
|
}
|
||||||
};
|
};
|
||||||
buffer_state.update_history(current_view);
|
buffer_state.update_history(current_view);
|
||||||
|
|
||||||
if app_state.ui.dialog.dialog_show {
|
if app_state.ui.dialog.dialog_show {
|
||||||
if let Event::Key(key_event) = event {
|
if let Event::Key(key_event) = event {
|
||||||
if let Some(dialog_result) = dialog::handle_dialog_event(
|
if let Some(dialog_result) = dialog::handle_dialog_event(
|
||||||
&Event::Key(key_event), config, app_state, login_state,
|
&Event::Key(key_event),
|
||||||
register_state, buffer_state, admin_state,
|
config,
|
||||||
).await {
|
app_state,
|
||||||
|
login_state,
|
||||||
|
register_state,
|
||||||
|
buffer_state,
|
||||||
|
admin_state,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
return dialog_result;
|
return dialog_result;
|
||||||
}
|
}
|
||||||
} else if let Event::Resize(_, _) = event {
|
} else if let Event::Resize(_, _) = event {
|
||||||
@@ -182,99 +344,227 @@ impl EventHandler {
|
|||||||
let key_code = key_event.code;
|
let key_code = key_event.code;
|
||||||
let modifiers = key_event.modifiers;
|
let modifiers = key_event.modifiers;
|
||||||
|
|
||||||
if UiStateHandler::toggle_sidebar(&mut app_state.ui, config, key_code, modifiers) {
|
if UiStateHandler::toggle_sidebar(
|
||||||
let message = format!("Sidebar {}", if app_state.ui.show_sidebar { "shown" } else { "hidden" });
|
&mut app_state.ui,
|
||||||
|
config,
|
||||||
|
key_code,
|
||||||
|
modifiers,
|
||||||
|
) {
|
||||||
|
let message = format!(
|
||||||
|
"Sidebar {}",
|
||||||
|
if app_state.ui.show_sidebar {
|
||||||
|
"shown"
|
||||||
|
} else {
|
||||||
|
"hidden"
|
||||||
|
}
|
||||||
|
);
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
if UiStateHandler::toggle_buffer_list(&mut app_state.ui, config, key_code, modifiers) {
|
if UiStateHandler::toggle_buffer_list(
|
||||||
let message = format!("Buffer {}", if app_state.ui.show_buffer_list { "shown" } else { "hidden" });
|
&mut app_state.ui,
|
||||||
|
config,
|
||||||
|
key_code,
|
||||||
|
modifiers,
|
||||||
|
) {
|
||||||
|
let message = format!(
|
||||||
|
"Buffer {}",
|
||||||
|
if app_state.ui.show_buffer_list {
|
||||||
|
"shown"
|
||||||
|
} else {
|
||||||
|
"hidden"
|
||||||
|
}
|
||||||
|
);
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
|
|
||||||
if !matches!(current_mode, AppMode::Edit | AppMode::Command) {
|
if !matches!(current_mode, AppMode::Edit | AppMode::Command) {
|
||||||
if let Some(action) = config.get_action_for_key_in_mode(&config.keybindings.global, key_code, modifiers) {
|
if let Some(action) = config.get_action_for_key_in_mode(
|
||||||
|
&config.keybindings.global,
|
||||||
|
key_code,
|
||||||
|
modifiers,
|
||||||
|
) {
|
||||||
match action {
|
match action {
|
||||||
"next_buffer" => {
|
"next_buffer" => {
|
||||||
if buffer::switch_buffer(buffer_state, true) {
|
if buffer::switch_buffer(buffer_state, true) {
|
||||||
return Ok(EventOutcome::Ok("Switched to next buffer".to_string()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Switched to next buffer".to_string(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"previous_buffer" => {
|
"previous_buffer" => {
|
||||||
if buffer::switch_buffer(buffer_state, false) {
|
if buffer::switch_buffer(buffer_state, false) {
|
||||||
return Ok(EventOutcome::Ok("Switched to previous buffer".to_string()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Switched to previous buffer".to_string(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"close_buffer" => {
|
"close_buffer" => {
|
||||||
let current_table_name = app_state.current_view_table_name.as_deref();
|
let current_table_name =
|
||||||
let message = buffer_state.close_buffer_with_intro_fallback(current_table_name);
|
app_state.current_view_table_name.as_deref();
|
||||||
|
let message = buffer_state
|
||||||
|
.close_buffer_with_intro_fallback(
|
||||||
|
current_table_name,
|
||||||
|
);
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(action) =
|
||||||
|
config.get_general_action(key_code, modifiers)
|
||||||
|
{
|
||||||
|
if action == "open_search" {
|
||||||
|
if app_state.ui.show_form {
|
||||||
|
if let Some(table_name) =
|
||||||
|
app_state.current_view_table_name.clone()
|
||||||
|
{
|
||||||
|
app_state.ui.show_search_palette = true;
|
||||||
|
app_state.search_state =
|
||||||
|
Some(SearchState::new(table_name));
|
||||||
|
app_state.ui.focus_outside_canvas = true;
|
||||||
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Search palette opened".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match current_mode {
|
match current_mode {
|
||||||
AppMode::General => {
|
AppMode::General => {
|
||||||
if app_state.ui.show_admin && auth_state.role.as_deref() == Some("admin") {
|
if app_state.ui.show_admin
|
||||||
if admin_nav::handle_admin_navigation(key_event, config, app_state, admin_state, buffer_state, &mut self.command_message) {
|
&& auth_state.role.as_deref() == Some("admin")
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
{
|
||||||
|
if admin_nav::handle_admin_navigation(
|
||||||
|
key_event,
|
||||||
|
config,
|
||||||
|
app_state,
|
||||||
|
admin_state,
|
||||||
|
buffer_state,
|
||||||
|
&mut self.command_message,
|
||||||
|
) {
|
||||||
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if app_state.ui.show_add_logic {
|
if app_state.ui.show_add_logic {
|
||||||
let client_clone = grpc_client.clone();
|
let client_clone = self.grpc_client.clone();
|
||||||
let sender_clone = self.save_logic_result_sender.clone();
|
let sender_clone = self.save_logic_result_sender.clone();
|
||||||
if add_logic_nav::handle_add_logic_navigation(
|
if add_logic_nav::handle_add_logic_navigation(
|
||||||
key_event, config, app_state, &mut admin_state.add_logic_state,
|
key_event,
|
||||||
&mut self.is_edit_mode, buffer_state, client_clone, sender_clone, &mut self.command_message,
|
config,
|
||||||
|
app_state,
|
||||||
|
&mut admin_state.add_logic_state,
|
||||||
|
&mut self.is_edit_mode,
|
||||||
|
buffer_state,
|
||||||
|
client_clone,
|
||||||
|
sender_clone,
|
||||||
|
&mut self.command_message,
|
||||||
) {
|
) {
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if app_state.ui.show_add_table {
|
if app_state.ui.show_add_table {
|
||||||
let client_clone = grpc_client.clone();
|
let client_clone = self.grpc_client.clone();
|
||||||
let sender_clone = self.save_table_result_sender.clone();
|
let sender_clone = self.save_table_result_sender.clone();
|
||||||
if add_table_nav::handle_add_table_navigation(
|
if add_table_nav::handle_add_table_navigation(
|
||||||
key_event, config, app_state, &mut admin_state.add_table_state,
|
key_event,
|
||||||
client_clone, sender_clone, &mut self.command_message,
|
config,
|
||||||
|
app_state,
|
||||||
|
&mut admin_state.add_table_state,
|
||||||
|
client_clone,
|
||||||
|
sender_clone,
|
||||||
|
&mut self.command_message,
|
||||||
) {
|
) {
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let nav_outcome = navigation::handle_navigation_event(
|
let nav_outcome = navigation::handle_navigation_event(
|
||||||
key_event, config, form_state, app_state, login_state, register_state,
|
key_event,
|
||||||
intro_state, admin_state, &mut self.command_mode, &mut self.command_input,
|
config,
|
||||||
&mut self.command_message, &mut self.navigation_state,
|
form_state,
|
||||||
).await;
|
app_state,
|
||||||
|
login_state,
|
||||||
|
register_state,
|
||||||
|
intro_state,
|
||||||
|
admin_state,
|
||||||
|
&mut self.command_mode,
|
||||||
|
&mut self.command_input,
|
||||||
|
&mut self.command_message,
|
||||||
|
&mut self.navigation_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
match nav_outcome {
|
match nav_outcome {
|
||||||
Ok(EventOutcome::ButtonSelected { context, index }) => {
|
Ok(EventOutcome::ButtonSelected { context, index }) => {
|
||||||
let message = match context {
|
let message = match context {
|
||||||
UiContext::Intro => {
|
UiContext::Intro => {
|
||||||
intro::handle_intro_selection(app_state, buffer_state, index);
|
intro::handle_intro_selection(
|
||||||
if app_state.ui.show_admin && !app_state.profile_tree.profiles.is_empty() {
|
app_state,
|
||||||
admin_state.profile_list_state.select(Some(0));
|
buffer_state,
|
||||||
|
index,
|
||||||
|
);
|
||||||
|
if app_state.ui.show_admin
|
||||||
|
&& !app_state
|
||||||
|
.profile_tree
|
||||||
|
.profiles
|
||||||
|
.is_empty()
|
||||||
|
{
|
||||||
|
admin_state
|
||||||
|
.profile_list_state
|
||||||
|
.select(Some(0));
|
||||||
}
|
}
|
||||||
format!("Intro Option {} selected", index)
|
format!("Intro Option {} selected", index)
|
||||||
}
|
}
|
||||||
UiContext::Login => match index {
|
UiContext::Login => match index {
|
||||||
0 => login::initiate_login(login_state, app_state, self.auth_client.clone(), self.login_result_sender.clone()),
|
0 => login::initiate_login(
|
||||||
1 => login::back_to_main(login_state, app_state, buffer_state).await,
|
login_state,
|
||||||
|
app_state,
|
||||||
|
self.auth_client.clone(),
|
||||||
|
self.login_result_sender.clone(),
|
||||||
|
),
|
||||||
|
1 => login::back_to_main(
|
||||||
|
login_state,
|
||||||
|
app_state,
|
||||||
|
buffer_state,
|
||||||
|
)
|
||||||
|
.await,
|
||||||
_ => "Invalid Login Option".to_string(),
|
_ => "Invalid Login Option".to_string(),
|
||||||
},
|
},
|
||||||
UiContext::Register => match index {
|
UiContext::Register => match index {
|
||||||
0 => register::initiate_registration(register_state, app_state, self.auth_client.clone(), self.register_result_sender.clone()),
|
0 => register::initiate_registration(
|
||||||
1 => register::back_to_login(register_state, app_state, buffer_state).await,
|
register_state,
|
||||||
|
app_state,
|
||||||
|
self.auth_client.clone(),
|
||||||
|
self.register_result_sender.clone(),
|
||||||
|
),
|
||||||
|
1 => register::back_to_login(
|
||||||
|
register_state,
|
||||||
|
app_state,
|
||||||
|
buffer_state,
|
||||||
|
)
|
||||||
|
.await,
|
||||||
_ => "Invalid Login Option".to_string(),
|
_ => "Invalid Login Option".to_string(),
|
||||||
},
|
},
|
||||||
UiContext::Admin => {
|
UiContext::Admin => {
|
||||||
admin::handle_admin_selection(app_state, admin_state);
|
admin::handle_admin_selection(
|
||||||
|
app_state,
|
||||||
|
admin_state,
|
||||||
|
);
|
||||||
format!("Admin Option {} selected", index)
|
format!("Admin Option {} selected", index)
|
||||||
}
|
}
|
||||||
UiContext::Dialog => "Internal error: Unexpected dialog state".to_string(),
|
UiContext::Dialog => "Internal error: Unexpected dialog state"
|
||||||
|
.to_string(),
|
||||||
};
|
};
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
@@ -326,35 +616,46 @@ impl EventHandler {
|
|||||||
return Ok(EventOutcome::Ok(String::new()));
|
return Ok(EventOutcome::Ok(String::new()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(action) = config.get_common_action(key_code, modifiers) {
|
if let Some(action) =
|
||||||
|
config.get_common_action(key_code, modifiers)
|
||||||
|
{
|
||||||
match action {
|
match action {
|
||||||
"save" | "force_quit" | "save_and_quit" | "revert" => {
|
"save" | "force_quit" | "save_and_quit"
|
||||||
|
| "revert" => {
|
||||||
return common_mode::handle_core_action(
|
return common_mode::handle_core_action(
|
||||||
action, form_state, auth_state, login_state, register_state,
|
action,
|
||||||
grpc_client, &mut self.auth_client, terminal, app_state,
|
form_state,
|
||||||
).await;
|
auth_state,
|
||||||
|
login_state,
|
||||||
|
register_state,
|
||||||
|
&mut self.grpc_client,
|
||||||
|
&mut self.auth_client,
|
||||||
|
terminal,
|
||||||
|
app_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let (_should_exit, message) = read_only::handle_read_only_event(
|
let (_should_exit, message) =
|
||||||
app_state,
|
read_only::handle_read_only_event(
|
||||||
key_event,
|
app_state,
|
||||||
config,
|
key_event,
|
||||||
form_state,
|
config,
|
||||||
login_state,
|
form_state,
|
||||||
register_state,
|
login_state,
|
||||||
&mut admin_state.add_table_state,
|
register_state,
|
||||||
&mut admin_state.add_logic_state,
|
&mut admin_state.add_table_state,
|
||||||
&mut self.key_sequence_tracker,
|
&mut admin_state.add_logic_state,
|
||||||
// No more current_position or total_count arguments
|
&mut self.key_sequence_tracker,
|
||||||
grpc_client,
|
&mut self.grpc_client, // <-- FIX 1
|
||||||
&mut self.command_message,
|
&mut self.command_message,
|
||||||
&mut self.edit_mode_cooldown,
|
&mut self.edit_mode_cooldown,
|
||||||
&mut self.ideal_cursor_column,
|
&mut self.ideal_cursor_column,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -373,33 +674,45 @@ impl EventHandler {
|
|||||||
return Ok(EventOutcome::Ok("".to_string()));
|
return Ok(EventOutcome::Ok("".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let (_should_exit, message) = read_only::handle_read_only_event(
|
let (_should_exit, message) =
|
||||||
app_state,
|
read_only::handle_read_only_event(
|
||||||
key_event,
|
app_state,
|
||||||
config,
|
key_event,
|
||||||
form_state,
|
config,
|
||||||
login_state,
|
form_state,
|
||||||
register_state,
|
login_state,
|
||||||
&mut admin_state.add_table_state,
|
register_state,
|
||||||
&mut admin_state.add_logic_state,
|
&mut admin_state.add_table_state,
|
||||||
&mut self.key_sequence_tracker,
|
&mut admin_state.add_logic_state,
|
||||||
grpc_client,
|
&mut self.key_sequence_tracker,
|
||||||
&mut self.command_message,
|
&mut self.grpc_client, // <-- FIX 2
|
||||||
&mut self.edit_mode_cooldown,
|
&mut self.command_message,
|
||||||
&mut self.ideal_cursor_column,
|
&mut self.edit_mode_cooldown,
|
||||||
)
|
&mut self.ideal_cursor_column,
|
||||||
.await?;
|
)
|
||||||
|
.await?;
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
|
|
||||||
AppMode::Edit => {
|
AppMode::Edit => {
|
||||||
if let Some(action) = config.get_common_action(key_code, modifiers) {
|
if let Some(action) =
|
||||||
|
config.get_common_action(key_code, modifiers)
|
||||||
|
{
|
||||||
match action {
|
match action {
|
||||||
"save" | "force_quit" | "save_and_quit" | "revert" => {
|
"save" | "force_quit" | "save_and_quit"
|
||||||
|
| "revert" => {
|
||||||
return common_mode::handle_core_action(
|
return common_mode::handle_core_action(
|
||||||
action, form_state, auth_state, login_state, register_state,
|
action,
|
||||||
grpc_client, &mut self.auth_client, terminal, app_state,
|
form_state,
|
||||||
).await;
|
auth_state,
|
||||||
|
login_state,
|
||||||
|
register_state,
|
||||||
|
&mut self.grpc_client,
|
||||||
|
&mut self.auth_client,
|
||||||
|
terminal,
|
||||||
|
app_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@@ -407,11 +720,20 @@ impl EventHandler {
|
|||||||
|
|
||||||
let mut current_position = form_state.current_position;
|
let mut current_position = form_state.current_position;
|
||||||
let total_count = form_state.total_count;
|
let total_count = form_state.total_count;
|
||||||
|
// --- MODIFIED: Pass `self` instead of `grpc_client` ---
|
||||||
let edit_result = edit::handle_edit_event(
|
let edit_result = edit::handle_edit_event(
|
||||||
key_event, config, form_state, login_state, register_state, admin_state,
|
key_event,
|
||||||
&mut self.ideal_cursor_column, &mut current_position, total_count,
|
config,
|
||||||
grpc_client, app_state,
|
form_state,
|
||||||
).await;
|
login_state,
|
||||||
|
register_state,
|
||||||
|
admin_state,
|
||||||
|
&mut current_position,
|
||||||
|
total_count,
|
||||||
|
self,
|
||||||
|
app_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
match edit_result {
|
match edit_result {
|
||||||
Ok(edit::EditEventOutcome::ExitEditMode) => {
|
Ok(edit::EditEventOutcome::ExitEditMode) => {
|
||||||
@@ -428,14 +750,22 @@ impl EventHandler {
|
|||||||
target_state.set_current_cursor_pos(new_pos);
|
target_state.set_current_cursor_pos(new_pos);
|
||||||
self.ideal_cursor_column = new_pos;
|
self.ideal_cursor_column = new_pos;
|
||||||
}
|
}
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
Ok(edit::EditEventOutcome::Message(msg)) => {
|
Ok(edit::EditEventOutcome::Message(msg)) => {
|
||||||
if !msg.is_empty() { self.command_message = msg; }
|
if !msg.is_empty() {
|
||||||
|
self.command_message = msg;
|
||||||
|
}
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
Err(e) => { return Err(e.into()); }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -445,21 +775,38 @@ impl EventHandler {
|
|||||||
self.command_message.clear();
|
self.command_message.clear();
|
||||||
self.command_mode = false;
|
self.command_mode = false;
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
return Ok(EventOutcome::Ok("Exited command mode".to_string()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Exited command mode".to_string(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.is_command_execute(key_code, modifiers) {
|
if config.is_command_execute(key_code, modifiers) {
|
||||||
let mut current_position = form_state.current_position;
|
let mut current_position = form_state.current_position;
|
||||||
let total_count = form_state.total_count;
|
let total_count = form_state.total_count;
|
||||||
let outcome = command_mode::handle_command_event(
|
let outcome = command_mode::handle_command_event(
|
||||||
key_event, config, app_state, login_state, register_state, form_state,
|
key_event,
|
||||||
&mut self.command_input, &mut self.command_message, grpc_client,
|
config,
|
||||||
command_handler, terminal, &mut current_position, total_count,
|
app_state,
|
||||||
).await?;
|
login_state,
|
||||||
|
register_state,
|
||||||
|
form_state,
|
||||||
|
&mut self.command_input,
|
||||||
|
&mut self.command_message,
|
||||||
|
&mut self.grpc_client, // <-- FIX 5
|
||||||
|
command_handler,
|
||||||
|
terminal,
|
||||||
|
&mut current_position,
|
||||||
|
total_count,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
form_state.current_position = current_position;
|
form_state.current_position = current_position;
|
||||||
self.command_mode = false;
|
self.command_mode = false;
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
let new_mode = ModeManager::derive_mode(app_state, self, admin_state);
|
let new_mode = ModeManager::derive_mode(
|
||||||
|
app_state,
|
||||||
|
self,
|
||||||
|
admin_state,
|
||||||
|
);
|
||||||
app_state.update_mode(new_mode);
|
app_state.update_mode(new_mode);
|
||||||
return Ok(outcome);
|
return Ok(outcome);
|
||||||
}
|
}
|
||||||
@@ -473,39 +820,59 @@ impl EventHandler {
|
|||||||
if let KeyCode::Char(c) = key_code {
|
if let KeyCode::Char(c) = key_code {
|
||||||
if c == 'f' {
|
if c == 'f' {
|
||||||
self.key_sequence_tracker.add_key(key_code);
|
self.key_sequence_tracker.add_key(key_code);
|
||||||
let sequence = self.key_sequence_tracker.get_sequence();
|
let sequence =
|
||||||
|
self.key_sequence_tracker.get_sequence();
|
||||||
|
|
||||||
if config.matches_key_sequence_generalized(&sequence) == Some("find_file_palette_toggle") {
|
if config.matches_key_sequence_generalized(
|
||||||
if app_state.ui.show_form || app_state.ui.show_intro {
|
&sequence,
|
||||||
// --- START FIX ---
|
) == Some("find_file_palette_toggle")
|
||||||
let mut all_table_paths: Vec<String> = app_state
|
{
|
||||||
.profile_tree
|
if app_state.ui.show_form
|
||||||
.profiles
|
|| app_state.ui.show_intro
|
||||||
.iter()
|
{
|
||||||
.flat_map(|profile| {
|
let mut all_table_paths: Vec<String> =
|
||||||
profile.tables.iter().map(move |table| {
|
app_state
|
||||||
format!("{}/{}", profile.name, table.name)
|
.profile_tree
|
||||||
|
.profiles
|
||||||
|
.iter()
|
||||||
|
.flat_map(|profile| {
|
||||||
|
profile.tables.iter().map(
|
||||||
|
move |table| {
|
||||||
|
format!(
|
||||||
|
"{}/{}",
|
||||||
|
profile.name,
|
||||||
|
table.name
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
.collect();
|
||||||
.collect();
|
|
||||||
all_table_paths.sort();
|
all_table_paths.sort();
|
||||||
|
|
||||||
self.navigation_state.activate_find_file(all_table_paths);
|
self.navigation_state
|
||||||
// --- END FIX ---
|
.activate_find_file(all_table_paths);
|
||||||
|
|
||||||
self.command_mode = false;
|
self.command_mode = false;
|
||||||
self.command_input.clear();
|
self.command_input.clear();
|
||||||
self.command_message.clear();
|
self.command_message.clear();
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
return Ok(EventOutcome::Ok("Table selection palette activated".to_string()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Table selection palette activated"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
} else {
|
} else {
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
self.command_input.push('f');
|
self.command_input.push('f');
|
||||||
if sequence.len() > 1 && sequence[0] == KeyCode::Char('f') {
|
if sequence.len() > 1
|
||||||
|
&& sequence[0] == KeyCode::Char('f')
|
||||||
|
{
|
||||||
self.command_input.push('f');
|
self.command_input.push('f');
|
||||||
}
|
}
|
||||||
self.command_message = "Find File not available in this view.".to_string();
|
self.command_message = "Find File not available in this view."
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
.to_string();
|
||||||
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -514,7 +881,9 @@ impl EventHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if c != 'f' && !self.key_sequence_tracker.current_sequence.is_empty() {
|
if c != 'f'
|
||||||
|
&& !self.key_sequence_tracker.current_sequence.is_empty()
|
||||||
|
{
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// src/services/grpc_client.rs
|
// src/services/grpc_client.rs
|
||||||
|
|
||||||
use tonic::transport::Channel;
|
use common::proto::multieko2::common::Empty;
|
||||||
use common::proto::multieko2::common::{CountResponse, Empty};
|
|
||||||
use common::proto::multieko2::table_structure::table_structure_service_client::TableStructureServiceClient;
|
use common::proto::multieko2::table_structure::table_structure_service_client::TableStructureServiceClient;
|
||||||
use common::proto::multieko2::table_structure::{GetTableStructureRequest, TableStructureResponse};
|
use common::proto::multieko2::table_structure::{GetTableStructureRequest, TableStructureResponse};
|
||||||
use common::proto::multieko2::table_definition::{
|
use common::proto::multieko2::table_definition::{
|
||||||
@@ -20,44 +19,44 @@ use common::proto::multieko2::tables_data::{
|
|||||||
PostTableDataRequest, PostTableDataResponse, PutTableDataRequest,
|
PostTableDataRequest, PostTableDataResponse, PutTableDataRequest,
|
||||||
PutTableDataResponse,
|
PutTableDataResponse,
|
||||||
};
|
};
|
||||||
use anyhow::{Context, Result}; // Added Context
|
use common::proto::multieko2::search::{
|
||||||
use std::collections::HashMap; // NEW
|
searcher_client::SearcherClient, SearchRequest, SearchResponse,
|
||||||
|
};
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use tonic::transport::Channel;
|
||||||
|
use prost_types::Value;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct GrpcClient {
|
pub struct GrpcClient {
|
||||||
table_structure_client: TableStructureServiceClient<Channel>,
|
table_structure_client: TableStructureServiceClient<Channel>,
|
||||||
table_definition_client: TableDefinitionClient<Channel>,
|
table_definition_client: TableDefinitionClient<Channel>,
|
||||||
table_script_client: TableScriptClient<Channel>,
|
table_script_client: TableScriptClient<Channel>,
|
||||||
tables_data_client: TablesDataClient<Channel>, // NEW
|
tables_data_client: TablesDataClient<Channel>,
|
||||||
|
search_client: SearcherClient<Channel>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GrpcClient {
|
impl GrpcClient {
|
||||||
pub async fn new() -> Result<Self> {
|
pub async fn new() -> Result<Self> {
|
||||||
let table_structure_client = TableStructureServiceClient::connect(
|
let channel = Channel::from_static("http://[::1]:50051")
|
||||||
"http://[::1]:50051",
|
.connect()
|
||||||
)
|
.await
|
||||||
.await
|
.context("Failed to create gRPC channel")?;
|
||||||
.context("Failed to connect to TableStructureService")?;
|
|
||||||
let table_definition_client = TableDefinitionClient::connect(
|
let table_structure_client =
|
||||||
"http://[::1]:50051",
|
TableStructureServiceClient::new(channel.clone());
|
||||||
)
|
let table_definition_client =
|
||||||
.await
|
TableDefinitionClient::new(channel.clone());
|
||||||
.context("Failed to connect to TableDefinitionService")?;
|
let table_script_client = TableScriptClient::new(channel.clone());
|
||||||
let table_script_client =
|
let tables_data_client = TablesDataClient::new(channel.clone());
|
||||||
TableScriptClient::connect("http://[::1]:50051")
|
let search_client = SearcherClient::new(channel.clone());
|
||||||
.await
|
|
||||||
.context("Failed to connect to TableScriptService")?;
|
|
||||||
let tables_data_client =
|
|
||||||
TablesDataClient::connect("http://[::1]:50051")
|
|
||||||
.await
|
|
||||||
.context("Failed to connect to TablesDataService")?; // NEW
|
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
// adresar_client, // REMOVE
|
|
||||||
table_structure_client,
|
table_structure_client,
|
||||||
table_definition_client,
|
table_definition_client,
|
||||||
table_script_client,
|
table_script_client,
|
||||||
tables_data_client, // NEW
|
tables_data_client,
|
||||||
|
search_client,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,7 +135,7 @@ impl GrpcClient {
|
|||||||
Ok(response.into_inner().count as u64)
|
Ok(response.into_inner().count as u64)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_table_data_by_position(
|
pub async fn get_table_data_by_position(
|
||||||
&mut self,
|
&mut self,
|
||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
@@ -160,12 +159,14 @@ impl GrpcClient {
|
|||||||
&mut self,
|
&mut self,
|
||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
data: HashMap<String, String>,
|
// CHANGE THIS: Accept the pre-converted data
|
||||||
|
data: HashMap<String, Value>,
|
||||||
) -> Result<PostTableDataResponse> {
|
) -> Result<PostTableDataResponse> {
|
||||||
|
// The conversion logic is now gone from here.
|
||||||
let grpc_request = PostTableDataRequest {
|
let grpc_request = PostTableDataRequest {
|
||||||
profile_name,
|
profile_name,
|
||||||
table_name,
|
table_name,
|
||||||
data,
|
data, // This is now the correct type
|
||||||
};
|
};
|
||||||
let request = tonic::Request::new(grpc_request);
|
let request = tonic::Request::new(grpc_request);
|
||||||
let response = self
|
let response = self
|
||||||
@@ -181,13 +182,15 @@ impl GrpcClient {
|
|||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
id: i64,
|
id: i64,
|
||||||
data: HashMap<String, String>,
|
// CHANGE THIS: Accept the pre-converted data
|
||||||
|
data: HashMap<String, Value>,
|
||||||
) -> Result<PutTableDataResponse> {
|
) -> Result<PutTableDataResponse> {
|
||||||
|
// The conversion logic is now gone from here.
|
||||||
let grpc_request = PutTableDataRequest {
|
let grpc_request = PutTableDataRequest {
|
||||||
profile_name,
|
profile_name,
|
||||||
table_name,
|
table_name,
|
||||||
id,
|
id,
|
||||||
data,
|
data, // This is now the correct type
|
||||||
};
|
};
|
||||||
let request = tonic::Request::new(grpc_request);
|
let request = tonic::Request::new(grpc_request);
|
||||||
let response = self
|
let response = self
|
||||||
@@ -197,4 +200,17 @@ impl GrpcClient {
|
|||||||
.context("gRPC PutTableData call failed")?;
|
.context("gRPC PutTableData call failed")?;
|
||||||
Ok(response.into_inner())
|
Ok(response.into_inner())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn search_table(
|
||||||
|
&mut self,
|
||||||
|
table_name: String,
|
||||||
|
query: String,
|
||||||
|
) -> Result<SearchResponse> {
|
||||||
|
let request = tonic::Request::new(SearchRequest { table_name, query });
|
||||||
|
let response = self
|
||||||
|
.search_client
|
||||||
|
.search_table(request)
|
||||||
|
.await?;
|
||||||
|
Ok(response.into_inner())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,100 @@
|
|||||||
// src/services/ui_service.rs
|
// src/services/ui_service.rs
|
||||||
|
|
||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
use crate::state::pages::form::FormState;
|
|
||||||
use crate::tui::functions::common::form::SaveOutcome;
|
|
||||||
use crate::state::pages::add_logic::AddLogicState;
|
|
||||||
use crate::state::app::state::AppState;
|
use crate::state::app::state::AppState;
|
||||||
|
use crate::state::pages::add_logic::AddLogicState;
|
||||||
|
use crate::state::pages::form::{FieldDefinition, FormState};
|
||||||
|
use crate::tui::functions::common::form::SaveOutcome;
|
||||||
use crate::utils::columns::filter_user_columns;
|
use crate::utils::columns::filter_user_columns;
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub struct UiService;
|
pub struct UiService;
|
||||||
|
|
||||||
impl UiService {
|
impl UiService {
|
||||||
|
pub async fn load_table_view(
|
||||||
|
grpc_client: &mut GrpcClient,
|
||||||
|
app_state: &mut AppState,
|
||||||
|
profile_name: &str,
|
||||||
|
table_name: &str,
|
||||||
|
) -> Result<FormState> {
|
||||||
|
// 1. & 2. Fetch and Cache Schema - UNCHANGED
|
||||||
|
let table_structure = grpc_client
|
||||||
|
.get_table_structure(profile_name.to_string(), table_name.to_string())
|
||||||
|
.await
|
||||||
|
.context(format!(
|
||||||
|
"Failed to get table structure for {}.{}",
|
||||||
|
profile_name, table_name
|
||||||
|
))?;
|
||||||
|
let cache_key = format!("{}.{}", profile_name, table_name);
|
||||||
|
app_state
|
||||||
|
.schema_cache
|
||||||
|
.insert(cache_key, Arc::new(table_structure.clone()));
|
||||||
|
tracing::info!("Schema for '{}.{}' cached.", profile_name, table_name);
|
||||||
|
|
||||||
|
// --- START: FINAL, SIMPLIFIED, CORRECT LOGIC ---
|
||||||
|
|
||||||
|
// 3a. Create definitions for REGULAR fields first.
|
||||||
|
let mut fields: Vec<FieldDefinition> = table_structure
|
||||||
|
.columns
|
||||||
|
.iter()
|
||||||
|
.filter(|col| {
|
||||||
|
!col.is_primary_key
|
||||||
|
&& col.name != "deleted"
|
||||||
|
&& col.name != "created_at"
|
||||||
|
&& !col.name.ends_with("_id") // Filter out ALL potential links
|
||||||
|
})
|
||||||
|
.map(|col| FieldDefinition {
|
||||||
|
display_name: col.name.clone(),
|
||||||
|
data_key: col.name.clone(),
|
||||||
|
is_link: false,
|
||||||
|
link_target_table: None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// 3b. Now, find and APPEND definitions for LINK fields based on the `_id` convention.
|
||||||
|
let link_fields: Vec<FieldDefinition> = table_structure
|
||||||
|
.columns
|
||||||
|
.iter()
|
||||||
|
.filter(|col| col.name.ends_with("_id")) // Find all foreign key columns
|
||||||
|
.map(|col| {
|
||||||
|
// The table we link to is derived from the column name.
|
||||||
|
// e.g., "test_diacritics_id" -> "test_diacritics"
|
||||||
|
let target_table_base = col
|
||||||
|
.name
|
||||||
|
.strip_suffix("_id")
|
||||||
|
.unwrap_or(&col.name);
|
||||||
|
|
||||||
|
// Find the full table name from the profile tree for display.
|
||||||
|
// e.g., "test_diacritics" -> "2025_test_diacritics"
|
||||||
|
let full_target_table_name = app_state
|
||||||
|
.profile_tree
|
||||||
|
.profiles
|
||||||
|
.iter()
|
||||||
|
.find(|p| p.name == profile_name)
|
||||||
|
.and_then(|p| p.tables.iter().find(|t| t.name.ends_with(target_table_base)))
|
||||||
|
.map_or(target_table_base.to_string(), |t| t.name.clone());
|
||||||
|
|
||||||
|
FieldDefinition {
|
||||||
|
display_name: full_target_table_name.clone(),
|
||||||
|
data_key: col.name.clone(), // The actual FK column name
|
||||||
|
is_link: true,
|
||||||
|
link_target_table: Some(full_target_table_name),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
fields.extend(link_fields); // Append the link fields to the end
|
||||||
|
|
||||||
|
// --- END: FINAL, SIMPLIFIED, CORRECT LOGIC ---
|
||||||
|
|
||||||
|
Ok(FormState::new(
|
||||||
|
profile_name.to_string(),
|
||||||
|
table_name.to_string(),
|
||||||
|
fields,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn initialize_add_logic_table_data(
|
pub async fn initialize_add_logic_table_data(
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
add_logic_state: &mut AddLogicState,
|
add_logic_state: &mut AddLogicState,
|
||||||
@@ -92,6 +176,7 @@ impl UiService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// REFACTOR THIS FUNCTION
|
||||||
pub async fn initialize_app_state_and_form(
|
pub async fn initialize_app_state_and_form(
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
app_state: &mut AppState,
|
app_state: &mut AppState,
|
||||||
@@ -102,7 +187,6 @@ impl UiService {
|
|||||||
.context("Failed to get profile tree")?;
|
.context("Failed to get profile tree")?;
|
||||||
app_state.profile_tree = profile_tree;
|
app_state.profile_tree = profile_tree;
|
||||||
|
|
||||||
// Determine initial table to load (e.g., first table of first profile, or a default)
|
|
||||||
let initial_profile_name = app_state
|
let initial_profile_name = app_state
|
||||||
.profile_tree
|
.profile_tree
|
||||||
.profiles
|
.profiles
|
||||||
@@ -115,33 +199,26 @@ impl UiService {
|
|||||||
.profiles
|
.profiles
|
||||||
.first()
|
.first()
|
||||||
.and_then(|p| p.tables.first().map(|t| t.name.clone()))
|
.and_then(|p| p.tables.first().map(|t| t.name.clone()))
|
||||||
.unwrap_or_else(|| "2025_company_data1".to_string()); // Fallback if no tables
|
.unwrap_or_else(|| "2025_company_data1".to_string());
|
||||||
|
|
||||||
app_state.set_current_view_table(
|
app_state.set_current_view_table(
|
||||||
initial_profile_name.clone(),
|
initial_profile_name.clone(),
|
||||||
initial_table_name.clone(),
|
initial_table_name.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let table_structure = grpc_client
|
// NOW, just call our new central function. This avoids code duplication.
|
||||||
.get_table_structure(
|
let form_state = Self::load_table_view(
|
||||||
initial_profile_name.clone(),
|
grpc_client,
|
||||||
initial_table_name.clone(),
|
app_state,
|
||||||
)
|
&initial_profile_name,
|
||||||
.await
|
&initial_table_name,
|
||||||
.context(format!(
|
)
|
||||||
"Failed to get initial table structure for {}.{}",
|
.await?;
|
||||||
initial_profile_name, initial_table_name
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let column_names: Vec<String> = table_structure
|
// The field names for the UI are derived from the new form_state
|
||||||
.columns
|
let field_names = form_state.fields.iter().map(|f| f.display_name.clone()).collect();
|
||||||
.iter()
|
|
||||||
.map(|col| col.name.clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let filtered_columns = filter_user_columns(column_names);
|
Ok((initial_profile_name, initial_table_name, field_names))
|
||||||
|
|
||||||
Ok((initial_profile_name, initial_table_name, filtered_columns))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn fetch_and_set_table_count(
|
pub async fn fetch_and_set_table_count(
|
||||||
|
|||||||
@@ -2,4 +2,5 @@
|
|||||||
|
|
||||||
pub mod state;
|
pub mod state;
|
||||||
pub mod buffer;
|
pub mod buffer;
|
||||||
|
pub mod search;
|
||||||
pub mod highlight;
|
pub mod highlight;
|
||||||
|
|||||||
56
client/src/state/app/search.rs
Normal file
56
client/src/state/app/search.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
// src/state/app/search.rs
|
||||||
|
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
|
|
||||||
|
/// Holds the complete state for the search palette.
|
||||||
|
pub struct SearchState {
|
||||||
|
/// The name of the table being searched.
|
||||||
|
pub table_name: String,
|
||||||
|
/// The current text entered by the user.
|
||||||
|
pub input: String,
|
||||||
|
/// The position of the cursor within the input text.
|
||||||
|
pub cursor_position: usize,
|
||||||
|
/// The search results returned from the server.
|
||||||
|
pub results: Vec<Hit>,
|
||||||
|
/// The index of the currently selected search result.
|
||||||
|
pub selected_index: usize,
|
||||||
|
/// A flag to indicate if a search is currently in progress.
|
||||||
|
pub is_loading: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SearchState {
|
||||||
|
/// Creates a new SearchState for a given table.
|
||||||
|
pub fn new(table_name: String) -> Self {
|
||||||
|
Self {
|
||||||
|
table_name,
|
||||||
|
input: String::new(),
|
||||||
|
cursor_position: 0,
|
||||||
|
results: Vec::new(),
|
||||||
|
selected_index: 0,
|
||||||
|
is_loading: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Moves the selection to the next item, wrapping around if at the end.
|
||||||
|
pub fn next_result(&mut self) {
|
||||||
|
if !self.results.is_empty() {
|
||||||
|
let next = self.selected_index + 1;
|
||||||
|
self.selected_index = if next >= self.results.len() {
|
||||||
|
0 // Wrap to the start
|
||||||
|
} else {
|
||||||
|
next
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Moves the selection to the previous item, wrapping around if at the beginning.
|
||||||
|
pub fn previous_result(&mut self) {
|
||||||
|
if !self.results.is_empty() {
|
||||||
|
self.selected_index = if self.selected_index == 0 {
|
||||||
|
self.results.len() - 1 // Wrap to the end
|
||||||
|
} else {
|
||||||
|
self.selected_index - 1
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,11 +1,19 @@
|
|||||||
// src/state/state.rs
|
// src/state/app/state.rs
|
||||||
|
|
||||||
use std::env;
|
|
||||||
use common::proto::multieko2::table_definition::ProfileTreeResponse;
|
|
||||||
use crate::modes::handlers::mode_manager::AppMode;
|
|
||||||
use crate::ui::handlers::context::DialogPurpose;
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use common::proto::multieko2::table_definition::ProfileTreeResponse;
|
||||||
|
// NEW: Import the types we need for the cache
|
||||||
|
use common::proto::multieko2::table_structure::TableStructureResponse;
|
||||||
|
use crate::modes::handlers::mode_manager::AppMode;
|
||||||
|
use crate::state::app::search::SearchState;
|
||||||
|
use crate::ui::handlers::context::DialogPurpose;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::env;
|
||||||
|
use std::sync::Arc;
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
|
// --- DialogState and UiState are unchanged ---
|
||||||
pub struct DialogState {
|
pub struct DialogState {
|
||||||
pub dialog_show: bool,
|
pub dialog_show: bool,
|
||||||
pub dialog_title: String,
|
pub dialog_title: String,
|
||||||
@@ -26,10 +34,19 @@ pub struct UiState {
|
|||||||
pub show_form: bool,
|
pub show_form: bool,
|
||||||
pub show_login: bool,
|
pub show_login: bool,
|
||||||
pub show_register: bool,
|
pub show_register: bool,
|
||||||
|
pub show_search_palette: bool,
|
||||||
pub focus_outside_canvas: bool,
|
pub focus_outside_canvas: bool,
|
||||||
pub dialog: DialogState,
|
pub dialog: DialogState,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DebugState {
|
||||||
|
pub displayed_message: String,
|
||||||
|
pub is_error: bool,
|
||||||
|
pub display_start_time: Instant,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
// Core editor state
|
// Core editor state
|
||||||
pub current_dir: String,
|
pub current_dir: String,
|
||||||
@@ -39,21 +56,24 @@ pub struct AppState {
|
|||||||
pub current_view_profile_name: Option<String>,
|
pub current_view_profile_name: Option<String>,
|
||||||
pub current_view_table_name: Option<String>,
|
pub current_view_table_name: Option<String>,
|
||||||
|
|
||||||
|
// NEW: The "Rulebook" cache. We use Arc for efficient sharing.
|
||||||
|
pub schema_cache: HashMap<String, Arc<TableStructureResponse>>,
|
||||||
|
|
||||||
pub focused_button_index: usize,
|
pub focused_button_index: usize,
|
||||||
pub pending_table_structure_fetch: Option<(String, String)>,
|
pub pending_table_structure_fetch: Option<(String, String)>,
|
||||||
|
|
||||||
|
pub search_state: Option<SearchState>,
|
||||||
|
|
||||||
// UI preferences
|
// UI preferences
|
||||||
pub ui: UiState,
|
pub ui: UiState,
|
||||||
|
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
pub debug_info: String,
|
pub debug_state: Option<DebugState>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppState {
|
impl AppState {
|
||||||
pub fn new() -> Result<Self> {
|
pub fn new() -> Result<Self> {
|
||||||
let current_dir = env::current_dir()?
|
let current_dir = env::current_dir()?.to_string_lossy().to_string();
|
||||||
.to_string_lossy()
|
|
||||||
.to_string();
|
|
||||||
Ok(AppState {
|
Ok(AppState {
|
||||||
current_dir,
|
current_dir,
|
||||||
profile_tree: ProfileTreeResponse::default(),
|
profile_tree: ProfileTreeResponse::default(),
|
||||||
@@ -61,27 +81,28 @@ impl AppState {
|
|||||||
current_view_profile_name: None,
|
current_view_profile_name: None,
|
||||||
current_view_table_name: None,
|
current_view_table_name: None,
|
||||||
current_mode: AppMode::General,
|
current_mode: AppMode::General,
|
||||||
|
schema_cache: HashMap::new(), // NEW: Initialize the cache
|
||||||
focused_button_index: 0,
|
focused_button_index: 0,
|
||||||
pending_table_structure_fetch: None,
|
pending_table_structure_fetch: None,
|
||||||
|
search_state: None,
|
||||||
ui: UiState::default(),
|
ui: UiState::default(),
|
||||||
|
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
debug_info: String::new(),
|
debug_state: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- ALL YOUR EXISTING METHODS ARE UNTOUCHED ---
|
||||||
|
|
||||||
pub fn update_mode(&mut self, mode: AppMode) {
|
pub fn update_mode(&mut self, mode: AppMode) {
|
||||||
self.current_mode = mode;
|
self.current_mode = mode;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_current_view_table(&mut self, profile_name: String, table_name: String) {
|
pub fn set_current_view_table(&mut self, profile_name: String, table_name: String) {
|
||||||
self.current_view_profile_name = Some(profile_name);
|
self.current_view_profile_name = Some(profile_name);
|
||||||
self.current_view_table_name = Some(table_name);
|
self.current_view_table_name = Some(table_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add dialog helper methods
|
|
||||||
/// Shows a dialog with the given title, message, and buttons.
|
|
||||||
/// The first button (index 0) is active by default.
|
|
||||||
pub fn show_dialog(
|
pub fn show_dialog(
|
||||||
&mut self,
|
&mut self,
|
||||||
title: &str,
|
title: &str,
|
||||||
@@ -99,19 +120,17 @@ impl AppState {
|
|||||||
self.ui.focus_outside_canvas = true;
|
self.ui.focus_outside_canvas = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Shows a dialog specifically for loading states.
|
|
||||||
pub fn show_loading_dialog(&mut self, title: &str, message: &str) {
|
pub fn show_loading_dialog(&mut self, title: &str, message: &str) {
|
||||||
self.ui.dialog.dialog_title = title.to_string();
|
self.ui.dialog.dialog_title = title.to_string();
|
||||||
self.ui.dialog.dialog_message = message.to_string();
|
self.ui.dialog.dialog_message = message.to_string();
|
||||||
self.ui.dialog.dialog_buttons.clear(); // No buttons during loading
|
self.ui.dialog.dialog_buttons.clear();
|
||||||
self.ui.dialog.dialog_active_button_index = 0;
|
self.ui.dialog.dialog_active_button_index = 0;
|
||||||
self.ui.dialog.purpose = None; // Purpose is set when loading finishes
|
self.ui.dialog.purpose = None;
|
||||||
self.ui.dialog.is_loading = true;
|
self.ui.dialog.is_loading = true;
|
||||||
self.ui.dialog.dialog_show = true;
|
self.ui.dialog.dialog_show = true;
|
||||||
self.ui.focus_outside_canvas = true; // Keep focus management consistent
|
self.ui.focus_outside_canvas = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Updates the content of an existing dialog, typically after loading.
|
|
||||||
pub fn update_dialog_content(
|
pub fn update_dialog_content(
|
||||||
&mut self,
|
&mut self,
|
||||||
message: &str,
|
message: &str,
|
||||||
@@ -121,16 +140,12 @@ impl AppState {
|
|||||||
if self.ui.dialog.dialog_show {
|
if self.ui.dialog.dialog_show {
|
||||||
self.ui.dialog.dialog_message = message.to_string();
|
self.ui.dialog.dialog_message = message.to_string();
|
||||||
self.ui.dialog.dialog_buttons = buttons;
|
self.ui.dialog.dialog_buttons = buttons;
|
||||||
self.ui.dialog.dialog_active_button_index = 0; // Reset focus
|
self.ui.dialog.dialog_active_button_index = 0;
|
||||||
self.ui.dialog.purpose = Some(purpose);
|
self.ui.dialog.purpose = Some(purpose);
|
||||||
self.ui.dialog.is_loading = false; // Loading finished
|
self.ui.dialog.is_loading = false;
|
||||||
// Keep dialog_show = true
|
|
||||||
// Keep focus_outside_canvas = true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Hides the dialog and clears its content.
|
|
||||||
pub fn hide_dialog(&mut self) {
|
pub fn hide_dialog(&mut self) {
|
||||||
self.ui.dialog.dialog_show = false;
|
self.ui.dialog.dialog_show = false;
|
||||||
self.ui.dialog.dialog_title.clear();
|
self.ui.dialog.dialog_title.clear();
|
||||||
@@ -142,30 +157,27 @@ impl AppState {
|
|||||||
self.ui.dialog.is_loading = false;
|
self.ui.dialog.is_loading = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the active button index, wrapping around if necessary.
|
|
||||||
pub fn next_dialog_button(&mut self) {
|
pub fn next_dialog_button(&mut self) {
|
||||||
if !self.ui.dialog.dialog_buttons.is_empty() {
|
if !self.ui.dialog.dialog_buttons.is_empty() {
|
||||||
let next_index = (self.ui.dialog.dialog_active_button_index + 1)
|
let next_index = (self.ui.dialog.dialog_active_button_index + 1)
|
||||||
% self.ui.dialog.dialog_buttons.len();
|
% self.ui.dialog.dialog_buttons.len();
|
||||||
self.ui.dialog.dialog_active_button_index = next_index; // Use new name
|
self.ui.dialog.dialog_active_button_index = next_index;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the active button index, wrapping around if necessary.
|
|
||||||
pub fn previous_dialog_button(&mut self) {
|
pub fn previous_dialog_button(&mut self) {
|
||||||
if !self.ui.dialog.dialog_buttons.is_empty() {
|
if !self.ui.dialog.dialog_buttons.is_empty() {
|
||||||
let len = self.ui.dialog.dialog_buttons.len();
|
let len = self.ui.dialog.dialog_buttons.len();
|
||||||
let prev_index =
|
let prev_index =
|
||||||
(self.ui.dialog.dialog_active_button_index + len - 1) % len;
|
(self.ui.dialog.dialog_active_button_index + len - 1) % len;
|
||||||
self.ui.dialog.dialog_active_button_index = prev_index; // Use new name
|
self.ui.dialog.dialog_active_button_index = prev_index;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the label of the currently active button, if any.
|
|
||||||
pub fn get_active_dialog_button_label(&self) -> Option<&str> {
|
pub fn get_active_dialog_button_label(&self) -> Option<&str> {
|
||||||
self.ui.dialog
|
self.ui.dialog
|
||||||
.dialog_buttons // Use new name
|
.dialog_buttons
|
||||||
.get(self.ui.dialog.dialog_active_button_index) // Use new name
|
.get(self.ui.dialog.dialog_active_button_index)
|
||||||
.map(|s| s.as_str())
|
.map(|s| s.as_str())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -182,13 +194,13 @@ impl Default for UiState {
|
|||||||
show_login: false,
|
show_login: false,
|
||||||
show_register: false,
|
show_register: false,
|
||||||
show_buffer_list: true,
|
show_buffer_list: true,
|
||||||
|
show_search_palette: false, // ADDED
|
||||||
focus_outside_canvas: false,
|
focus_outside_canvas: false,
|
||||||
dialog: DialogState::default(),
|
dialog: DialogState::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update the Default implementation for DialogState itself
|
|
||||||
impl Default for DialogState {
|
impl Default for DialogState {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
// src/state/canvas_state.rs
|
// src/state/pages/canvas_state.rs
|
||||||
|
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
|
|
||||||
pub trait CanvasState {
|
pub trait CanvasState {
|
||||||
|
// --- Existing methods (unchanged) ---
|
||||||
fn current_field(&self) -> usize;
|
fn current_field(&self) -> usize;
|
||||||
fn current_cursor_pos(&self) -> usize;
|
fn current_cursor_pos(&self) -> usize;
|
||||||
fn has_unsaved_changes(&self) -> bool;
|
fn has_unsaved_changes(&self) -> bool;
|
||||||
@@ -9,12 +11,22 @@ pub trait CanvasState {
|
|||||||
fn get_current_input(&self) -> &str;
|
fn get_current_input(&self) -> &str;
|
||||||
fn get_current_input_mut(&mut self) -> &mut String;
|
fn get_current_input_mut(&mut self) -> &mut String;
|
||||||
fn fields(&self) -> Vec<&str>;
|
fn fields(&self) -> Vec<&str>;
|
||||||
|
|
||||||
fn set_current_field(&mut self, index: usize);
|
fn set_current_field(&mut self, index: usize);
|
||||||
fn set_current_cursor_pos(&mut self, pos: usize);
|
fn set_current_cursor_pos(&mut self, pos: usize);
|
||||||
fn set_has_unsaved_changes(&mut self, changed: bool);
|
fn set_has_unsaved_changes(&mut self, changed: bool);
|
||||||
|
|
||||||
// --- Autocomplete Support ---
|
|
||||||
fn get_suggestions(&self) -> Option<&[String]>;
|
fn get_suggestions(&self) -> Option<&[String]>;
|
||||||
fn get_selected_suggestion_index(&self) -> Option<usize>;
|
fn get_selected_suggestion_index(&self) -> Option<usize>;
|
||||||
|
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_display_value_for_field(&self, index: usize) -> &str {
|
||||||
|
self.inputs()
|
||||||
|
.get(index)
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
}
|
||||||
|
fn has_display_override(&self, _index: usize) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,49 +1,109 @@
|
|||||||
// src/state/pages/form.rs
|
// src/state/pages/form.rs
|
||||||
|
|
||||||
use std::collections::HashMap; // NEW
|
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
use ratatui::layout::Rect;
|
|
||||||
use ratatui::Frame;
|
|
||||||
use crate::state::app::highlight::HighlightState;
|
use crate::state::app::highlight::HighlightState;
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
|
use ratatui::layout::Rect;
|
||||||
|
use ratatui::Frame;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
fn json_value_to_string(value: &serde_json::Value) -> String {
|
||||||
|
match value {
|
||||||
|
serde_json::Value::String(s) => s.clone(),
|
||||||
|
serde_json::Value::Number(n) => n.to_string(),
|
||||||
|
serde_json::Value::Bool(b) => b.to_string(),
|
||||||
|
_ => String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct FieldDefinition {
|
||||||
|
pub display_name: String,
|
||||||
|
pub data_key: String,
|
||||||
|
pub is_link: bool,
|
||||||
|
pub link_target_table: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct FormState {
|
pub struct FormState {
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
// NEW fields for dynamic table context
|
|
||||||
pub profile_name: String,
|
pub profile_name: String,
|
||||||
pub table_name: String,
|
pub table_name: String,
|
||||||
pub total_count: u64,
|
pub total_count: u64,
|
||||||
pub current_position: u64, // 1-based index, 0 or total_count + 1 for new entry
|
pub current_position: u64,
|
||||||
|
pub fields: Vec<FieldDefinition>,
|
||||||
pub fields: Vec<String>, // Already dynamic, which is good
|
|
||||||
pub values: Vec<String>,
|
pub values: Vec<String>,
|
||||||
pub current_field: usize,
|
pub current_field: usize,
|
||||||
pub has_unsaved_changes: bool,
|
pub has_unsaved_changes: bool,
|
||||||
pub current_cursor_pos: usize,
|
pub current_cursor_pos: usize,
|
||||||
|
pub autocomplete_active: bool,
|
||||||
|
pub autocomplete_suggestions: Vec<Hit>,
|
||||||
|
pub selected_suggestion_index: Option<usize>,
|
||||||
|
pub autocomplete_loading: bool,
|
||||||
|
pub link_display_map: HashMap<usize, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FormState {
|
impl FormState {
|
||||||
/// Creates a new, empty FormState for a given table.
|
|
||||||
/// The position defaults to 1, representing either the first record
|
|
||||||
/// or the position for a new entry if the table is empty.
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
fields: Vec<String>,
|
fields: Vec<FieldDefinition>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let values = vec![String::new(); fields.len()];
|
let values = vec![String::new(); fields.len()];
|
||||||
FormState {
|
FormState {
|
||||||
id: 0, // Default to 0, indicating a new or unloaded record
|
id: 0,
|
||||||
profile_name,
|
profile_name,
|
||||||
table_name,
|
table_name,
|
||||||
total_count: 0, // Will be fetched after initialization
|
total_count: 0,
|
||||||
// FIX: Default to 1. A position of 0 is an invalid state.
|
|
||||||
current_position: 1,
|
current_position: 1,
|
||||||
fields,
|
fields,
|
||||||
values,
|
values,
|
||||||
current_field: 0,
|
current_field: 0,
|
||||||
has_unsaved_changes: false,
|
has_unsaved_changes: false,
|
||||||
current_cursor_pos: 0,
|
current_cursor_pos: 0,
|
||||||
|
autocomplete_active: false,
|
||||||
|
autocomplete_suggestions: Vec::new(),
|
||||||
|
selected_suggestion_index: None,
|
||||||
|
autocomplete_loading: false,
|
||||||
|
link_display_map: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_display_name_for_hit(&self, hit: &Hit) -> String {
|
||||||
|
if let Ok(content_map) =
|
||||||
|
serde_json::from_str::<HashMap<String, serde_json::Value>>(
|
||||||
|
&hit.content_json,
|
||||||
|
)
|
||||||
|
{
|
||||||
|
const IGNORED_KEYS: &[&str] = &["id", "deleted", "created_at"];
|
||||||
|
let mut keys: Vec<_> = content_map
|
||||||
|
.keys()
|
||||||
|
.filter(|k| !IGNORED_KEYS.contains(&k.as_str()))
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
keys.sort();
|
||||||
|
|
||||||
|
let values: Vec<_> = keys
|
||||||
|
.iter()
|
||||||
|
.map(|key| {
|
||||||
|
content_map
|
||||||
|
.get(key)
|
||||||
|
.map(json_value_to_string)
|
||||||
|
.unwrap_or_default()
|
||||||
|
})
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.take(1)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let display_part = values.first().cloned().unwrap_or_default();
|
||||||
|
if display_part.is_empty() {
|
||||||
|
format!("ID: {}", hit.id)
|
||||||
|
} else {
|
||||||
|
format!("{} | ID: {}", display_part, hit.id)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
format!("ID: {} (parse error)", hit.id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,13 +116,13 @@ impl FormState {
|
|||||||
highlight_state: &HighlightState,
|
highlight_state: &HighlightState,
|
||||||
) {
|
) {
|
||||||
let fields_str_slice: Vec<&str> =
|
let fields_str_slice: Vec<&str> =
|
||||||
self.fields.iter().map(|s| s.as_str()).collect();
|
self.fields().iter().map(|s| *s).collect();
|
||||||
let values_str_slice: Vec<&String> = self.values.iter().collect();
|
let values_str_slice: Vec<&String> = self.values.iter().collect();
|
||||||
|
|
||||||
crate::components::form::form::render_form(
|
crate::components::form::form::render_form(
|
||||||
f,
|
f,
|
||||||
area,
|
area,
|
||||||
self, // Pass self as CanvasState
|
self,
|
||||||
&fields_str_slice,
|
&fields_str_slice,
|
||||||
&self.current_field,
|
&self.current_field,
|
||||||
&values_str_slice,
|
&values_str_slice,
|
||||||
@@ -75,20 +135,19 @@ impl FormState {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resets the form to a state for creating a new entry.
|
|
||||||
/// It clears all values and sets the position to be one after the last record.
|
|
||||||
pub fn reset_to_empty(&mut self) {
|
pub fn reset_to_empty(&mut self) {
|
||||||
self.id = 0;
|
self.id = 0;
|
||||||
self.values.iter_mut().for_each(|v| v.clear());
|
self.values.iter_mut().for_each(|v| v.clear());
|
||||||
self.current_field = 0;
|
self.current_field = 0;
|
||||||
self.current_cursor_pos = 0;
|
self.current_cursor_pos = 0;
|
||||||
self.has_unsaved_changes = false;
|
self.has_unsaved_changes = false;
|
||||||
// Set the position for a new entry.
|
|
||||||
if self.total_count > 0 {
|
if self.total_count > 0 {
|
||||||
self.current_position = self.total_count + 1;
|
self.current_position = self.total_count + 1;
|
||||||
} else {
|
} else {
|
||||||
self.current_position = 1; // If table is empty, new record is at position 1
|
self.current_position = 1;
|
||||||
}
|
}
|
||||||
|
self.deactivate_autocomplete();
|
||||||
|
self.link_display_map.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_current_input(&self) -> &str {
|
pub fn get_current_input(&self) -> &str {
|
||||||
@@ -99,31 +158,28 @@ impl FormState {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_current_input_mut(&mut self) -> &mut String {
|
pub fn get_current_input_mut(&mut self) -> &mut String {
|
||||||
|
self.link_display_map.remove(&self.current_field);
|
||||||
self.values
|
self.values
|
||||||
.get_mut(self.current_field)
|
.get_mut(self.current_field)
|
||||||
.expect("Invalid current_field index")
|
.expect("Invalid current_field index")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Updates the form's values from a data response and sets its position.
|
|
||||||
/// This is the single source of truth for populating the form after a data fetch.
|
|
||||||
pub fn update_from_response(
|
pub fn update_from_response(
|
||||||
&mut self,
|
&mut self,
|
||||||
response_data: &HashMap<String, String>,
|
response_data: &HashMap<String, String>,
|
||||||
// FIX: Add new_position to make this method authoritative.
|
|
||||||
new_position: u64,
|
new_position: u64,
|
||||||
) {
|
) {
|
||||||
// Create a new vector for the values, ensuring they are in the correct order.
|
self.values = self
|
||||||
self.values = self.fields.iter().map(|field_from_schema| {
|
.fields
|
||||||
// For each field from our schema, find the corresponding key in the
|
.iter()
|
||||||
// response data by doing a case-insensitive comparison.
|
.map(|field_def| {
|
||||||
response_data
|
response_data
|
||||||
.iter()
|
.get(&field_def.data_key)
|
||||||
.find(|(key_from_data, _)| key_from_data.eq_ignore_ascii_case(field_from_schema))
|
.cloned()
|
||||||
.map(|(_, value)| value.clone()) // If found, clone its value.
|
.unwrap_or_default()
|
||||||
.unwrap_or_default() // If not found, use an empty string.
|
})
|
||||||
}).collect();
|
.collect();
|
||||||
|
|
||||||
// Now, do the same case-insensitive lookup for the 'id' field.
|
|
||||||
let id_str_opt = response_data
|
let id_str_opt = response_data
|
||||||
.iter()
|
.iter()
|
||||||
.find(|(k, _)| k.eq_ignore_ascii_case("id"))
|
.find(|(k, _)| k.eq_ignore_ascii_case("id"))
|
||||||
@@ -133,18 +189,31 @@ impl FormState {
|
|||||||
if let Ok(parsed_id) = id_str.parse::<i64>() {
|
if let Ok(parsed_id) = id_str.parse::<i64>() {
|
||||||
self.id = parsed_id;
|
self.id = parsed_id;
|
||||||
} else {
|
} else {
|
||||||
tracing::error!( "Failed to parse 'id' field '{}' for table {}.{}", id_str, self.profile_name, self.table_name);
|
tracing::error!(
|
||||||
|
"Failed to parse 'id' field '{}' for table {}.{}",
|
||||||
|
id_str,
|
||||||
|
self.profile_name,
|
||||||
|
self.table_name
|
||||||
|
);
|
||||||
self.id = 0;
|
self.id = 0;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.id = 0;
|
self.id = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIX: Set the position from the provided parameter.
|
|
||||||
self.current_position = new_position;
|
self.current_position = new_position;
|
||||||
self.has_unsaved_changes = false;
|
self.has_unsaved_changes = false;
|
||||||
self.current_field = 0;
|
self.current_field = 0;
|
||||||
self.current_cursor_pos = 0;
|
self.current_cursor_pos = 0;
|
||||||
|
self.deactivate_autocomplete();
|
||||||
|
self.link_display_map.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deactivate_autocomplete(&mut self) {
|
||||||
|
self.autocomplete_active = false;
|
||||||
|
self.autocomplete_suggestions.clear();
|
||||||
|
self.selected_suggestion_index = None;
|
||||||
|
self.autocomplete_loading = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -152,52 +221,69 @@ impl CanvasState for FormState {
|
|||||||
fn current_field(&self) -> usize {
|
fn current_field(&self) -> usize {
|
||||||
self.current_field
|
self.current_field
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_cursor_pos(&self) -> usize {
|
fn current_cursor_pos(&self) -> usize {
|
||||||
self.current_cursor_pos
|
self.current_cursor_pos
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_unsaved_changes(&self) -> bool {
|
fn has_unsaved_changes(&self) -> bool {
|
||||||
self.has_unsaved_changes
|
self.has_unsaved_changes
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inputs(&self) -> Vec<&String> {
|
fn inputs(&self) -> Vec<&String> {
|
||||||
self.values.iter().collect()
|
self.values.iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_current_input(&self) -> &str {
|
fn get_current_input(&self) -> &str {
|
||||||
// Re-use the struct's own method
|
|
||||||
FormState::get_current_input(self)
|
FormState::get_current_input(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_current_input_mut(&mut self) -> &mut String {
|
fn get_current_input_mut(&mut self) -> &mut String {
|
||||||
// Re-use the struct's own method
|
|
||||||
FormState::get_current_input_mut(self)
|
FormState::get_current_input_mut(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fields(&self) -> Vec<&str> {
|
fn fields(&self) -> Vec<&str> {
|
||||||
self.fields.iter().map(|s| s.as_str()).collect()
|
self.fields
|
||||||
|
.iter()
|
||||||
|
.map(|f| f.display_name.as_str())
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_current_field(&mut self, index: usize) {
|
fn set_current_field(&mut self, index: usize) {
|
||||||
if index < self.fields.len() {
|
if index < self.fields.len() {
|
||||||
self.current_field = index;
|
self.current_field = index;
|
||||||
}
|
}
|
||||||
|
self.deactivate_autocomplete();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_current_cursor_pos(&mut self, pos: usize) {
|
fn set_current_cursor_pos(&mut self, pos: usize) {
|
||||||
self.current_cursor_pos = pos;
|
self.current_cursor_pos = pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_has_unsaved_changes(&mut self, changed: bool) {
|
fn set_has_unsaved_changes(&mut self, changed: bool) {
|
||||||
self.has_unsaved_changes = changed;
|
self.has_unsaved_changes = changed;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_suggestions(&self) -> Option<&[String]> {
|
fn get_suggestions(&self) -> Option<&[String]> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
|
||||||
|
if self.autocomplete_active {
|
||||||
|
Some(&self.autocomplete_suggestions)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
fn get_selected_suggestion_index(&self) -> Option<usize> {
|
fn get_selected_suggestion_index(&self) -> Option<usize> {
|
||||||
None
|
if self.autocomplete_active {
|
||||||
|
self.selected_suggestion_index
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_display_value_for_field(&self, index: usize) -> &str {
|
||||||
|
if let Some(display_text) = self.link_display_map.get(&index) {
|
||||||
|
return display_text.as_str();
|
||||||
|
}
|
||||||
|
self.inputs()
|
||||||
|
.get(index)
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- IMPLEMENT THE NEW TRAIT METHOD ---
|
||||||
|
fn has_display_override(&self, index: usize) -> bool {
|
||||||
|
self.link_display_map.contains_key(&index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,19 +1,22 @@
|
|||||||
// src/tui/functions/common/form.rs
|
// src/tui/functions/common/form.rs
|
||||||
|
|
||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
|
use crate::state::app::state::AppState; // NEW: Import AppState
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::FormState;
|
||||||
use anyhow::{Context, Result}; // Added Context
|
use crate::utils::data_converter; // NEW: Import our translator
|
||||||
use std::collections::HashMap; // NEW
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum SaveOutcome {
|
pub enum SaveOutcome {
|
||||||
NoChange,
|
NoChange,
|
||||||
UpdatedExisting,
|
UpdatedExisting,
|
||||||
CreatedNew(i64), // Keep the ID
|
CreatedNew(i64),
|
||||||
}
|
}
|
||||||
|
|
||||||
// MODIFIED save function
|
// MODIFIED save function signature and logic
|
||||||
pub async fn save(
|
pub async fn save(
|
||||||
|
app_state: &AppState, // NEW: Pass in AppState
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
) -> Result<SaveOutcome> {
|
) -> Result<SaveOutcome> {
|
||||||
@@ -21,42 +24,64 @@ pub async fn save(
|
|||||||
return Ok(SaveOutcome::NoChange);
|
return Ok(SaveOutcome::NoChange);
|
||||||
}
|
}
|
||||||
|
|
||||||
let data_map: HashMap<String, String> = form_state.fields.iter()
|
// --- NEW: VALIDATION & CONVERSION STEP ---
|
||||||
|
let cache_key =
|
||||||
|
format!("{}.{}", form_state.profile_name, form_state.table_name);
|
||||||
|
let schema = match app_state.schema_cache.get(&cache_key) {
|
||||||
|
Some(s) => s,
|
||||||
|
None => {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Schema for table '{}' not found in cache. Cannot save.",
|
||||||
|
form_state.table_name
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let data_map: HashMap<String, String> = form_state
|
||||||
|
.fields
|
||||||
|
.iter()
|
||||||
.zip(form_state.values.iter())
|
.zip(form_state.values.iter())
|
||||||
.map(|(field, value)| (field.clone(), value.clone()))
|
.map(|(field_def, value)| (field_def.data_key.clone(), value.clone()))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
// Use our new translator. It returns a user-friendly error on failure.
|
||||||
|
let converted_data =
|
||||||
|
match data_converter::convert_and_validate_data(&data_map, schema) {
|
||||||
|
Ok(data) => data,
|
||||||
|
Err(user_error) => return Err(anyhow!(user_error)),
|
||||||
|
};
|
||||||
|
// --- END OF NEW STEP ---
|
||||||
|
|
||||||
let outcome: SaveOutcome;
|
let outcome: SaveOutcome;
|
||||||
|
let is_new_entry = form_state.id == 0
|
||||||
let is_new_entry = form_state.id == 0 || (form_state.total_count > 0 && form_state.current_position > form_state.total_count) || (form_state.total_count == 0 && form_state.current_position == 1) ;
|
|| (form_state.total_count > 0
|
||||||
|
&& form_state.current_position > form_state.total_count)
|
||||||
|
|| (form_state.total_count == 0 && form_state.current_position == 1);
|
||||||
|
|
||||||
if is_new_entry {
|
if is_new_entry {
|
||||||
let response = grpc_client
|
let response = grpc_client
|
||||||
.post_table_data(
|
.post_table_data(
|
||||||
form_state.profile_name.clone(),
|
form_state.profile_name.clone(),
|
||||||
form_state.table_name.clone(),
|
form_state.table_name.clone(),
|
||||||
data_map,
|
converted_data, // Use the validated & converted data
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("Failed to post new table data")?;
|
.context("Failed to post new table data")?;
|
||||||
|
|
||||||
if response.success {
|
if response.success {
|
||||||
form_state.id = response.inserted_id;
|
form_state.id = response.inserted_id;
|
||||||
// After creating a new entry, total_count increases, and current_position becomes this new total_count
|
|
||||||
form_state.total_count += 1;
|
form_state.total_count += 1;
|
||||||
form_state.current_position = form_state.total_count;
|
form_state.current_position = form_state.total_count;
|
||||||
outcome = SaveOutcome::CreatedNew(response.inserted_id);
|
outcome = SaveOutcome::CreatedNew(response.inserted_id);
|
||||||
} else {
|
} else {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow!(
|
||||||
"Server failed to insert data: {}",
|
"Server failed to insert data: {}",
|
||||||
response.message
|
response.message
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// This assumes form_state.id is valid for an existing record
|
|
||||||
if form_state.id == 0 {
|
if form_state.id == 0 {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow!(
|
||||||
"Cannot update record: ID is 0, but not classified as new entry."
|
"Cannot update record: ID is 0, but not classified as new entry."
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@@ -65,7 +90,7 @@ pub async fn save(
|
|||||||
form_state.profile_name.clone(),
|
form_state.profile_name.clone(),
|
||||||
form_state.table_name.clone(),
|
form_state.table_name.clone(),
|
||||||
form_state.id,
|
form_state.id,
|
||||||
data_map,
|
converted_data, // Use the validated & converted data
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("Failed to put (update) table data")?;
|
.context("Failed to put (update) table data")?;
|
||||||
@@ -73,7 +98,7 @@ pub async fn save(
|
|||||||
if response.success {
|
if response.success {
|
||||||
outcome = SaveOutcome::UpdatedExisting;
|
outcome = SaveOutcome::UpdatedExisting;
|
||||||
} else {
|
} else {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow!(
|
||||||
"Server failed to update data: {}",
|
"Server failed to update data: {}",
|
||||||
response.message
|
response.message
|
||||||
));
|
));
|
||||||
|
|||||||
@@ -1,34 +1,36 @@
|
|||||||
// client/src/ui/handlers/render.rs
|
// src/ui/handlers/render.rs
|
||||||
|
|
||||||
use crate::components::{
|
use crate::components::{
|
||||||
|
admin::add_logic::render_add_logic,
|
||||||
|
admin::render_add_table,
|
||||||
|
auth::{login::render_login, register::render_register},
|
||||||
|
common::dialog::render_dialog,
|
||||||
|
common::find_file_palette,
|
||||||
|
common::search_palette::render_search_palette,
|
||||||
|
form::form::render_form,
|
||||||
|
handlers::sidebar::{self, calculate_sidebar_layout},
|
||||||
|
intro::intro::render_intro,
|
||||||
render_background,
|
render_background,
|
||||||
render_buffer_list,
|
render_buffer_list,
|
||||||
render_command_line,
|
render_command_line,
|
||||||
render_status_line,
|
render_status_line,
|
||||||
intro::intro::render_intro,
|
|
||||||
handlers::sidebar::{self, calculate_sidebar_layout},
|
|
||||||
form::form::render_form,
|
|
||||||
admin::render_add_table,
|
|
||||||
admin::add_logic::render_add_logic,
|
|
||||||
auth::{login::render_login, register::render_register},
|
|
||||||
common::find_file_palette,
|
|
||||||
};
|
};
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::modes::general::command_navigation::NavigationState;
|
||||||
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
|
use crate::state::app::buffer::BufferState;
|
||||||
|
use crate::state::app::highlight::HighlightState;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
|
use crate::state::pages::admin::AdminState;
|
||||||
|
use crate::state::pages::auth::AuthState;
|
||||||
|
use crate::state::pages::auth::LoginState;
|
||||||
|
use crate::state::pages::auth::RegisterState;
|
||||||
|
use crate::state::pages::form::FormState;
|
||||||
|
use crate::state::pages::intro::IntroState;
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
layout::{Constraint, Direction, Layout},
|
layout::{Constraint, Direction, Layout},
|
||||||
Frame,
|
Frame,
|
||||||
};
|
};
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
|
||||||
use crate::state::pages::form::FormState;
|
|
||||||
use crate::state::pages::auth::AuthState;
|
|
||||||
use crate::state::pages::auth::LoginState;
|
|
||||||
use crate::state::pages::auth::RegisterState;
|
|
||||||
use crate::state::pages::intro::IntroState;
|
|
||||||
use crate::state::app::buffer::BufferState;
|
|
||||||
use crate::state::app::state::AppState;
|
|
||||||
use crate::state::pages::admin::AdminState;
|
|
||||||
use crate::state::app::highlight::HighlightState;
|
|
||||||
use crate::modes::general::command_navigation::NavigationState;
|
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn render_ui(
|
pub fn render_ui(
|
||||||
@@ -53,16 +55,28 @@ pub fn render_ui(
|
|||||||
) {
|
) {
|
||||||
render_background(f, f.area(), theme);
|
render_background(f, f.area(), theme);
|
||||||
|
|
||||||
|
// --- START DYNAMIC LAYOUT LOGIC ---
|
||||||
|
let mut status_line_height = 1;
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
{
|
||||||
|
if let Some(debug_state) = &app_state.debug_state {
|
||||||
|
if debug_state.is_error {
|
||||||
|
status_line_height = 4;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// --- END DYNAMIC LAYOUT LOGIC ---
|
||||||
|
|
||||||
const PALETTE_OPTIONS_HEIGHT_FOR_LAYOUT: u16 = 15;
|
const PALETTE_OPTIONS_HEIGHT_FOR_LAYOUT: u16 = 15;
|
||||||
|
|
||||||
let mut bottom_area_constraints: Vec<Constraint> = vec![Constraint::Length(1)];
|
|
||||||
|
|
||||||
|
let mut bottom_area_constraints: Vec<Constraint> = vec![Constraint::Length(status_line_height)];
|
||||||
let command_palette_area_height = if navigation_state.active {
|
let command_palette_area_height = if navigation_state.active {
|
||||||
1 + PALETTE_OPTIONS_HEIGHT_FOR_LAYOUT
|
1 + PALETTE_OPTIONS_HEIGHT_FOR_LAYOUT
|
||||||
} else if event_handler_command_mode_active {
|
} else if event_handler_command_mode_active {
|
||||||
1
|
1
|
||||||
} else {
|
} else {
|
||||||
0 // Neither is active
|
0
|
||||||
};
|
};
|
||||||
|
|
||||||
if command_palette_area_height > 0 {
|
if command_palette_area_height > 0 {
|
||||||
@@ -75,7 +89,6 @@ pub fn render_ui(
|
|||||||
}
|
}
|
||||||
main_layout_constraints.extend(bottom_area_constraints);
|
main_layout_constraints.extend(bottom_area_constraints);
|
||||||
|
|
||||||
|
|
||||||
let root_chunks = Layout::default()
|
let root_chunks = Layout::default()
|
||||||
.direction(Direction::Vertical)
|
.direction(Direction::Vertical)
|
||||||
.constraints(main_layout_constraints)
|
.constraints(main_layout_constraints)
|
||||||
@@ -106,77 +119,95 @@ pub fn render_ui(
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
if app_state.ui.show_intro {
|
if app_state.ui.show_intro {
|
||||||
render_intro(f, intro_state, main_content_area, theme);
|
render_intro(f, intro_state, main_content_area, theme);
|
||||||
} else if app_state.ui.show_register {
|
} else if app_state.ui.show_register {
|
||||||
render_register(
|
render_register(
|
||||||
f, main_content_area, theme, register_state, app_state,
|
f,
|
||||||
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
register_state,
|
||||||
|
app_state,
|
||||||
register_state.current_field() < 4,
|
register_state.current_field() < 4,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
);
|
);
|
||||||
} else if app_state.ui.show_add_table {
|
} else if app_state.ui.show_add_table {
|
||||||
render_add_table(
|
render_add_table(
|
||||||
f, main_content_area, theme, app_state, &mut admin_state.add_table_state,
|
f,
|
||||||
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
app_state,
|
||||||
|
&mut admin_state.add_table_state,
|
||||||
is_event_handler_edit_mode,
|
is_event_handler_edit_mode,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
);
|
);
|
||||||
} else if app_state.ui.show_add_logic {
|
} else if app_state.ui.show_add_logic {
|
||||||
render_add_logic(
|
render_add_logic(
|
||||||
f, main_content_area, theme, app_state, &mut admin_state.add_logic_state,
|
f,
|
||||||
is_event_handler_edit_mode, highlight_state,
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
app_state,
|
||||||
|
&mut admin_state.add_logic_state,
|
||||||
|
is_event_handler_edit_mode,
|
||||||
|
highlight_state,
|
||||||
);
|
);
|
||||||
} else if app_state.ui.show_login {
|
} else if app_state.ui.show_login {
|
||||||
render_login(
|
render_login(
|
||||||
f, main_content_area, theme, login_state, app_state,
|
f,
|
||||||
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
login_state,
|
||||||
|
app_state,
|
||||||
login_state.current_field() < 2,
|
login_state.current_field() < 2,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
);
|
);
|
||||||
} else if app_state.ui.show_admin {
|
} else if app_state.ui.show_admin {
|
||||||
crate::components::admin::admin_panel::render_admin_panel(
|
crate::components::admin::admin_panel::render_admin_panel(
|
||||||
f, app_state, auth_state, admin_state, main_content_area, theme,
|
f,
|
||||||
&app_state.profile_tree, &app_state.selected_profile,
|
app_state,
|
||||||
|
auth_state,
|
||||||
|
admin_state,
|
||||||
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
&app_state.profile_tree,
|
||||||
|
&app_state.selected_profile,
|
||||||
);
|
);
|
||||||
|
|
||||||
} else if app_state.ui.show_form {
|
} else if app_state.ui.show_form {
|
||||||
let (sidebar_area, form_actual_area) = calculate_sidebar_layout(
|
let (sidebar_area, form_actual_area) =
|
||||||
app_state.ui.show_sidebar, main_content_area
|
calculate_sidebar_layout(app_state.ui.show_sidebar, main_content_area);
|
||||||
);
|
|
||||||
if let Some(sidebar_rect) = sidebar_area {
|
if let Some(sidebar_rect) = sidebar_area {
|
||||||
sidebar::render_sidebar(
|
sidebar::render_sidebar(
|
||||||
f, sidebar_rect, theme, &app_state.profile_tree, &app_state.selected_profile
|
f,
|
||||||
|
sidebar_rect,
|
||||||
|
theme,
|
||||||
|
&app_state.profile_tree,
|
||||||
|
&app_state.selected_profile,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let available_width = form_actual_area.width;
|
let available_width = form_actual_area.width;
|
||||||
let form_render_area = if available_width >= 80 {
|
let form_render_area = if available_width >= 80 {
|
||||||
Layout::default().direction(Direction::Horizontal)
|
Layout::default()
|
||||||
|
.direction(Direction::Horizontal)
|
||||||
.constraints([Constraint::Min(0), Constraint::Length(80), Constraint::Min(0)])
|
.constraints([Constraint::Min(0), Constraint::Length(80), Constraint::Min(0)])
|
||||||
.split(form_actual_area)[1]
|
.split(form_actual_area)[1]
|
||||||
} else {
|
} else {
|
||||||
Layout::default().direction(Direction::Horizontal)
|
Layout::default()
|
||||||
.constraints([Constraint::Min(0), Constraint::Length(available_width), Constraint::Min(0)])
|
.direction(Direction::Horizontal)
|
||||||
|
.constraints([
|
||||||
|
Constraint::Min(0),
|
||||||
|
Constraint::Length(available_width),
|
||||||
|
Constraint::Min(0),
|
||||||
|
])
|
||||||
.split(form_actual_area)[1]
|
.split(form_actual_area)[1]
|
||||||
};
|
};
|
||||||
let fields_vec: Vec<&str> = form_state.fields.iter().map(AsRef::as_ref).collect();
|
|
||||||
let values_vec: Vec<&String> = form_state.values.iter().collect();
|
form_state.render(
|
||||||
|
|
||||||
// --- START FIX ---
|
|
||||||
// Add the missing `&form_state.table_name` argument to this function call.
|
|
||||||
render_form(
|
|
||||||
f,
|
f,
|
||||||
form_render_area,
|
form_render_area,
|
||||||
form_state,
|
|
||||||
&fields_vec,
|
|
||||||
&form_state.current_field,
|
|
||||||
&values_vec,
|
|
||||||
&form_state.table_name, // <-- THIS ARGUMENT WAS MISSING
|
|
||||||
theme,
|
theme,
|
||||||
is_event_handler_edit_mode,
|
is_event_handler_edit_mode,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
form_state.total_count,
|
|
||||||
form_state.current_position,
|
|
||||||
);
|
);
|
||||||
// --- END FIX ---
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(area) = buffer_list_area {
|
if let Some(area) = buffer_list_area {
|
||||||
@@ -193,23 +224,41 @@ pub fn render_ui(
|
|||||||
app_state,
|
app_state,
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(palette_or_command_area) = command_render_area { // Use the calculated area
|
if let Some(palette_or_command_area) = command_render_area {
|
||||||
if navigation_state.active {
|
if navigation_state.active {
|
||||||
find_file_palette::render_find_file_palette(
|
find_file_palette::render_find_file_palette(
|
||||||
f,
|
f,
|
||||||
palette_or_command_area, // Use the correct area
|
palette_or_command_area,
|
||||||
theme,
|
theme,
|
||||||
navigation_state, // Pass the navigation_state directly
|
navigation_state,
|
||||||
);
|
);
|
||||||
} else if event_handler_command_mode_active {
|
} else if event_handler_command_mode_active {
|
||||||
render_command_line(
|
render_command_line(
|
||||||
f,
|
f,
|
||||||
palette_or_command_area, // Use the correct area
|
palette_or_command_area,
|
||||||
event_handler_command_input,
|
event_handler_command_input,
|
||||||
true, // Assuming it's always active when this branch is hit
|
true,
|
||||||
theme,
|
theme,
|
||||||
event_handler_command_message,
|
event_handler_command_message,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This block now correctly handles drawing popups over any view.
|
||||||
|
if app_state.ui.show_search_palette {
|
||||||
|
if let Some(search_state) = &app_state.search_state {
|
||||||
|
render_search_palette(f, f.area(), theme, search_state);
|
||||||
|
}
|
||||||
|
} else if app_state.ui.dialog.dialog_show {
|
||||||
|
render_dialog(
|
||||||
|
f,
|
||||||
|
f.area(),
|
||||||
|
theme,
|
||||||
|
&app_state.ui.dialog.dialog_title,
|
||||||
|
&app_state.ui.dialog.dialog_message,
|
||||||
|
&app_state.ui.dialog.dialog_buttons,
|
||||||
|
app_state.ui.dialog.dialog_active_button_index,
|
||||||
|
app_state.ui.dialog.is_loading,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use crate::modes::common::commands::CommandHandler;
|
|||||||
use crate::modes::handlers::event::{EventHandler, EventOutcome};
|
use crate::modes::handlers::event::{EventHandler, EventOutcome};
|
||||||
use crate::modes::handlers::mode_manager::{AppMode, ModeManager};
|
use crate::modes::handlers::mode_manager::{AppMode, ModeManager};
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::{FormState, FieldDefinition}; // Import FieldDefinition
|
||||||
use crate::state::pages::auth::AuthState;
|
use crate::state::pages::auth::AuthState;
|
||||||
use crate::state::pages::auth::LoginState;
|
use crate::state::pages::auth::LoginState;
|
||||||
use crate::state::pages::auth::RegisterState;
|
use crate::state::pages::auth::RegisterState;
|
||||||
@@ -27,12 +27,16 @@ use crate::ui::handlers::context::DialogPurpose;
|
|||||||
use crate::tui::functions::common::login;
|
use crate::tui::functions::common::login;
|
||||||
use crate::tui::functions::common::register;
|
use crate::tui::functions::common::register;
|
||||||
use crate::utils::columns::filter_user_columns;
|
use crate::utils::columns::filter_user_columns;
|
||||||
use std::time::Instant;
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use crossterm::cursor::SetCursorStyle;
|
use crossterm::cursor::SetCursorStyle;
|
||||||
use crossterm::event as crossterm_event;
|
use crossterm::event as crossterm_event;
|
||||||
use tracing::{error, info, warn};
|
use tracing::{error, info, warn};
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
use crate::state::app::state::DebugState;
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
use crate::utils::debug_logger::pop_next_debug_message;
|
||||||
|
|
||||||
pub async fn run_ui() -> Result<()> {
|
pub async fn run_ui() -> Result<()> {
|
||||||
let config = Config::load().context("Failed to load configuration")?;
|
let config = Config::load().context("Failed to load configuration")?;
|
||||||
@@ -51,6 +55,7 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
register_result_sender.clone(),
|
register_result_sender.clone(),
|
||||||
save_table_result_sender.clone(),
|
save_table_result_sender.clone(),
|
||||||
save_logic_result_sender.clone(),
|
save_logic_result_sender.clone(),
|
||||||
|
grpc_client.clone(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("Failed to create event handler")?;
|
.context("Failed to create event handler")?;
|
||||||
@@ -87,12 +92,20 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
.await
|
.await
|
||||||
.context("Failed to initialize app state and form")?;
|
.context("Failed to initialize app state and form")?;
|
||||||
|
|
||||||
let filtered_columns = filter_user_columns(initial_columns_from_service);
|
let initial_field_defs: Vec<FieldDefinition> = filter_user_columns(initial_columns_from_service)
|
||||||
|
.into_iter()
|
||||||
|
.map(|col_name| FieldDefinition {
|
||||||
|
display_name: col_name.clone(),
|
||||||
|
data_key: col_name,
|
||||||
|
is_link: false,
|
||||||
|
link_target_table: None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
let mut form_state = FormState::new(
|
let mut form_state = FormState::new(
|
||||||
initial_profile.clone(),
|
initial_profile.clone(),
|
||||||
initial_table.clone(),
|
initial_table.clone(),
|
||||||
filtered_columns,
|
initial_field_defs,
|
||||||
);
|
);
|
||||||
|
|
||||||
UiService::fetch_and_set_table_count(&mut grpc_client, &mut form_state)
|
UiService::fetch_and_set_table_count(&mut grpc_client, &mut form_state)
|
||||||
@@ -126,6 +139,51 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
loop {
|
loop {
|
||||||
let position_before_event = form_state.current_position;
|
let position_before_event = form_state.current_position;
|
||||||
let mut event_processed = false;
|
let mut event_processed = false;
|
||||||
|
|
||||||
|
// --- CHANNEL RECEIVERS ---
|
||||||
|
|
||||||
|
// For main search palette
|
||||||
|
match event_handler.search_result_receiver.try_recv() {
|
||||||
|
Ok(hits) => {
|
||||||
|
info!("--- 4. Main loop received message from channel. ---");
|
||||||
|
if let Some(search_state) = app_state.search_state.as_mut() {
|
||||||
|
search_state.results = hits;
|
||||||
|
search_state.is_loading = false;
|
||||||
|
}
|
||||||
|
needs_redraw = true;
|
||||||
|
}
|
||||||
|
Err(mpsc::error::TryRecvError::Empty) => {
|
||||||
|
}
|
||||||
|
Err(mpsc::error::TryRecvError::Disconnected) => {
|
||||||
|
error!("Search result channel disconnected!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- ADDED: For live form autocomplete ---
|
||||||
|
match event_handler.autocomplete_result_receiver.try_recv() {
|
||||||
|
Ok(hits) => {
|
||||||
|
if form_state.autocomplete_active {
|
||||||
|
form_state.autocomplete_suggestions = hits;
|
||||||
|
form_state.autocomplete_loading = false;
|
||||||
|
if !form_state.autocomplete_suggestions.is_empty() {
|
||||||
|
form_state.selected_suggestion_index = Some(0);
|
||||||
|
} else {
|
||||||
|
form_state.selected_suggestion_index = None;
|
||||||
|
}
|
||||||
|
event_handler.command_message = format!("Found {} suggestions.", form_state.autocomplete_suggestions.len());
|
||||||
|
}
|
||||||
|
needs_redraw = true;
|
||||||
|
}
|
||||||
|
Err(mpsc::error::TryRecvError::Empty) => {}
|
||||||
|
Err(mpsc::error::TryRecvError::Disconnected) => {
|
||||||
|
error!("Autocomplete result channel disconnected!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if app_state.ui.show_search_palette {
|
||||||
|
needs_redraw = true;
|
||||||
|
}
|
||||||
if crossterm_event::poll(std::time::Duration::from_millis(1))? {
|
if crossterm_event::poll(std::time::Duration::from_millis(1))? {
|
||||||
let event = event_reader.read_event().context("Failed to read terminal event")?;
|
let event = event_reader.read_event().context("Failed to read terminal event")?;
|
||||||
event_processed = true;
|
event_processed = true;
|
||||||
@@ -133,7 +191,6 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
event,
|
event,
|
||||||
&config,
|
&config,
|
||||||
&mut terminal,
|
&mut terminal,
|
||||||
&mut grpc_client,
|
|
||||||
&mut command_handler,
|
&mut command_handler,
|
||||||
&mut form_state,
|
&mut form_state,
|
||||||
&mut auth_state,
|
&mut auth_state,
|
||||||
@@ -293,83 +350,91 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
let current_view_profile = app_state.current_view_profile_name.clone();
|
let current_view_profile = app_state.current_view_profile_name.clone();
|
||||||
let current_view_table = app_state.current_view_table_name.clone();
|
let current_view_table = app_state.current_view_table_name.clone();
|
||||||
|
|
||||||
|
// This condition correctly detects a table switch.
|
||||||
if prev_view_profile_name != current_view_profile
|
if prev_view_profile_name != current_view_profile
|
||||||
|| prev_view_table_name != current_view_table
|
|| prev_view_table_name != current_view_table
|
||||||
{
|
{
|
||||||
if let (Some(prof_name), Some(tbl_name)) =
|
if let (Some(prof_name), Some(tbl_name)) =
|
||||||
(current_view_profile.as_ref(), current_view_table.as_ref())
|
(current_view_profile.as_ref(), current_view_table.as_ref())
|
||||||
{
|
{
|
||||||
|
// --- START OF REFACTORED LOGIC ---
|
||||||
app_state.show_loading_dialog(
|
app_state.show_loading_dialog(
|
||||||
"Loading Table",
|
"Loading Table",
|
||||||
&format!("Fetching data for {}.{}...", prof_name, tbl_name),
|
&format!("Fetching data for {}.{}...", prof_name, tbl_name),
|
||||||
);
|
);
|
||||||
needs_redraw = true;
|
needs_redraw = true;
|
||||||
|
|
||||||
match grpc_client
|
// 1. Call our new, central function. It handles fetching AND caching.
|
||||||
.get_table_structure(prof_name.clone(), tbl_name.clone())
|
match UiService::load_table_view(
|
||||||
.await
|
&mut grpc_client,
|
||||||
|
&mut app_state,
|
||||||
|
prof_name,
|
||||||
|
tbl_name,
|
||||||
|
)
|
||||||
|
.await
|
||||||
{
|
{
|
||||||
Ok(structure_response) => {
|
Ok(mut new_form_state) => {
|
||||||
let new_columns: Vec<String> = structure_response
|
// 2. The function succeeded, we have a new FormState.
|
||||||
.columns
|
// Now, fetch its data.
|
||||||
.iter()
|
|
||||||
.map(|c| c.name.clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let filtered_columns = filter_user_columns(new_columns);
|
|
||||||
form_state = FormState::new(
|
|
||||||
prof_name.clone(),
|
|
||||||
tbl_name.clone(),
|
|
||||||
filtered_columns,
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Err(e) = UiService::fetch_and_set_table_count(
|
if let Err(e) = UiService::fetch_and_set_table_count(
|
||||||
&mut grpc_client,
|
&mut grpc_client,
|
||||||
&mut form_state,
|
&mut new_form_state,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
|
// Handle count fetching error
|
||||||
app_state.update_dialog_content(
|
app_state.update_dialog_content(
|
||||||
&format!("Error fetching count: {}", e),
|
&format!("Error fetching count: {}", e),
|
||||||
vec!["OK".to_string()],
|
vec!["OK".to_string()],
|
||||||
DialogPurpose::LoginFailed,
|
DialogPurpose::LoginFailed, // Or a more appropriate purpose
|
||||||
);
|
);
|
||||||
} else if form_state.total_count > 0 {
|
} else if new_form_state.total_count > 0 {
|
||||||
|
// If there are records, load the first/last one
|
||||||
if let Err(e) = UiService::load_table_data_by_position(
|
if let Err(e) = UiService::load_table_data_by_position(
|
||||||
&mut grpc_client,
|
&mut grpc_client,
|
||||||
&mut form_state,
|
&mut new_form_state,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
|
// Handle data loading error
|
||||||
app_state.update_dialog_content(
|
app_state.update_dialog_content(
|
||||||
&format!("Error loading data: {}", e),
|
&format!("Error loading data: {}", e),
|
||||||
vec!["OK".to_string()],
|
vec!["OK".to_string()],
|
||||||
DialogPurpose::LoginFailed,
|
DialogPurpose::LoginFailed, // Or a more appropriate purpose
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
// Success! Hide the loading dialog.
|
||||||
app_state.hide_dialog();
|
app_state.hide_dialog();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
form_state.reset_to_empty();
|
// No records, so just reset to an empty form.
|
||||||
|
new_form_state.reset_to_empty();
|
||||||
app_state.hide_dialog();
|
app_state.hide_dialog();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 3. CRITICAL: Replace the old form_state with the new one.
|
||||||
|
form_state = new_form_state;
|
||||||
|
|
||||||
|
// 4. Update our tracking variables.
|
||||||
prev_view_profile_name = current_view_profile;
|
prev_view_profile_name = current_view_profile;
|
||||||
prev_view_table_name = current_view_table;
|
prev_view_table_name = current_view_table;
|
||||||
table_just_switched = true;
|
table_just_switched = true;
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
// This handles errors from load_table_view (e.g., schema fetch failed)
|
||||||
app_state.update_dialog_content(
|
app_state.update_dialog_content(
|
||||||
&format!("Error fetching table structure: {}", e),
|
&format!("Error loading table: {}", e),
|
||||||
vec!["OK".to_string()],
|
vec!["OK".to_string()],
|
||||||
DialogPurpose::LoginFailed,
|
DialogPurpose::LoginFailed, // Or a more appropriate purpose
|
||||||
);
|
);
|
||||||
|
// Revert the view change in app_state to avoid a loop
|
||||||
app_state.current_view_profile_name =
|
app_state.current_view_profile_name =
|
||||||
prev_view_profile_name.clone();
|
prev_view_profile_name.clone();
|
||||||
app_state.current_view_table_name =
|
app_state.current_view_table_name =
|
||||||
prev_view_table_name.clone();
|
prev_view_table_name.clone();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// --- END OF REFACTORED LOGIC ---
|
||||||
}
|
}
|
||||||
needs_redraw = true;
|
needs_redraw = true;
|
||||||
}
|
}
|
||||||
@@ -499,10 +564,20 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
|
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
{
|
{
|
||||||
app_state.debug_info = format!(
|
let can_display_next = match &app_state.debug_state {
|
||||||
"Redraw -> event: {}, needs_redraw: {}, pos_changed: {}",
|
Some(current) => current.display_start_time.elapsed() >= Duration::from_secs(2),
|
||||||
event_processed, needs_redraw, position_changed
|
None => true,
|
||||||
);
|
};
|
||||||
|
|
||||||
|
if can_display_next {
|
||||||
|
if let Some((new_message, is_error)) = pop_next_debug_message() {
|
||||||
|
app_state.debug_state = Some(DebugState {
|
||||||
|
displayed_message: new_message,
|
||||||
|
is_error,
|
||||||
|
display_start_time: Instant::now(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if event_processed || needs_redraw || position_changed {
|
if event_processed || needs_redraw || position_changed {
|
||||||
|
|||||||
50
client/src/utils/data_converter.rs
Normal file
50
client/src/utils/data_converter.rs
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
// src/utils/data_converter.rs
|
||||||
|
|
||||||
|
use common::proto::multieko2::table_structure::TableStructureResponse;
|
||||||
|
use prost_types::{value::Kind, NullValue, Value};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
pub fn convert_and_validate_data(
|
||||||
|
data: &HashMap<String, String>,
|
||||||
|
schema: &TableStructureResponse,
|
||||||
|
) -> Result<HashMap<String, Value>, String> {
|
||||||
|
let type_map: HashMap<_, _> = schema
|
||||||
|
.columns
|
||||||
|
.iter()
|
||||||
|
.map(|col| (col.name.as_str(), col.data_type.as_str()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
data.iter()
|
||||||
|
.map(|(key, str_value)| {
|
||||||
|
let expected_type = type_map.get(key.as_str()).unwrap_or(&"TEXT");
|
||||||
|
|
||||||
|
let kind = if str_value.is_empty() {
|
||||||
|
// TODO: Use the correct enum variant
|
||||||
|
Kind::NullValue(NullValue::NullValue.into())
|
||||||
|
} else {
|
||||||
|
// Attempt to parse the string based on the expected type
|
||||||
|
match *expected_type {
|
||||||
|
"BOOL" => match str_value.to_lowercase().parse::<bool>() {
|
||||||
|
Ok(v) => Kind::BoolValue(v),
|
||||||
|
Err(_) => return Err(format!("Invalid boolean for '{}': must be 'true' or 'false'", key)),
|
||||||
|
},
|
||||||
|
"INT8" | "INT4" | "INT2" | "SERIAL" | "BIGSERIAL" => {
|
||||||
|
match str_value.parse::<f64>() {
|
||||||
|
Ok(v) => Kind::NumberValue(v),
|
||||||
|
Err(_) => return Err(format!("Invalid number for '{}': must be a whole number", key)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"NUMERIC" | "FLOAT4" | "FLOAT8" => match str_value.parse::<f64>() {
|
||||||
|
Ok(v) => Kind::NumberValue(v),
|
||||||
|
Err(_) => return Err(format!("Invalid decimal for '{}': must be a number", key)),
|
||||||
|
},
|
||||||
|
"TIMESTAMPTZ" | "DATE" | "TIME" | "TEXT" | "VARCHAR" | "UUID" => {
|
||||||
|
Kind::StringValue(str_value.clone())
|
||||||
|
}
|
||||||
|
_ => Kind::StringValue(str_value.clone()),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok((key.clone(), Value { kind: Some(kind) }))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
46
client/src/utils/debug_logger.rs
Normal file
46
client/src/utils/debug_logger.rs
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
// client/src/utils/debug_logger.rs
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use std::collections::VecDeque; // <-- FIX: Import VecDeque
|
||||||
|
use std::io;
|
||||||
|
use std::sync::{Arc, Mutex}; // <-- FIX: Import Mutex
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref UI_DEBUG_BUFFER: Arc<Mutex<VecDeque<(String, bool)>>> =
|
||||||
|
Arc::new(Mutex::new(VecDeque::from([(String::from("Logger initialized..."), false)])));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct UiDebugWriter;
|
||||||
|
|
||||||
|
impl Default for UiDebugWriter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UiDebugWriter {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl io::Write for UiDebugWriter {
|
||||||
|
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||||
|
let mut buffer = UI_DEBUG_BUFFER.lock().unwrap();
|
||||||
|
let message = String::from_utf8_lossy(buf);
|
||||||
|
let trimmed_message = message.trim().to_string();
|
||||||
|
let is_error = trimmed_message.starts_with("ERROR");
|
||||||
|
// Add the new message to the back of the queue
|
||||||
|
buffer.push_back((trimmed_message, is_error));
|
||||||
|
Ok(buf.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&mut self) -> io::Result<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A public function to pop the next message from the front of the queue.
|
||||||
|
pub fn pop_next_debug_message() -> Option<(String, bool)> {
|
||||||
|
UI_DEBUG_BUFFER.lock().unwrap().pop_front()
|
||||||
|
}
|
||||||
@@ -1,4 +1,9 @@
|
|||||||
// src/utils/mod.rs
|
// src/utils/mod.rs
|
||||||
|
|
||||||
pub mod columns;
|
pub mod columns;
|
||||||
|
pub mod debug_logger;
|
||||||
|
pub mod data_converter;
|
||||||
|
|
||||||
pub use columns::*;
|
pub use columns::*;
|
||||||
|
pub use debug_logger::*;
|
||||||
|
pub use data_converter::*;
|
||||||
|
|||||||
@@ -5,9 +5,14 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
prost-types = { workspace = true }
|
||||||
|
|
||||||
tonic = "0.13.0"
|
tonic = "0.13.0"
|
||||||
prost = "0.13.5"
|
prost = "0.13.5"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
|
||||||
|
# Search
|
||||||
|
tantivy = { workspace = true }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
tonic-build = "0.13.0"
|
tonic-build = "0.13.0"
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||||||
"proto/table_definition.proto",
|
"proto/table_definition.proto",
|
||||||
"proto/tables_data.proto",
|
"proto/tables_data.proto",
|
||||||
"proto/table_script.proto",
|
"proto/table_script.proto",
|
||||||
|
"proto/search.proto",
|
||||||
],
|
],
|
||||||
&["proto"],
|
&["proto"],
|
||||||
)?;
|
)?;
|
||||||
|
|||||||
20
common/proto/search.proto
Normal file
20
common/proto/search.proto
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
// In common/proto/search.proto
|
||||||
|
syntax = "proto3";
|
||||||
|
package multieko2.search;
|
||||||
|
|
||||||
|
service Searcher {
|
||||||
|
rpc SearchTable(SearchRequest) returns (SearchResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
message SearchRequest {
|
||||||
|
string table_name = 1;
|
||||||
|
string query = 2;
|
||||||
|
}
|
||||||
|
message SearchResponse {
|
||||||
|
message Hit {
|
||||||
|
int64 id = 1; // PostgreSQL row ID
|
||||||
|
float score = 2;
|
||||||
|
string content_json = 3;
|
||||||
|
}
|
||||||
|
repeated Hit hits = 1;
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ syntax = "proto3";
|
|||||||
package multieko2.tables_data;
|
package multieko2.tables_data;
|
||||||
|
|
||||||
import "common.proto";
|
import "common.proto";
|
||||||
|
import "google/protobuf/struct.proto";
|
||||||
|
|
||||||
service TablesData {
|
service TablesData {
|
||||||
rpc PostTableData (PostTableDataRequest) returns (PostTableDataResponse);
|
rpc PostTableData (PostTableDataRequest) returns (PostTableDataResponse);
|
||||||
@@ -16,7 +17,7 @@ service TablesData {
|
|||||||
message PostTableDataRequest {
|
message PostTableDataRequest {
|
||||||
string profile_name = 1;
|
string profile_name = 1;
|
||||||
string table_name = 2;
|
string table_name = 2;
|
||||||
map<string, string> data = 3;
|
map<string, google.protobuf.Value> data = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message PostTableDataResponse {
|
message PostTableDataResponse {
|
||||||
@@ -29,7 +30,7 @@ message PutTableDataRequest {
|
|||||||
string profile_name = 1;
|
string profile_name = 1;
|
||||||
string table_name = 2;
|
string table_name = 2;
|
||||||
int64 id = 3;
|
int64 id = 3;
|
||||||
map<string, string> data = 4;
|
map<string, google.protobuf.Value> data = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message PutTableDataResponse {
|
message PutTableDataResponse {
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
// common/src/lib.rs
|
// common/src/lib.rs
|
||||||
|
|
||||||
|
pub mod search;
|
||||||
|
|
||||||
pub mod proto {
|
pub mod proto {
|
||||||
pub mod multieko2 {
|
pub mod multieko2 {
|
||||||
pub mod adresar {
|
pub mod adresar {
|
||||||
@@ -25,6 +28,9 @@ pub mod proto {
|
|||||||
pub mod table_script {
|
pub mod table_script {
|
||||||
include!("proto/multieko2.table_script.rs");
|
include!("proto/multieko2.table_script.rs");
|
||||||
}
|
}
|
||||||
|
pub mod search {
|
||||||
|
include!("proto/multieko2.search.rs");
|
||||||
|
}
|
||||||
pub const FILE_DESCRIPTOR_SET: &[u8] =
|
pub const FILE_DESCRIPTOR_SET: &[u8] =
|
||||||
include_bytes!("proto/descriptor.bin");
|
include_bytes!("proto/descriptor.bin");
|
||||||
}
|
}
|
||||||
|
|||||||
Binary file not shown.
317
common/src/proto/multieko2.search.rs
Normal file
317
common/src/proto/multieko2.search.rs
Normal file
@@ -0,0 +1,317 @@
|
|||||||
|
// This file is @generated by prost-build.
|
||||||
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
|
pub struct SearchRequest {
|
||||||
|
#[prost(string, tag = "1")]
|
||||||
|
pub table_name: ::prost::alloc::string::String,
|
||||||
|
#[prost(string, tag = "2")]
|
||||||
|
pub query: ::prost::alloc::string::String,
|
||||||
|
}
|
||||||
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
|
pub struct SearchResponse {
|
||||||
|
#[prost(message, repeated, tag = "1")]
|
||||||
|
pub hits: ::prost::alloc::vec::Vec<search_response::Hit>,
|
||||||
|
}
|
||||||
|
/// Nested message and enum types in `SearchResponse`.
|
||||||
|
pub mod search_response {
|
||||||
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
|
pub struct Hit {
|
||||||
|
/// PostgreSQL row ID
|
||||||
|
#[prost(int64, tag = "1")]
|
||||||
|
pub id: i64,
|
||||||
|
#[prost(float, tag = "2")]
|
||||||
|
pub score: f32,
|
||||||
|
#[prost(string, tag = "3")]
|
||||||
|
pub content_json: ::prost::alloc::string::String,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/// Generated client implementations.
|
||||||
|
pub mod searcher_client {
|
||||||
|
#![allow(
|
||||||
|
unused_variables,
|
||||||
|
dead_code,
|
||||||
|
missing_docs,
|
||||||
|
clippy::wildcard_imports,
|
||||||
|
clippy::let_unit_value,
|
||||||
|
)]
|
||||||
|
use tonic::codegen::*;
|
||||||
|
use tonic::codegen::http::Uri;
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SearcherClient<T> {
|
||||||
|
inner: tonic::client::Grpc<T>,
|
||||||
|
}
|
||||||
|
impl SearcherClient<tonic::transport::Channel> {
|
||||||
|
/// Attempt to create a new client by connecting to a given endpoint.
|
||||||
|
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
|
||||||
|
where
|
||||||
|
D: TryInto<tonic::transport::Endpoint>,
|
||||||
|
D::Error: Into<StdError>,
|
||||||
|
{
|
||||||
|
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
|
||||||
|
Ok(Self::new(conn))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<T> SearcherClient<T>
|
||||||
|
where
|
||||||
|
T: tonic::client::GrpcService<tonic::body::Body>,
|
||||||
|
T::Error: Into<StdError>,
|
||||||
|
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
|
||||||
|
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
|
||||||
|
{
|
||||||
|
pub fn new(inner: T) -> Self {
|
||||||
|
let inner = tonic::client::Grpc::new(inner);
|
||||||
|
Self { inner }
|
||||||
|
}
|
||||||
|
pub fn with_origin(inner: T, origin: Uri) -> Self {
|
||||||
|
let inner = tonic::client::Grpc::with_origin(inner, origin);
|
||||||
|
Self { inner }
|
||||||
|
}
|
||||||
|
pub fn with_interceptor<F>(
|
||||||
|
inner: T,
|
||||||
|
interceptor: F,
|
||||||
|
) -> SearcherClient<InterceptedService<T, F>>
|
||||||
|
where
|
||||||
|
F: tonic::service::Interceptor,
|
||||||
|
T::ResponseBody: Default,
|
||||||
|
T: tonic::codegen::Service<
|
||||||
|
http::Request<tonic::body::Body>,
|
||||||
|
Response = http::Response<
|
||||||
|
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
|
||||||
|
>,
|
||||||
|
>,
|
||||||
|
<T as tonic::codegen::Service<
|
||||||
|
http::Request<tonic::body::Body>,
|
||||||
|
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
|
||||||
|
{
|
||||||
|
SearcherClient::new(InterceptedService::new(inner, interceptor))
|
||||||
|
}
|
||||||
|
/// Compress requests with the given encoding.
|
||||||
|
///
|
||||||
|
/// This requires the server to support it otherwise it might respond with an
|
||||||
|
/// error.
|
||||||
|
#[must_use]
|
||||||
|
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||||
|
self.inner = self.inner.send_compressed(encoding);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
/// Enable decompressing responses.
|
||||||
|
#[must_use]
|
||||||
|
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||||
|
self.inner = self.inner.accept_compressed(encoding);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
/// Limits the maximum size of a decoded message.
|
||||||
|
///
|
||||||
|
/// Default: `4MB`
|
||||||
|
#[must_use]
|
||||||
|
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||||
|
self.inner = self.inner.max_decoding_message_size(limit);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
/// Limits the maximum size of an encoded message.
|
||||||
|
///
|
||||||
|
/// Default: `usize::MAX`
|
||||||
|
#[must_use]
|
||||||
|
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||||
|
self.inner = self.inner.max_encoding_message_size(limit);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
pub async fn search_table(
|
||||||
|
&mut self,
|
||||||
|
request: impl tonic::IntoRequest<super::SearchRequest>,
|
||||||
|
) -> std::result::Result<tonic::Response<super::SearchResponse>, tonic::Status> {
|
||||||
|
self.inner
|
||||||
|
.ready()
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
tonic::Status::unknown(
|
||||||
|
format!("Service was not ready: {}", e.into()),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let codec = tonic::codec::ProstCodec::default();
|
||||||
|
let path = http::uri::PathAndQuery::from_static(
|
||||||
|
"/multieko2.search.Searcher/SearchTable",
|
||||||
|
);
|
||||||
|
let mut req = request.into_request();
|
||||||
|
req.extensions_mut()
|
||||||
|
.insert(GrpcMethod::new("multieko2.search.Searcher", "SearchTable"));
|
||||||
|
self.inner.unary(req, path, codec).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/// Generated server implementations.
|
||||||
|
pub mod searcher_server {
|
||||||
|
#![allow(
|
||||||
|
unused_variables,
|
||||||
|
dead_code,
|
||||||
|
missing_docs,
|
||||||
|
clippy::wildcard_imports,
|
||||||
|
clippy::let_unit_value,
|
||||||
|
)]
|
||||||
|
use tonic::codegen::*;
|
||||||
|
/// Generated trait containing gRPC methods that should be implemented for use with SearcherServer.
|
||||||
|
#[async_trait]
|
||||||
|
pub trait Searcher: std::marker::Send + std::marker::Sync + 'static {
|
||||||
|
async fn search_table(
|
||||||
|
&self,
|
||||||
|
request: tonic::Request<super::SearchRequest>,
|
||||||
|
) -> std::result::Result<tonic::Response<super::SearchResponse>, tonic::Status>;
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct SearcherServer<T> {
|
||||||
|
inner: Arc<T>,
|
||||||
|
accept_compression_encodings: EnabledCompressionEncodings,
|
||||||
|
send_compression_encodings: EnabledCompressionEncodings,
|
||||||
|
max_decoding_message_size: Option<usize>,
|
||||||
|
max_encoding_message_size: Option<usize>,
|
||||||
|
}
|
||||||
|
impl<T> SearcherServer<T> {
|
||||||
|
pub fn new(inner: T) -> Self {
|
||||||
|
Self::from_arc(Arc::new(inner))
|
||||||
|
}
|
||||||
|
pub fn from_arc(inner: Arc<T>) -> Self {
|
||||||
|
Self {
|
||||||
|
inner,
|
||||||
|
accept_compression_encodings: Default::default(),
|
||||||
|
send_compression_encodings: Default::default(),
|
||||||
|
max_decoding_message_size: None,
|
||||||
|
max_encoding_message_size: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn with_interceptor<F>(
|
||||||
|
inner: T,
|
||||||
|
interceptor: F,
|
||||||
|
) -> InterceptedService<Self, F>
|
||||||
|
where
|
||||||
|
F: tonic::service::Interceptor,
|
||||||
|
{
|
||||||
|
InterceptedService::new(Self::new(inner), interceptor)
|
||||||
|
}
|
||||||
|
/// Enable decompressing requests with the given encoding.
|
||||||
|
#[must_use]
|
||||||
|
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||||
|
self.accept_compression_encodings.enable(encoding);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
/// Compress responses with the given encoding, if the client supports it.
|
||||||
|
#[must_use]
|
||||||
|
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||||
|
self.send_compression_encodings.enable(encoding);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
/// Limits the maximum size of a decoded message.
|
||||||
|
///
|
||||||
|
/// Default: `4MB`
|
||||||
|
#[must_use]
|
||||||
|
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||||
|
self.max_decoding_message_size = Some(limit);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
/// Limits the maximum size of an encoded message.
|
||||||
|
///
|
||||||
|
/// Default: `usize::MAX`
|
||||||
|
#[must_use]
|
||||||
|
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||||
|
self.max_encoding_message_size = Some(limit);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<T, B> tonic::codegen::Service<http::Request<B>> for SearcherServer<T>
|
||||||
|
where
|
||||||
|
T: Searcher,
|
||||||
|
B: Body + std::marker::Send + 'static,
|
||||||
|
B::Error: Into<StdError> + std::marker::Send + 'static,
|
||||||
|
{
|
||||||
|
type Response = http::Response<tonic::body::Body>;
|
||||||
|
type Error = std::convert::Infallible;
|
||||||
|
type Future = BoxFuture<Self::Response, Self::Error>;
|
||||||
|
fn poll_ready(
|
||||||
|
&mut self,
|
||||||
|
_cx: &mut Context<'_>,
|
||||||
|
) -> Poll<std::result::Result<(), Self::Error>> {
|
||||||
|
Poll::Ready(Ok(()))
|
||||||
|
}
|
||||||
|
fn call(&mut self, req: http::Request<B>) -> Self::Future {
|
||||||
|
match req.uri().path() {
|
||||||
|
"/multieko2.search.Searcher/SearchTable" => {
|
||||||
|
#[allow(non_camel_case_types)]
|
||||||
|
struct SearchTableSvc<T: Searcher>(pub Arc<T>);
|
||||||
|
impl<T: Searcher> tonic::server::UnaryService<super::SearchRequest>
|
||||||
|
for SearchTableSvc<T> {
|
||||||
|
type Response = super::SearchResponse;
|
||||||
|
type Future = BoxFuture<
|
||||||
|
tonic::Response<Self::Response>,
|
||||||
|
tonic::Status,
|
||||||
|
>;
|
||||||
|
fn call(
|
||||||
|
&mut self,
|
||||||
|
request: tonic::Request<super::SearchRequest>,
|
||||||
|
) -> Self::Future {
|
||||||
|
let inner = Arc::clone(&self.0);
|
||||||
|
let fut = async move {
|
||||||
|
<T as Searcher>::search_table(&inner, request).await
|
||||||
|
};
|
||||||
|
Box::pin(fut)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let accept_compression_encodings = self.accept_compression_encodings;
|
||||||
|
let send_compression_encodings = self.send_compression_encodings;
|
||||||
|
let max_decoding_message_size = self.max_decoding_message_size;
|
||||||
|
let max_encoding_message_size = self.max_encoding_message_size;
|
||||||
|
let inner = self.inner.clone();
|
||||||
|
let fut = async move {
|
||||||
|
let method = SearchTableSvc(inner);
|
||||||
|
let codec = tonic::codec::ProstCodec::default();
|
||||||
|
let mut grpc = tonic::server::Grpc::new(codec)
|
||||||
|
.apply_compression_config(
|
||||||
|
accept_compression_encodings,
|
||||||
|
send_compression_encodings,
|
||||||
|
)
|
||||||
|
.apply_max_message_size_config(
|
||||||
|
max_decoding_message_size,
|
||||||
|
max_encoding_message_size,
|
||||||
|
);
|
||||||
|
let res = grpc.unary(method, req).await;
|
||||||
|
Ok(res)
|
||||||
|
};
|
||||||
|
Box::pin(fut)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
Box::pin(async move {
|
||||||
|
let mut response = http::Response::new(
|
||||||
|
tonic::body::Body::default(),
|
||||||
|
);
|
||||||
|
let headers = response.headers_mut();
|
||||||
|
headers
|
||||||
|
.insert(
|
||||||
|
tonic::Status::GRPC_STATUS,
|
||||||
|
(tonic::Code::Unimplemented as i32).into(),
|
||||||
|
);
|
||||||
|
headers
|
||||||
|
.insert(
|
||||||
|
http::header::CONTENT_TYPE,
|
||||||
|
tonic::metadata::GRPC_CONTENT_TYPE,
|
||||||
|
);
|
||||||
|
Ok(response)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<T> Clone for SearcherServer<T> {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
let inner = self.inner.clone();
|
||||||
|
Self {
|
||||||
|
inner,
|
||||||
|
accept_compression_encodings: self.accept_compression_encodings,
|
||||||
|
send_compression_encodings: self.send_compression_encodings,
|
||||||
|
max_decoding_message_size: self.max_decoding_message_size,
|
||||||
|
max_encoding_message_size: self.max_encoding_message_size,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/// Generated gRPC service name
|
||||||
|
pub const SERVICE_NAME: &str = "multieko2.search.Searcher";
|
||||||
|
impl<T> tonic::server::NamedService for SearcherServer<T> {
|
||||||
|
const NAME: &'static str = SERVICE_NAME;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,10 +5,10 @@ pub struct PostTableDataRequest {
|
|||||||
pub profile_name: ::prost::alloc::string::String,
|
pub profile_name: ::prost::alloc::string::String,
|
||||||
#[prost(string, tag = "2")]
|
#[prost(string, tag = "2")]
|
||||||
pub table_name: ::prost::alloc::string::String,
|
pub table_name: ::prost::alloc::string::String,
|
||||||
#[prost(map = "string, string", tag = "3")]
|
#[prost(map = "string, message", tag = "3")]
|
||||||
pub data: ::std::collections::HashMap<
|
pub data: ::std::collections::HashMap<
|
||||||
::prost::alloc::string::String,
|
::prost::alloc::string::String,
|
||||||
::prost::alloc::string::String,
|
::prost_types::Value,
|
||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
@@ -28,10 +28,10 @@ pub struct PutTableDataRequest {
|
|||||||
pub table_name: ::prost::alloc::string::String,
|
pub table_name: ::prost::alloc::string::String,
|
||||||
#[prost(int64, tag = "3")]
|
#[prost(int64, tag = "3")]
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
#[prost(map = "string, string", tag = "4")]
|
#[prost(map = "string, message", tag = "4")]
|
||||||
pub data: ::std::collections::HashMap<
|
pub data: ::std::collections::HashMap<
|
||||||
::prost::alloc::string::String,
|
::prost::alloc::string::String,
|
||||||
::prost::alloc::string::String,
|
::prost_types::Value,
|
||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
|
|||||||
78
common/src/search.rs
Normal file
78
common/src/search.rs
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
// common/src/search.rs
|
||||||
|
|
||||||
|
use tantivy::schema::*;
|
||||||
|
use tantivy::tokenizer::*;
|
||||||
|
use tantivy::Index;
|
||||||
|
|
||||||
|
/// Creates a hybrid Slovak search schema with optimized prefix fields.
|
||||||
|
pub fn create_search_schema() -> Schema {
|
||||||
|
let mut schema_builder = Schema::builder();
|
||||||
|
|
||||||
|
schema_builder.add_u64_field("pg_id", INDEXED | STORED);
|
||||||
|
|
||||||
|
// FIELD 1: For prefixes (1-4 chars).
|
||||||
|
let short_prefix_indexing = TextFieldIndexing::default()
|
||||||
|
.set_tokenizer("slovak_prefix_edge")
|
||||||
|
.set_index_option(IndexRecordOption::WithFreqsAndPositions);
|
||||||
|
let short_prefix_options = TextOptions::default()
|
||||||
|
.set_indexing_options(short_prefix_indexing)
|
||||||
|
.set_stored();
|
||||||
|
schema_builder.add_text_field("prefix_edge", short_prefix_options);
|
||||||
|
|
||||||
|
// FIELD 2: For the full word.
|
||||||
|
let full_word_indexing = TextFieldIndexing::default()
|
||||||
|
.set_tokenizer("slovak_prefix_full")
|
||||||
|
.set_index_option(IndexRecordOption::WithFreqsAndPositions);
|
||||||
|
let full_word_options = TextOptions::default()
|
||||||
|
.set_indexing_options(full_word_indexing)
|
||||||
|
.set_stored();
|
||||||
|
schema_builder.add_text_field("prefix_full", full_word_options);
|
||||||
|
|
||||||
|
// NGRAM FIELD: For substring matching.
|
||||||
|
let ngram_field_indexing = TextFieldIndexing::default()
|
||||||
|
.set_tokenizer("slovak_ngram")
|
||||||
|
.set_index_option(IndexRecordOption::WithFreqsAndPositions);
|
||||||
|
let ngram_options = TextOptions::default()
|
||||||
|
.set_indexing_options(ngram_field_indexing)
|
||||||
|
.set_stored();
|
||||||
|
schema_builder.add_text_field("text_ngram", ngram_options);
|
||||||
|
|
||||||
|
schema_builder.build()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Registers all necessary Slovak tokenizers with the index.
|
||||||
|
///
|
||||||
|
/// This must be called by ANY process that opens the index
|
||||||
|
/// to ensure the tokenizers are loaded into memory.
|
||||||
|
pub fn register_slovak_tokenizers(index: &Index) -> tantivy::Result<()> {
|
||||||
|
let tokenizer_manager = index.tokenizers();
|
||||||
|
|
||||||
|
// TOKENIZER for `prefix_edge`: Edge N-gram (1-4 chars)
|
||||||
|
let edge_tokenizer =
|
||||||
|
TextAnalyzer::builder(NgramTokenizer::new(1, 4, true)?)
|
||||||
|
.filter(RemoveLongFilter::limit(40))
|
||||||
|
.filter(LowerCaser)
|
||||||
|
.filter(AsciiFoldingFilter)
|
||||||
|
.build();
|
||||||
|
tokenizer_manager.register("slovak_prefix_edge", edge_tokenizer);
|
||||||
|
|
||||||
|
// TOKENIZER for `prefix_full`: Simple word tokenizer
|
||||||
|
let full_tokenizer =
|
||||||
|
TextAnalyzer::builder(SimpleTokenizer::default())
|
||||||
|
.filter(RemoveLongFilter::limit(40))
|
||||||
|
.filter(LowerCaser)
|
||||||
|
.filter(AsciiFoldingFilter)
|
||||||
|
.build();
|
||||||
|
tokenizer_manager.register("slovak_prefix_full", full_tokenizer);
|
||||||
|
|
||||||
|
// NGRAM TOKENIZER: For substring matching.
|
||||||
|
let ngram_tokenizer =
|
||||||
|
TextAnalyzer::builder(NgramTokenizer::new(3, 3, false)?)
|
||||||
|
.filter(RemoveLongFilter::limit(40))
|
||||||
|
.filter(LowerCaser)
|
||||||
|
.filter(AsciiFoldingFilter)
|
||||||
|
.build();
|
||||||
|
tokenizer_manager.register("slovak_ngram", ngram_tokenizer);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
19
search/Cargo.toml
Normal file
19
search/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
name = "search"
|
||||||
|
version.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license = "AGPL-3.0-or-later"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = { workspace = true }
|
||||||
|
prost = { workspace = true }
|
||||||
|
serde = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
tonic = { workspace = true }
|
||||||
|
tracing = { workspace = true }
|
||||||
|
tantivy = { workspace = true }
|
||||||
|
|
||||||
|
common = { path = "../common" }
|
||||||
|
tonic-reflection = "0.13.1"
|
||||||
|
sqlx = { version = "0.8.6", features = ["postgres"] }
|
||||||
302
search/src/lib.rs
Normal file
302
search/src/lib.rs
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
// src/lib.rs
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::Path;
|
||||||
|
use tantivy::collector::TopDocs;
|
||||||
|
use tantivy::query::{
|
||||||
|
BooleanQuery, BoostQuery, FuzzyTermQuery, Occur, Query, QueryParser,
|
||||||
|
TermQuery,
|
||||||
|
};
|
||||||
|
use tantivy::schema::{IndexRecordOption, Value};
|
||||||
|
use tantivy::{Index, TantivyDocument, Term};
|
||||||
|
use tonic::{Request, Response, Status};
|
||||||
|
|
||||||
|
use common::proto::multieko2::search::{
|
||||||
|
search_response::Hit, SearchRequest, SearchResponse,
|
||||||
|
};
|
||||||
|
pub use common::proto::multieko2::search::searcher_server::SearcherServer;
|
||||||
|
use common::proto::multieko2::search::searcher_server::Searcher;
|
||||||
|
use common::search::register_slovak_tokenizers;
|
||||||
|
use sqlx::{PgPool, Row};
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
// We need to hold the database pool in our service struct.
|
||||||
|
pub struct SearcherService {
|
||||||
|
pub pool: PgPool,
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalize_slovak_text function remains unchanged...
|
||||||
|
fn normalize_slovak_text(text: &str) -> String {
|
||||||
|
// ... function content is unchanged ...
|
||||||
|
text.chars()
|
||||||
|
.map(|c| match c {
|
||||||
|
'á' | 'à' | 'â' | 'ä' | 'ă' | 'ā' => 'a',
|
||||||
|
'Á' | 'À' | 'Â' | 'Ä' | 'Ă' | 'Ā' => 'A',
|
||||||
|
'é' | 'è' | 'ê' | 'ë' | 'ě' | 'ē' => 'e',
|
||||||
|
'É' | 'È' | 'Ê' | 'Ë' | 'Ě' | 'Ē' => 'E',
|
||||||
|
'í' | 'ì' | 'î' | 'ï' | 'ī' => 'i',
|
||||||
|
'Í' | 'Ì' | 'Î' | 'Ï' | 'Ī' => 'I',
|
||||||
|
'ó' | 'ò' | 'ô' | 'ö' | 'ō' | 'ő' => 'o',
|
||||||
|
'Ó' | 'Ò' | 'Ô' | 'Ö' | 'Ō' | 'Ő' => 'O',
|
||||||
|
'ú' | 'ù' | 'û' | 'ü' | 'ū' | 'ű' => 'u',
|
||||||
|
'Ú' | 'Ù' | 'Û' | 'Ü' | 'Ū' | 'Ű' => 'U',
|
||||||
|
'ý' | 'ỳ' | 'ŷ' | 'ÿ' => 'y',
|
||||||
|
'Ý' | 'Ỳ' | 'Ŷ' | 'Ÿ' => 'Y',
|
||||||
|
'č' => 'c',
|
||||||
|
'Č' => 'C',
|
||||||
|
'ď' => 'd',
|
||||||
|
'Ď' => 'D',
|
||||||
|
'ľ' => 'l',
|
||||||
|
'Ľ' => 'L',
|
||||||
|
'ň' => 'n',
|
||||||
|
'Ň' => 'N',
|
||||||
|
'ř' => 'r',
|
||||||
|
'Ř' => 'R',
|
||||||
|
'š' => 's',
|
||||||
|
'Š' => 'S',
|
||||||
|
'ť' => 't',
|
||||||
|
'Ť' => 'T',
|
||||||
|
'ž' => 'z',
|
||||||
|
'Ž' => 'Z',
|
||||||
|
_ => c,
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tonic::async_trait]
|
||||||
|
impl Searcher for SearcherService {
|
||||||
|
async fn search_table(
|
||||||
|
&self,
|
||||||
|
request: Request<SearchRequest>,
|
||||||
|
) -> Result<Response<SearchResponse>, Status> {
|
||||||
|
let req = request.into_inner();
|
||||||
|
let table_name = req.table_name;
|
||||||
|
let query_str = req.query;
|
||||||
|
|
||||||
|
// --- MODIFIED LOGIC ---
|
||||||
|
// If the query is empty, fetch the 5 most recent records.
|
||||||
|
if query_str.trim().is_empty() {
|
||||||
|
info!(
|
||||||
|
"Empty query for table '{}'. Fetching default results.",
|
||||||
|
table_name
|
||||||
|
);
|
||||||
|
let qualified_table = format!("gen.\"{}\"", table_name);
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT id, to_jsonb(t) AS data FROM {} t ORDER BY id DESC LIMIT 5",
|
||||||
|
qualified_table
|
||||||
|
);
|
||||||
|
|
||||||
|
let rows = sqlx::query(&sql)
|
||||||
|
.fetch_all(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
Status::internal(format!(
|
||||||
|
"DB query for default results failed: {}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let hits: Vec<Hit> = rows
|
||||||
|
.into_iter()
|
||||||
|
.map(|row| {
|
||||||
|
let id: i64 = row.try_get("id").unwrap_or_default();
|
||||||
|
let json_data: serde_json::Value =
|
||||||
|
row.try_get("data").unwrap_or_default();
|
||||||
|
Hit {
|
||||||
|
id,
|
||||||
|
// Score is 0.0 as this is not a relevance-ranked search
|
||||||
|
score: 0.0,
|
||||||
|
content_json: json_data.to_string(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
info!("--- SERVER: Successfully processed empty query. Returning {} default hits. ---", hits.len());
|
||||||
|
return Ok(Response::new(SearchResponse { hits }));
|
||||||
|
}
|
||||||
|
// --- END OF MODIFIED LOGIC ---
|
||||||
|
|
||||||
|
let index_path = Path::new("./tantivy_indexes").join(&table_name);
|
||||||
|
if !index_path.exists() {
|
||||||
|
return Err(Status::not_found(format!(
|
||||||
|
"No search index found for table '{}'",
|
||||||
|
table_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let index = Index::open_in_dir(&index_path)
|
||||||
|
.map_err(|e| Status::internal(format!("Failed to open index: {}", e)))?;
|
||||||
|
|
||||||
|
register_slovak_tokenizers(&index).map_err(|e| {
|
||||||
|
Status::internal(format!("Failed to register Slovak tokenizers: {}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let reader = index.reader().map_err(|e| {
|
||||||
|
Status::internal(format!("Failed to create index reader: {}", e))
|
||||||
|
})?;
|
||||||
|
let searcher = reader.searcher();
|
||||||
|
let schema = index.schema();
|
||||||
|
|
||||||
|
let pg_id_field = schema.get_field("pg_id").map_err(|_| {
|
||||||
|
Status::internal("Schema is missing the 'pg_id' field.")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// --- Query Building Logic (no changes here) ---
|
||||||
|
let prefix_edge_field = schema.get_field("prefix_edge").unwrap();
|
||||||
|
let prefix_full_field = schema.get_field("prefix_full").unwrap();
|
||||||
|
let text_ngram_field = schema.get_field("text_ngram").unwrap();
|
||||||
|
let normalized_query = normalize_slovak_text(&query_str);
|
||||||
|
let words: Vec<&str> = normalized_query.split_whitespace().collect();
|
||||||
|
if words.is_empty() {
|
||||||
|
return Ok(Response::new(SearchResponse { hits: vec![] }));
|
||||||
|
}
|
||||||
|
let mut query_layers: Vec<(Occur, Box<dyn Query>)> = Vec::new();
|
||||||
|
// ... all your query building layers remain exactly the same ...
|
||||||
|
// ===============================
|
||||||
|
// LAYER 1: PREFIX MATCHING (HIGHEST PRIORITY, Boost: 4.0)
|
||||||
|
// ===============================
|
||||||
|
{
|
||||||
|
let mut must_clauses: Vec<(Occur, Box<dyn Query>)> = Vec::new();
|
||||||
|
for word in &words {
|
||||||
|
let edge_term =
|
||||||
|
Term::from_field_text(prefix_edge_field, word);
|
||||||
|
let full_term =
|
||||||
|
Term::from_field_text(prefix_full_field, word);
|
||||||
|
|
||||||
|
let per_word_query = BooleanQuery::new(vec![
|
||||||
|
(
|
||||||
|
Occur::Should,
|
||||||
|
Box::new(TermQuery::new(
|
||||||
|
edge_term,
|
||||||
|
IndexRecordOption::Basic,
|
||||||
|
)),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Occur::Should,
|
||||||
|
Box::new(TermQuery::new(
|
||||||
|
full_term,
|
||||||
|
IndexRecordOption::Basic,
|
||||||
|
)),
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
must_clauses.push((Occur::Must, Box::new(per_word_query) as Box<dyn Query>));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !must_clauses.is_empty() {
|
||||||
|
let prefix_query = BooleanQuery::new(must_clauses);
|
||||||
|
let boosted_query =
|
||||||
|
BoostQuery::new(Box::new(prefix_query), 4.0);
|
||||||
|
query_layers.push((Occur::Should, Box::new(boosted_query)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===============================
|
||||||
|
// LAYER 2: FUZZY MATCHING (HIGH PRIORITY, Boost: 3.0)
|
||||||
|
// ===============================
|
||||||
|
{
|
||||||
|
let last_word = words.last().unwrap();
|
||||||
|
let fuzzy_term =
|
||||||
|
Term::from_field_text(prefix_full_field, last_word);
|
||||||
|
let fuzzy_query = FuzzyTermQuery::new(fuzzy_term, 2, true);
|
||||||
|
let boosted_query = BoostQuery::new(Box::new(fuzzy_query), 3.0);
|
||||||
|
query_layers.push((Occur::Should, Box::new(boosted_query)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===============================
|
||||||
|
// LAYER 3: PHRASE MATCHING WITH SLOP (MEDIUM PRIORITY, Boost: 2.0)
|
||||||
|
// ===============================
|
||||||
|
if words.len() > 1 {
|
||||||
|
let slop_parser =
|
||||||
|
QueryParser::for_index(&index, vec![prefix_full_field]);
|
||||||
|
let slop_query_str = format!("\"{}\"~3", normalized_query);
|
||||||
|
if let Ok(slop_query) = slop_parser.parse_query(&slop_query_str) {
|
||||||
|
let boosted_query = BoostQuery::new(slop_query, 2.0);
|
||||||
|
query_layers.push((Occur::Should, Box::new(boosted_query)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===============================
|
||||||
|
// LAYER 4: NGRAM SUBSTRING MATCHING (LOWEST PRIORITY, Boost: 1.0)
|
||||||
|
// ===============================
|
||||||
|
{
|
||||||
|
let ngram_parser =
|
||||||
|
QueryParser::for_index(&index, vec![text_ngram_field]);
|
||||||
|
if let Ok(ngram_query) =
|
||||||
|
ngram_parser.parse_query(&normalized_query)
|
||||||
|
{
|
||||||
|
let boosted_query = BoostQuery::new(ngram_query, 1.0);
|
||||||
|
query_layers.push((Occur::Should, Box::new(boosted_query)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let master_query = BooleanQuery::new(query_layers);
|
||||||
|
// --- End of Query Building Logic ---
|
||||||
|
|
||||||
|
let top_docs = searcher
|
||||||
|
.search(&master_query, &TopDocs::with_limit(100))
|
||||||
|
.map_err(|e| Status::internal(format!("Search failed: {}", e)))?;
|
||||||
|
|
||||||
|
if top_docs.is_empty() {
|
||||||
|
return Ok(Response::new(SearchResponse { hits: vec![] }));
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- NEW LOGIC: Fetch from DB and combine results ---
|
||||||
|
|
||||||
|
// Step 1: Extract (score, pg_id) from Tantivy results.
|
||||||
|
let mut scored_ids: Vec<(f32, u64)> = Vec::new();
|
||||||
|
for (score, doc_address) in top_docs {
|
||||||
|
let doc: TantivyDocument = searcher.doc(doc_address).map_err(|e| {
|
||||||
|
Status::internal(format!("Failed to retrieve document: {}", e))
|
||||||
|
})?;
|
||||||
|
if let Some(pg_id_value) = doc.get_first(pg_id_field) {
|
||||||
|
if let Some(pg_id) = pg_id_value.as_u64() {
|
||||||
|
scored_ids.push((score, pg_id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2: Fetch all corresponding rows from Postgres in a single query.
|
||||||
|
let pg_ids: Vec<i64> =
|
||||||
|
scored_ids.iter().map(|(_, id)| *id as i64).collect();
|
||||||
|
let qualified_table = format!("gen.\"{}\"", table_name);
|
||||||
|
let query_str = format!(
|
||||||
|
"SELECT id, to_jsonb(t) AS data FROM {} t WHERE id = ANY($1)",
|
||||||
|
qualified_table
|
||||||
|
);
|
||||||
|
|
||||||
|
let rows = sqlx::query(&query_str)
|
||||||
|
.bind(&pg_ids)
|
||||||
|
.fetch_all(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
Status::internal(format!("Database query failed: {}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Step 3: Map the database results by ID for quick lookup.
|
||||||
|
let mut content_map: HashMap<i64, String> = HashMap::new();
|
||||||
|
for row in rows {
|
||||||
|
let id: i64 = row.try_get("id").unwrap_or(0);
|
||||||
|
let json_data: serde_json::Value =
|
||||||
|
row.try_get("data").unwrap_or(serde_json::Value::Null);
|
||||||
|
content_map.insert(id, json_data.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Build the final response, combining Tantivy scores with PG content.
|
||||||
|
let hits: Vec<Hit> = scored_ids
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(score, pg_id)| {
|
||||||
|
content_map
|
||||||
|
.get(&(pg_id as i64))
|
||||||
|
.map(|content_json| Hit {
|
||||||
|
id: pg_id as i64,
|
||||||
|
score,
|
||||||
|
content_json: content_json.clone(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
info!("--- SERVER: Successfully processed search. Returning {} hits. ---", hits.len());
|
||||||
|
|
||||||
|
let response = SearchResponse { hits };
|
||||||
|
Ok(Response::new(response))
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,13 +6,17 @@ license = "AGPL-3.0-or-later"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
common = { path = "../common" }
|
common = { path = "../common" }
|
||||||
|
search = { path = "../search" }
|
||||||
|
|
||||||
|
anyhow = { workspace = true }
|
||||||
|
tantivy = { workspace = true }
|
||||||
|
prost-types = { workspace = true }
|
||||||
chrono = { version = "0.4.40", features = ["serde"] }
|
chrono = { version = "0.4.40", features = ["serde"] }
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
prost = "0.13.5"
|
prost = "0.13.5"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = "1.0.140"
|
serde_json = "1.0.140"
|
||||||
sqlx = { version = "0.8.5", features = ["chrono", "postgres", "runtime-tokio", "runtime-tokio-native-tls", "time", "uuid"] }
|
sqlx = { version = "0.8.5", features = ["chrono", "postgres", "runtime-tokio", "runtime-tokio-native-tls", "rust_decimal", "time", "uuid"] }
|
||||||
tokio = { version = "1.44.2", features = ["full", "macros"] }
|
tokio = { version = "1.44.2", features = ["full", "macros"] }
|
||||||
tonic = "0.13.0"
|
tonic = "0.13.0"
|
||||||
tonic-reflection = "0.13.0"
|
tonic-reflection = "0.13.0"
|
||||||
@@ -28,6 +32,9 @@ bcrypt = "0.17.0"
|
|||||||
validator = { version = "0.20.0", features = ["derive"] }
|
validator = { version = "0.20.0", features = ["derive"] }
|
||||||
uuid = { version = "1.16.0", features = ["serde", "v4"] }
|
uuid = { version = "1.16.0", features = ["serde", "v4"] }
|
||||||
jsonwebtoken = "9.3.1"
|
jsonwebtoken = "9.3.1"
|
||||||
|
rust-stemmers = "1.2.0"
|
||||||
|
rust_decimal = "1.37.2"
|
||||||
|
rust_decimal_macros = "1.37.1"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "server"
|
name = "server"
|
||||||
@@ -37,3 +44,5 @@ path = "src/lib.rs"
|
|||||||
tokio = { version = "1.44", features = ["full", "test-util"] }
|
tokio = { version = "1.44", features = ["full", "test-util"] }
|
||||||
rstest = "0.25.0"
|
rstest = "0.25.0"
|
||||||
lazy_static = "1.5.0"
|
lazy_static = "1.5.0"
|
||||||
|
rand = "0.9.1"
|
||||||
|
futures = "0.3.31"
|
||||||
|
|||||||
13
server/Makefile
Normal file
13
server/Makefile
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Makefile
|
||||||
|
|
||||||
|
test: reset_db run_tests
|
||||||
|
|
||||||
|
reset_db:
|
||||||
|
@echo "Resetting test database..."
|
||||||
|
@./scripts/reset_test_db.sh
|
||||||
|
|
||||||
|
run_tests:
|
||||||
|
@echo "Running tests..."
|
||||||
|
@cargo test --test mod -- --test-threads=1
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
-- Add migration script here
|
|
||||||
CREATE TABLE adresar (
|
|
||||||
id BIGSERIAL PRIMARY KEY,
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
firma TEXT NOT NULL,
|
|
||||||
kz TEXT,
|
|
||||||
drc TEXT,
|
|
||||||
ulica TEXT,
|
|
||||||
psc TEXT,
|
|
||||||
mesto TEXT,
|
|
||||||
stat TEXT,
|
|
||||||
banka TEXT,
|
|
||||||
ucet TEXT,
|
|
||||||
skladm TEXT,
|
|
||||||
ico TEXT,
|
|
||||||
kontakt TEXT,
|
|
||||||
telefon TEXT,
|
|
||||||
skladu TEXT,
|
|
||||||
fax TEXT,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_adresar_firma ON adresar (firma);
|
|
||||||
CREATE INDEX idx_adresar_mesto ON adresar (mesto);
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
-- Add migration script here
|
|
||||||
CREATE TABLE uctovnictvo (
|
|
||||||
id BIGSERIAL PRIMARY KEY,
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
adresar_id BIGINT NOT NULL REFERENCES adresar(id), -- Link to adresar table
|
|
||||||
c_dokladu TEXT NOT NULL,
|
|
||||||
datum DATE NOT NULL,
|
|
||||||
c_faktury TEXT NOT NULL,
|
|
||||||
obsah TEXT,
|
|
||||||
stredisko TEXT,
|
|
||||||
c_uctu TEXT,
|
|
||||||
md TEXT,
|
|
||||||
identif TEXT,
|
|
||||||
poznanka TEXT,
|
|
||||||
firma TEXT NOT NULL,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_uctovnictvo_adresar_id ON uctovnictvo (adresar_id);
|
|
||||||
CREATE INDEX idx_uctovnictvo_firma ON uctovnictvo (firma);
|
|
||||||
CREATE INDEX idx_uctovnictvo_c_dokladu ON uctovnictvo (c_dokladu);
|
|
||||||
CREATE INDEX idx_uctovnictvo_poznanka ON uctovnictvo (poznanka);
|
|
||||||
@@ -1,9 +1,12 @@
|
|||||||
-- Add migration script here
|
-- Add migration script here
|
||||||
CREATE TABLE profiles (
|
CREATE TABLE schemas (
|
||||||
id BIGSERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
name TEXT NOT NULL UNIQUE,
|
name TEXT NOT NULL UNIQUE,
|
||||||
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
description TEXT,
|
||||||
|
is_active BOOLEAN DEFAULT TRUE
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Create default profile for existing data
|
-- Create default profile for existing data
|
||||||
INSERT INTO profiles (name) VALUES ('default');
|
INSERT INTO schemas (name) VALUES ('default');
|
||||||
|
CREATE SCHEMA IF NOT EXISTS "default";
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
-- Main table definitions
|
-- Main table definitions
|
||||||
|
|
||||||
CREATE TABLE table_definitions (
|
CREATE TABLE table_definitions (
|
||||||
id BIGSERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
@@ -6,7 +7,7 @@ CREATE TABLE table_definitions (
|
|||||||
columns JSONB NOT NULL,
|
columns JSONB NOT NULL,
|
||||||
indexes JSONB NOT NULL,
|
indexes JSONB NOT NULL,
|
||||||
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
|
||||||
profile_id BIGINT NOT NULL REFERENCES profiles(id) DEFAULT 1
|
schema_id BIGINT NOT NULL REFERENCES schemas(id)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Relationship table for multiple links
|
-- Relationship table for multiple links
|
||||||
@@ -18,9 +19,10 @@ CREATE TABLE table_definition_links (
|
|||||||
PRIMARY KEY (source_table_id, linked_table_id)
|
PRIMARY KEY (source_table_id, linked_table_id)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Create composite unique index for profile+table combination
|
-- Create composite unique index for schema+table combination
|
||||||
CREATE UNIQUE INDEX idx_table_definitions_profile_table
|
CREATE UNIQUE INDEX idx_table_definitions_schema_table
|
||||||
ON table_definitions (profile_id, table_name);
|
ON table_definitions (schema_id, table_name);
|
||||||
|
|
||||||
CREATE INDEX idx_links_source ON table_definition_links (source_table_id);
|
CREATE INDEX idx_links_source ON table_definition_links (source_table_id);
|
||||||
CREATE INDEX idx_links_target ON table_definition_links (linked_table_id);
|
CREATE INDEX idx_links_target ON table_definition_links (linked_table_id);
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ CREATE TABLE table_scripts (
|
|||||||
script TEXT NOT NULL,
|
script TEXT NOT NULL,
|
||||||
description TEXT,
|
description TEXT,
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
profile_id BIGINT NOT NULL REFERENCES profiles(id) DEFAULT 1,
|
schema_id BIGINT NOT NULL REFERENCES schemas(id),
|
||||||
UNIQUE(table_definitions_id, target_column)
|
UNIQUE(table_definitions_id, target_column)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
-- Add migration script here
|
|
||||||
|
|
||||||
CREATE SCHEMA IF NOT EXISTS gen;
|
|
||||||
9
server/scripts/reset_test_db.sh
Executable file
9
server/scripts/reset_test_db.sh
Executable file
@@ -0,0 +1,9 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# scripts/reset_test_db.sh
|
||||||
|
|
||||||
|
DATABASE_URL=${TEST_DATABASE_URL:-"postgres://multi_psql_dev:3@localhost:5432/multi_rust_test"}
|
||||||
|
|
||||||
|
echo "Reset db script"
|
||||||
|
yes | sqlx database drop --database-url "$DATABASE_URL"
|
||||||
|
sqlx database create --database-url "$DATABASE_URL"
|
||||||
|
echo "Test database reset complete."
|
||||||
@@ -1,156 +0,0 @@
|
|||||||
❯ grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"firma": "Updated Firma",
|
|
||||||
"kz": "Updated KZ",
|
|
||||||
"drc": "Updated DRC",
|
|
||||||
"ulica": "Updated Ulica",
|
|
||||||
"psc": "Updated PSC",
|
|
||||||
"mesto": "Updated Mesto",
|
|
||||||
"stat": "Updated Stat",
|
|
||||||
"banka": "Updated Banka",
|
|
||||||
"ucet": "Updated Ucet",
|
|
||||||
"skladm": "Updated Skladm",
|
|
||||||
"ico": "Updated ICO",
|
|
||||||
"kontakt": "Updated Kontakt",
|
|
||||||
"telefon": "Updated Telefon",
|
|
||||||
"skladu": "Updated Skladu",
|
|
||||||
"fax": "Updated Fax"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 2}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
{
|
|
||||||
"id": "2",
|
|
||||||
"firma": "asdfasf",
|
|
||||||
"kz": " ",
|
|
||||||
"drc": " ",
|
|
||||||
"ulica": " ",
|
|
||||||
"psc": "sdfasdf",
|
|
||||||
"mesto": "asf",
|
|
||||||
"stat": "as",
|
|
||||||
"banka": "df",
|
|
||||||
"ucet": "asf",
|
|
||||||
"skladm": "f",
|
|
||||||
"ico": "f",
|
|
||||||
"kontakt": "f",
|
|
||||||
"telefon": "f",
|
|
||||||
"skladu": "f",
|
|
||||||
"fax": " "
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/DeleteAdresar
|
|
||||||
{
|
|
||||||
"success": true
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
ERROR:
|
|
||||||
Code: NotFound
|
|
||||||
Message: no rows returned by a query that expected to return at least one row
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 2}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
{
|
|
||||||
"id": "2",
|
|
||||||
"firma": "asdfasf",
|
|
||||||
"kz": " ",
|
|
||||||
"drc": " ",
|
|
||||||
"ulica": " ",
|
|
||||||
"psc": "sdfasdf",
|
|
||||||
"mesto": "asf",
|
|
||||||
"stat": "as",
|
|
||||||
"banka": "df",
|
|
||||||
"ucet": "asf",
|
|
||||||
"skladm": "f",
|
|
||||||
"ico": "f",
|
|
||||||
"kontakt": "f",
|
|
||||||
"telefon": "f",
|
|
||||||
"skladu": "f",
|
|
||||||
"fax": " "
|
|
||||||
}
|
|
||||||
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"firma": "New Firma",
|
|
||||||
"kz": "New KZ",
|
|
||||||
"drc": "New DRC",
|
|
||||||
"ulica": "New Ulica",
|
|
||||||
"psc": "New PSC",
|
|
||||||
"mesto": "New Mesto",
|
|
||||||
"stat": "New Stat",
|
|
||||||
"banka": "New Banka",
|
|
||||||
"ucet": "New Ucet",
|
|
||||||
"skladm": "New Skladm",
|
|
||||||
"ico": "New ICO",
|
|
||||||
"kontakt": "New Kontakt",
|
|
||||||
"telefon": "New Telefon",
|
|
||||||
"skladu": "New Skladu",
|
|
||||||
"fax": "New Fax"
|
|
||||||
}' localhost:50051 multieko2.adresar.Adresar/PostAdresar
|
|
||||||
{
|
|
||||||
"id": "43",
|
|
||||||
"firma": "New Firma",
|
|
||||||
"kz": "New KZ",
|
|
||||||
"drc": "New DRC",
|
|
||||||
"ulica": "New Ulica",
|
|
||||||
"psc": "New PSC",
|
|
||||||
"mesto": "New Mesto",
|
|
||||||
"stat": "New Stat",
|
|
||||||
"banka": "New Banka",
|
|
||||||
"ucet": "New Ucet",
|
|
||||||
"skladm": "New Skladm",
|
|
||||||
"ico": "New ICO",
|
|
||||||
"kontakt": "New Kontakt",
|
|
||||||
"telefon": "New Telefon",
|
|
||||||
"skladu": "New Skladu",
|
|
||||||
"fax": "New Fax"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"id": 43,
|
|
||||||
"firma": "Updated Firma",
|
|
||||||
"kz": "Updated KZ",
|
|
||||||
"drc": "Updated DRC",
|
|
||||||
"ulica": "Updated Ulica",
|
|
||||||
"psc": "Updated PSC",
|
|
||||||
"mesto": "Updated Mesto",
|
|
||||||
"stat": "Updated Stat",
|
|
||||||
"banka": "Updated Banka",
|
|
||||||
"ucet": "Updated Ucet",
|
|
||||||
"skladm": "Updated Skladm",
|
|
||||||
"ico": "Updated ICO",
|
|
||||||
"kontakt": "Updated Kontakt",
|
|
||||||
"telefon": "Updated Telefon",
|
|
||||||
"skladu": "Updated Skladu",
|
|
||||||
"fax": "Updated Fax"
|
|
||||||
}' localhost:50051 multieko2.adresar.Adresar/PutAdresar
|
|
||||||
{
|
|
||||||
"id": "43",
|
|
||||||
"firma": "Updated Firma",
|
|
||||||
"kz": "Updated KZ",
|
|
||||||
"drc": "Updated DRC",
|
|
||||||
"ulica": "Updated Ulica",
|
|
||||||
"psc": "Updated PSC",
|
|
||||||
"mesto": "Updated Mesto",
|
|
||||||
"stat": "Updated Stat",
|
|
||||||
"banka": "Updated Banka",
|
|
||||||
"ucet": "Updated Ucet",
|
|
||||||
"skladm": "Updated Skladm",
|
|
||||||
"ico": "Updated ICO",
|
|
||||||
"kontakt": "Updated Kontakt",
|
|
||||||
"telefon": "Updated Telefon",
|
|
||||||
"skladu": "Updated Skladu",
|
|
||||||
"fax": "Updated Fax"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 43}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
{
|
|
||||||
"id": "43",
|
|
||||||
"firma": "Updated Firma",
|
|
||||||
"kz": "Updated KZ",
|
|
||||||
"drc": "Updated DRC",
|
|
||||||
"ulica": "Updated Ulica",
|
|
||||||
"psc": "Updated PSC",
|
|
||||||
"mesto": "Updated Mesto",
|
|
||||||
"stat": "Updated Stat",
|
|
||||||
"banka": "Updated Banka",
|
|
||||||
"ucet": "Updated Ucet",
|
|
||||||
"skladm": "Updated Skladm",
|
|
||||||
"ico": "Updated ICO",
|
|
||||||
"kontakt": "Updated Kontakt",
|
|
||||||
"telefon": "Updated Telefon",
|
|
||||||
"skladu": "Updated Skladu",
|
|
||||||
"fax": "Updated Fax"
|
|
||||||
}
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
|
|
||||||
# TOTAL items in the adresar
|
|
||||||
❯ grpcurl -plaintext localhost:50051 multieko2.adresar.Adresar/GetAdresarCount
|
|
||||||
{
|
|
||||||
"count": "5"
|
|
||||||
}
|
|
||||||
# Item at this count. If there are 43 items, number 1 is the first item
|
|
||||||
❯ grpcurl -plaintext -d '{"position": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresarByPosition
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"firma": "ks555",
|
|
||||||
"kz": "f",
|
|
||||||
"drc": "asdf",
|
|
||||||
"ulica": "as",
|
|
||||||
"psc": "f",
|
|
||||||
"mesto": "asf",
|
|
||||||
"stat": "as",
|
|
||||||
"banka": "fa",
|
|
||||||
"telefon": "a",
|
|
||||||
"skladu": "fd",
|
|
||||||
"fax": "asf"
|
|
||||||
}
|
|
||||||
# Item fetched by id. The first item was created and marked as deleted, therefore number 1 in ids shouldnt be fetched.
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
ERROR:
|
|
||||||
Code: NotFound
|
|
||||||
Message: no rows returned by a query that expected to return at least one row
|
|
||||||
╭─ ~ ············································· 69 ✘
|
|
||||||
╰─
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
// src/adresar/handlers.rs
|
|
||||||
|
|
||||||
pub mod post_adresar;
|
|
||||||
pub mod get_adresar;
|
|
||||||
pub mod put_adresar;
|
|
||||||
pub mod delete_adresar;
|
|
||||||
pub mod get_adresar_count;
|
|
||||||
pub mod get_adresar_by_position;
|
|
||||||
|
|
||||||
pub use post_adresar::post_adresar;
|
|
||||||
pub use get_adresar::get_adresar;
|
|
||||||
pub use put_adresar::put_adresar;
|
|
||||||
pub use delete_adresar::delete_adresar;
|
|
||||||
pub use get_adresar_count::get_adresar_count;
|
|
||||||
pub use get_adresar_by_position::get_adresar_by_position;
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
// src/adresar/handlers/delete_adresar.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use common::proto::multieko2::adresar::{DeleteAdresarRequest, DeleteAdresarResponse};
|
|
||||||
|
|
||||||
pub async fn delete_adresar(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: DeleteAdresarRequest,
|
|
||||||
) -> Result<DeleteAdresarResponse, Status> {
|
|
||||||
let rows_affected = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
UPDATE adresar
|
|
||||||
SET deleted = true
|
|
||||||
WHERE id = $1 AND deleted = false
|
|
||||||
"#,
|
|
||||||
request.id
|
|
||||||
)
|
|
||||||
.execute(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?
|
|
||||||
.rows_affected();
|
|
||||||
|
|
||||||
Ok(DeleteAdresarResponse {
|
|
||||||
success: rows_affected > 0,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
// src/adresar/handlers/get_adresar.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use crate::adresar::models::Adresar;
|
|
||||||
use common::proto::multieko2::adresar::{GetAdresarRequest, AdresarResponse};
|
|
||||||
|
|
||||||
pub async fn get_adresar(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: GetAdresarRequest,
|
|
||||||
) -> Result<AdresarResponse, Status> {
|
|
||||||
let adresar = sqlx::query_as!(
|
|
||||||
Adresar,
|
|
||||||
r#"
|
|
||||||
SELECT
|
|
||||||
id,
|
|
||||||
deleted,
|
|
||||||
firma,
|
|
||||||
kz,
|
|
||||||
drc,
|
|
||||||
ulica,
|
|
||||||
psc,
|
|
||||||
mesto,
|
|
||||||
stat,
|
|
||||||
banka,
|
|
||||||
ucet,
|
|
||||||
skladm,
|
|
||||||
ico,
|
|
||||||
kontakt,
|
|
||||||
telefon,
|
|
||||||
skladu,
|
|
||||||
fax
|
|
||||||
FROM adresar
|
|
||||||
WHERE id = $1 AND deleted = false
|
|
||||||
"#,
|
|
||||||
request.id
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| match e {
|
|
||||||
sqlx::Error::RowNotFound => Status::not_found("Record not found"),
|
|
||||||
_ => Status::internal(format!("Database error: {}", e)),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(AdresarResponse {
|
|
||||||
id: adresar.id,
|
|
||||||
firma: adresar.firma,
|
|
||||||
kz: adresar.kz.unwrap_or_default(),
|
|
||||||
drc: adresar.drc.unwrap_or_default(),
|
|
||||||
ulica: adresar.ulica.unwrap_or_default(),
|
|
||||||
psc: adresar.psc.unwrap_or_default(),
|
|
||||||
mesto: adresar.mesto.unwrap_or_default(),
|
|
||||||
stat: adresar.stat.unwrap_or_default(),
|
|
||||||
banka: adresar.banka.unwrap_or_default(),
|
|
||||||
ucet: adresar.ucet.unwrap_or_default(),
|
|
||||||
skladm: adresar.skladm.unwrap_or_default(),
|
|
||||||
ico: adresar.ico.unwrap_or_default(),
|
|
||||||
kontakt: adresar.kontakt.unwrap_or_default(),
|
|
||||||
telefon: adresar.telefon.unwrap_or_default(),
|
|
||||||
skladu: adresar.skladu.unwrap_or_default(),
|
|
||||||
fax: adresar.fax.unwrap_or_default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
// src/adresar/handlers/get_adresar_by_position.rs
|
|
||||||
use tonic::{Status};
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use common::proto::multieko2::adresar::{AdresarResponse, GetAdresarRequest};
|
|
||||||
use common::proto::multieko2::common::PositionRequest;
|
|
||||||
use super::get_adresar;
|
|
||||||
|
|
||||||
pub async fn get_adresar_by_position(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: PositionRequest,
|
|
||||||
) -> Result<AdresarResponse, Status> {
|
|
||||||
if request.position < 1 {
|
|
||||||
return Err(Status::invalid_argument("Position must be at least 1"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the ID of the Nth non-deleted record
|
|
||||||
let id: i64 = sqlx::query_scalar!(
|
|
||||||
r#"
|
|
||||||
SELECT id
|
|
||||||
FROM adresar
|
|
||||||
WHERE deleted = FALSE
|
|
||||||
ORDER BY id ASC
|
|
||||||
OFFSET $1
|
|
||||||
LIMIT 1
|
|
||||||
"#,
|
|
||||||
request.position - 1
|
|
||||||
)
|
|
||||||
.fetch_optional(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?
|
|
||||||
.ok_or_else(|| Status::not_found("Position out of bounds"))?;
|
|
||||||
|
|
||||||
// Now fetch the complete record using the existing get_adresar function
|
|
||||||
get_adresar(db_pool, GetAdresarRequest { id }).await
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
// src/adresar/handlers/get_adresar_count.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use common::proto::multieko2::common::{CountResponse, Empty};
|
|
||||||
|
|
||||||
pub async fn get_adresar_count(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
_request: Empty,
|
|
||||||
) -> Result<CountResponse, Status> {
|
|
||||||
let count: i64 = sqlx::query_scalar!(
|
|
||||||
r#"
|
|
||||||
SELECT COUNT(*) AS count
|
|
||||||
FROM adresar
|
|
||||||
WHERE deleted = FALSE
|
|
||||||
"#
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
Ok(CountResponse { count })
|
|
||||||
}
|
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
// src/adresar/handlers/post_adresar.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use crate::adresar::models::Adresar;
|
|
||||||
use common::proto::multieko2::adresar::{PostAdresarRequest, AdresarResponse};
|
|
||||||
|
|
||||||
// Helper function to sanitize inputs
|
|
||||||
fn sanitize_input(input: &str) -> Option<String> {
|
|
||||||
let trimmed = input.trim().to_string();
|
|
||||||
if trimmed.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(trimmed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn post_adresar(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
mut request: PostAdresarRequest,
|
|
||||||
) -> Result<AdresarResponse, Status> {
|
|
||||||
request.firma = request.firma.trim().to_string();
|
|
||||||
if request.firma.is_empty() {
|
|
||||||
return Err(Status::invalid_argument("Firma je povinne pole"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanitize optional fields
|
|
||||||
let kz = sanitize_input(&request.kz);
|
|
||||||
let drc = sanitize_input(&request.drc);
|
|
||||||
let ulica = sanitize_input(&request.ulica);
|
|
||||||
let psc = sanitize_input(&request.psc);
|
|
||||||
let mesto = sanitize_input(&request.mesto);
|
|
||||||
let stat = sanitize_input(&request.stat);
|
|
||||||
let banka = sanitize_input(&request.banka);
|
|
||||||
let ucet = sanitize_input(&request.ucet);
|
|
||||||
let skladm = sanitize_input(&request.skladm);
|
|
||||||
let ico = sanitize_input(&request.ico);
|
|
||||||
let kontakt = sanitize_input(&request.kontakt);
|
|
||||||
let telefon = sanitize_input(&request.telefon);
|
|
||||||
let skladu = sanitize_input(&request.skladu);
|
|
||||||
let fax = sanitize_input(&request.fax);
|
|
||||||
|
|
||||||
let adresar = sqlx::query_as!(
|
|
||||||
Adresar,
|
|
||||||
r#"
|
|
||||||
INSERT INTO adresar (
|
|
||||||
firma, kz, drc, ulica, psc, mesto, stat, banka, ucet,
|
|
||||||
skladm, ico, kontakt, telefon, skladu, fax, deleted
|
|
||||||
)
|
|
||||||
VALUES (
|
|
||||||
$1, $2, $3, $4, $5, $6, $7, $8, $9,
|
|
||||||
$10, $11, $12, $13, $14, $15, $16
|
|
||||||
)
|
|
||||||
RETURNING
|
|
||||||
id, deleted, firma, kz, drc, ulica, psc, mesto, stat,
|
|
||||||
banka, ucet, skladm, ico, kontakt, telefon, skladu, fax
|
|
||||||
"#,
|
|
||||||
request.firma,
|
|
||||||
kz,
|
|
||||||
drc,
|
|
||||||
ulica,
|
|
||||||
psc,
|
|
||||||
mesto,
|
|
||||||
stat,
|
|
||||||
banka,
|
|
||||||
ucet,
|
|
||||||
skladm,
|
|
||||||
ico,
|
|
||||||
kontakt,
|
|
||||||
telefon,
|
|
||||||
skladu,
|
|
||||||
fax,
|
|
||||||
false
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?;
|
|
||||||
|
|
||||||
Ok(AdresarResponse {
|
|
||||||
id: adresar.id,
|
|
||||||
// Do not include `deleted` in the response since it's not
|
|
||||||
// defined in the proto message.
|
|
||||||
firma: adresar.firma,
|
|
||||||
kz: adresar.kz.unwrap_or_default(),
|
|
||||||
drc: adresar.drc.unwrap_or_default(),
|
|
||||||
ulica: adresar.ulica.unwrap_or_default(),
|
|
||||||
psc: adresar.psc.unwrap_or_default(),
|
|
||||||
mesto: adresar.mesto.unwrap_or_default(),
|
|
||||||
stat: adresar.stat.unwrap_or_default(),
|
|
||||||
banka: adresar.banka.unwrap_or_default(),
|
|
||||||
ucet: adresar.ucet.unwrap_or_default(),
|
|
||||||
skladm: adresar.skladm.unwrap_or_default(),
|
|
||||||
ico: adresar.ico.unwrap_or_default(),
|
|
||||||
kontakt: adresar.kontakt.unwrap_or_default(),
|
|
||||||
telefon: adresar.telefon.unwrap_or_default(),
|
|
||||||
skladu: adresar.skladu.unwrap_or_default(),
|
|
||||||
fax: adresar.fax.unwrap_or_default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,122 +0,0 @@
|
|||||||
// src/adresar/handlers/put_adresar.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use crate::adresar::models::Adresar;
|
|
||||||
use common::proto::multieko2::adresar::{PutAdresarRequest, AdresarResponse};
|
|
||||||
|
|
||||||
// Add the same sanitize_input helper as in POST handler
|
|
||||||
fn sanitize_input(input: &str) -> Option<String> {
|
|
||||||
let trimmed = input.trim().to_string();
|
|
||||||
if trimmed.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(trimmed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn put_adresar(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
mut request: PutAdresarRequest,
|
|
||||||
) -> Result<AdresarResponse, Status> {
|
|
||||||
// Add validation for required fields like in POST
|
|
||||||
request.firma = request.firma.trim().to_string();
|
|
||||||
if request.firma.is_empty() {
|
|
||||||
return Err(Status::invalid_argument("Firma je povinne pole"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanitize optional fields like in POST
|
|
||||||
let kz = sanitize_input(&request.kz);
|
|
||||||
let drc = sanitize_input(&request.drc);
|
|
||||||
let ulica = sanitize_input(&request.ulica);
|
|
||||||
let psc = sanitize_input(&request.psc);
|
|
||||||
let mesto = sanitize_input(&request.mesto);
|
|
||||||
let stat = sanitize_input(&request.stat);
|
|
||||||
let banka = sanitize_input(&request.banka);
|
|
||||||
let ucet = sanitize_input(&request.ucet);
|
|
||||||
let skladm = sanitize_input(&request.skladm);
|
|
||||||
let ico = sanitize_input(&request.ico);
|
|
||||||
let kontakt = sanitize_input(&request.kontakt);
|
|
||||||
let telefon = sanitize_input(&request.telefon);
|
|
||||||
let skladu = sanitize_input(&request.skladu);
|
|
||||||
let fax = sanitize_input(&request.fax);
|
|
||||||
|
|
||||||
let adresar = sqlx::query_as!(
|
|
||||||
Adresar,
|
|
||||||
r#"
|
|
||||||
UPDATE adresar
|
|
||||||
SET
|
|
||||||
firma = $2,
|
|
||||||
kz = $3,
|
|
||||||
drc = $4,
|
|
||||||
ulica = $5,
|
|
||||||
psc = $6,
|
|
||||||
mesto = $7,
|
|
||||||
stat = $8,
|
|
||||||
banka = $9,
|
|
||||||
ucet = $10,
|
|
||||||
skladm = $11,
|
|
||||||
ico = $12,
|
|
||||||
kontakt = $13,
|
|
||||||
telefon = $14,
|
|
||||||
skladu = $15,
|
|
||||||
fax = $16
|
|
||||||
WHERE id = $1 AND deleted = FALSE
|
|
||||||
RETURNING
|
|
||||||
id,
|
|
||||||
deleted,
|
|
||||||
firma,
|
|
||||||
kz,
|
|
||||||
drc,
|
|
||||||
ulica,
|
|
||||||
psc,
|
|
||||||
mesto,
|
|
||||||
stat,
|
|
||||||
banka,
|
|
||||||
ucet,
|
|
||||||
skladm,
|
|
||||||
ico,
|
|
||||||
kontakt,
|
|
||||||
telefon,
|
|
||||||
skladu,
|
|
||||||
fax
|
|
||||||
"#,
|
|
||||||
request.id,
|
|
||||||
request.firma,
|
|
||||||
kz,
|
|
||||||
drc,
|
|
||||||
ulica,
|
|
||||||
psc,
|
|
||||||
mesto,
|
|
||||||
stat,
|
|
||||||
banka,
|
|
||||||
ucet,
|
|
||||||
skladm,
|
|
||||||
ico,
|
|
||||||
kontakt,
|
|
||||||
telefon,
|
|
||||||
skladu,
|
|
||||||
fax
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?;
|
|
||||||
|
|
||||||
Ok(AdresarResponse {
|
|
||||||
id: adresar.id,
|
|
||||||
firma: adresar.firma,
|
|
||||||
kz: adresar.kz.unwrap_or_default(),
|
|
||||||
drc: adresar.drc.unwrap_or_default(),
|
|
||||||
ulica: adresar.ulica.unwrap_or_default(),
|
|
||||||
psc: adresar.psc.unwrap_or_default(),
|
|
||||||
mesto: adresar.mesto.unwrap_or_default(),
|
|
||||||
stat: adresar.stat.unwrap_or_default(),
|
|
||||||
banka: adresar.banka.unwrap_or_default(),
|
|
||||||
ucet: adresar.ucet.unwrap_or_default(),
|
|
||||||
skladm: adresar.skladm.unwrap_or_default(),
|
|
||||||
ico: adresar.ico.unwrap_or_default(),
|
|
||||||
kontakt: adresar.kontakt.unwrap_or_default(),
|
|
||||||
telefon: adresar.telefon.unwrap_or_default(),
|
|
||||||
skladu: adresar.skladu.unwrap_or_default(),
|
|
||||||
fax: adresar.fax.unwrap_or_default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
// src/adresar/mod.rs
|
|
||||||
|
|
||||||
pub mod models;
|
|
||||||
pub mod handlers;
|
|
||||||
|
|
||||||
// #[cfg(test)]
|
|
||||||
// pub mod tests;
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
// src/adresar/models.rs
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct Adresar {
|
|
||||||
pub id: i64,
|
|
||||||
pub deleted: bool,
|
|
||||||
pub firma: String,
|
|
||||||
pub kz: Option<String>,
|
|
||||||
pub drc: Option<String>,
|
|
||||||
pub ulica: Option<String>,
|
|
||||||
pub psc: Option<String>,
|
|
||||||
pub mesto: Option<String>,
|
|
||||||
pub stat: Option<String>,
|
|
||||||
pub banka: Option<String>,
|
|
||||||
pub ucet: Option<String>,
|
|
||||||
pub skladm: Option<String>,
|
|
||||||
pub ico: Option<String>,
|
|
||||||
pub kontakt: Option<String>,
|
|
||||||
pub telefon: Option<String>,
|
|
||||||
pub skladu: Option<String>,
|
|
||||||
pub fax: Option<String>,
|
|
||||||
}
|
|
||||||
@@ -3,6 +3,8 @@
|
|||||||
use tower::ServiceBuilder;
|
use tower::ServiceBuilder;
|
||||||
use crate::auth::logic::rbac;
|
use crate::auth::logic::rbac;
|
||||||
|
|
||||||
|
// TODO redesign this, adresar and uctovnictvo are nonexistent, but we are keeping this code for
|
||||||
|
// the reference. Please adjust in the future rbac.
|
||||||
pub async fn run_server(db_pool: sqlx::PgPool) -> Result<(), Box<dyn std::error::Error>> {
|
pub async fn run_server(db_pool: sqlx::PgPool) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
// ... existing setup code ...
|
// ... existing setup code ...
|
||||||
|
|
||||||
|
|||||||
137
server/src/indexer.rs
Normal file
137
server/src/indexer.rs
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
// server/src/indexer.rs
|
||||||
|
|
||||||
|
use sqlx::{PgPool, Row};
|
||||||
|
use tantivy::schema::Term;
|
||||||
|
use tantivy::{doc, IndexWriter};
|
||||||
|
use tokio::sync::mpsc::Receiver;
|
||||||
|
use tracing::{error, info, warn};
|
||||||
|
use tantivy::schema::Schema;
|
||||||
|
use crate::search_schema;
|
||||||
|
|
||||||
|
/// Defines the commands that can be sent to the indexer task.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum IndexCommand {
|
||||||
|
/// Add a new document or update an existing one.
|
||||||
|
AddOrUpdate(IndexCommandData),
|
||||||
|
/// Remove a document from the index.
|
||||||
|
Delete(IndexCommandData),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct IndexCommandData {
|
||||||
|
pub table_name: String,
|
||||||
|
pub row_id: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The main loop for the background indexer task.
|
||||||
|
pub async fn indexer_task(pool: PgPool, mut receiver: Receiver<IndexCommand>) {
|
||||||
|
info!("Background indexer task started.");
|
||||||
|
while let Some(command) = receiver.recv().await {
|
||||||
|
info!("Indexer received command: {:?}", command);
|
||||||
|
let result = match command {
|
||||||
|
IndexCommand::AddOrUpdate(data) => {
|
||||||
|
handle_add_or_update(&pool, data).await
|
||||||
|
}
|
||||||
|
IndexCommand::Delete(data) => handle_delete(&pool, data).await,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(e) = result {
|
||||||
|
error!("Failed to process index command: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
warn!("Indexer channel closed. Task is shutting down.");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handles adding or updating a document in a Tantivy index.
|
||||||
|
async fn handle_add_or_update(
|
||||||
|
pool: &PgPool,
|
||||||
|
data: IndexCommandData,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let qualified_table = format!("gen.\"{}\"", data.table_name);
|
||||||
|
let query_str = format!(
|
||||||
|
"SELECT to_jsonb(t) AS data FROM {} t WHERE id = $1",
|
||||||
|
qualified_table
|
||||||
|
);
|
||||||
|
let row = sqlx::query(&query_str)
|
||||||
|
.bind(data.row_id)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await?;
|
||||||
|
let json_data: serde_json::Value = row.try_get("data")?;
|
||||||
|
let slovak_text = extract_text_content(&json_data);
|
||||||
|
|
||||||
|
let (mut writer, schema) = get_index_writer(&data.table_name)?;
|
||||||
|
let pg_id_field = schema.get_field("pg_id").unwrap();
|
||||||
|
let prefix_edge_field = schema.get_field("prefix_edge").unwrap();
|
||||||
|
let prefix_full_field = schema.get_field("prefix_full").unwrap();
|
||||||
|
let text_ngram_field = schema.get_field("text_ngram").unwrap();
|
||||||
|
|
||||||
|
let id_term = Term::from_field_u64(pg_id_field, data.row_id as u64);
|
||||||
|
writer.delete_term(id_term);
|
||||||
|
|
||||||
|
writer.add_document(doc!(
|
||||||
|
pg_id_field => data.row_id as u64,
|
||||||
|
prefix_edge_field => slovak_text.clone(),
|
||||||
|
prefix_full_field => slovak_text.clone(),
|
||||||
|
text_ngram_field => slovak_text
|
||||||
|
))?;
|
||||||
|
|
||||||
|
writer.commit()?;
|
||||||
|
info!(
|
||||||
|
"Successfully indexed document id:{} for table:{}",
|
||||||
|
data.row_id, data.table_name
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handles deleting a document from a Tantivy index.
|
||||||
|
async fn handle_delete(
|
||||||
|
_pool: &PgPool,
|
||||||
|
data: IndexCommandData,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let (mut writer, schema) = get_index_writer(&data.table_name)?;
|
||||||
|
let pg_id_field = schema.get_field("pg_id").unwrap();
|
||||||
|
|
||||||
|
let id_term = Term::from_field_u64(pg_id_field, data.row_id as u64);
|
||||||
|
writer.delete_term(id_term);
|
||||||
|
writer.commit()?;
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Successfully deleted document id:{} from table:{}",
|
||||||
|
data.row_id, data.table_name
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper to get or create an index and return its writer and schema.
|
||||||
|
fn get_index_writer(
|
||||||
|
table_name: &str,
|
||||||
|
) -> anyhow::Result<(IndexWriter, Schema)> {
|
||||||
|
let index = search_schema::get_or_create_index(table_name)?;
|
||||||
|
let schema = index.schema();
|
||||||
|
let writer = index.writer(100_000_000)?; // 100MB heap
|
||||||
|
Ok((writer, schema))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract all text content from a JSON object for indexing
|
||||||
|
fn extract_text_content(json_data: &serde_json::Value) -> String {
|
||||||
|
let mut full_text = String::new();
|
||||||
|
|
||||||
|
if let Some(obj) = json_data.as_object() {
|
||||||
|
for value in obj.values() {
|
||||||
|
match value {
|
||||||
|
serde_json::Value::String(s) => {
|
||||||
|
full_text.push_str(s);
|
||||||
|
full_text.push(' ');
|
||||||
|
}
|
||||||
|
serde_json::Value::Number(n) => {
|
||||||
|
full_text.push_str(&n.to_string());
|
||||||
|
full_text.push(' ');
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
full_text.trim().to_string()
|
||||||
|
}
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
// src/lib.rs
|
// src/lib.rs
|
||||||
pub mod db;
|
pub mod db;
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
|
pub mod indexer;
|
||||||
|
pub mod search_schema;
|
||||||
pub mod server;
|
pub mod server;
|
||||||
pub mod adresar;
|
|
||||||
pub mod uctovnictvo;
|
|
||||||
pub mod shared;
|
pub mod shared;
|
||||||
pub mod table_structure;
|
pub mod table_structure;
|
||||||
pub mod table_definition;
|
pub mod table_definition;
|
||||||
|
|||||||
26
server/src/search_schema.rs
Normal file
26
server/src/search_schema.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
// server/src/search_schema.rs
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
use tantivy::Index;
|
||||||
|
|
||||||
|
// Re-export the functions from the common crate.
|
||||||
|
// This makes them available as `crate::search_schema::create_search_schema`, etc.
|
||||||
|
pub use common::search::{create_search_schema, register_slovak_tokenizers};
|
||||||
|
|
||||||
|
/// Gets an existing index or creates a new one.
|
||||||
|
/// This function now uses the shared logic from the `common` crate.
|
||||||
|
pub fn get_or_create_index(table_name: &str) -> tantivy::Result<Index> {
|
||||||
|
let index_path = Path::new("./tantivy_indexes").join(table_name);
|
||||||
|
std::fs::create_dir_all(&index_path)?;
|
||||||
|
|
||||||
|
let index = if index_path.join("meta.json").exists() {
|
||||||
|
Index::open_in_dir(&index_path)?
|
||||||
|
} else {
|
||||||
|
let schema = create_search_schema();
|
||||||
|
Index::create_in_dir(&index_path, schema)?
|
||||||
|
};
|
||||||
|
|
||||||
|
// This now calls the single, authoritative function from `common`.
|
||||||
|
register_slovak_tokenizers(&index)?;
|
||||||
|
Ok(index)
|
||||||
|
}
|
||||||
@@ -1,4 +1,2 @@
|
|||||||
// src/server/handlers.rs
|
// src/server/handlers.rs
|
||||||
pub use crate::server::services::adresar_service::AdresarService;
|
|
||||||
pub use crate::server::services::uctovnictvo_service::UctovnictvoService;
|
|
||||||
pub use crate::server::services::table_structure_service::TableStructureHandler;
|
pub use crate::server::services::table_structure_service::TableStructureHandler;
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
// src/server/run.rs
|
|
||||||
use tonic::transport::Server;
|
use tonic::transport::Server;
|
||||||
use tonic_reflection::server::Builder as ReflectionBuilder;
|
use tonic_reflection::server::Builder as ReflectionBuilder;
|
||||||
|
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use crate::indexer::{indexer_task, IndexCommand};
|
||||||
|
|
||||||
use common::proto::multieko2::FILE_DESCRIPTOR_SET;
|
use common::proto::multieko2::FILE_DESCRIPTOR_SET;
|
||||||
use crate::server::services::{
|
use crate::server::services::{
|
||||||
AdresarService,
|
|
||||||
UctovnictvoService,
|
|
||||||
TableStructureHandler,
|
TableStructureHandler,
|
||||||
TableDefinitionService,
|
TableDefinitionService,
|
||||||
TablesDataService,
|
TablesDataService,
|
||||||
@@ -13,39 +13,51 @@ use crate::server::services::{
|
|||||||
AuthServiceImpl
|
AuthServiceImpl
|
||||||
};
|
};
|
||||||
use common::proto::multieko2::{
|
use common::proto::multieko2::{
|
||||||
adresar::adresar_server::AdresarServer,
|
|
||||||
uctovnictvo::uctovnictvo_server::UctovnictvoServer,
|
|
||||||
table_structure::table_structure_service_server::TableStructureServiceServer,
|
table_structure::table_structure_service_server::TableStructureServiceServer,
|
||||||
table_definition::table_definition_server::TableDefinitionServer,
|
table_definition::table_definition_server::TableDefinitionServer,
|
||||||
tables_data::tables_data_server::TablesDataServer,
|
tables_data::tables_data_server::TablesDataServer,
|
||||||
table_script::table_script_server::TableScriptServer,
|
table_script::table_script_server::TableScriptServer,
|
||||||
auth::auth_service_server::AuthServiceServer
|
auth::auth_service_server::AuthServiceServer
|
||||||
};
|
};
|
||||||
|
use search::{SearcherService, SearcherServer};
|
||||||
|
|
||||||
pub async fn run_server(db_pool: sqlx::PgPool) -> Result<(), Box<dyn std::error::Error>> {
|
pub async fn run_server(db_pool: sqlx::PgPool) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
// Initialize JWT for authentication
|
// Initialize JWT for authentication
|
||||||
crate::auth::logic::jwt::init_jwt()?;
|
crate::auth::logic::jwt::init_jwt()?;
|
||||||
|
|
||||||
let addr = "[::1]:50051".parse()?;
|
let addr = "[::1]:50051".parse()?;
|
||||||
|
println!("Unified Server listening on {}", addr);
|
||||||
|
|
||||||
|
// 1. Create the MPSC channel for indexer commands
|
||||||
|
let (indexer_tx, indexer_rx) = mpsc::channel::<IndexCommand>(100); // Buffer of 100 messages
|
||||||
|
|
||||||
|
// 2. Spawn the background indexer task
|
||||||
|
let indexer_pool = db_pool.clone();
|
||||||
|
tokio::spawn(indexer_task(indexer_pool, indexer_rx));
|
||||||
|
|
||||||
let reflection_service = ReflectionBuilder::configure()
|
let reflection_service = ReflectionBuilder::configure()
|
||||||
.register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
|
.register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
|
||||||
.build_v1()?;
|
.build_v1()?;
|
||||||
|
|
||||||
// Initialize services
|
// Initialize services, passing the indexer sender to the relevant ones
|
||||||
let table_definition_service = TableDefinitionService { db_pool: db_pool.clone() };
|
let table_definition_service = TableDefinitionService { db_pool: db_pool.clone() };
|
||||||
let tables_data_service = TablesDataService { db_pool: db_pool.clone() };
|
let tables_data_service = TablesDataService {
|
||||||
|
db_pool: db_pool.clone(),
|
||||||
|
indexer_tx: indexer_tx.clone(),
|
||||||
|
};
|
||||||
let table_script_service = TableScriptService { db_pool: db_pool.clone() };
|
let table_script_service = TableScriptService { db_pool: db_pool.clone() };
|
||||||
let auth_service = AuthServiceImpl { db_pool: db_pool.clone() };
|
let auth_service = AuthServiceImpl { db_pool: db_pool.clone() };
|
||||||
|
|
||||||
|
// MODIFIED: Instantiate SearcherService with the database pool
|
||||||
|
let search_service = SearcherService { pool: db_pool.clone() };
|
||||||
|
|
||||||
Server::builder()
|
Server::builder()
|
||||||
.add_service(AdresarServer::new(AdresarService { db_pool: db_pool.clone() }))
|
|
||||||
.add_service(UctovnictvoServer::new(UctovnictvoService { db_pool: db_pool.clone() }))
|
|
||||||
.add_service(TableStructureServiceServer::new(TableStructureHandler { db_pool: db_pool.clone() }))
|
.add_service(TableStructureServiceServer::new(TableStructureHandler { db_pool: db_pool.clone() }))
|
||||||
.add_service(TableDefinitionServer::new(table_definition_service))
|
.add_service(TableDefinitionServer::new(table_definition_service))
|
||||||
.add_service(TablesDataServer::new(tables_data_service))
|
.add_service(TablesDataServer::new(tables_data_service))
|
||||||
.add_service(TableScriptServer::new(table_script_service))
|
.add_service(TableScriptServer::new(table_script_service))
|
||||||
.add_service(AuthServiceServer::new(auth_service))
|
.add_service(AuthServiceServer::new(auth_service))
|
||||||
|
.add_service(SearcherServer::new(search_service))
|
||||||
.add_service(reflection_service)
|
.add_service(reflection_service)
|
||||||
.serve(addr)
|
.serve(addr)
|
||||||
.await?;
|
.await?;
|
||||||
|
|||||||
@@ -1,69 +0,0 @@
|
|||||||
// src/server/services/adresar_service.rs
|
|
||||||
use tonic::{Request, Response, Status};
|
|
||||||
use common::proto::multieko2::adresar::{
|
|
||||||
adresar_server::Adresar,
|
|
||||||
PostAdresarRequest, AdresarResponse, GetAdresarRequest, PutAdresarRequest,
|
|
||||||
DeleteAdresarRequest, DeleteAdresarResponse,
|
|
||||||
};
|
|
||||||
use common::proto::multieko2::common::{Empty, CountResponse, PositionRequest};
|
|
||||||
use crate::adresar::handlers::{
|
|
||||||
post_adresar, get_adresar, put_adresar, delete_adresar,
|
|
||||||
get_adresar_count, get_adresar_by_position,
|
|
||||||
};
|
|
||||||
use sqlx::PgPool;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct AdresarService {
|
|
||||||
pub db_pool: PgPool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tonic::async_trait]
|
|
||||||
impl Adresar for AdresarService {
|
|
||||||
async fn post_adresar(
|
|
||||||
&self,
|
|
||||||
request: Request<PostAdresarRequest>,
|
|
||||||
) -> Result<Response<AdresarResponse>, Status> {
|
|
||||||
let response = post_adresar(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_adresar(
|
|
||||||
&self,
|
|
||||||
request: Request<GetAdresarRequest>,
|
|
||||||
) -> Result<Response<AdresarResponse>, Status> {
|
|
||||||
let response = get_adresar(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn put_adresar(
|
|
||||||
&self,
|
|
||||||
request: Request<PutAdresarRequest>,
|
|
||||||
) -> Result<Response<AdresarResponse>, Status> {
|
|
||||||
let response = put_adresar(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn delete_adresar(
|
|
||||||
&self,
|
|
||||||
request: Request<DeleteAdresarRequest>,
|
|
||||||
) -> Result<Response<DeleteAdresarResponse>, Status> {
|
|
||||||
let response = delete_adresar(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_adresar_count(
|
|
||||||
&self,
|
|
||||||
request: Request<Empty>,
|
|
||||||
) -> Result<Response<CountResponse>, Status> {
|
|
||||||
let response = get_adresar_count(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_adresar_by_position(
|
|
||||||
&self,
|
|
||||||
request: Request<PositionRequest>,
|
|
||||||
) -> Result<Response<AdresarResponse>, Status> {
|
|
||||||
let response = get_adresar_by_position(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,16 +1,12 @@
|
|||||||
// src/server/services/mod.rs
|
// src/server/services/mod.rs
|
||||||
|
|
||||||
pub mod adresar_service;
|
|
||||||
pub mod table_structure_service;
|
pub mod table_structure_service;
|
||||||
pub mod uctovnictvo_service;
|
|
||||||
pub mod table_definition_service;
|
pub mod table_definition_service;
|
||||||
pub mod tables_data_service;
|
pub mod tables_data_service;
|
||||||
pub mod table_script_service;
|
pub mod table_script_service;
|
||||||
pub mod auth_service;
|
pub mod auth_service;
|
||||||
|
|
||||||
pub use adresar_service::AdresarService;
|
|
||||||
pub use table_structure_service::TableStructureHandler;
|
pub use table_structure_service::TableStructureHandler;
|
||||||
pub use uctovnictvo_service::UctovnictvoService;
|
|
||||||
pub use table_definition_service::TableDefinitionService;
|
pub use table_definition_service::TableDefinitionService;
|
||||||
pub use tables_data_service::TablesDataService;
|
pub use tables_data_service::TablesDataService;
|
||||||
pub use table_script_service::TableScriptService;
|
pub use table_script_service::TableScriptService;
|
||||||
|
|||||||
@@ -1,5 +1,10 @@
|
|||||||
// src/server/services/tables_data_service.rs
|
// src/server/services/tables_data_service.rs
|
||||||
|
|
||||||
use tonic::{Request, Response, Status};
|
use tonic::{Request, Response, Status};
|
||||||
|
// Add these imports
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use crate::indexer::IndexCommand;
|
||||||
|
|
||||||
use common::proto::multieko2::tables_data::tables_data_server::TablesData;
|
use common::proto::multieko2::tables_data::tables_data_server::TablesData;
|
||||||
use common::proto::multieko2::common::CountResponse;
|
use common::proto::multieko2::common::CountResponse;
|
||||||
use common::proto::multieko2::tables_data::{
|
use common::proto::multieko2::tables_data::{
|
||||||
@@ -15,6 +20,8 @@ use sqlx::PgPool;
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct TablesDataService {
|
pub struct TablesDataService {
|
||||||
pub db_pool: PgPool,
|
pub db_pool: PgPool,
|
||||||
|
// MODIFIED: Add the sender field
|
||||||
|
pub indexer_tx: mpsc::Sender<IndexCommand>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tonic::async_trait]
|
#[tonic::async_trait]
|
||||||
@@ -24,25 +31,37 @@ impl TablesData for TablesDataService {
|
|||||||
request: Request<PostTableDataRequest>,
|
request: Request<PostTableDataRequest>,
|
||||||
) -> Result<Response<PostTableDataResponse>, Status> {
|
) -> Result<Response<PostTableDataResponse>, Status> {
|
||||||
let request = request.into_inner();
|
let request = request.into_inner();
|
||||||
let response = post_table_data(&self.db_pool, request).await?;
|
// MODIFIED: Pass the indexer_tx to the handler
|
||||||
|
let response = post_table_data(
|
||||||
|
&self.db_pool,
|
||||||
|
request,
|
||||||
|
&self.indexer_tx,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
Ok(Response::new(response))
|
Ok(Response::new(response))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the new method implementation
|
|
||||||
async fn put_table_data(
|
async fn put_table_data(
|
||||||
&self,
|
&self,
|
||||||
request: Request<PutTableDataRequest>,
|
request: Request<PutTableDataRequest>,
|
||||||
) -> Result<Response<PutTableDataResponse>, Status> {
|
) -> Result<Response<PutTableDataResponse>, Status> {
|
||||||
let request = request.into_inner();
|
let request = request.into_inner();
|
||||||
let response = put_table_data(&self.db_pool, request).await?;
|
let response = put_table_data(
|
||||||
|
&self.db_pool,
|
||||||
|
request,
|
||||||
|
&self.indexer_tx,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
Ok(Response::new(response))
|
Ok(Response::new(response))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ...and delete_table_data
|
||||||
async fn delete_table_data(
|
async fn delete_table_data(
|
||||||
&self,
|
&self,
|
||||||
request: Request<DeleteTableDataRequest>,
|
request: Request<DeleteTableDataRequest>,
|
||||||
) -> Result<Response<DeleteTableDataResponse>, Status> {
|
) -> Result<Response<DeleteTableDataResponse>, Status> {
|
||||||
let request = request.into_inner();
|
let request = request.into_inner();
|
||||||
|
// TODO: Update delete_table_data handler to accept and use indexer_tx
|
||||||
let response = delete_table_data(&self.db_pool, request).await?;
|
let response = delete_table_data(&self.db_pool, request).await?;
|
||||||
Ok(Response::new(response))
|
Ok(Response::new(response))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,60 +0,0 @@
|
|||||||
// src/server/services/uctovnictvo_service.rs
|
|
||||||
use tonic::{Request, Response, Status};
|
|
||||||
use common::proto::multieko2::uctovnictvo::{
|
|
||||||
uctovnictvo_server::Uctovnictvo,
|
|
||||||
PostUctovnictvoRequest, UctovnictvoResponse, GetUctovnictvoRequest, PutUctovnictvoRequest,
|
|
||||||
};
|
|
||||||
use crate::uctovnictvo::handlers::{
|
|
||||||
post_uctovnictvo, get_uctovnictvo, get_uctovnictvo_count,
|
|
||||||
get_uctovnictvo_by_position, put_uctovnictvo,
|
|
||||||
};
|
|
||||||
use common::proto::multieko2::common::{Empty, CountResponse, PositionRequest};
|
|
||||||
use sqlx::PgPool;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct UctovnictvoService {
|
|
||||||
pub db_pool: PgPool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tonic::async_trait]
|
|
||||||
impl Uctovnictvo for UctovnictvoService {
|
|
||||||
async fn post_uctovnictvo(
|
|
||||||
&self,
|
|
||||||
request: Request<PostUctovnictvoRequest>,
|
|
||||||
) -> Result<Response<UctovnictvoResponse>, Status> {
|
|
||||||
let response = post_uctovnictvo(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_uctovnictvo(
|
|
||||||
&self,
|
|
||||||
request: Request<GetUctovnictvoRequest>,
|
|
||||||
) -> Result<Response<UctovnictvoResponse>, Status> {
|
|
||||||
let response = get_uctovnictvo(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_uctovnictvo_count(
|
|
||||||
&self,
|
|
||||||
request: Request<Empty>,
|
|
||||||
) -> Result<Response<CountResponse>, Status> {
|
|
||||||
let response = get_uctovnictvo_count(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_uctovnictvo_by_position(
|
|
||||||
&self,
|
|
||||||
request: Request<PositionRequest>,
|
|
||||||
) -> Result<Response<UctovnictvoResponse>, Status> {
|
|
||||||
let response = get_uctovnictvo_by_position(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn put_uctovnictvo(
|
|
||||||
&self,
|
|
||||||
request: Request<PutUctovnictvoRequest>,
|
|
||||||
) -> Result<Response<UctovnictvoResponse>, Status> {
|
|
||||||
let response = put_uctovnictvo(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,34 +1,50 @@
|
|||||||
// src/shared/schema_qualifier.rs
|
// src/shared/schema_qualifier.rs
|
||||||
|
use sqlx::PgPool;
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
|
|
||||||
/// Qualifies table names with the appropriate schema
|
// TODO in the future, remove database query on every request and implement caching for scalable
|
||||||
///
|
// solution with many data and requests
|
||||||
|
|
||||||
|
/// Qualifies a table name by checking for its existence in the table_definitions table.
|
||||||
|
/// This is the robust, "source of truth" approach.
|
||||||
|
///
|
||||||
/// Rules:
|
/// Rules:
|
||||||
/// - Tables created via PostTableDefinition (dynamically created tables) are in 'gen' schema
|
/// - If a table is found in `table_definitions`, it is qualified with the 'gen' schema.
|
||||||
/// - System tables (like users, profiles) remain in 'public' schema
|
/// - Otherwise, it is assumed to be a system table in the 'public' schema.
|
||||||
pub fn qualify_table_name(table_name: &str) -> String {
|
pub async fn qualify_table_name(
|
||||||
// Check if table matches the pattern of dynamically created tables (e.g., 2025_something)
|
db_pool: &PgPool,
|
||||||
if table_name.starts_with(|c: char| c.is_ascii_digit()) && table_name.contains('_') {
|
profile_name: &str,
|
||||||
format!("gen.\"{}\"", table_name)
|
table_name: &str,
|
||||||
|
) -> Result<String, Status> {
|
||||||
|
// Check if a definition exists for this table in the given profile.
|
||||||
|
let definition_exists = sqlx::query!(
|
||||||
|
r#"SELECT EXISTS (
|
||||||
|
SELECT 1 FROM table_definitions td
|
||||||
|
JOIN schemas s ON td.schema_id = s.id
|
||||||
|
WHERE s.name = $1 AND td.table_name = $2
|
||||||
|
)"#,
|
||||||
|
profile_name,
|
||||||
|
table_name
|
||||||
|
)
|
||||||
|
.fetch_one(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Schema lookup failed: {}", e)))?
|
||||||
|
.exists
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
if definition_exists {
|
||||||
|
Ok(format!("{}.\"{}\"", profile_name, table_name))
|
||||||
} else {
|
} else {
|
||||||
format!("\"{}\"", table_name)
|
// It's not a user-defined table, so it must be a system table in 'public.
|
||||||
|
Ok(format!("\"{}\"", table_name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Qualifies table names for data operations
|
/// Qualifies table names for data operations
|
||||||
pub fn qualify_table_name_for_data(table_name: &str) -> Result<String, Status> {
|
pub async fn qualify_table_name_for_data(
|
||||||
Ok(qualify_table_name(table_name))
|
db_pool: &PgPool,
|
||||||
}
|
profile_name: &str,
|
||||||
|
table_name: &str,
|
||||||
#[cfg(test)]
|
) -> Result<String, Status> {
|
||||||
mod tests {
|
qualify_table_name(db_pool, profile_name, table_name).await
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_qualify_table_name() {
|
|
||||||
assert_eq!(qualify_table_name("2025_test_schema3"), "gen.\"2025_test_schema3\"");
|
|
||||||
assert_eq!(qualify_table_name("users"), "\"users\"");
|
|
||||||
assert_eq!(qualify_table_name("profiles"), "\"profiles\"");
|
|
||||||
assert_eq!(qualify_table_name("adresar"), "\"adresar\"");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,7 +21,8 @@ pub enum FunctionError {
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SteelContext {
|
pub struct SteelContext {
|
||||||
pub current_table: String,
|
pub current_table: String,
|
||||||
pub profile_id: i64,
|
pub schema_id: i64,
|
||||||
|
pub schema_name: String,
|
||||||
pub row_data: HashMap<String, String>,
|
pub row_data: HashMap<String, String>,
|
||||||
pub db_pool: Arc<PgPool>,
|
pub db_pool: Arc<PgPool>,
|
||||||
}
|
}
|
||||||
@@ -30,8 +31,8 @@ impl SteelContext {
|
|||||||
pub async fn get_related_table_name(&self, base_name: &str) -> Result<String, FunctionError> {
|
pub async fn get_related_table_name(&self, base_name: &str) -> Result<String, FunctionError> {
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT table_name FROM table_definitions
|
r#"SELECT table_name FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name LIKE $2"#,
|
WHERE schema_id = $1 AND table_name LIKE $2"#,
|
||||||
self.profile_id,
|
self.schema_id,
|
||||||
format!("%_{}", base_name)
|
format!("%_{}", base_name)
|
||||||
)
|
)
|
||||||
.fetch_optional(&*self.db_pool)
|
.fetch_optional(&*self.db_pool)
|
||||||
@@ -66,7 +67,7 @@ impl SteelContext {
|
|||||||
|
|
||||||
// Add quotes around the table name
|
// Add quotes around the table name
|
||||||
sqlx::query_scalar::<_, String>(
|
sqlx::query_scalar::<_, String>(
|
||||||
&format!("SELECT {} FROM \"{}\" WHERE id = $1", column, actual_table)
|
&format!("SELECT {} FROM \"{}\".\"{}\" WHERE id = $1", column, self.schema_name, actual_table)
|
||||||
)
|
)
|
||||||
.bind(fk_value.parse::<i64>().map_err(|_|
|
.bind(fk_value.parse::<i64>().map_err(|_|
|
||||||
SteelVal::StringV("Invalid foreign key format".into()))?)
|
SteelVal::StringV("Invalid foreign key format".into()))?)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// server/src/table_definition/handlers/delete_table.rs
|
// src/table_definition/handlers/delete_table.rs
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use common::proto::multieko2::table_definition::{DeleteTableRequest, DeleteTableResponse};
|
use common::proto::multieko2::table_definition::{DeleteTableRequest, DeleteTableResponse};
|
||||||
@@ -10,25 +10,25 @@ pub async fn delete_table(
|
|||||||
let mut transaction = db_pool.begin().await
|
let mut transaction = db_pool.begin().await
|
||||||
.map_err(|e| Status::internal(format!("Failed to start transaction: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Failed to start transaction: {}", e)))?;
|
||||||
|
|
||||||
// Step 1: Get profile and validate existence
|
// Step 1: Get schema and validate existence
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id, name FROM schemas WHERE name = $1",
|
||||||
request.profile_name
|
request.profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(&mut *transaction)
|
.fetch_optional(&mut *transaction)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Schema lookup failed: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = match profile {
|
let (schema_id, schema_name) = match schema {
|
||||||
Some(p) => p.id,
|
Some(s) => (s.id, s.name),
|
||||||
None => return Err(Status::not_found("Profile not found")),
|
None => return Err(Status::not_found("Profile not found")),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Step 2: Get table definition and validate existence
|
// Step 2: Get table definition and validate existence
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
"SELECT id FROM table_definitions
|
"SELECT id FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2",
|
WHERE schema_id = $1 AND table_name = $2",
|
||||||
profile_id,
|
schema_id,
|
||||||
request.table_name
|
request.table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(&mut *transaction)
|
.fetch_optional(&mut *transaction)
|
||||||
@@ -40,8 +40,9 @@ pub async fn delete_table(
|
|||||||
None => return Err(Status::not_found("Table not found in profile")),
|
None => return Err(Status::not_found("Table not found in profile")),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Step 3: Drop the actual PostgreSQL table with CASCADE
|
// Step 3: Drop the actual PostgreSQL table with CASCADE (schema-qualified)
|
||||||
sqlx::query(&format!(r#"DROP TABLE IF EXISTS "{}" CASCADE"#, request.table_name))
|
let drop_table_sql = format!(r#"DROP TABLE IF EXISTS "{}"."{}" CASCADE"#, schema_name, request.table_name);
|
||||||
|
sqlx::query(&drop_table_sql)
|
||||||
.execute(&mut *transaction)
|
.execute(&mut *transaction)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Table drop failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Table drop failed: {}", e)))?;
|
||||||
@@ -55,23 +56,31 @@ pub async fn delete_table(
|
|||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Definition deletion failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Definition deletion failed: {}", e)))?;
|
||||||
|
|
||||||
// Step 5: Check and clean up profile if empty
|
// Step 5: Check and clean up schema if empty
|
||||||
let remaining = sqlx::query!(
|
let remaining = sqlx::query!(
|
||||||
"SELECT COUNT(*) as count FROM table_definitions WHERE profile_id = $1",
|
"SELECT COUNT(*) as count FROM table_definitions WHERE schema_id = $1",
|
||||||
profile_id
|
schema_id
|
||||||
)
|
)
|
||||||
.fetch_one(&mut *transaction)
|
.fetch_one(&mut *transaction)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Count query failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Count query failed: {}", e)))?;
|
||||||
|
|
||||||
if remaining.count.unwrap_or(1) == 0 {
|
if remaining.count.unwrap_or(1) == 0 {
|
||||||
|
// Drop the PostgreSQL schema if empty
|
||||||
|
let drop_schema_sql = format!(r#"DROP SCHEMA IF EXISTS "{}" CASCADE"#, schema_name);
|
||||||
|
sqlx::query(&drop_schema_sql)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Schema drop failed: {}", e)))?;
|
||||||
|
|
||||||
|
// Delete the schema record
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
"DELETE FROM profiles WHERE id = $1",
|
"DELETE FROM schemas WHERE id = $1",
|
||||||
profile_id
|
schema_id
|
||||||
)
|
)
|
||||||
.execute(&mut *transaction)
|
.execute(&mut *transaction)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile cleanup failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Schema cleanup failed: {}", e)))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
transaction.commit().await
|
transaction.commit().await
|
||||||
|
|||||||
@@ -15,13 +15,15 @@ pub async fn get_profile_tree(
|
|||||||
) -> Result<Response<ProfileTreeResponse>, Status> {
|
) -> Result<Response<ProfileTreeResponse>, Status> {
|
||||||
let mut profiles = Vec::new();
|
let mut profiles = Vec::new();
|
||||||
|
|
||||||
// Get all profiles
|
// Get all schemas (internally changed from profiles to schemas)
|
||||||
let profile_records = sqlx::query!("SELECT id, name FROM profiles")
|
let schema_records = sqlx::query!(
|
||||||
.fetch_all(db_pool)
|
"SELECT id, name FROM schemas ORDER BY name"
|
||||||
.await
|
)
|
||||||
.map_err(|e| Status::internal(format!("Failed to fetch profiles: {}", e)))?;
|
.fetch_all(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Failed to fetch schemas: {}", e)))?;
|
||||||
|
|
||||||
for profile in profile_records {
|
for schema in schema_records {
|
||||||
// Get all tables with their dependencies from the links table
|
// Get all tables with their dependencies from the links table
|
||||||
let tables = sqlx::query!(
|
let tables = sqlx::query!(
|
||||||
r#"
|
r#"
|
||||||
@@ -35,15 +37,16 @@ pub async fn get_profile_tree(
|
|||||||
'required', tdl.is_required
|
'required', tdl.is_required
|
||||||
)
|
)
|
||||||
) FILTER (WHERE ltd.id IS NOT NULL),
|
) FILTER (WHERE ltd.id IS NOT NULL),
|
||||||
'[]'
|
'[]'::json
|
||||||
) as dependencies
|
) as dependencies
|
||||||
FROM table_definitions td
|
FROM table_definitions td
|
||||||
LEFT JOIN table_definition_links tdl ON td.id = tdl.source_table_id
|
LEFT JOIN table_definition_links tdl ON td.id = tdl.source_table_id
|
||||||
LEFT JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
LEFT JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
||||||
WHERE td.profile_id = $1
|
WHERE td.schema_id = $1
|
||||||
GROUP BY td.id, td.table_name
|
GROUP BY td.id, td.table_name
|
||||||
|
ORDER BY td.table_name
|
||||||
"#,
|
"#,
|
||||||
profile.id
|
schema.id
|
||||||
)
|
)
|
||||||
.fetch_all(db_pool)
|
.fetch_all(db_pool)
|
||||||
.await
|
.await
|
||||||
@@ -70,8 +73,9 @@ pub async fn get_profile_tree(
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
// External API still returns "profiles" for compatibility
|
||||||
profiles.push(Profile {
|
profiles.push(Profile {
|
||||||
name: profile.name,
|
name: schema.name,
|
||||||
tables: proto_tables
|
tables: proto_tables
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,48 +1,170 @@
|
|||||||
|
// src/table_definition/handlers/post_table_definition.rs
|
||||||
|
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::{PgPool, Transaction, Postgres};
|
use sqlx::{PgPool, Transaction, Postgres};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
|
||||||
use common::proto::multieko2::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse};
|
use common::proto::multieko2::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse};
|
||||||
|
|
||||||
const GENERATED_SCHEMA_NAME: &str = "gen";
|
|
||||||
|
|
||||||
const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
|
const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
|
||||||
("text", "TEXT"),
|
("text", "TEXT"),
|
||||||
("psc", "TEXT"),
|
("string", "TEXT"),
|
||||||
("phone", "VARCHAR(15)"),
|
|
||||||
("address", "TEXT"),
|
|
||||||
("email", "VARCHAR(255)"),
|
|
||||||
("boolean", "BOOLEAN"),
|
("boolean", "BOOLEAN"),
|
||||||
("timestamp", "TIMESTAMPTZ"),
|
("timestamp", "TIMESTAMPTZ"),
|
||||||
|
("timestamptz", "TIMESTAMPTZ"),
|
||||||
|
("time", "TIMESTAMPTZ"),
|
||||||
|
("money", "NUMERIC(14, 4)"),
|
||||||
|
("integer", "INTEGER"),
|
||||||
|
("int", "INTEGER"),
|
||||||
|
("biginteger", "BIGINT"),
|
||||||
|
("bigint", "BIGINT"),
|
||||||
|
("date", "DATE"),
|
||||||
];
|
];
|
||||||
|
|
||||||
fn is_valid_identifier(s: &str) -> bool {
|
// NEW: Helper function to provide detailed error messages
|
||||||
!s.is_empty() &&
|
fn validate_identifier_format(s: &str, identifier_type: &str) -> Result<(), Status> {
|
||||||
s.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') &&
|
if s.is_empty() {
|
||||||
!s.starts_with('_') &&
|
return Err(Status::invalid_argument(format!("{} cannot be empty", identifier_type)));
|
||||||
!s.chars().next().unwrap().is_ascii_digit()
|
}
|
||||||
|
|
||||||
|
if s.starts_with('_') {
|
||||||
|
return Err(Status::invalid_argument(format!("{} cannot start with underscore", identifier_type)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.chars().next().unwrap().is_ascii_digit() {
|
||||||
|
return Err(Status::invalid_argument(format!("{} cannot start with a number", identifier_type)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for invalid characters
|
||||||
|
let invalid_chars: Vec<char> = s.chars()
|
||||||
|
.filter(|c| !c.is_ascii_lowercase() && !c.is_ascii_digit() && *c != '_')
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if !invalid_chars.is_empty() {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} contains invalid characters: {:?}. Only lowercase letters, numbers, and underscores are allowed",
|
||||||
|
identifier_type, invalid_chars
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for uppercase letters specifically to give a helpful message
|
||||||
|
if s.chars().any(|c| c.is_ascii_uppercase()) {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} contains uppercase letters. Only lowercase letters are allowed",
|
||||||
|
identifier_type
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sanitize_table_name(s: &str) -> String {
|
fn validate_decimal_number_format(num_str: &str, param_name: &str) -> Result<(), Status> {
|
||||||
let year = OffsetDateTime::now_utc().year();
|
if num_str.is_empty() {
|
||||||
let cleaned = s.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
return Err(Status::invalid_argument(format!(
|
||||||
.trim()
|
"{} cannot be empty",
|
||||||
.to_lowercase();
|
param_name
|
||||||
format!("{}_{}", year, cleaned)
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for explicit signs
|
||||||
|
if num_str.starts_with('+') || num_str.starts_with('-') {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} cannot have explicit positive or negative signs",
|
||||||
|
param_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for decimal points
|
||||||
|
if num_str.contains('.') {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} must be a whole number (no decimal points)",
|
||||||
|
param_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for leading zeros (but allow "0" itself)
|
||||||
|
if num_str.len() > 1 && num_str.starts_with('0') {
|
||||||
|
let trimmed = num_str.trim_start_matches('0');
|
||||||
|
let suggestion = if trimmed.is_empty() { "0" } else { trimmed };
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} cannot have leading zeros (use '{}' instead of '{}')",
|
||||||
|
param_name,
|
||||||
|
suggestion,
|
||||||
|
num_str
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that all characters are digits
|
||||||
|
if !num_str.chars().all(|c| c.is_ascii_digit()) {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} contains invalid characters. Only digits 0-9 are allowed",
|
||||||
|
param_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sanitize_identifier(s: &str) -> String {
|
fn map_field_type(field_type: &str) -> Result<String, Status> {
|
||||||
s.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
let lower_field_type = field_type.to_lowercase();
|
||||||
.trim()
|
|
||||||
.to_lowercase()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn map_field_type(field_type: &str) -> Result<&str, Status> {
|
// Special handling for "decimal(precision, scale)"
|
||||||
|
if lower_field_type.starts_with("decimal(") && lower_field_type.ends_with(')') {
|
||||||
|
// Extract the part inside the parentheses, e.g., "10, 2"
|
||||||
|
let args = lower_field_type
|
||||||
|
.strip_prefix("decimal(")
|
||||||
|
.and_then(|s| s.strip_suffix(')'))
|
||||||
|
.unwrap_or(""); // Should always succeed due to the checks above
|
||||||
|
|
||||||
|
// Split into precision and scale parts
|
||||||
|
if let Some((p_str, s_str)) = args.split_once(',') {
|
||||||
|
let precision_str = p_str.trim();
|
||||||
|
let scale_str = s_str.trim();
|
||||||
|
|
||||||
|
// NEW: Validate format BEFORE parsing
|
||||||
|
validate_decimal_number_format(precision_str, "precision")?;
|
||||||
|
validate_decimal_number_format(scale_str, "scale")?;
|
||||||
|
|
||||||
|
// Parse precision, returning an error if it's not a valid number
|
||||||
|
let precision = precision_str.parse::<u32>().map_err(|_| {
|
||||||
|
Status::invalid_argument("Invalid precision in decimal type")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Parse scale, returning an error if it's not a valid number
|
||||||
|
let scale = scale_str.parse::<u32>().map_err(|_| {
|
||||||
|
Status::invalid_argument("Invalid scale in decimal type")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Add validation based on PostgreSQL rules
|
||||||
|
if precision < 1 {
|
||||||
|
return Err(Status::invalid_argument("Precision must be at least 1"));
|
||||||
|
}
|
||||||
|
if scale > precision {
|
||||||
|
return Err(Status::invalid_argument(
|
||||||
|
"Scale cannot be greater than precision",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// If everything is valid, build and return the NUMERIC type string
|
||||||
|
return Ok(format!("NUMERIC({}, {})", precision, scale));
|
||||||
|
} else {
|
||||||
|
// The format was wrong, e.g., "decimal(10)" or "decimal()"
|
||||||
|
return Err(Status::invalid_argument(
|
||||||
|
"Invalid decimal format. Expected: decimal(precision, scale)",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not a decimal, fall back to the predefined list
|
||||||
PREDEFINED_FIELD_TYPES
|
PREDEFINED_FIELD_TYPES
|
||||||
.iter()
|
.iter()
|
||||||
.find(|(key, _)| *key == field_type.to_lowercase().as_str())
|
.find(|(key, _)| *key == lower_field_type.as_str())
|
||||||
.map(|(_, sql_type)| *sql_type)
|
.map(|(_, sql_type)| sql_type.to_string()) // Convert to an owned String
|
||||||
.ok_or_else(|| Status::invalid_argument(format!("Invalid field type: {}", field_type)))
|
.ok_or_else(|| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Invalid field type: {}",
|
||||||
|
field_type
|
||||||
|
))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_invalid_table_name(table_name: &str) -> bool {
|
fn is_invalid_table_name(table_name: &str) -> bool {
|
||||||
@@ -52,33 +174,65 @@ fn is_invalid_table_name(table_name: &str) -> bool {
|
|||||||
table_name == "created_at"
|
table_name == "created_at"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_reserved_schema(schema_name: &str) -> bool {
|
||||||
|
let lower = schema_name.to_lowercase();
|
||||||
|
lower == "public" ||
|
||||||
|
lower == "information_schema" ||
|
||||||
|
lower.starts_with("pg_")
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn post_table_definition(
|
pub async fn post_table_definition(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
request: PostTableDefinitionRequest,
|
request: PostTableDefinitionRequest,
|
||||||
) -> Result<TableDefinitionResponse, Status> {
|
) -> Result<TableDefinitionResponse, Status> {
|
||||||
let base_name = sanitize_table_name(&request.table_name);
|
// Create owned copies of the strings after validation
|
||||||
let user_part_cleaned = request.table_name
|
let profile_name = {
|
||||||
.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
let trimmed = request.profile_name.trim();
|
||||||
.trim_matches('_')
|
validate_identifier_format(trimmed, "Profile name")?;
|
||||||
.to_lowercase();
|
trimmed.to_string()
|
||||||
|
};
|
||||||
|
|
||||||
// New validation check
|
// Add validation to prevent reserved schemas
|
||||||
if is_invalid_table_name(&user_part_cleaned) {
|
if is_reserved_schema(&profile_name) {
|
||||||
return Err(Status::invalid_argument(
|
return Err(Status::invalid_argument("Profile name is reserved and cannot be used"));
|
||||||
"Table name cannot be 'id', 'deleted', 'created_at' or end with '_id'"
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !user_part_cleaned.is_empty() && !is_valid_identifier(&user_part_cleaned) {
|
const MAX_IDENTIFIER_LENGTH: usize = 63;
|
||||||
return Err(Status::invalid_argument("Invalid table name"));
|
|
||||||
} else if user_part_cleaned.is_empty() {
|
if profile_name.len() > MAX_IDENTIFIER_LENGTH {
|
||||||
return Err(Status::invalid_argument("Table name cannot be empty"));
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Profile name '{}' exceeds the {} character limit.",
|
||||||
|
profile_name,
|
||||||
|
MAX_IDENTIFIER_LENGTH
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let table_name = {
|
||||||
|
let trimmed = request.table_name.trim();
|
||||||
|
validate_identifier_format(trimmed, "Table name")?;
|
||||||
|
|
||||||
|
if trimmed.len() > MAX_IDENTIFIER_LENGTH {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Table name '{}' exceeds the {} character limit.",
|
||||||
|
trimmed,
|
||||||
|
MAX_IDENTIFIER_LENGTH
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check invalid table names on the original input
|
||||||
|
if is_invalid_table_name(trimmed) {
|
||||||
|
return Err(Status::invalid_argument(
|
||||||
|
"Table name cannot be 'id', 'deleted', 'created_at' or end with '_id'"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
trimmed.to_string()
|
||||||
|
};
|
||||||
|
|
||||||
let mut tx = db_pool.begin().await
|
let mut tx = db_pool.begin().await
|
||||||
.map_err(|e| Status::internal(format!("Failed to start transaction: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Failed to start transaction: {}", e)))?;
|
||||||
|
|
||||||
match execute_table_definition(&mut tx, request, base_name).await {
|
match execute_table_definition(&mut tx, request, table_name, profile_name).await {
|
||||||
Ok(response) => {
|
Ok(response) => {
|
||||||
tx.commit().await
|
tx.commit().await
|
||||||
.map_err(|e| Status::internal(format!("Failed to commit transaction: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Failed to commit transaction: {}", e)))?;
|
||||||
@@ -95,23 +249,42 @@ async fn execute_table_definition(
|
|||||||
tx: &mut Transaction<'_, Postgres>,
|
tx: &mut Transaction<'_, Postgres>,
|
||||||
mut request: PostTableDefinitionRequest,
|
mut request: PostTableDefinitionRequest,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
|
profile_name: String,
|
||||||
) -> Result<TableDefinitionResponse, Status> {
|
) -> Result<TableDefinitionResponse, Status> {
|
||||||
let profile = sqlx::query!(
|
// Use the validated profile_name for schema insertion
|
||||||
"INSERT INTO profiles (name) VALUES ($1)
|
let schema = sqlx::query!(
|
||||||
|
"INSERT INTO schemas (name) VALUES ($1)
|
||||||
ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name
|
ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name
|
||||||
RETURNING id",
|
RETURNING id",
|
||||||
request.profile_name
|
profile_name // Use the validated profile name
|
||||||
)
|
)
|
||||||
.fetch_one(&mut **tx)
|
.fetch_one(&mut **tx)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Schema error: {}", e)))?;
|
||||||
|
|
||||||
|
// Create PostgreSQL schema if it doesn't exist
|
||||||
|
let create_schema_sql = format!("CREATE SCHEMA IF NOT EXISTS \"{}\"", profile_name);
|
||||||
|
sqlx::query(&create_schema_sql)
|
||||||
|
.execute(&mut **tx)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Schema creation failed: {}", e)))?;
|
||||||
|
|
||||||
let mut links = Vec::new();
|
let mut links = Vec::new();
|
||||||
|
let mut seen_tables = std::collections::HashSet::new();
|
||||||
|
|
||||||
for link in request.links.drain(..) {
|
for link in request.links.drain(..) {
|
||||||
|
// Check for duplicate link
|
||||||
|
if !seen_tables.insert(link.linked_table_name.clone()) {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Duplicate link to table '{}'",
|
||||||
|
link.linked_table_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
let linked_table = sqlx::query!(
|
let linked_table = sqlx::query!(
|
||||||
"SELECT id FROM table_definitions
|
"SELECT id FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2",
|
WHERE schema_id = $1 AND table_name = $2",
|
||||||
profile.id,
|
schema.id,
|
||||||
link.linked_table_name
|
link.linked_table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(&mut **tx)
|
.fetch_optional(&mut **tx)
|
||||||
@@ -127,34 +300,40 @@ async fn execute_table_definition(
|
|||||||
|
|
||||||
let mut columns = Vec::new();
|
let mut columns = Vec::new();
|
||||||
for col_def in request.columns.drain(..) {
|
for col_def in request.columns.drain(..) {
|
||||||
let col_name = sanitize_identifier(&col_def.name);
|
let col_name = col_def.name.trim().to_string();
|
||||||
if !is_valid_identifier(&col_def.name) {
|
validate_identifier_format(&col_name, "Column name")?;
|
||||||
return Err(Status::invalid_argument("Invalid column name"));
|
|
||||||
|
if col_name.ends_with("_id") || col_name == "id" || col_name == "deleted" || col_name == "created_at" {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Column name '{}' cannot be 'id', 'deleted', 'created_at' or end with '_id'",
|
||||||
|
col_name
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let sql_type = map_field_type(&col_def.field_type)?;
|
let sql_type = map_field_type(&col_def.field_type)?;
|
||||||
columns.push(format!("\"{}\" {}", col_name, sql_type));
|
columns.push(format!("\"{}\" {}", col_name, sql_type));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut indexes = Vec::new();
|
let mut indexes = Vec::new();
|
||||||
for idx in request.indexes.drain(..) {
|
for idx in request.indexes.drain(..) {
|
||||||
let idx_name = sanitize_identifier(&idx);
|
let idx_name = idx.trim().to_string();
|
||||||
if !is_valid_identifier(&idx) {
|
validate_identifier_format(&idx_name, "Index name")?;
|
||||||
return Err(Status::invalid_argument(format!("Invalid index name: {}", idx)));
|
|
||||||
}
|
|
||||||
if !columns.iter().any(|c| c.starts_with(&format!("\"{}\"", idx_name))) {
|
if !columns.iter().any(|c| c.starts_with(&format!("\"{}\"", idx_name))) {
|
||||||
return Err(Status::invalid_argument(format!("Index column {} not found", idx_name)));
|
return Err(Status::invalid_argument(format!("Index column '{}' not found", idx_name)));
|
||||||
}
|
}
|
||||||
indexes.push(idx_name);
|
indexes.push(idx_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
let (create_sql, index_sql) = generate_table_sql(tx, &table_name, &columns, &indexes, &links).await?;
|
let (create_sql, index_sql) = generate_table_sql(tx, &profile_name, &table_name, &columns, &indexes, &links).await?;
|
||||||
|
|
||||||
|
// Use schema_id instead of profile_id
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"INSERT INTO table_definitions
|
r#"INSERT INTO table_definitions
|
||||||
(profile_id, table_name, columns, indexes)
|
(schema_id, table_name, columns, indexes)
|
||||||
VALUES ($1, $2, $3, $4)
|
VALUES ($1, $2, $3, $4)
|
||||||
RETURNING id"#,
|
RETURNING id"#,
|
||||||
profile.id,
|
schema.id,
|
||||||
&table_name,
|
&table_name,
|
||||||
json!(columns),
|
json!(columns),
|
||||||
json!(indexes)
|
json!(indexes)
|
||||||
@@ -163,7 +342,8 @@ async fn execute_table_definition(
|
|||||||
.await
|
.await
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
if db_err.constraint() == Some("idx_table_definitions_profile_table") {
|
// Update constraint name to match new schema
|
||||||
|
if db_err.constraint() == Some("idx_table_definitions_schema_table") {
|
||||||
return Status::already_exists("Table already exists in this profile");
|
return Status::already_exists("Table already exists in this profile");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -204,13 +384,13 @@ async fn execute_table_definition(
|
|||||||
|
|
||||||
async fn generate_table_sql(
|
async fn generate_table_sql(
|
||||||
tx: &mut Transaction<'_, Postgres>,
|
tx: &mut Transaction<'_, Postgres>,
|
||||||
|
profile_name: &str,
|
||||||
table_name: &str,
|
table_name: &str,
|
||||||
columns: &[String],
|
columns: &[String],
|
||||||
indexes: &[String],
|
indexes: &[String],
|
||||||
links: &[(i64, bool)],
|
links: &[(i64, bool)],
|
||||||
) -> Result<(String, Vec<String>), Status> {
|
) -> Result<(String, Vec<String>), Status> {
|
||||||
let qualified_table = format!("{}.\"{}\"", GENERATED_SCHEMA_NAME, table_name);
|
let qualified_table = format!("\"{}\".\"{}\"", profile_name, table_name);
|
||||||
|
|
||||||
let mut system_columns = vec![
|
let mut system_columns = vec![
|
||||||
"id BIGSERIAL PRIMARY KEY".to_string(),
|
"id BIGSERIAL PRIMARY KEY".to_string(),
|
||||||
"deleted BOOLEAN NOT NULL DEFAULT FALSE".to_string(),
|
"deleted BOOLEAN NOT NULL DEFAULT FALSE".to_string(),
|
||||||
@@ -218,16 +398,13 @@ async fn generate_table_sql(
|
|||||||
|
|
||||||
for (linked_id, required) in links {
|
for (linked_id, required) in links {
|
||||||
let linked_table = get_table_name_by_id(tx, *linked_id).await?;
|
let linked_table = get_table_name_by_id(tx, *linked_id).await?;
|
||||||
let qualified_linked_table = format!("{}.\"{}\"", GENERATED_SCHEMA_NAME, linked_table);
|
let qualified_linked_table = format!("\"{}\".\"{}\"", profile_name, linked_table);
|
||||||
let base_name = linked_table.split_once('_')
|
|
||||||
.map(|(_, rest)| rest)
|
|
||||||
.unwrap_or(&linked_table)
|
|
||||||
.to_string();
|
|
||||||
let null_clause = if *required { "NOT NULL" } else { "" };
|
|
||||||
|
|
||||||
|
// Simply use the full table name - no truncation!
|
||||||
|
let null_clause = if *required { "NOT NULL" } else { "" };
|
||||||
system_columns.push(
|
system_columns.push(
|
||||||
format!("\"{0}_id\" BIGINT {1} REFERENCES {2}(id)",
|
format!("\"{}_id\" BIGINT {} REFERENCES {}(id)",
|
||||||
base_name, null_clause, qualified_linked_table
|
linked_table, null_clause, qualified_linked_table
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -247,13 +424,9 @@ async fn generate_table_sql(
|
|||||||
let mut all_indexes = Vec::new();
|
let mut all_indexes = Vec::new();
|
||||||
for (linked_id, _) in links {
|
for (linked_id, _) in links {
|
||||||
let linked_table = get_table_name_by_id(tx, *linked_id).await?;
|
let linked_table = get_table_name_by_id(tx, *linked_id).await?;
|
||||||
let base_name = linked_table.split_once('_')
|
|
||||||
.map(|(_, rest)| rest)
|
|
||||||
.unwrap_or(&linked_table)
|
|
||||||
.to_string();
|
|
||||||
all_indexes.push(format!(
|
all_indexes.push(format!(
|
||||||
"CREATE INDEX \"idx_{}_{}_fk\" ON {} (\"{}_id\")",
|
"CREATE INDEX \"idx_{}_{}_fk\" ON {} (\"{}_id\")",
|
||||||
table_name, base_name, qualified_table, base_name
|
table_name, linked_table, qualified_table, linked_table
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ pub async fn post_table_script(
|
|||||||
) -> Result<TableScriptResponse, Status> {
|
) -> Result<TableScriptResponse, Status> {
|
||||||
// Fetch the table definition
|
// Fetch the table definition
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT id, table_name, columns, profile_id
|
r#"SELECT id, table_name, columns, schema_id
|
||||||
FROM table_definitions WHERE id = $1"#,
|
FROM table_definitions WHERE id = $1"#,
|
||||||
request.table_definition_id
|
request.table_definition_id
|
||||||
)
|
)
|
||||||
@@ -76,7 +76,7 @@ pub async fn post_table_script(
|
|||||||
let script_record = sqlx::query!(
|
let script_record = sqlx::query!(
|
||||||
r#"INSERT INTO table_scripts
|
r#"INSERT INTO table_scripts
|
||||||
(table_definitions_id, target_table, target_column,
|
(table_definitions_id, target_table, target_column,
|
||||||
target_column_type, script, description, profile_id)
|
target_column_type, script, description, schema_id)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||||
RETURNING id"#,
|
RETURNING id"#,
|
||||||
request.table_definition_id,
|
request.table_definition_id,
|
||||||
@@ -85,7 +85,7 @@ pub async fn post_table_script(
|
|||||||
column_type,
|
column_type,
|
||||||
parsed_script,
|
parsed_script,
|
||||||
request.description,
|
request.description,
|
||||||
table_def.profile_id
|
table_def.schema_id
|
||||||
)
|
)
|
||||||
.fetch_one(db_pool)
|
.fetch_one(db_pool)
|
||||||
.await
|
.await
|
||||||
|
|||||||
@@ -20,11 +20,11 @@ pub async fn get_table_structure(
|
|||||||
) -> Result<TableStructureResponse, Status> {
|
) -> Result<TableStructureResponse, Status> {
|
||||||
let profile_name = request.profile_name;
|
let profile_name = request.profile_name;
|
||||||
let table_name = request.table_name;
|
let table_name = request.table_name;
|
||||||
let table_schema = "gen";
|
let table_schema = &profile_name;
|
||||||
|
|
||||||
// 1. Validate Profile
|
// 1. Validate Profile
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -36,8 +36,8 @@ pub async fn get_table_structure(
|
|||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let profile_id = match profile {
|
let schema_id = match schema {
|
||||||
Some(p) => p.id,
|
Some(s) => s.id,
|
||||||
None => {
|
None => {
|
||||||
return Err(Status::not_found(format!(
|
return Err(Status::not_found(format!(
|
||||||
"Profile '{}' not found",
|
"Profile '{}' not found",
|
||||||
@@ -48,8 +48,8 @@ pub async fn get_table_structure(
|
|||||||
|
|
||||||
// 2. Validate Table within Profile
|
// 2. Validate Table within Profile
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
"SELECT id FROM table_definitions WHERE profile_id = $1 AND table_name = $2",
|
"SELECT id FROM table_definitions WHERE schema_id = $1 AND table_name = $2",
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
|
|||||||
@@ -9,24 +9,24 @@ pub async fn delete_table_data(
|
|||||||
request: DeleteTableDataRequest,
|
request: DeleteTableDataRequest,
|
||||||
) -> Result<DeleteTableDataResponse, Status> {
|
) -> Result<DeleteTableDataResponse, Status> {
|
||||||
// Lookup profile
|
// Lookup profile
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
request.profile_name
|
request.profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = match profile {
|
let schema_id = match schema {
|
||||||
Some(p) => p.id,
|
Some(s) => s.id,
|
||||||
None => return Err(Status::not_found("Profile not found")),
|
None => return Err(Status::not_found("Profile not found")),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Verify table exists in profile
|
// Verify table exists in profile
|
||||||
let table_exists = sqlx::query!(
|
let table_exists = sqlx::query!(
|
||||||
"SELECT 1 AS exists FROM table_definitions
|
"SELECT 1 AS exists FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2",
|
WHERE schema_id = $1 AND table_name = $2",
|
||||||
profile_id,
|
schema_id,
|
||||||
request.table_name
|
request.table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -38,7 +38,12 @@ pub async fn delete_table_data(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&request.table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&request.profile_name,
|
||||||
|
&request.table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// Perform soft delete using qualified table name
|
// Perform soft delete using qualified table name
|
||||||
let query = format!(
|
let query = format!(
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
// src/tables_data/handlers/get_table_data.rs
|
// src/tables_data/handlers/get_table_data.rs
|
||||||
|
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::{PgPool, Row};
|
use sqlx::{PgPool, Row};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use common::proto::multieko2::tables_data::{GetTableDataRequest, GetTableDataResponse};
|
use common::proto::multieko2::tables_data::{GetTableDataRequest, GetTableDataResponse};
|
||||||
use crate::shared::schema_qualifier::qualify_table_name_for_data; // Import schema qualifier
|
use crate::shared::schema_qualifier::qualify_table_name_for_data;
|
||||||
|
|
||||||
pub async fn get_table_data(
|
pub async fn get_table_data(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
@@ -14,21 +15,21 @@ pub async fn get_table_data(
|
|||||||
let record_id = request.id;
|
let record_id = request.id;
|
||||||
|
|
||||||
// Lookup profile
|
// Lookup profile
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = profile.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
||||||
|
|
||||||
// Lookup table_definition
|
// Lookup table_definition
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT id, columns FROM table_definitions
|
r#"SELECT id, columns FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2"#,
|
WHERE schema_id = $1 AND table_name = $2"#,
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -48,29 +49,51 @@ pub async fn get_table_data(
|
|||||||
return Err(Status::internal("Invalid column format"));
|
return Err(Status::internal("Invalid column format"));
|
||||||
}
|
}
|
||||||
let name = parts[0].trim_matches('"').to_string();
|
let name = parts[0].trim_matches('"').to_string();
|
||||||
let sql_type = parts[1].to_string();
|
user_columns.push(name);
|
||||||
user_columns.push((name, sql_type));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare all columns (system + user-defined)
|
// --- START OF FIX ---
|
||||||
let system_columns = vec![
|
|
||||||
("id".to_string(), "BIGINT".to_string()),
|
|
||||||
("deleted".to_string(), "BOOLEAN".to_string()),
|
|
||||||
];
|
|
||||||
let all_columns: Vec<(String, String)> = system_columns
|
|
||||||
.into_iter()
|
|
||||||
.chain(user_columns.into_iter())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// Build SELECT clause with COALESCE and type casting
|
// 1. Get all foreign key columns for this table
|
||||||
let columns_clause = all_columns
|
let fk_columns_query = sqlx::query!(
|
||||||
|
r#"SELECT ltd.table_name
|
||||||
|
FROM table_definition_links tdl
|
||||||
|
JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
||||||
|
WHERE tdl.source_table_id = $1"#,
|
||||||
|
table_def.id
|
||||||
|
)
|
||||||
|
.fetch_all(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
||||||
|
|
||||||
|
// 2. Build the list of foreign key column names using full table names
|
||||||
|
let mut foreign_key_columns = Vec::new();
|
||||||
|
for fk in fk_columns_query {
|
||||||
|
// Use the full table name, not a stripped version
|
||||||
|
foreign_key_columns.push(format!("{}_id", fk.table_name));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Prepare a complete list of all columns to select
|
||||||
|
let mut all_column_names = vec!["id".to_string(), "deleted".to_string()];
|
||||||
|
all_column_names.extend(user_columns);
|
||||||
|
all_column_names.extend(foreign_key_columns);
|
||||||
|
|
||||||
|
// 4. Build the SELECT clause with all columns
|
||||||
|
let columns_clause = all_column_names
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(name, _)| format!("COALESCE(\"{0}\"::TEXT, '') AS \"{0}\"", name))
|
.map(|name| format!("COALESCE(\"{0}\"::TEXT, '') AS \"{0}\"", name))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
|
||||||
|
// --- END OF FIX ---
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"SELECT {} FROM {} WHERE id = $1 AND deleted = false",
|
"SELECT {} FROM {} WHERE id = $1 AND deleted = false",
|
||||||
@@ -87,7 +110,6 @@ pub async fn get_table_data(
|
|||||||
Ok(row) => row,
|
Ok(row) => row,
|
||||||
Err(sqlx::Error::RowNotFound) => return Err(Status::not_found("Record not found")),
|
Err(sqlx::Error::RowNotFound) => return Err(Status::not_found("Record not found")),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
// Handle "relation does not exist" error specifically
|
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
||||||
return Err(Status::internal(format!(
|
return Err(Status::internal(format!(
|
||||||
@@ -100,9 +122,9 @@ pub async fn get_table_data(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Build response data
|
// Build response data from the complete list of columns
|
||||||
let mut data = HashMap::new();
|
let mut data = HashMap::new();
|
||||||
for (column_name, _) in &all_columns {
|
for column_name in &all_column_names {
|
||||||
let value: String = row
|
let value: String = row
|
||||||
.try_get(column_name.as_str())
|
.try_get(column_name.as_str())
|
||||||
.map_err(|e| Status::internal(format!("Failed to get column {}: {}", column_name, e)))?;
|
.map_err(|e| Status::internal(format!("Failed to get column {}: {}", column_name, e)))?;
|
||||||
|
|||||||
@@ -18,22 +18,22 @@ pub async fn get_table_data_by_position(
|
|||||||
return Err(Status::invalid_argument("Position must be at least 1"));
|
return Err(Status::invalid_argument("Position must be at least 1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = profile.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
||||||
|
|
||||||
let table_exists = sqlx::query_scalar!(
|
let table_exists = sqlx::query_scalar!(
|
||||||
r#"SELECT EXISTS(
|
r#"SELECT EXISTS(
|
||||||
SELECT 1 FROM table_definitions
|
SELECT 1 FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2
|
WHERE schema_id = $1 AND table_name = $2
|
||||||
) AS "exists!""#,
|
) AS "exists!""#,
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_one(db_pool)
|
.fetch_one(db_pool)
|
||||||
@@ -45,7 +45,12 @@ pub async fn get_table_data_by_position(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let id_result = sqlx::query_scalar(
|
let id_result = sqlx::query_scalar(
|
||||||
&format!(
|
&format!(
|
||||||
|
|||||||
@@ -12,15 +12,15 @@ pub async fn get_table_data_count(
|
|||||||
// We still need to verify that the table is logically defined for the profile.
|
// We still need to verify that the table is logically defined for the profile.
|
||||||
// The schema qualifier handles *how* to access it physically, but this check
|
// The schema qualifier handles *how* to access it physically, but this check
|
||||||
// ensures the request is valid in the context of the application's definitions.
|
// ensures the request is valid in the context of the application's definitions.
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
request.profile_name
|
request.profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error for '{}': {}", request.profile_name, e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error for '{}': {}", request.profile_name, e)))?;
|
||||||
|
|
||||||
let profile_id = match profile {
|
let schema_id = match schema {
|
||||||
Some(p) => p.id,
|
Some(p) => p.id,
|
||||||
None => return Err(Status::not_found(format!("Profile '{}' not found", request.profile_name))),
|
None => return Err(Status::not_found(format!("Profile '{}' not found", request.profile_name))),
|
||||||
};
|
};
|
||||||
@@ -28,9 +28,9 @@ pub async fn get_table_data_count(
|
|||||||
let table_defined_for_profile = sqlx::query_scalar!(
|
let table_defined_for_profile = sqlx::query_scalar!(
|
||||||
r#"SELECT EXISTS(
|
r#"SELECT EXISTS(
|
||||||
SELECT 1 FROM table_definitions
|
SELECT 1 FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2
|
WHERE schema_id = $1 AND table_name = $2
|
||||||
) AS "exists!" "#, // Added AS "exists!" for clarity with sqlx macro
|
) AS "exists!" "#,
|
||||||
profile_id,
|
schema_id,
|
||||||
request.table_name
|
request.table_name
|
||||||
)
|
)
|
||||||
.fetch_one(db_pool)
|
.fetch_one(db_pool)
|
||||||
@@ -47,7 +47,12 @@ pub async fn get_table_data_count(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 2. QUALIFY THE TABLE NAME using the imported function
|
// 2. QUALIFY THE TABLE NAME using the imported function
|
||||||
let qualified_table_name = qualify_table_name_for_data(&request.table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&request.profile_name,
|
||||||
|
&request.table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// 3. USE THE QUALIFIED NAME in the SQL query
|
// 3. USE THE QUALIFIED NAME in the SQL query
|
||||||
let query_sql = format!(
|
let query_sql = format!(
|
||||||
@@ -56,7 +61,7 @@ pub async fn get_table_data_count(
|
|||||||
FROM {}
|
FROM {}
|
||||||
WHERE deleted = FALSE
|
WHERE deleted = FALSE
|
||||||
"#,
|
"#,
|
||||||
qualified_table_name // Use the schema-qualified name here
|
qualified_table
|
||||||
);
|
);
|
||||||
|
|
||||||
// The rest of the logic remains largely the same, but error messages can be more specific.
|
// The rest of the logic remains largely the same, but error messages can be more specific.
|
||||||
@@ -81,14 +86,14 @@ pub async fn get_table_data_count(
|
|||||||
// even though it was defined in table_definitions. This is an inconsistency.
|
// even though it was defined in table_definitions. This is an inconsistency.
|
||||||
return Err(Status::internal(format!(
|
return Err(Status::internal(format!(
|
||||||
"Table '{}' is defined but does not physically exist in the database as {}.",
|
"Table '{}' is defined but does not physically exist in the database as {}.",
|
||||||
request.table_name, qualified_table_name
|
request.table_name, qualified_table
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// For other errors, provide a general message.
|
// For other errors, provide a general message.
|
||||||
Err(Status::internal(format!(
|
Err(Status::internal(format!(
|
||||||
"Count query failed for table {}: {}",
|
"Count query failed for table {}: {}",
|
||||||
qualified_table_name, e
|
qualified_table, e
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
// src/tables_data/handlers/post_table_data.rs
|
// src/tables_data/handlers/post_table_data.rs
|
||||||
|
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::{PgPool, Arguments};
|
use sqlx::{PgPool, Arguments};
|
||||||
use sqlx::postgres::PgArguments;
|
use sqlx::postgres::PgArguments;
|
||||||
@@ -6,42 +7,39 @@ use chrono::{DateTime, Utc};
|
|||||||
use common::proto::multieko2::tables_data::{PostTableDataRequest, PostTableDataResponse};
|
use common::proto::multieko2::tables_data::{PostTableDataRequest, PostTableDataResponse};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use crate::shared::schema_qualifier::qualify_table_name_for_data; // Import schema qualifier
|
use prost_types::value::Kind;
|
||||||
|
use rust_decimal::Decimal;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use crate::steel::server::execution::{self, Value};
|
use crate::steel::server::execution::{self, Value};
|
||||||
use crate::steel::server::functions::SteelContext;
|
use crate::steel::server::functions::SteelContext;
|
||||||
|
|
||||||
|
use crate::indexer::{IndexCommand, IndexCommandData};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
pub async fn post_table_data(
|
pub async fn post_table_data(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
request: PostTableDataRequest,
|
request: PostTableDataRequest,
|
||||||
|
indexer_tx: &mpsc::Sender<IndexCommand>,
|
||||||
) -> Result<PostTableDataResponse, Status> {
|
) -> Result<PostTableDataResponse, Status> {
|
||||||
let profile_name = request.profile_name;
|
let profile_name = request.profile_name;
|
||||||
let table_name = request.table_name;
|
let table_name = request.table_name;
|
||||||
let mut data = HashMap::new();
|
|
||||||
|
|
||||||
// CORRECTED: Process and trim all incoming data values.
|
let schema = sqlx::query!(
|
||||||
// We remove the hardcoded validation. We will let the database's
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
// NOT NULL constraints or Steel validation scripts handle required fields.
|
|
||||||
for (key, value) in request.data {
|
|
||||||
data.insert(key, value.trim().to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lookup profile
|
|
||||||
let profile = sqlx::query!(
|
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = profile.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
||||||
|
|
||||||
// Lookup table_definition
|
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT id, columns FROM table_definitions
|
r#"SELECT id, columns FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2"#,
|
WHERE schema_id = $1 AND table_name = $2"#,
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -50,7 +48,6 @@ pub async fn post_table_data(
|
|||||||
|
|
||||||
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
||||||
|
|
||||||
// Parse columns from JSON
|
|
||||||
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
||||||
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
||||||
|
|
||||||
@@ -65,7 +62,6 @@ pub async fn post_table_data(
|
|||||||
columns.push((name, sql_type));
|
columns.push((name, sql_type));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all foreign key columns for this table
|
|
||||||
let fk_columns = sqlx::query!(
|
let fk_columns = sqlx::query!(
|
||||||
r#"SELECT ltd.table_name
|
r#"SELECT ltd.table_name
|
||||||
FROM table_definition_links tdl
|
FROM table_definition_links tdl
|
||||||
@@ -77,26 +73,41 @@ pub async fn post_table_data(
|
|||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
||||||
|
|
||||||
// Build system columns with foreign keys
|
|
||||||
let mut system_columns = vec!["deleted".to_string()];
|
let mut system_columns = vec!["deleted".to_string()];
|
||||||
for fk in fk_columns {
|
for fk in fk_columns {
|
||||||
let base_name = fk.table_name.split('_').last().unwrap_or(&fk.table_name);
|
system_columns.push(format!("{}_id", fk.table_name));
|
||||||
system_columns.push(format!("{}_id", base_name));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert to HashSet for faster lookups
|
|
||||||
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
||||||
|
|
||||||
// Validate all data columns
|
|
||||||
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
||||||
for key in data.keys() {
|
for key in request.data.keys() {
|
||||||
if !system_columns_set.contains(key.as_str()) &&
|
if !system_columns_set.contains(key.as_str()) &&
|
||||||
!user_columns.contains(&&key.to_string()) {
|
!user_columns.contains(&&key.to_string()) {
|
||||||
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate Steel scripts
|
let mut string_data_for_scripts = HashMap::new();
|
||||||
|
for (key, proto_value) in &request.data {
|
||||||
|
let str_val = match &proto_value.kind {
|
||||||
|
Some(Kind::StringValue(s)) => {
|
||||||
|
let trimmed = s.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
trimmed.to_string()
|
||||||
|
},
|
||||||
|
Some(Kind::NumberValue(n)) => n.to_string(),
|
||||||
|
Some(Kind::BoolValue(b)) => b.to_string(),
|
||||||
|
Some(Kind::NullValue(_)) | None => continue,
|
||||||
|
Some(Kind::StructValue(_)) | Some(Kind::ListValue(_)) => {
|
||||||
|
return Err(Status::invalid_argument(format!("Unsupported type for script validation in column '{}'", key)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
string_data_for_scripts.insert(key.clone(), str_val);
|
||||||
|
}
|
||||||
|
|
||||||
let scripts = sqlx::query!(
|
let scripts = sqlx::query!(
|
||||||
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
||||||
table_def.id
|
table_def.id
|
||||||
@@ -108,21 +119,19 @@ pub async fn post_table_data(
|
|||||||
for script_record in scripts {
|
for script_record in scripts {
|
||||||
let target_column = script_record.target_column;
|
let target_column = script_record.target_column;
|
||||||
|
|
||||||
// Ensure target column exists in submitted data
|
let user_value = string_data_for_scripts.get(&target_column)
|
||||||
let user_value = data.get(&target_column)
|
|
||||||
.ok_or_else(|| Status::invalid_argument(
|
.ok_or_else(|| Status::invalid_argument(
|
||||||
format!("Script target column '{}' is required", target_column)
|
format!("Script target column '{}' is required", target_column)
|
||||||
))?;
|
))?;
|
||||||
|
|
||||||
// Create execution context
|
|
||||||
let context = SteelContext {
|
let context = SteelContext {
|
||||||
current_table: table_name.clone(), // Keep base name for scripts
|
current_table: table_name.clone(),
|
||||||
profile_id,
|
schema_id,
|
||||||
row_data: data.clone(),
|
schema_name: profile_name.clone(),
|
||||||
|
row_data: string_data_for_scripts.clone(),
|
||||||
db_pool: Arc::new(db_pool.clone()),
|
db_pool: Arc::new(db_pool.clone()),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Execute validation script
|
|
||||||
let script_result = execution::execute_script(
|
let script_result = execution::execute_script(
|
||||||
script_record.script,
|
script_record.script,
|
||||||
"STRINGS",
|
"STRINGS",
|
||||||
@@ -133,7 +142,6 @@ pub async fn post_table_data(
|
|||||||
format!("Script execution failed for '{}': {}", target_column, e)
|
format!("Script execution failed for '{}': {}", target_column, e)
|
||||||
))?;
|
))?;
|
||||||
|
|
||||||
// Validate script output
|
|
||||||
let Value::Strings(mut script_output) = script_result else {
|
let Value::Strings(mut script_output) = script_result else {
|
||||||
return Err(Status::internal("Script must return string values"));
|
return Err(Status::internal("Script must return string values"));
|
||||||
};
|
};
|
||||||
@@ -149,17 +157,16 @@ pub async fn post_table_data(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare SQL parameters
|
|
||||||
let mut params = PgArguments::default();
|
let mut params = PgArguments::default();
|
||||||
let mut columns_list = Vec::new();
|
let mut columns_list = Vec::new();
|
||||||
let mut placeholders = Vec::new();
|
let mut placeholders = Vec::new();
|
||||||
let mut param_idx = 1;
|
let mut param_idx = 1;
|
||||||
|
|
||||||
for (col, value) in data {
|
for (col, proto_value) in request.data {
|
||||||
let sql_type = if system_columns_set.contains(col.as_str()) {
|
let sql_type = if system_columns_set.contains(col.as_str()) {
|
||||||
match col.as_str() {
|
match col.as_str() {
|
||||||
"deleted" => "BOOLEAN",
|
"deleted" => "BOOLEAN",
|
||||||
_ if col.ends_with("_id") => "BIGINT", // Handle foreign keys
|
_ if col.ends_with("_id") => "BIGINT",
|
||||||
_ => return Err(Status::invalid_argument("Invalid system column")),
|
_ => return Err(Status::invalid_argument("Invalid system column")),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -169,38 +176,122 @@ pub async fn post_table_data(
|
|||||||
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
||||||
};
|
};
|
||||||
|
|
||||||
match sql_type {
|
let kind = match &proto_value.kind {
|
||||||
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => {
|
None | Some(Kind::NullValue(_)) => {
|
||||||
if let Some(max_len) = sql_type.strip_prefix("VARCHAR(")
|
match sql_type {
|
||||||
.and_then(|s| s.strip_suffix(')'))
|
"BOOLEAN" => params.add(None::<bool>),
|
||||||
.and_then(|s| s.parse::<usize>().ok())
|
"TEXT" => params.add(None::<String>),
|
||||||
{
|
"TIMESTAMPTZ" => params.add(None::<DateTime<Utc>>),
|
||||||
if value.len() > max_len {
|
"BIGINT" => params.add(None::<i64>),
|
||||||
|
"INTEGER" => params.add(None::<i32>),
|
||||||
|
s if s.starts_with("NUMERIC") => params.add(None::<Decimal>),
|
||||||
|
_ => return Err(Status::invalid_argument(format!("Unsupported type for null value: {}", sql_type))),
|
||||||
|
}.map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||||
|
|
||||||
|
columns_list.push(format!("\"{}\"", col));
|
||||||
|
placeholders.push(format!("${}", param_idx));
|
||||||
|
param_idx += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Some(k) => k,
|
||||||
|
};
|
||||||
|
|
||||||
|
if sql_type == "TEXT" {
|
||||||
|
if let Kind::StringValue(value) = kind {
|
||||||
|
let trimmed_value = value.trim();
|
||||||
|
|
||||||
|
if trimmed_value.is_empty() {
|
||||||
|
params.add(None::<String>).map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
if col == "telefon" && trimmed_value.len() > 15 {
|
||||||
return Err(Status::internal(format!("Value too long for {}", col)));
|
return Err(Status::internal(format!("Value too long for {}", col)));
|
||||||
}
|
}
|
||||||
|
params.add(trimmed_value).map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
||||||
}
|
}
|
||||||
params.add(value)
|
} else {
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected string for column '{}'", col)));
|
||||||
},
|
}
|
||||||
"BOOLEAN" => {
|
} else if sql_type == "BOOLEAN" {
|
||||||
let val = value.parse::<bool>()
|
if let Kind::BoolValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid boolean for {}", col)))?;
|
params.add(val).map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
||||||
params.add(val)
|
} else {
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected boolean for column '{}'", col)));
|
||||||
},
|
}
|
||||||
"TIMESTAMPTZ" => {
|
} else if sql_type == "TIMESTAMPTZ" {
|
||||||
let dt = DateTime::parse_from_rfc3339(&value)
|
if let Kind::StringValue(value) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
let dt = DateTime::parse_from_rfc3339(value).map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
||||||
params.add(dt.with_timezone(&Utc))
|
params.add(dt.with_timezone(&Utc)).map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
} else {
|
||||||
},
|
return Err(Status::invalid_argument(format!("Expected ISO 8601 string for column '{}'", col)));
|
||||||
"BIGINT" => {
|
}
|
||||||
let val = value.parse::<i64>()
|
} else if sql_type == "BIGINT" {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid integer for {}", col)))?;
|
if let Kind::NumberValue(val) = kind {
|
||||||
params.add(val)
|
if val.fract() != 0.0 {
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
},
|
}
|
||||||
_ => return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type))),
|
|
||||||
|
// Simple universal check: try the conversion and verify it's reversible
|
||||||
|
// This handles ALL edge cases: infinity, NaN, overflow, underflow, precision loss
|
||||||
|
let as_i64 = *val as i64;
|
||||||
|
if (as_i64 as f64) != *val {
|
||||||
|
return Err(Status::invalid_argument(format!("Integer value out of range for BIGINT column '{}'", col)));
|
||||||
|
}
|
||||||
|
|
||||||
|
params.add(as_i64).map_err(|e| Status::invalid_argument(format!("Failed to add bigint parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
|
} else if sql_type == "INTEGER" {
|
||||||
|
if let Kind::NumberValue(val) = kind {
|
||||||
|
if val.fract() != 0.0 {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simple universal check: try the conversion and verify it's reversible
|
||||||
|
// This handles ALL edge cases: infinity, NaN, overflow, underflow, precision loss
|
||||||
|
let as_i32 = *val as i32;
|
||||||
|
if (as_i32 as f64) != *val {
|
||||||
|
return Err(Status::invalid_argument(format!("Integer value out of range for INTEGER column '{}'", col)));
|
||||||
|
}
|
||||||
|
|
||||||
|
params.add(as_i32).map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
|
} else if sql_type.starts_with("NUMERIC") {
|
||||||
|
// MODIFIED: This block is now stricter.
|
||||||
|
let decimal_val = match kind {
|
||||||
|
Kind::StringValue(s) => {
|
||||||
|
let trimmed = s.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
None // Treat empty string as NULL
|
||||||
|
} else {
|
||||||
|
// This is the only valid path: parse from a string.
|
||||||
|
Some(Decimal::from_str(trimmed).map_err(|_| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Invalid decimal string format for column '{}': {}",
|
||||||
|
col, s
|
||||||
|
))
|
||||||
|
})?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// CATCH-ALL: Reject NumberValue, BoolValue, etc. for NUMERIC fields.
|
||||||
|
_ => {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Expected a string representation for decimal column '{}', but received a different type.",
|
||||||
|
col
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
params.add(decimal_val).map_err(|e| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Failed to add decimal parameter for {}: {}",
|
||||||
|
col, e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type)));
|
||||||
}
|
}
|
||||||
|
|
||||||
columns_list.push(format!("\"{}\"", col));
|
columns_list.push(format!("\"{}\"", col));
|
||||||
@@ -212,8 +303,12 @@ pub async fn post_table_data(
|
|||||||
return Err(Status::invalid_argument("No valid columns to insert"));
|
return Err(Status::invalid_argument("No valid columns to insert"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Qualify table name with schema
|
let qualified_table = crate::shared::schema_qualifier::qualify_table_name_for_data(
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"INSERT INTO {} ({}) VALUES ({}) RETURNING id",
|
"INSERT INTO {} ({}) VALUES ({}) RETURNING id",
|
||||||
@@ -222,7 +317,6 @@ pub async fn post_table_data(
|
|||||||
placeholders.join(", ")
|
placeholders.join(", ")
|
||||||
);
|
);
|
||||||
|
|
||||||
// Execute query with enhanced error handling
|
|
||||||
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
||||||
.fetch_one(db_pool)
|
.fetch_one(db_pool)
|
||||||
.await;
|
.await;
|
||||||
@@ -230,8 +324,13 @@ pub async fn post_table_data(
|
|||||||
let inserted_id = match result {
|
let inserted_id = match result {
|
||||||
Ok(id) => id,
|
Ok(id) => id,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
// Handle "relation does not exist" error specifically
|
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("22P02")) ||
|
||||||
|
db_err.code() == Some(std::borrow::Cow::Borrowed("22003")) {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Numeric field overflow or invalid format. Check precision and scale. Details: {}", db_err.message()
|
||||||
|
)));
|
||||||
|
}
|
||||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
||||||
return Err(Status::internal(format!(
|
return Err(Status::internal(format!(
|
||||||
"Table '{}' is defined but does not physically exist in the database as {}",
|
"Table '{}' is defined but does not physically exist in the database as {}",
|
||||||
@@ -243,6 +342,18 @@ pub async fn post_table_data(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let command = IndexCommand::AddOrUpdate(IndexCommandData {
|
||||||
|
table_name: table_name.clone(),
|
||||||
|
row_id: inserted_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Err(e) = indexer_tx.send(command).await {
|
||||||
|
error!(
|
||||||
|
"CRITICAL: DB insert for table '{}' (id: {}) succeeded but failed to queue for indexing: {}. Search index is now inconsistent.",
|
||||||
|
table_name, inserted_id, e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(PostTableDataResponse {
|
Ok(PostTableDataResponse {
|
||||||
success: true,
|
success: true,
|
||||||
message: "Data inserted successfully".into(),
|
message: "Data inserted successfully".into(),
|
||||||
|
|||||||
@@ -1,52 +1,56 @@
|
|||||||
// src/tables_data/handlers/put_table_data.rs
|
// src/tables_data/handlers/put_table_data.rs
|
||||||
|
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::{PgPool, Arguments, Postgres};
|
use sqlx::{PgPool, Arguments};
|
||||||
use sqlx::postgres::PgArguments;
|
use sqlx::postgres::PgArguments;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use common::proto::multieko2::tables_data::{PutTableDataRequest, PutTableDataResponse};
|
use common::proto::multieko2::tables_data::{PutTableDataRequest, PutTableDataResponse};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use crate::shared::schema_qualifier::qualify_table_name_for_data; // Import schema qualifier
|
use std::sync::Arc;
|
||||||
|
use prost_types::value::Kind;
|
||||||
|
use rust_decimal::Decimal;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use crate::steel::server::execution::{self, Value};
|
||||||
|
use crate::steel::server::functions::SteelContext;
|
||||||
|
use crate::indexer::{IndexCommand, IndexCommandData};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
pub async fn put_table_data(
|
pub async fn put_table_data(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
request: PutTableDataRequest,
|
request: PutTableDataRequest,
|
||||||
|
indexer_tx: &mpsc::Sender<IndexCommand>,
|
||||||
) -> Result<PutTableDataResponse, Status> {
|
) -> Result<PutTableDataResponse, Status> {
|
||||||
let profile_name = request.profile_name;
|
let profile_name = request.profile_name;
|
||||||
let table_name = request.table_name;
|
let table_name = request.table_name;
|
||||||
let record_id = request.id;
|
let record_id = request.id;
|
||||||
|
|
||||||
// Preprocess and validate data
|
// An update with no fields is a no-op; we can return success early.
|
||||||
let mut processed_data = HashMap::new();
|
if request.data.is_empty() {
|
||||||
let mut null_fields = Vec::new();
|
return Ok(PutTableDataResponse {
|
||||||
|
success: true,
|
||||||
// CORRECTED: Generic handling for all fields.
|
message: "No fields to update.".into(),
|
||||||
// Any field with an empty string will be added to the null_fields list.
|
updated_id: record_id,
|
||||||
// The special, hardcoded logic for "firma" has been removed.
|
});
|
||||||
for (key, value) in request.data {
|
|
||||||
let trimmed = value.trim().to_string();
|
|
||||||
if trimmed.is_empty() {
|
|
||||||
null_fields.push(key);
|
|
||||||
} else {
|
|
||||||
processed_data.insert(key, trimmed);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Lookup profile
|
// --- Start of logic copied and adapted from post_table_data ---
|
||||||
let profile = sqlx::query!(
|
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
let schema = sqlx::query!(
|
||||||
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = profile.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
||||||
|
|
||||||
// Lookup table_definition
|
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT id, columns FROM table_definitions
|
r#"SELECT id, columns FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2"#,
|
WHERE schema_id = $1 AND table_name = $2"#,
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -55,7 +59,6 @@ pub async fn put_table_data(
|
|||||||
|
|
||||||
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
||||||
|
|
||||||
// Parse columns from JSON
|
|
||||||
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
||||||
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
||||||
|
|
||||||
@@ -70,130 +73,287 @@ pub async fn put_table_data(
|
|||||||
columns.push((name, sql_type));
|
columns.push((name, sql_type));
|
||||||
}
|
}
|
||||||
|
|
||||||
// CORRECTED: "firma" is not a system column.
|
let fk_columns = sqlx::query!(
|
||||||
// It should be treated as a user-defined column.
|
r#"SELECT ltd.table_name
|
||||||
let system_columns = ["deleted"];
|
FROM table_definition_links tdl
|
||||||
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
||||||
|
WHERE tdl.source_table_id = $1"#,
|
||||||
|
table_def.id
|
||||||
|
)
|
||||||
|
.fetch_all(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
||||||
|
|
||||||
// Validate input columns
|
let mut system_columns = vec!["deleted".to_string()];
|
||||||
for key in processed_data.keys() {
|
for fk in fk_columns {
|
||||||
if !system_columns.contains(&key.as_str()) && !user_columns.contains(&key) {
|
system_columns.push(format!("{}_id", fk.table_name));
|
||||||
|
}
|
||||||
|
|
||||||
|
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
||||||
|
|
||||||
|
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
||||||
|
for key in request.data.keys() {
|
||||||
|
if !system_columns_set.contains(key.as_str()) &&
|
||||||
|
!user_columns.contains(&&key.to_string()) {
|
||||||
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare SQL parameters
|
let mut string_data_for_scripts = HashMap::new();
|
||||||
|
for (key, proto_value) in &request.data {
|
||||||
|
let str_val = match &proto_value.kind {
|
||||||
|
Some(Kind::StringValue(s)) => {
|
||||||
|
let trimmed = s.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
trimmed.to_string()
|
||||||
|
},
|
||||||
|
Some(Kind::NumberValue(n)) => n.to_string(),
|
||||||
|
Some(Kind::BoolValue(b)) => b.to_string(),
|
||||||
|
Some(Kind::NullValue(_)) | None => continue,
|
||||||
|
Some(Kind::StructValue(_)) | Some(Kind::ListValue(_)) => {
|
||||||
|
return Err(Status::invalid_argument(format!("Unsupported type for script validation in column '{}'", key)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
string_data_for_scripts.insert(key.clone(), str_val);
|
||||||
|
}
|
||||||
|
|
||||||
|
let scripts = sqlx::query!(
|
||||||
|
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
||||||
|
table_def.id
|
||||||
|
)
|
||||||
|
.fetch_all(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Failed to fetch scripts: {}", e)))?;
|
||||||
|
|
||||||
|
for script_record in scripts {
|
||||||
|
let target_column = script_record.target_column;
|
||||||
|
|
||||||
|
if let Some(user_value) = string_data_for_scripts.get(&target_column) {
|
||||||
|
let context = SteelContext {
|
||||||
|
current_table: table_name.clone(),
|
||||||
|
schema_id,
|
||||||
|
schema_name: profile_name.clone(),
|
||||||
|
row_data: string_data_for_scripts.clone(),
|
||||||
|
db_pool: Arc::new(db_pool.clone()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let script_result = execution::execute_script(
|
||||||
|
script_record.script,
|
||||||
|
"STRINGS",
|
||||||
|
Arc::new(db_pool.clone()),
|
||||||
|
context,
|
||||||
|
)
|
||||||
|
.map_err(|e| Status::invalid_argument(
|
||||||
|
format!("Script execution failed for '{}': {}", target_column, e)
|
||||||
|
))?;
|
||||||
|
|
||||||
|
let Value::Strings(mut script_output) = script_result else {
|
||||||
|
return Err(Status::internal("Script must return string values"));
|
||||||
|
};
|
||||||
|
|
||||||
|
let expected_value = script_output.pop()
|
||||||
|
.ok_or_else(|| Status::internal("Script returned no values"))?;
|
||||||
|
|
||||||
|
if user_value != &expected_value {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Validation failed for column '{}': Expected '{}', Got '{}'",
|
||||||
|
target_column, expected_value, user_value
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut params = PgArguments::default();
|
let mut params = PgArguments::default();
|
||||||
let mut set_clauses = Vec::new();
|
let mut set_clauses = Vec::new();
|
||||||
let mut param_idx = 1;
|
let mut param_idx = 1;
|
||||||
|
|
||||||
// Add data parameters for non-empty fields
|
for (col, proto_value) in request.data {
|
||||||
for (col, value) in &processed_data {
|
let sql_type = if system_columns_set.contains(col.as_str()) {
|
||||||
// CORRECTED: The logic for "firma" is removed from this match.
|
|
||||||
// It will now fall through to the `else` block and have its type
|
|
||||||
// correctly looked up from the `columns` vector.
|
|
||||||
let sql_type = if system_columns.contains(&col.as_str()) {
|
|
||||||
match col.as_str() {
|
match col.as_str() {
|
||||||
"deleted" => "BOOLEAN",
|
"deleted" => "BOOLEAN",
|
||||||
|
_ if col.ends_with("_id") => "BIGINT",
|
||||||
_ => return Err(Status::invalid_argument("Invalid system column")),
|
_ => return Err(Status::invalid_argument("Invalid system column")),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
columns.iter()
|
columns.iter()
|
||||||
.find(|(name, _)| name == col)
|
.find(|(name, _)| name == &col)
|
||||||
.map(|(_, sql_type)| sql_type.as_str())
|
.map(|(_, sql_type)| sql_type.as_str())
|
||||||
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
||||||
};
|
};
|
||||||
|
|
||||||
match sql_type {
|
let kind = match &proto_value.kind {
|
||||||
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => {
|
None | Some(Kind::NullValue(_)) => {
|
||||||
if let Some(max_len) = sql_type.strip_prefix("VARCHAR(")
|
match sql_type {
|
||||||
.and_then(|s| s.strip_suffix(')'))
|
"BOOLEAN" => params.add(None::<bool>),
|
||||||
.and_then(|s| s.parse::<usize>().ok())
|
"TEXT" => params.add(None::<String>),
|
||||||
{
|
"TIMESTAMPTZ" => params.add(None::<DateTime<Utc>>),
|
||||||
if value.len() > max_len {
|
"BIGINT" => params.add(None::<i64>),
|
||||||
|
"INTEGER" => params.add(None::<i32>),
|
||||||
|
s if s.starts_with("NUMERIC") => params.add(None::<Decimal>),
|
||||||
|
_ => return Err(Status::invalid_argument(format!("Unsupported type for null value: {}", sql_type))),
|
||||||
|
}.map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||||
|
|
||||||
|
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
||||||
|
param_idx += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Some(k) => k,
|
||||||
|
};
|
||||||
|
|
||||||
|
if sql_type == "TEXT" {
|
||||||
|
if let Kind::StringValue(value) = kind {
|
||||||
|
let trimmed_value = value.trim();
|
||||||
|
|
||||||
|
if trimmed_value.is_empty() {
|
||||||
|
params.add(None::<String>).map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
if col == "telefon" && trimmed_value.len() > 15 {
|
||||||
return Err(Status::internal(format!("Value too long for {}", col)));
|
return Err(Status::internal(format!("Value too long for {}", col)));
|
||||||
}
|
}
|
||||||
|
params.add(trimmed_value).map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
||||||
}
|
}
|
||||||
params.add(value)
|
} else {
|
||||||
.map_err(|e| Status::internal(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected string for column '{}'", col)));
|
||||||
},
|
}
|
||||||
"BOOLEAN" => {
|
} else if sql_type == "BOOLEAN" {
|
||||||
let val = value.parse::<bool>()
|
if let Kind::BoolValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid boolean for {}", col)))?;
|
params.add(val).map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
||||||
params.add(val)
|
} else {
|
||||||
.map_err(|e| Status::internal(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected boolean for column '{}'", col)));
|
||||||
},
|
}
|
||||||
"TIMESTAMPTZ" => {
|
} else if sql_type == "TIMESTAMPTZ" {
|
||||||
let dt = DateTime::parse_from_rfc3339(value)
|
if let Kind::StringValue(value) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
let dt = DateTime::parse_from_rfc3339(value).map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
||||||
params.add(dt.with_timezone(&Utc))
|
params.add(dt.with_timezone(&Utc)).map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
||||||
.map_err(|e| Status::internal(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
} else {
|
||||||
},
|
return Err(Status::invalid_argument(format!("Expected ISO 8601 string for column '{}'", col)));
|
||||||
// ADDED: BIGINT handling for completeness, if needed for other columns.
|
}
|
||||||
"BIGINT" => {
|
} else if sql_type == "BIGINT" {
|
||||||
let val = value.parse::<i64>()
|
if let Kind::NumberValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid integer for {}", col)))?;
|
if val.fract() != 0.0 {
|
||||||
params.add(val)
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
.map_err(|e| Status::internal(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
}
|
||||||
},
|
let as_i64 = *val as i64;
|
||||||
_ => return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type))),
|
if (as_i64 as f64) != *val {
|
||||||
|
return Err(Status::invalid_argument(format!("Integer value out of range for BIGINT column '{}'", col)));
|
||||||
|
}
|
||||||
|
params.add(as_i64).map_err(|e| Status::invalid_argument(format!("Failed to add bigint parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
|
} else if sql_type == "INTEGER" {
|
||||||
|
if let Kind::NumberValue(val) = kind {
|
||||||
|
if val.fract() != 0.0 {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
|
}
|
||||||
|
let as_i32 = *val as i32;
|
||||||
|
if (as_i32 as f64) != *val {
|
||||||
|
return Err(Status::invalid_argument(format!("Integer value out of range for INTEGER column '{}'", col)));
|
||||||
|
}
|
||||||
|
params.add(as_i32).map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
|
} else if sql_type.starts_with("NUMERIC") {
|
||||||
|
let decimal_val = match kind {
|
||||||
|
Kind::StringValue(s) => {
|
||||||
|
let trimmed = s.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(Decimal::from_str(trimmed).map_err(|_| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Invalid decimal string format for column '{}': {}",
|
||||||
|
col, s
|
||||||
|
))
|
||||||
|
})?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Expected a string representation for decimal column '{}', but received a different type.",
|
||||||
|
col
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
params.add(decimal_val).map_err(|e| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Failed to add decimal parameter for {}: {}",
|
||||||
|
col, e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type)));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
||||||
param_idx += 1;
|
param_idx += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add NULL clauses for empty fields
|
// --- End of copied logic ---
|
||||||
for field in null_fields {
|
|
||||||
// Make sure the field is valid
|
|
||||||
if !system_columns.contains(&field.as_str()) && !user_columns.contains(&&field) {
|
|
||||||
return Err(Status::invalid_argument(format!("Invalid column to set NULL: {}", field)));
|
|
||||||
}
|
|
||||||
set_clauses.push(format!("\"{}\" = NULL", field));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure we have at least one field to update
|
|
||||||
if set_clauses.is_empty() {
|
if set_clauses.is_empty() {
|
||||||
return Err(Status::invalid_argument("No valid fields to update"));
|
return Ok(PutTableDataResponse {
|
||||||
|
success: true,
|
||||||
|
message: "No valid fields to update after processing.".into(),
|
||||||
|
updated_id: record_id,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add ID parameter at the end
|
let qualified_table = crate::shared::schema_qualifier::qualify_table_name_for_data(
|
||||||
params.add(record_id)
|
db_pool,
|
||||||
.map_err(|e| Status::internal(format!("Failed to add record_id parameter: {}", e)))?;
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
// Qualify table name with schema
|
)
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
.await?;
|
||||||
|
|
||||||
let set_clause = set_clauses.join(", ");
|
let set_clause = set_clauses.join(", ");
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"UPDATE {} SET {} WHERE id = ${} AND deleted = FALSE RETURNING id",
|
"UPDATE {} SET {} WHERE id = ${} RETURNING id",
|
||||||
qualified_table,
|
qualified_table,
|
||||||
set_clause,
|
set_clause,
|
||||||
param_idx
|
param_idx
|
||||||
);
|
);
|
||||||
|
|
||||||
let result = sqlx::query_scalar_with::<Postgres, i64, _>(&sql, params)
|
params.add(record_id).map_err(|e| Status::internal(format!("Failed to add record_id parameter: {}", e)))?;
|
||||||
|
|
||||||
|
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
match result {
|
let updated_id = match result {
|
||||||
Ok(Some(updated_id)) => Ok(PutTableDataResponse {
|
Ok(Some(id)) => id,
|
||||||
success: true,
|
Ok(None) => return Err(Status::not_found("Record not found")),
|
||||||
message: "Data updated successfully".into(),
|
|
||||||
updated_id,
|
|
||||||
}),
|
|
||||||
Ok(None) => Err(Status::not_found("Record not found or already deleted")),
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
// Handle "relation does not exist" error specifically
|
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("22P02")) ||
|
||||||
return Err(Status::internal(format!(
|
db_err.code() == Some(std::borrow::Cow::Borrowed("22003")) {
|
||||||
"Table '{}' is defined but does not physically exist in the database as {}",
|
return Err(Status::invalid_argument(format!(
|
||||||
table_name, qualified_table
|
"Numeric field overflow or invalid format. Check precision and scale. Details: {}", db_err.message()
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(Status::internal(format!("Update failed: {}", e)))
|
return Err(Status::internal(format!("Update failed: {}", e)));
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let command = IndexCommand::AddOrUpdate(IndexCommandData {
|
||||||
|
table_name: table_name.clone(),
|
||||||
|
row_id: updated_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Err(e) = indexer_tx.send(command).await {
|
||||||
|
error!(
|
||||||
|
"CRITICAL: DB update for table '{}' (id: {}) succeeded but failed to queue for indexing: {}. Search index is now inconsistent.",
|
||||||
|
table_name, updated_id, e
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(PutTableDataResponse {
|
||||||
|
success: true,
|
||||||
|
message: "Data updated successfully".into(),
|
||||||
|
updated_id,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,58 +0,0 @@
|
|||||||
POST
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"adresar_id": 1,
|
|
||||||
"c_dokladu": "DOC123",
|
|
||||||
"datum": "01:10:2023",
|
|
||||||
"c_faktury": "INV123",
|
|
||||||
"obsah": "Sample content",
|
|
||||||
"stredisko": "Center A",
|
|
||||||
"c_uctu": "ACC123",
|
|
||||||
"md": "MD123",
|
|
||||||
"identif": "ID123",
|
|
||||||
"poznanka": "Sample note",
|
|
||||||
"firma": "AAA"
|
|
||||||
}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/PostUctovnictvo
|
|
||||||
{
|
|
||||||
"id": "3",
|
|
||||||
"adresarId": "1",
|
|
||||||
"cDokladu": "DOC123",
|
|
||||||
"datum": "2023-10-01",
|
|
||||||
"cFaktury": "INV123",
|
|
||||||
"obsah": "Sample content",
|
|
||||||
"stredisko": "Center A",
|
|
||||||
"cUctu": "ACC123",
|
|
||||||
"md": "MD123",
|
|
||||||
"identif": "ID123",
|
|
||||||
"poznanka": "Sample note",
|
|
||||||
"firma": "AAA"
|
|
||||||
}
|
|
||||||
|
|
||||||
PUT
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"id": '1',
|
|
||||||
"adresar_id": 1,
|
|
||||||
"c_dokladu": "UPDATED-DOC",
|
|
||||||
"datum": "15.11.2023",
|
|
||||||
"c_faktury": "UPDATED-INV",
|
|
||||||
"obsah": "Updated content",
|
|
||||||
"stredisko": "Updated Center",
|
|
||||||
"c_uctu": "UPD-ACC",
|
|
||||||
"md": "UPD-MD",
|
|
||||||
"identif": "UPD-ID",
|
|
||||||
"poznanka": "Updated note",
|
|
||||||
"firma": "UPD"
|
|
||||||
}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/PutUctovnictvo
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"adresarId": "1",
|
|
||||||
"cDokladu": "UPDATED-DOC",
|
|
||||||
"datum": "15.11.2023",
|
|
||||||
"cFaktury": "UPDATED-INV",
|
|
||||||
"obsah": "Updated content",
|
|
||||||
"stredisko": "Updated Center",
|
|
||||||
"cUctu": "UPD-ACC",
|
|
||||||
"md": "UPD-MD",
|
|
||||||
"identif": "UPD-ID",
|
|
||||||
"poznanka": "Updated note",
|
|
||||||
"firma": "UPD"
|
|
||||||
}
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
❯ grpcurl -plaintext -d '{}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvoCount
|
|
||||||
|
|
||||||
{
|
|
||||||
"count": "4"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"position": 2
|
|
||||||
}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvoByPosition
|
|
||||||
|
|
||||||
{
|
|
||||||
"id": "2",
|
|
||||||
"adresarId": "1",
|
|
||||||
"cDokladu": "DOC123",
|
|
||||||
"datum": "01.10.2023",
|
|
||||||
"cFaktury": "INV123",
|
|
||||||
"obsah": "Sample content",
|
|
||||||
"stredisko": "Center A",
|
|
||||||
"cUctu": "ACC123",
|
|
||||||
"md": "MD123",
|
|
||||||
"identif": "ID123",
|
|
||||||
"poznanka": "Sample note",
|
|
||||||
"firma": "AAA"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"id": 1
|
|
||||||
}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvo
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"adresarId": "1",
|
|
||||||
"cDokladu": "DOC123",
|
|
||||||
"datum": "01.10.2023",
|
|
||||||
"cFaktury": "INV123",
|
|
||||||
"obsah": "Sample content",
|
|
||||||
"stredisko": "Center A",
|
|
||||||
"cUctu": "ACC123",
|
|
||||||
"md": "MD123",
|
|
||||||
"identif": "ID123",
|
|
||||||
"poznanka": "Sample note",
|
|
||||||
"firma": "AAA"
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
// src/uctovnictvo/handlers.rs
|
|
||||||
pub mod post_uctovnictvo;
|
|
||||||
pub mod get_uctovnictvo;
|
|
||||||
pub mod get_uctovnictvo_count;
|
|
||||||
pub mod get_uctovnictvo_by_position;
|
|
||||||
pub mod put_uctovnictvo;
|
|
||||||
|
|
||||||
pub use post_uctovnictvo::post_uctovnictvo;
|
|
||||||
pub use get_uctovnictvo::get_uctovnictvo;
|
|
||||||
pub use get_uctovnictvo_count::get_uctovnictvo_count;
|
|
||||||
pub use get_uctovnictvo_by_position::get_uctovnictvo_by_position;
|
|
||||||
pub use put_uctovnictvo::put_uctovnictvo;
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
// src/uctovnictvo/handlers/get_uctovnictvo.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use crate::uctovnictvo::models::Uctovnictvo;
|
|
||||||
use common::proto::multieko2::uctovnictvo::{GetUctovnictvoRequest, UctovnictvoResponse};
|
|
||||||
|
|
||||||
pub async fn get_uctovnictvo(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: GetUctovnictvoRequest,
|
|
||||||
) -> Result<UctovnictvoResponse, Status> {
|
|
||||||
let uctovnictvo = sqlx::query_as!(
|
|
||||||
Uctovnictvo,
|
|
||||||
r#"
|
|
||||||
SELECT
|
|
||||||
id,
|
|
||||||
deleted,
|
|
||||||
adresar_id,
|
|
||||||
c_dokladu,
|
|
||||||
datum as "datum: chrono::NaiveDate",
|
|
||||||
c_faktury,
|
|
||||||
obsah,
|
|
||||||
stredisko,
|
|
||||||
c_uctu,
|
|
||||||
md,
|
|
||||||
identif,
|
|
||||||
poznanka,
|
|
||||||
firma
|
|
||||||
FROM uctovnictvo
|
|
||||||
WHERE id = $1
|
|
||||||
"#,
|
|
||||||
request.id
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::not_found(e.to_string()))?;
|
|
||||||
|
|
||||||
Ok(UctovnictvoResponse {
|
|
||||||
id: uctovnictvo.id,
|
|
||||||
adresar_id: uctovnictvo.adresar_id,
|
|
||||||
c_dokladu: uctovnictvo.c_dokladu,
|
|
||||||
datum: uctovnictvo.datum.format("%d.%m.%Y").to_string(),
|
|
||||||
c_faktury: uctovnictvo.c_faktury,
|
|
||||||
obsah: uctovnictvo.obsah.unwrap_or_default(),
|
|
||||||
stredisko: uctovnictvo.stredisko.unwrap_or_default(),
|
|
||||||
c_uctu: uctovnictvo.c_uctu.unwrap_or_default(),
|
|
||||||
md: uctovnictvo.md.unwrap_or_default(),
|
|
||||||
identif: uctovnictvo.identif.unwrap_or_default(),
|
|
||||||
poznanka: uctovnictvo.poznanka.unwrap_or_default(),
|
|
||||||
firma: uctovnictvo.firma,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
// src/uctovnictvo/handlers/get_uctovnictvo_by_position.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use common::proto::multieko2::common::PositionRequest;
|
|
||||||
use super::get_uctovnictvo;
|
|
||||||
|
|
||||||
pub async fn get_uctovnictvo_by_position(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: PositionRequest,
|
|
||||||
) -> Result<common::proto::multieko2::uctovnictvo::UctovnictvoResponse, Status> {
|
|
||||||
if request.position < 1 {
|
|
||||||
return Err(Status::invalid_argument("Position must be at least 1"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the ID of the Nth non-deleted record
|
|
||||||
let id: i64 = sqlx::query_scalar!(
|
|
||||||
r#"
|
|
||||||
SELECT id
|
|
||||||
FROM uctovnictvo
|
|
||||||
WHERE deleted = FALSE
|
|
||||||
ORDER BY id ASC
|
|
||||||
OFFSET $1
|
|
||||||
LIMIT 1
|
|
||||||
"#,
|
|
||||||
request.position - 1
|
|
||||||
)
|
|
||||||
.fetch_optional(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?
|
|
||||||
.ok_or_else(|| Status::not_found("Position out of bounds"))?;
|
|
||||||
|
|
||||||
// Now fetch the complete record using the existing get_uctovnictvo function
|
|
||||||
get_uctovnictvo(db_pool, common::proto::multieko2::uctovnictvo::GetUctovnictvoRequest { id }).await
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
// src/uctovnictvo/handlers/get_uctovnictvo_count.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use common::proto::multieko2::common::{CountResponse, Empty};
|
|
||||||
|
|
||||||
pub async fn get_uctovnictvo_count(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
_request: Empty,
|
|
||||||
) -> Result<CountResponse, Status> {
|
|
||||||
let count: i64 = sqlx::query_scalar!(
|
|
||||||
r#"
|
|
||||||
SELECT COUNT(*) AS count
|
|
||||||
FROM uctovnictvo
|
|
||||||
WHERE deleted = FALSE
|
|
||||||
"#
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
Ok(CountResponse { count })
|
|
||||||
}
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
// src/uctovnictvo/handlers/post_uctovnictvo.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use crate::uctovnictvo::models::Uctovnictvo;
|
|
||||||
use common::proto::multieko2::uctovnictvo::{PostUctovnictvoRequest, UctovnictvoResponse};
|
|
||||||
use crate::shared::date_utils::parse_date_with_multiple_formats; // Import from shared module
|
|
||||||
|
|
||||||
pub async fn post_uctovnictvo(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: PostUctovnictvoRequest,
|
|
||||||
) -> Result<UctovnictvoResponse, Status> {
|
|
||||||
let datum = parse_date_with_multiple_formats(&request.datum)
|
|
||||||
.ok_or_else(|| Status::invalid_argument(format!("Invalid date format: {}", request.datum)))?;
|
|
||||||
|
|
||||||
// Pass the NaiveDate value directly.
|
|
||||||
let uctovnictvo = sqlx::query_as!(
|
|
||||||
Uctovnictvo,
|
|
||||||
r#"
|
|
||||||
INSERT INTO uctovnictvo (
|
|
||||||
adresar_id, c_dokladu, datum, c_faktury, obsah, stredisko,
|
|
||||||
c_uctu, md, identif, poznanka, firma, deleted
|
|
||||||
)
|
|
||||||
VALUES (
|
|
||||||
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12
|
|
||||||
)
|
|
||||||
RETURNING
|
|
||||||
id,
|
|
||||||
deleted,
|
|
||||||
adresar_id,
|
|
||||||
c_dokladu,
|
|
||||||
datum as "datum: chrono::NaiveDate",
|
|
||||||
c_faktury,
|
|
||||||
obsah,
|
|
||||||
stredisko,
|
|
||||||
c_uctu,
|
|
||||||
md,
|
|
||||||
identif,
|
|
||||||
poznanka,
|
|
||||||
firma
|
|
||||||
"#,
|
|
||||||
request.adresar_id,
|
|
||||||
request.c_dokladu,
|
|
||||||
datum as chrono::NaiveDate,
|
|
||||||
request.c_faktury,
|
|
||||||
request.obsah,
|
|
||||||
request.stredisko,
|
|
||||||
request.c_uctu,
|
|
||||||
request.md,
|
|
||||||
request.identif,
|
|
||||||
request.poznanka,
|
|
||||||
request.firma,
|
|
||||||
false
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?;
|
|
||||||
|
|
||||||
// Return the response with formatted date
|
|
||||||
Ok(UctovnictvoResponse {
|
|
||||||
id: uctovnictvo.id,
|
|
||||||
adresar_id: uctovnictvo.adresar_id,
|
|
||||||
c_dokladu: uctovnictvo.c_dokladu,
|
|
||||||
datum: uctovnictvo.datum.format("%d.%m.%Y").to_string(), // Standard Slovak format
|
|
||||||
c_faktury: uctovnictvo.c_faktury,
|
|
||||||
obsah: uctovnictvo.obsah.unwrap_or_default(),
|
|
||||||
stredisko: uctovnictvo.stredisko.unwrap_or_default(),
|
|
||||||
c_uctu: uctovnictvo.c_uctu.unwrap_or_default(),
|
|
||||||
md: uctovnictvo.md.unwrap_or_default(),
|
|
||||||
identif: uctovnictvo.identif.unwrap_or_default(),
|
|
||||||
poznanka: uctovnictvo.poznanka.unwrap_or_default(),
|
|
||||||
firma: uctovnictvo.firma,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
// src/uctovnictvo/handlers/put_uctovnictvo.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use crate::uctovnictvo::models::Uctovnictvo;
|
|
||||||
use common::proto::multieko2::uctovnictvo::{PutUctovnictvoRequest, UctovnictvoResponse};
|
|
||||||
use crate::shared::date_utils::parse_date_with_multiple_formats; // Import from shared module
|
|
||||||
|
|
||||||
pub async fn put_uctovnictvo(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: PutUctovnictvoRequest,
|
|
||||||
) -> Result<UctovnictvoResponse, Status> {
|
|
||||||
let datum = parse_date_with_multiple_formats(&request.datum)
|
|
||||||
.ok_or_else(|| Status::invalid_argument("Invalid date format"))?;
|
|
||||||
|
|
||||||
let uctovnictvo = sqlx::query_as!(
|
|
||||||
Uctovnictvo,
|
|
||||||
r#"
|
|
||||||
UPDATE uctovnictvo
|
|
||||||
SET
|
|
||||||
adresar_id = $2,
|
|
||||||
c_dokladu = $3,
|
|
||||||
datum = $4,
|
|
||||||
c_faktury = $5,
|
|
||||||
obsah = $6,
|
|
||||||
stredisko = $7,
|
|
||||||
c_uctu = $8,
|
|
||||||
md = $9,
|
|
||||||
identif = $10,
|
|
||||||
poznanka = $11,
|
|
||||||
firma = $12
|
|
||||||
WHERE id = $1 AND deleted = FALSE
|
|
||||||
RETURNING
|
|
||||||
id,
|
|
||||||
deleted,
|
|
||||||
adresar_id,
|
|
||||||
c_dokladu,
|
|
||||||
datum as "datum: chrono::NaiveDate",
|
|
||||||
c_faktury,
|
|
||||||
obsah,
|
|
||||||
stredisko,
|
|
||||||
c_uctu,
|
|
||||||
md,
|
|
||||||
identif,
|
|
||||||
poznanka,
|
|
||||||
firma
|
|
||||||
"#,
|
|
||||||
request.id,
|
|
||||||
request.adresar_id,
|
|
||||||
request.c_dokladu,
|
|
||||||
datum as chrono::NaiveDate,
|
|
||||||
request.c_faktury,
|
|
||||||
request.obsah,
|
|
||||||
request.stredisko,
|
|
||||||
request.c_uctu,
|
|
||||||
request.md,
|
|
||||||
request.identif,
|
|
||||||
request.poznanka,
|
|
||||||
request.firma
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?;
|
|
||||||
|
|
||||||
Ok(UctovnictvoResponse {
|
|
||||||
id: uctovnictvo.id,
|
|
||||||
adresar_id: uctovnictvo.adresar_id,
|
|
||||||
c_dokladu: uctovnictvo.c_dokladu,
|
|
||||||
datum: uctovnictvo.datum.format("%d.%m.%Y").to_string(),
|
|
||||||
c_faktury: uctovnictvo.c_faktury,
|
|
||||||
obsah: uctovnictvo.obsah.unwrap_or_default(),
|
|
||||||
stredisko: uctovnictvo.stredisko.unwrap_or_default(),
|
|
||||||
c_uctu: uctovnictvo.c_uctu.unwrap_or_default(),
|
|
||||||
md: uctovnictvo.md.unwrap_or_default(),
|
|
||||||
identif: uctovnictvo.identif.unwrap_or_default(),
|
|
||||||
poznanka: uctovnictvo.poznanka.unwrap_or_default(),
|
|
||||||
firma: uctovnictvo.firma,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
// src/uctovnictvo/mod.rs
|
|
||||||
|
|
||||||
pub mod models;
|
|
||||||
pub mod handlers;
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
// src/uctovnictvo/models.rs
|
|
||||||
use chrono::NaiveDate;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, sqlx::FromRow, Serialize, Deserialize)]
|
|
||||||
pub struct Uctovnictvo {
|
|
||||||
pub id: i64,
|
|
||||||
pub deleted: bool,
|
|
||||||
pub adresar_id: i64,
|
|
||||||
pub c_dokladu: String,
|
|
||||||
pub datum: NaiveDate,
|
|
||||||
pub c_faktury: String,
|
|
||||||
pub obsah: Option<String>,
|
|
||||||
pub stredisko: Option<String>,
|
|
||||||
pub c_uctu: Option<String>,
|
|
||||||
pub md: Option<String>,
|
|
||||||
pub identif: Option<String>,
|
|
||||||
pub poznanka: Option<String>,
|
|
||||||
pub firma: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,161 +0,0 @@
|
|||||||
// tests/adresar/delete_adresar_test.rs
|
|
||||||
use rstest::{fixture, rstest};
|
|
||||||
use server::adresar::handlers::delete_adresar;
|
|
||||||
use common::proto::multieko2::adresar::DeleteAdresarRequest;
|
|
||||||
use crate::common::setup_test_db;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use tonic;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::Mutex;
|
|
||||||
|
|
||||||
// Reuse the mutex from get_adresar_by_position_test or create a new one
|
|
||||||
lazy_static::lazy_static! {
|
|
||||||
static ref TEST_MUTEX: Arc<Mutex<()>> = Arc::new(Mutex::new(()));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fixtures
|
|
||||||
#[fixture]
|
|
||||||
async fn pool() -> PgPool {
|
|
||||||
setup_test_db().await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn closed_pool(#[future] pool: PgPool) -> PgPool {
|
|
||||||
let pool = pool.await;
|
|
||||||
pool.close().await;
|
|
||||||
pool
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn existing_record(#[future] pool: PgPool) -> (PgPool, i64, String) {
|
|
||||||
let pool = pool.await;
|
|
||||||
// Use a unique prefix for test data
|
|
||||||
let prefix = format!("DeleteTest_{}", chrono::Utc::now().timestamp_nanos_opt().unwrap_or_default());
|
|
||||||
|
|
||||||
let record = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
INSERT INTO adresar (firma, deleted)
|
|
||||||
VALUES ($1, false)
|
|
||||||
RETURNING id
|
|
||||||
"#,
|
|
||||||
format!("{}_Company", prefix)
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
(pool, record.id, prefix)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn existing_deleted_record(#[future] pool: PgPool) -> (PgPool, i64, String) {
|
|
||||||
let pool = pool.await;
|
|
||||||
// Use a unique prefix for test data
|
|
||||||
let prefix = format!("DeletedTest_{}", chrono::Utc::now().timestamp_nanos_opt().unwrap_or_default());
|
|
||||||
|
|
||||||
let record = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
INSERT INTO adresar (firma, deleted)
|
|
||||||
VALUES ($1, true)
|
|
||||||
RETURNING id
|
|
||||||
"#,
|
|
||||||
format!("{}_Deleted", prefix)
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
(pool, record.id, prefix)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper to check if the record is deleted
|
|
||||||
async fn assert_record_deleted(pool: &PgPool, id: i64) {
|
|
||||||
let db_record = sqlx::query!("SELECT deleted FROM adresar WHERE id = $1", id)
|
|
||||||
.fetch_one(pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert!(db_record.deleted);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper to clean up test records
|
|
||||||
async fn cleanup_test_records(pool: &PgPool, prefix: &str) {
|
|
||||||
if !prefix.is_empty() {
|
|
||||||
sqlx::query!(
|
|
||||||
"DELETE FROM adresar WHERE firma LIKE $1",
|
|
||||||
format!("{}%", prefix)
|
|
||||||
)
|
|
||||||
.execute(pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tests
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_delete_adresar_success(
|
|
||||||
#[future] existing_record: (PgPool, i64, String),
|
|
||||||
) {
|
|
||||||
// Take a lock to prevent concurrent test execution
|
|
||||||
let _guard = TEST_MUTEX.lock().await;
|
|
||||||
|
|
||||||
let (pool, id, prefix) = existing_record.await;
|
|
||||||
let request = DeleteAdresarRequest { id };
|
|
||||||
let response = delete_adresar(&pool, request).await.unwrap();
|
|
||||||
|
|
||||||
assert!(response.success);
|
|
||||||
assert_record_deleted(&pool, id).await;
|
|
||||||
|
|
||||||
// Clean up
|
|
||||||
cleanup_test_records(&pool, &prefix).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_delete_adresar_nonexistent_id(
|
|
||||||
#[future] pool: PgPool,
|
|
||||||
) {
|
|
||||||
// Take a lock to prevent concurrent test execution
|
|
||||||
let _guard = TEST_MUTEX.lock().await;
|
|
||||||
|
|
||||||
let pool = pool.await;
|
|
||||||
let request = DeleteAdresarRequest { id: 9999 };
|
|
||||||
let response = delete_adresar(&pool, request).await.unwrap();
|
|
||||||
|
|
||||||
// Deleting a non-existent record should return success: false
|
|
||||||
assert!(!response.success);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_delete_adresar_already_deleted(
|
|
||||||
#[future] existing_deleted_record: (PgPool, i64, String),
|
|
||||||
) {
|
|
||||||
// Take a lock to prevent concurrent test execution
|
|
||||||
let _guard = TEST_MUTEX.lock().await;
|
|
||||||
|
|
||||||
let (pool, id, prefix) = existing_deleted_record.await;
|
|
||||||
let request = DeleteAdresarRequest { id };
|
|
||||||
let response = delete_adresar(&pool, request).await.unwrap();
|
|
||||||
|
|
||||||
// Deleting an already deleted record should return success: false
|
|
||||||
assert!(!response.success);
|
|
||||||
|
|
||||||
// Clean up
|
|
||||||
cleanup_test_records(&pool, &prefix).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_delete_adresar_database_error(
|
|
||||||
#[future] closed_pool: PgPool,
|
|
||||||
) {
|
|
||||||
// No need for mutex here as we're not modifying the database
|
|
||||||
let closed_pool = closed_pool.await;
|
|
||||||
let request = DeleteAdresarRequest { id: 1 };
|
|
||||||
let result = delete_adresar(&closed_pool, request).await;
|
|
||||||
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err().code(), tonic::Code::Internal);
|
|
||||||
}
|
|
||||||
@@ -1,368 +0,0 @@
|
|||||||
// tests/adresar/get_adresar_by_position_test.rs
|
|
||||||
use rstest::{fixture, rstest};
|
|
||||||
use server::adresar::handlers::{get_adresar_by_position, get_adresar_count};
|
|
||||||
use common::proto::multieko2::common::{PositionRequest, Empty};
|
|
||||||
use crate::common::setup_test_db;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use tonic;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::Mutex;
|
|
||||||
|
|
||||||
// Use a global mutex to synchronize test execution
|
|
||||||
// This prevents tests from interfering with each other
|
|
||||||
lazy_static::lazy_static! {
|
|
||||||
static ref TEST_MUTEX: Arc<Mutex<()>> = Arc::new(Mutex::new(()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn pool() -> PgPool {
|
|
||||||
setup_test_db().await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn closed_pool(#[future] pool: PgPool) -> PgPool {
|
|
||||||
let pool = pool.await;
|
|
||||||
pool.close().await;
|
|
||||||
pool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a test record with specific data and delete status
|
|
||||||
async fn create_test_record(pool: &PgPool, firma: &str, deleted: bool) -> i64 {
|
|
||||||
sqlx::query_scalar!(
|
|
||||||
"INSERT INTO adresar (firma, deleted) VALUES ($1, $2) RETURNING id",
|
|
||||||
firma,
|
|
||||||
deleted
|
|
||||||
)
|
|
||||||
.fetch_one(pool)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean up test records after tests
|
|
||||||
async fn cleanup_test_records(pool: &PgPool, prefix: &str) {
|
|
||||||
sqlx::query!(
|
|
||||||
"DELETE FROM adresar WHERE firma LIKE $1",
|
|
||||||
format!("{}%", prefix)
|
|
||||||
)
|
|
||||||
.execute(pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the position of a record in the database
|
|
||||||
async fn find_position_of_record(pool: &PgPool, id: i64) -> Option<i64> {
|
|
||||||
// Get all non-deleted records ordered by ID
|
|
||||||
let records = sqlx::query_scalar!(
|
|
||||||
"SELECT id FROM adresar WHERE deleted = FALSE ORDER BY id ASC"
|
|
||||||
)
|
|
||||||
.fetch_all(pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Find the position of our record (1-based)
|
|
||||||
for (index, record_id) in records.iter().enumerate() {
|
|
||||||
if *record_id == id {
|
|
||||||
return Some((index + 1) as i64);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test position validation
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_position_zero(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Request position 0 (invalid)
|
|
||||||
let request = PositionRequest { position: 0 };
|
|
||||||
let result = get_adresar_by_position(&pool, request).await;
|
|
||||||
|
|
||||||
// Verify it returns an error
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err().code(), tonic::Code::InvalidArgument);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_position_negative(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Request negative position (invalid)
|
|
||||||
let request = PositionRequest { position: -1 };
|
|
||||||
let result = get_adresar_by_position(&pool, request).await;
|
|
||||||
|
|
||||||
// Verify it returns an error
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err().code(), tonic::Code::InvalidArgument);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_basic_position_retrieval(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Take a lock to prevent concurrent test execution
|
|
||||||
let _guard = TEST_MUTEX.lock().await;
|
|
||||||
|
|
||||||
// Use a unique prefix for test data to prevent conflicts
|
|
||||||
let prefix = "PosBasicTest";
|
|
||||||
|
|
||||||
// Clean up any existing test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
|
|
||||||
// Create test records
|
|
||||||
let id1 = create_test_record(&pool, &format!("{}_1", prefix), false).await;
|
|
||||||
let id2 = create_test_record(&pool, &format!("{}_2", prefix), false).await;
|
|
||||||
let id3 = create_test_record(&pool, &format!("{}_3", prefix), false).await;
|
|
||||||
|
|
||||||
// Find the positions of these records in the database
|
|
||||||
let pos1 = find_position_of_record(&pool, id1).await.unwrap();
|
|
||||||
let pos2 = find_position_of_record(&pool, id2).await.unwrap();
|
|
||||||
let pos3 = find_position_of_record(&pool, id3).await.unwrap();
|
|
||||||
|
|
||||||
// Test retrieving each position
|
|
||||||
let response1 = get_adresar_by_position(&pool, PositionRequest { position: pos1 }).await.unwrap();
|
|
||||||
assert_eq!(response1.id, id1);
|
|
||||||
|
|
||||||
let response2 = get_adresar_by_position(&pool, PositionRequest { position: pos2 }).await.unwrap();
|
|
||||||
assert_eq!(response2.id, id2);
|
|
||||||
|
|
||||||
let response3 = get_adresar_by_position(&pool, PositionRequest { position: pos3 }).await.unwrap();
|
|
||||||
assert_eq!(response3.id, id3);
|
|
||||||
|
|
||||||
// Clean up test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_deleted_records_excluded(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Take a lock to prevent concurrent test execution
|
|
||||||
let _guard = TEST_MUTEX.lock().await;
|
|
||||||
|
|
||||||
// Use a unique prefix for test data
|
|
||||||
let prefix = "PosDeletedTest";
|
|
||||||
|
|
||||||
// Clean up any existing test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
|
|
||||||
// Create a mix of active and deleted records
|
|
||||||
let id1 = create_test_record(&pool, &format!("{}_1", prefix), false).await;
|
|
||||||
let _id_deleted = create_test_record(&pool, &format!("{}_del", prefix), true).await;
|
|
||||||
let id2 = create_test_record(&pool, &format!("{}_2", prefix), false).await;
|
|
||||||
|
|
||||||
// Find positions
|
|
||||||
let pos1 = find_position_of_record(&pool, id1).await.unwrap();
|
|
||||||
let pos2 = find_position_of_record(&pool, id2).await.unwrap();
|
|
||||||
|
|
||||||
// Verify positions are consecutive, which means the deleted record is excluded
|
|
||||||
assert_eq!(pos2, pos1 + 1);
|
|
||||||
|
|
||||||
// Retrieve by position and verify
|
|
||||||
let response1 = get_adresar_by_position(&pool, PositionRequest { position: pos1 }).await.unwrap();
|
|
||||||
assert_eq!(response1.id, id1);
|
|
||||||
|
|
||||||
let response2 = get_adresar_by_position(&pool, PositionRequest { position: pos2 }).await.unwrap();
|
|
||||||
assert_eq!(response2.id, id2);
|
|
||||||
|
|
||||||
// Clean up test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_position_changes_after_deletion(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Take a lock to prevent concurrent test execution
|
|
||||||
let _guard = TEST_MUTEX.lock().await;
|
|
||||||
|
|
||||||
// Use a unique prefix for test data
|
|
||||||
let prefix = "PosChangeTest";
|
|
||||||
|
|
||||||
// Clean up any existing test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
|
|
||||||
// Create records
|
|
||||||
let id1 = create_test_record(&pool, &format!("{}_1", prefix), false).await;
|
|
||||||
let id2 = create_test_record(&pool, &format!("{}_2", prefix), false).await;
|
|
||||||
let id3 = create_test_record(&pool, &format!("{}_3", prefix), false).await;
|
|
||||||
|
|
||||||
// Find initial positions
|
|
||||||
let _pos1 = find_position_of_record(&pool, id1).await.unwrap();
|
|
||||||
let pos2 = find_position_of_record(&pool, id2).await.unwrap();
|
|
||||||
let pos3 = find_position_of_record(&pool, id3).await.unwrap();
|
|
||||||
|
|
||||||
// Mark the first record as deleted
|
|
||||||
sqlx::query!("UPDATE adresar SET deleted = TRUE WHERE id = $1", id1)
|
|
||||||
.execute(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Find new positions
|
|
||||||
let pos2_after = find_position_of_record(&pool, id2).await.unwrap();
|
|
||||||
let pos3_after = find_position_of_record(&pool, id3).await.unwrap();
|
|
||||||
|
|
||||||
// Verify positions shifted
|
|
||||||
assert!(pos2_after < pos2);
|
|
||||||
assert!(pos3_after < pos3);
|
|
||||||
|
|
||||||
// Verify by retrieving records at new positions
|
|
||||||
let response_at_first = get_adresar_by_position(&pool, PositionRequest { position: pos2_after }).await.unwrap();
|
|
||||||
assert_eq!(response_at_first.id, id2);
|
|
||||||
|
|
||||||
// Clean up test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_position_out_of_bounds(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Take a lock to prevent concurrent test execution
|
|
||||||
let _guard = TEST_MUTEX.lock().await;
|
|
||||||
|
|
||||||
// Get the total count of non-deleted records
|
|
||||||
let count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Request a position beyond the count
|
|
||||||
let request = PositionRequest { position: count + 1 };
|
|
||||||
let result = get_adresar_by_position(&pool, request).await;
|
|
||||||
|
|
||||||
// Verify it returns an error
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err().code(), tonic::Code::NotFound);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_database_error(#[future] closed_pool: PgPool) {
|
|
||||||
let closed_pool = closed_pool.await;
|
|
||||||
|
|
||||||
// Attempt to query with a closed pool
|
|
||||||
let request = PositionRequest { position: 1 };
|
|
||||||
let result = get_adresar_by_position(&closed_pool, request).await;
|
|
||||||
|
|
||||||
// Verify it returns an internal error
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err().code(), tonic::Code::Internal);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_position_after_adding_record(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Take a lock to prevent concurrent test execution
|
|
||||||
let _guard = TEST_MUTEX.lock().await;
|
|
||||||
|
|
||||||
// Use a unique prefix for test data
|
|
||||||
let prefix = "PosAddTest";
|
|
||||||
|
|
||||||
// Clean up any existing test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
|
|
||||||
// Create records
|
|
||||||
let id1 = create_test_record(&pool, &format!("{}_1", prefix), false).await;
|
|
||||||
let id2 = create_test_record(&pool, &format!("{}_2", prefix), false).await;
|
|
||||||
|
|
||||||
// Find positions
|
|
||||||
let pos1 = find_position_of_record(&pool, id1).await.unwrap();
|
|
||||||
let pos2 = find_position_of_record(&pool, id2).await.unwrap();
|
|
||||||
|
|
||||||
// Add a new record
|
|
||||||
let id3 = create_test_record(&pool, &format!("{}_3", prefix), false).await;
|
|
||||||
|
|
||||||
// Find its position
|
|
||||||
let pos3 = find_position_of_record(&pool, id3).await.unwrap();
|
|
||||||
|
|
||||||
// Verify retrieval by position
|
|
||||||
let response3 = get_adresar_by_position(&pool, PositionRequest { position: pos3 }).await.unwrap();
|
|
||||||
assert_eq!(response3.id, id3);
|
|
||||||
|
|
||||||
// Verify original positions still work
|
|
||||||
let response1 = get_adresar_by_position(&pool, PositionRequest { position: pos1 }).await.unwrap();
|
|
||||||
assert_eq!(response1.id, id1);
|
|
||||||
|
|
||||||
let response2 = get_adresar_by_position(&pool, PositionRequest { position: pos2 }).await.unwrap();
|
|
||||||
assert_eq!(response2.id, id2);
|
|
||||||
|
|
||||||
// Clean up test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test handler correctly excludes deleted records
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_handler_excludes_deleted_records(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Take a lock to prevent concurrent test execution
|
|
||||||
let _guard = TEST_MUTEX.lock().await;
|
|
||||||
|
|
||||||
// Use a unique prefix for test data
|
|
||||||
let prefix = "CountTest";
|
|
||||||
|
|
||||||
// Clean up any existing test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
|
|
||||||
// Create active records
|
|
||||||
for i in 1..=3 {
|
|
||||||
create_test_record(&pool, &format!("{}_Active_{}", prefix, i), false).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create deleted records
|
|
||||||
for i in 1..=2 {
|
|
||||||
create_test_record(&pool, &format!("{}_Deleted_{}", prefix, i), true).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Count our test records by deleted status
|
|
||||||
let active_test_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE firma LIKE $1 AND deleted = FALSE",
|
|
||||||
format!("{}%", prefix)
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
let deleted_test_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE firma LIKE $1 AND deleted = TRUE",
|
|
||||||
format!("{}%", prefix)
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Verify our test data was inserted correctly
|
|
||||||
assert_eq!(active_test_count, 3);
|
|
||||||
assert_eq!(deleted_test_count, 2);
|
|
||||||
|
|
||||||
// Get the total count of active records (including existing ones)
|
|
||||||
let total_active_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Now call our handler and verify it returns the same count
|
|
||||||
let response = get_adresar_count(&pool, Empty {}).await.unwrap();
|
|
||||||
assert_eq!(response.count, total_active_count);
|
|
||||||
|
|
||||||
// Clean up test data
|
|
||||||
cleanup_test_records(&pool, prefix).await;
|
|
||||||
}
|
|
||||||
@@ -1,284 +0,0 @@
|
|||||||
// tests/adresar/get_adresar_count_test.rs
|
|
||||||
use rstest::{fixture, rstest};
|
|
||||||
use server::adresar::handlers::get_adresar_count;
|
|
||||||
use common::proto::multieko2::common::Empty;
|
|
||||||
use crate::common::setup_test_db;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use tonic;
|
|
||||||
|
|
||||||
// For connection pooling
|
|
||||||
#[fixture]
|
|
||||||
async fn pool() -> PgPool {
|
|
||||||
setup_test_db().await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn closed_pool(#[future] pool: PgPool) -> PgPool {
|
|
||||||
let pool = pool.await;
|
|
||||||
pool.close().await;
|
|
||||||
pool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a self-contained test that runs in a transaction
|
|
||||||
// --------------------------------------------------------
|
|
||||||
// Instead of relying on table state and doing our own transaction management,
|
|
||||||
// we'll mock the database response to `get_adresar_count` and verify it behaves correctly
|
|
||||||
|
|
||||||
/// Test only that the handler returns the value from the database correctly
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_handler_returns_count_from_database(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// First, get whatever count the database currently has
|
|
||||||
let count_query = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Now call our handler and verify it returns the same count
|
|
||||||
let response = get_adresar_count(&pool, Empty {}).await.unwrap();
|
|
||||||
assert_eq!(response.count, count_query);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test handler correctly excludes deleted records
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_handler_excludes_deleted_records(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Use a transaction to isolate this test completely
|
|
||||||
let mut tx = pool.begin().await.unwrap();
|
|
||||||
|
|
||||||
// Count records where deleted = TRUE
|
|
||||||
let deleted_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = TRUE"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Count records where deleted = FALSE
|
|
||||||
let active_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Count all records
|
|
||||||
let total_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Verify our counts are consistent
|
|
||||||
assert_eq!(total_count, active_count + deleted_count);
|
|
||||||
|
|
||||||
// Verify our handler returns only the active count
|
|
||||||
let response = get_adresar_count(&pool, Empty {}).await.unwrap();
|
|
||||||
assert_eq!(response.count, active_count);
|
|
||||||
|
|
||||||
// Rollback transaction
|
|
||||||
tx.rollback().await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test SQL query behavior with deleted flag
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_deleted_flag_filters_records(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Use a transaction to isolate this test completely
|
|
||||||
let mut tx = pool.begin().await.unwrap();
|
|
||||||
|
|
||||||
// Insert test records inside this transaction
|
|
||||||
// They will be automatically rolled back at the end
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"INSERT INTO adresar (firma, deleted) VALUES ($1, FALSE)",
|
|
||||||
"Test Active Record"
|
|
||||||
)
|
|
||||||
.execute(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"INSERT INTO adresar (firma, deleted) VALUES ($1, TRUE)",
|
|
||||||
"Test Deleted Record"
|
|
||||||
)
|
|
||||||
.execute(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Count active records in the transaction
|
|
||||||
let active_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Count deleted records in the transaction
|
|
||||||
let deleted_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = TRUE"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Verify at least one active and one deleted record
|
|
||||||
assert!(active_count > 0);
|
|
||||||
assert!(deleted_count > 0);
|
|
||||||
|
|
||||||
// Rollback transaction
|
|
||||||
tx.rollback().await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test the handler returns an error with a closed pool
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_database_error(#[future] closed_pool: PgPool) {
|
|
||||||
let closed_pool = closed_pool.await;
|
|
||||||
let result = get_adresar_count(&closed_pool, Empty {}).await;
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err().code(), tonic::Code::Internal);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test the behavior of setting deleted to true and back
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_update_of_deleted_flag(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Use a transaction for complete isolation
|
|
||||||
let mut tx = pool.begin().await.unwrap();
|
|
||||||
|
|
||||||
// Insert a test record
|
|
||||||
let id = sqlx::query_scalar!(
|
|
||||||
"INSERT INTO adresar (firma, deleted) VALUES ($1, FALSE) RETURNING id",
|
|
||||||
"Test Toggle Record"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Count active records with this new record
|
|
||||||
let active_count_before = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Mark as deleted
|
|
||||||
sqlx::query!(
|
|
||||||
"UPDATE adresar SET deleted = TRUE WHERE id = $1",
|
|
||||||
id
|
|
||||||
)
|
|
||||||
.execute(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Count active records after marking as deleted
|
|
||||||
let active_count_after_delete = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Verify count decreased by 1
|
|
||||||
assert_eq!(active_count_after_delete, active_count_before - 1);
|
|
||||||
|
|
||||||
// Mark as active again
|
|
||||||
sqlx::query!(
|
|
||||||
"UPDATE adresar SET deleted = FALSE WHERE id = $1",
|
|
||||||
id
|
|
||||||
)
|
|
||||||
.execute(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Count active records after marking as active
|
|
||||||
let active_count_after_restore = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Verify count increased back to original
|
|
||||||
assert_eq!(active_count_after_restore, active_count_before);
|
|
||||||
|
|
||||||
// Rollback transaction
|
|
||||||
tx.rollback().await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test edge cases of an empty table
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_edge_case_empty_table(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Not literally testing an empty table since we can't truncate due to FK constraints
|
|
||||||
// But we can verify the count response is never negative
|
|
||||||
let response = get_adresar_count(&pool, Empty {}).await.unwrap();
|
|
||||||
assert!(response.count >= 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test adding a record and verifying count increases
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_count_increments_after_adding_record(#[future] pool: PgPool) {
|
|
||||||
let pool = pool.await;
|
|
||||||
|
|
||||||
// Use a transaction for complete isolation
|
|
||||||
let mut tx = pool.begin().await.unwrap();
|
|
||||||
|
|
||||||
// Get initial active count inside transaction
|
|
||||||
let initial_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Add a record inside the transaction
|
|
||||||
sqlx::query!(
|
|
||||||
"INSERT INTO adresar (firma, deleted) VALUES ($1, FALSE)",
|
|
||||||
"Test Increment Record"
|
|
||||||
)
|
|
||||||
.execute(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Get new count inside transaction
|
|
||||||
let new_count = sqlx::query_scalar!(
|
|
||||||
"SELECT COUNT(*) FROM adresar WHERE deleted = FALSE"
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *tx)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
// Verify count increased by exactly 1
|
|
||||||
assert_eq!(new_count, initial_count + 1);
|
|
||||||
|
|
||||||
// Rollback transaction
|
|
||||||
tx.rollback().await.unwrap();
|
|
||||||
}
|
|
||||||
@@ -1,238 +0,0 @@
|
|||||||
// tests/adresar/get_adresar_test.rs
|
|
||||||
use rstest::{fixture, rstest};
|
|
||||||
use server::adresar::handlers::get_adresar;
|
|
||||||
use common::proto::multieko2::adresar::{GetAdresarRequest, AdresarResponse};
|
|
||||||
use crate::common::setup_test_db;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use tonic;
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn pool() -> PgPool {
|
|
||||||
setup_test_db().await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn closed_pool(#[future] pool: PgPool) -> PgPool {
|
|
||||||
let pool = pool.await;
|
|
||||||
pool.close().await;
|
|
||||||
pool
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn existing_record(#[future] pool: PgPool) -> (PgPool, i64) {
|
|
||||||
let pool = pool.await;
|
|
||||||
let record = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
INSERT INTO adresar (
|
|
||||||
firma, kz, drc, ulica, psc, mesto, stat, banka, ucet,
|
|
||||||
skladm, ico, kontakt, telefon, skladu, fax, deleted
|
|
||||||
)
|
|
||||||
VALUES (
|
|
||||||
'Test Company', 'KZ', 'DRC', 'Street', '12345', 'City',
|
|
||||||
'Country', 'Bank', 'Account', 'SkladM', 'ICO', 'Contact',
|
|
||||||
'+421123456789', 'SkladU', 'Fax', false
|
|
||||||
)
|
|
||||||
RETURNING id
|
|
||||||
"#
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
(pool, record.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn existing_deleted_record(#[future] pool: PgPool) -> (PgPool, i64) {
|
|
||||||
let pool = pool.await;
|
|
||||||
let record = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
INSERT INTO adresar (firma, deleted)
|
|
||||||
VALUES ('Deleted Company', true)
|
|
||||||
RETURNING id
|
|
||||||
"#
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
(pool, record.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
async fn existing_record_with_nulls(#[future] pool: PgPool) -> (PgPool, i64) {
|
|
||||||
let pool = pool.await;
|
|
||||||
let record = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
INSERT INTO adresar (firma)
|
|
||||||
VALUES ('Null Fields Company')
|
|
||||||
RETURNING id
|
|
||||||
"#
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
(pool, record.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn assert_response_matches(pool: &PgPool, id: i64, response: &AdresarResponse) {
|
|
||||||
let db_record = sqlx::query!("SELECT * FROM adresar WHERE id = $1", id)
|
|
||||||
.fetch_one(pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(db_record.firma, response.firma);
|
|
||||||
assert_eq!(db_record.kz.unwrap_or_default(), response.kz);
|
|
||||||
assert_eq!(db_record.drc.unwrap_or_default(), response.drc);
|
|
||||||
assert_eq!(db_record.ulica.unwrap_or_default(), response.ulica);
|
|
||||||
assert_eq!(db_record.psc.unwrap_or_default(), response.psc);
|
|
||||||
assert_eq!(db_record.mesto.unwrap_or_default(), response.mesto);
|
|
||||||
assert_eq!(db_record.stat.unwrap_or_default(), response.stat);
|
|
||||||
assert_eq!(db_record.banka.unwrap_or_default(), response.banka);
|
|
||||||
assert_eq!(db_record.ucet.unwrap_or_default(), response.ucet);
|
|
||||||
assert_eq!(db_record.skladm.unwrap_or_default(), response.skladm);
|
|
||||||
assert_eq!(db_record.ico.unwrap_or_default(), response.ico);
|
|
||||||
assert_eq!(db_record.kontakt.unwrap_or_default(), response.kontakt);
|
|
||||||
assert_eq!(db_record.telefon.unwrap_or_default(), response.telefon);
|
|
||||||
assert_eq!(db_record.skladu.unwrap_or_default(), response.skladu);
|
|
||||||
assert_eq!(db_record.fax.unwrap_or_default(), response.fax);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_get_adresar_success(
|
|
||||||
#[future] existing_record: (PgPool, i64),
|
|
||||||
) {
|
|
||||||
let (pool, id) = existing_record.await;
|
|
||||||
let request = GetAdresarRequest { id };
|
|
||||||
let response = get_adresar(&pool, request).await.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(response.id, id);
|
|
||||||
assert_response_matches(&pool, id, &response).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_get_optional_fields_null(
|
|
||||||
#[future] existing_record_with_nulls: (PgPool, i64),
|
|
||||||
) {
|
|
||||||
let (pool, id) = existing_record_with_nulls.await;
|
|
||||||
let request = GetAdresarRequest { id };
|
|
||||||
let response = get_adresar(&pool, request).await.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(response.kz, "");
|
|
||||||
assert_eq!(response.drc, "");
|
|
||||||
assert_eq!(response.ulica, "");
|
|
||||||
assert_eq!(response.psc, "");
|
|
||||||
assert_eq!(response.mesto, "");
|
|
||||||
assert_eq!(response.stat, "");
|
|
||||||
assert_eq!(response.banka, "");
|
|
||||||
assert_eq!(response.ucet, "");
|
|
||||||
assert_eq!(response.skladm, "");
|
|
||||||
assert_eq!(response.ico, "");
|
|
||||||
assert_eq!(response.kontakt, "");
|
|
||||||
assert_eq!(response.telefon, "");
|
|
||||||
assert_eq!(response.skladu, "");
|
|
||||||
assert_eq!(response.fax, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_get_nonexistent_id(
|
|
||||||
#[future] pool: PgPool,
|
|
||||||
) {
|
|
||||||
let pool = pool.await;
|
|
||||||
let request = GetAdresarRequest { id: 9999 };
|
|
||||||
let result = get_adresar(&pool, request).await;
|
|
||||||
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err().code(), tonic::Code::NotFound);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_get_deleted_record(
|
|
||||||
#[future] existing_deleted_record: (PgPool, i64),
|
|
||||||
) {
|
|
||||||
let (pool, id) = existing_deleted_record.await;
|
|
||||||
let request = GetAdresarRequest { id };
|
|
||||||
let result = get_adresar(&pool, request).await;
|
|
||||||
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err().code(), tonic::Code::NotFound);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_database_error(
|
|
||||||
#[future] closed_pool: PgPool,
|
|
||||||
) {
|
|
||||||
let closed_pool = closed_pool.await;
|
|
||||||
let request = GetAdresarRequest { id: 1 };
|
|
||||||
let result = get_adresar(&closed_pool, request).await;
|
|
||||||
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err().code(), tonic::Code::Internal);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_get_special_characters(
|
|
||||||
#[future] pool: PgPool,
|
|
||||||
) {
|
|
||||||
let pool = pool.await;
|
|
||||||
let firma = "Náměstí ČR";
|
|
||||||
let telefon = "+420 123-456.789";
|
|
||||||
let ulica = "Křižíkova 123";
|
|
||||||
|
|
||||||
let record = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
INSERT INTO adresar (firma, telefon, ulica)
|
|
||||||
VALUES ($1, $2, $3)
|
|
||||||
RETURNING id
|
|
||||||
"#,
|
|
||||||
firma,
|
|
||||||
telefon,
|
|
||||||
ulica
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let request = GetAdresarRequest { id: record.id };
|
|
||||||
let response = get_adresar(&pool, request).await.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(response.firma, firma);
|
|
||||||
assert_eq!(response.telefon, telefon);
|
|
||||||
assert_eq!(response.ulica, ulica);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_get_max_length_fields(
|
|
||||||
#[future] pool: PgPool,
|
|
||||||
) {
|
|
||||||
let pool = pool.await;
|
|
||||||
let firma = "a".repeat(255);
|
|
||||||
let telefon = "1".repeat(20);
|
|
||||||
|
|
||||||
let record = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
INSERT INTO adresar (firma, telefon)
|
|
||||||
VALUES ($1, $2)
|
|
||||||
RETURNING id
|
|
||||||
"#,
|
|
||||||
firma,
|
|
||||||
telefon
|
|
||||||
)
|
|
||||||
.fetch_one(&pool)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let request = GetAdresarRequest { id: record.id };
|
|
||||||
let response = get_adresar(&pool, request).await.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(response.firma.len(), 255);
|
|
||||||
assert_eq!(response.telefon.len(), 20);
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
// server/tests/adresar/mod.rs
|
|
||||||
|
|
||||||
pub mod post_adresar_test;
|
|
||||||
pub mod put_adresar_test;
|
|
||||||
pub mod get_adresar_test;
|
|
||||||
pub mod get_adresar_count_test;
|
|
||||||
pub mod get_adresar_by_position_test;
|
|
||||||
pub mod delete_adresar_test;
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user