Compare commits

...

101 Commits

Author SHA1 Message Date
filipriec
8127c7bb1b renamed again and fixed some minor stuff 2025-07-25 18:18:00 +02:00
filipriec
7437908baf removal of @ syntax as reference from the backend 2025-07-25 12:13:51 +02:00
filipriec
9eb46cb5d3 kompAC 2025-07-25 11:59:18 +02:00
filipriec
38a70128b0 prod code 2, time for new functionality 2025-07-25 00:08:16 +02:00
filipriec
c58ce52b33 fixing warnings and making prod code 2025-07-24 23:57:21 +02:00
filipriec
c82813185f code cleanup in post endpoint 2025-07-24 22:18:36 +02:00
filipriec
a96681e9d6 cleaned up code 2025-07-24 22:04:55 +02:00
filipriec
4df6c40034 removal of hardcoded fix, now working in general 2025-07-24 21:56:07 +02:00
filipriec
089d728cc7 passers 2025-07-24 10:55:03 +02:00
filipriec
aca3d718b5 CHECK THIS COMMIT I HAVE NO CLUE IF ITS CORRECT 2025-07-23 23:30:56 +02:00
filipriec
8a6a584cf3 compiletime error in test fixed 2025-07-23 00:43:11 +02:00
filipriec
00ed0cf796 test is now passing 2025-07-22 18:31:26 +02:00
filipriec
7e54b2fe43 we got a full passer now 2025-07-20 14:58:39 +02:00
filipriec
84871faad4 another passer, 2 more to go 2025-07-20 11:57:46 +02:00
filipriec
bcb433d7b2 fixing post table script 2025-07-20 11:46:00 +02:00
filipriec
7d1b130b68 we have a passer 2025-07-17 22:55:54 +02:00
filipriec
24c2376ea1 crucial self reference allowed 2025-07-17 22:06:53 +02:00
filipriec
810ef5fc10 post table script is now aware of a type in the database 2025-07-17 21:16:49 +02:00
filipriec
fe246b1fe6 reject boolean and text in math functions at the post table script 2025-07-16 22:31:55 +02:00
filipriec
de42bb48aa deprecated function removed, no need for backup 2025-07-13 08:51:47 +02:00
filipriec
17495c49ac steel scripts now have far better logic than before 2025-07-12 23:06:21 +02:00
filipriec
0e3a7a06a3 put needs more adjustements 2025-07-12 11:06:11 +02:00
filipriec
e0ee48eb9c put endpoint is now stronq boiii 2025-07-11 23:24:28 +02:00
filipriec
d2053b1d5a SCRIPTS only scripts reference to a linked table from this commit 2025-07-11 08:55:32 +02:00
filipriec
fbe8e53858 circular dependency fix in post script logic 2025-07-11 08:50:10 +02:00
filipriec
8fe2581b3f put request at halt until script fixes 2025-07-10 21:11:42 +02:00
filipriec
60cc0e562e adjusted tests for the put request 2025-07-09 22:24:33 +02:00
filipriec
26898d474f tests for steel in post request is fixed with all the errors found in the codebase 2025-07-08 22:04:11 +02:00
filipriec
2311fbaa3b post steel tests 2025-07-08 20:26:48 +02:00
filipriec
be99cd9423 forgotten lock 2025-07-08 18:07:28 +02:00
filipriec
a3dd6fa95b now fully functional without a steel decimal crate, we are only importing it via cargo, because steel decimal is a separate published crate now 2025-07-08 18:02:32 +02:00
filipriec
433d87c96d stuff around publishing crate successfuly done 2025-07-07 22:08:38 +02:00
filipriec
aff4383671 tests are passing well now 2025-07-07 20:29:51 +02:00
filipriec
b7c8f6b1a2 tests in the steel decimal crate with serious issue fixed 2025-07-07 19:24:08 +02:00
filipriec
3443839ba4 placing them properly 2025-07-07 18:47:50 +02:00
filipriec
6c31d48f3b steel decimal needs readme before being published to the crates io 2025-07-07 18:08:45 +02:00
filipriec
1770292fd8 all the tests are now passing perfectly well 2025-07-07 15:35:33 +02:00
filipriec
afdd5c5740 parser finally fixed 2025-07-07 15:07:08 +02:00
filipriec
11487f0833 more fixes, still not done yet 2025-07-07 13:32:06 +02:00
filipriec
4d5d22d0c2 precision to steel decimal crate implemented 2025-07-07 00:31:13 +02:00
filipriec
314a957922 fixes, now .0 from rust decimal is being discussed 2025-07-06 20:53:40 +02:00
filipriec
4c57b562e6 more fixes, not there yet tho 2025-07-05 10:00:04 +02:00
filipriec
a757acf51c we are now fully using steel decimal crate inside of the server crate 2025-07-04 13:11:47 +02:00
filipriec
f4a23be1a2 steel decimal crate with a proper setup, needs so much work to be done to be finished 2025-07-04 11:56:21 +02:00
filipriec
93c67ffa14 steel decimal crate implemented 2025-07-02 16:31:15 +02:00
filipriec
d1ebe4732f steel with decimal math, saving before separating steel to a separate crate 2025-07-02 14:44:37 +02:00
filipriec
7b7f3ca05a more tests for the frontend 2025-06-26 20:25:59 +02:00
filipriec
234613f831 more frontend tests 2025-06-26 20:03:47 +02:00
filipriec
f6d84e70cc testing frontend to connect to the backend in the form page 2025-06-26 19:19:08 +02:00
filipriec
5cd324b6ae client tests now have a proper structure 2025-06-26 11:56:18 +02:00
filipriec
a7457f5749 frontend tui tests 2025-06-25 23:00:51 +02:00
filipriec
a5afc75099 crit bug fixed 2025-06-25 17:33:37 +02:00
filipriec
625c9b3e09 adresar and uctovnictvo are now wiped out of the existence 2025-06-25 16:14:43 +02:00
filipriec
e20623ed53 removing adresar and uctovnictvo hardcoded way of doing things from the project entirely 2025-06-25 13:52:00 +02:00
filipriec
aa9adf7348 removed unused tests 2025-06-25 13:50:08 +02:00
filipriec
2e82aba0d1 full passer on the tables data now 2025-06-25 13:46:35 +02:00
filipriec
b7a3f0f8d9 count is now fixed and working properly 2025-06-25 12:40:27 +02:00
filipriec
38c82389f7 count gets a full passer in tests 2025-06-25 12:37:37 +02:00
filipriec
cb0a2bee17 get by count well tested 2025-06-25 11:47:25 +02:00
filipriec
dc99131794 ordering of the tests for tables data 2025-06-25 10:34:58 +02:00
filipriec
5c23f61a10 get method passing without any problem 2025-06-25 09:44:38 +02:00
filipriec
f87e3c03cb get test updated, working now 2025-06-25 09:16:32 +02:00
filipriec
d346670839 tests for delete endpoint are passing all the tests 2025-06-25 09:04:58 +02:00
filipriec
560d8b7234 delete tests robustness not yet fully working 2025-06-25 08:44:36 +02:00
filipriec
b297c2b311 working full passer on put request 2025-06-24 20:06:39 +02:00
filipriec
d390c567d5 more tests 2025-06-24 00:46:51 +02:00
filipriec
029e614b9c more put tests 2025-06-24 00:45:37 +02:00
filipriec
f9a78e4eec the tests for the put endpoint is now being tested and passing but its not what i would love 2025-06-23 23:25:45 +02:00
filipriec
d8758f7531 we are passing all the tests now properly with the table definition and the post tables data now 2025-06-23 13:52:29 +02:00
filipriec
4e86ecff84 its now passing all the tests 2025-06-22 23:05:38 +02:00
filipriec
070d091e07 robustness, one test still failing, will fix it 2025-06-22 23:02:41 +02:00
filipriec
7403b3c3f8 4 tests are failing 2025-06-22 22:15:08 +02:00
filipriec
1b1e7b7205 robust decimal solution to push tables data to the backend 2025-06-22 22:08:22 +02:00
filipriec
1b8f19f1ce tables data tests are now generalized, needs a bit more fixes, 6/6 are passing 2025-06-22 16:10:24 +02:00
filipriec
2a14eadf34 fixed compatibility layer to old tests git status REMOVE IN THE FUTURE 2025-06-22 14:00:49 +02:00
filipriec
fd36cd5795 tests are now passing fully 2025-06-22 13:13:20 +02:00
filipriec
f4286ac3c9 more changes and more fixes, 3 more tests to go 2025-06-22 12:48:36 +02:00
filipriec
92d5eb4844 needs last one to be fixed, otherwise its getting perfect 2025-06-21 23:57:52 +02:00
filipriec
87b9f6ab87 more fixes 2025-06-21 21:43:39 +02:00
filipriec
06d98aab5c 5 more tests to go 2025-06-21 21:01:49 +02:00
filipriec
298f56a53c tests are passing better than ever before, its looking decent actually nowc 2025-06-21 16:18:32 +02:00
filipriec
714a5f2f1c tests compiled 2025-06-21 15:11:27 +02:00
filipriec
4e29d0084f compiled with the profile to be schemas 2025-06-21 10:37:37 +02:00
filipriec
63f1b4da2e changing profile id to schema in the whole project 2025-06-21 09:57:14 +02:00
filipriec
9477f53432 big change in the schema, its profile names now and not gen 2025-06-20 22:31:49 +02:00
filipriec
ed786f087c changing test for a huge change in a project 2025-06-20 20:07:07 +02:00
filipriec
8e22ea05ff improvements and fixing of the tests 2025-06-20 19:59:42 +02:00
filipriec
8414657224 gen isolated tables 2025-06-18 23:19:19 +02:00
filipriec
e25213ed1b tests are robusts running in parallel 2025-06-18 22:38:00 +02:00
filipriec
4843b0778c robust testing of the table definitions 2025-06-18 21:37:30 +02:00
filipriec
f5fae98c69 tests now working via make file 2025-06-18 14:44:38 +02:00
filipriec
6faf0a4a31 tests for table definitions 2025-06-17 22:46:04 +02:00
filipriec
011fafc0ff now working proper types 2025-06-17 17:31:11 +02:00
filipriec
8ebe74484c now not creating tables with the year_ prefix and living in the gen schema by default 2025-06-17 11:45:55 +02:00
filipriec
3eb9523103 you are going to kill me but please dont, i just cleaned up migration file and its 100% valid, do not use any version before this version and after this version so many things needs to be changed so haha... im ashamed but i love it at the same time 2025-06-17 11:21:33 +02:00
filipriec
3dfa922b9e unimportant change 2025-06-17 10:27:22 +02:00
filipriec
248d54a30f accpeting now null in the post table data as nothing 2025-06-16 22:51:05 +02:00
filipriec
b30fef4ccd post doesnt work, but refactored code displays the autocomplete at least, needs fix 2025-06-16 16:42:25 +02:00
filipriec
a9c4527318 complete redesign oh how client is displaying data 2025-06-16 16:10:24 +02:00
filipriec
c31f08d5b8 fixing post with links 2025-06-16 14:42:49 +02:00
filipriec
9e0fa9ddb1 autocomplete now autocompleting data not just id 2025-06-16 11:54:54 +02:00
206 changed files with 27931 additions and 5348 deletions

1
.gitignore vendored
View File

@@ -2,3 +2,4 @@
.env
/tantivy_indexes
server/tantivy_indexes
steel_decimal/tests/property_tests.proptest-regressions

1153
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,14 +4,14 @@ resolver = "2"
[workspace.package]
# TODO: idk how to do the name, fix later
# name = "Multieko2"
# name = "komp_ac"
version = "0.3.13"
edition = "2021"
license = "GPL-3.0-or-later"
authors = ["Filip Priečinský <filippriec@gmail.com>"]
description = "Poriadny uctovnicky software."
readme = "README.md"
repository = "https://gitlab.com/filipriec/multieko2"
repository = "https://gitlab.com/filipriec/komp_ac"
categories = ["command-line-interface"]
# [workspace.metadata]
@@ -24,6 +24,7 @@ tokio = { version = "1.44.2", features = ["full"] }
tonic = "0.13.0"
prost = "0.13.5"
async-trait = "0.1.88"
prost-types = "0.13.0"
# Data Handling & Serialization
serde = { version = "1.0.219", features = ["derive"] }
@@ -39,4 +40,10 @@ tracing = "0.1.41"
# Search crate
tantivy = "0.24.1"
# Steel_decimal crate
rust_decimal = { version = "1.37.2", features = ["maths", "serde"] }
rust_decimal_macros = "1.37.1"
thiserror = "2.0.12"
regex = "1.11.1"
common = { path = "./common" }

View File

@@ -9,6 +9,7 @@ anyhow = "1.0.98"
async-trait = "0.1.88"
common = { path = "../common" }
prost-types = { workspace = true }
crossterm = "0.28.1"
dirs = "6.0.0"
dotenvy = "0.15.7"
@@ -30,3 +31,9 @@ unicode-width = "0.2.0"
[features]
default = []
ui-debug = []
[dev-dependencies]
rstest = "0.25.0"
tokio-test = "0.4.4"
uuid = { version = "1.17.0", features = ["v4"] }
futures = "0.3.31"

View File

@@ -4,7 +4,7 @@ use crate::config::colors::themes::Theme;
use crate::state::pages::auth::AuthState;
use crate::state::app::state::AppState;
use crate::state::pages::admin::AdminState;
use common::proto::multieko2::table_definition::ProfileTreeResponse;
use common::proto::komp_ac::table_definition::ProfileTreeResponse;
use ratatui::{
layout::{Constraint, Direction, Layout, Rect},
style::Style,

View File

@@ -1,30 +1,18 @@
// src/components/common/autocomplete.rs
use common::proto::multieko2::search::search_response::Hit;
use crate::config::colors::themes::Theme;
use crate::state::pages::form::FormState;
use common::proto::komp_ac::search::search_response::Hit;
use ratatui::{
layout::Rect,
style::{Color, Modifier, Style},
widgets::{Block, List, ListItem, ListState},
Frame,
};
use std::collections::HashMap;
use unicode_width::UnicodeWidthStr;
/// Converts a serde_json::Value into a displayable String.
/// Handles String, Number, and Bool variants. Returns an empty string for Null and others.
fn json_value_to_string(value: &serde_json::Value) -> String {
match value {
serde_json::Value::String(s) => s.clone(),
serde_json::Value::Number(n) => n.to_string(),
serde_json::Value::Bool(b) => b.to_string(),
// Return an empty string for Null, Array, or Object so we can filter them out.
_ => String::new(),
}
}
/// Renders an opaque dropdown list for simple string-based suggestions.
/// This function remains unchanged.
/// THIS IS THE RESTORED FUNCTION.
pub fn render_autocomplete_dropdown(
f: &mut Frame,
input_rect: Rect,
@@ -84,22 +72,22 @@ pub fn render_autocomplete_dropdown(
.collect();
let list = List::new(items);
let mut profile_list_state = ListState::default();
profile_list_state.select(selected_index);
let mut list_state = ListState::default();
list_state.select(selected_index);
f.render_stateful_widget(list, dropdown_area, &mut profile_list_state);
f.render_stateful_widget(list, dropdown_area, &mut list_state);
}
// --- MODIFIED FUNCTION FOR RICH SUGGESTIONS ---
/// Renders an opaque dropdown list for rich `Hit`-based suggestions.
/// Displays the value of the first meaningful column, followed by the Hit ID.
pub fn render_rich_autocomplete_dropdown(
/// RENAMED from render_rich_autocomplete_dropdown
pub fn render_hit_autocomplete_dropdown(
f: &mut Frame,
input_rect: Rect,
frame_area: Rect,
theme: &Theme,
suggestions: &[Hit],
selected_index: Option<usize>,
form_state: &FormState,
) {
if suggestions.is_empty() {
return;
@@ -107,50 +95,9 @@ pub fn render_rich_autocomplete_dropdown(
let display_names: Vec<String> = suggestions
.iter()
.map(|hit| {
// Use serde_json::Value to handle mixed types (string, null, etc.)
if let Ok(content_map) =
serde_json::from_str::<HashMap<String, serde_json::Value>>(
&hit.content_json,
)
{
// Define keys to ignore for a cleaner display
const IGNORED_KEYS: &[&str] = &["id", "deleted", "created_at"];
// Get keys, filter out ignored ones, and sort for consistency
let mut keys: Vec<_> = content_map
.keys()
.filter(|k| !IGNORED_KEYS.contains(&k.as_str()))
.cloned()
.collect();
keys.sort();
// Get only the first non-empty value from the sorted keys
let values: Vec<_> = keys
.iter()
.map(|key| {
content_map
.get(key)
.map(json_value_to_string)
.unwrap_or_default()
})
.filter(|s| !s.is_empty()) // Filter out null/empty values
.take(1) // Changed from take(2) to take(1)
.collect();
let display_part = values.first().cloned().unwrap_or_default(); // Get the first value
if display_part.is_empty() {
format!("ID: {}", hit.id)
} else {
format!("{} | ID: {}", display_part, hit.id) // ID at the end
}
} else {
format!("ID: {} (parse error)", hit.id)
}
})
.map(|hit| form_state.get_display_name_for_hit(hit))
.collect();
// --- Calculate Dropdown Size & Position ---
let max_suggestion_width =
display_names.iter().map(|s| s.width()).max().unwrap_or(0) as u16;
let horizontal_padding: u16 = 2;
@@ -164,7 +111,6 @@ pub fn render_rich_autocomplete_dropdown(
height: dropdown_height,
};
// --- Clamping Logic ---
if dropdown_area.bottom() > frame_area.height {
dropdown_area.y = input_rect.y.saturating_sub(dropdown_height);
}
@@ -174,7 +120,6 @@ pub fn render_rich_autocomplete_dropdown(
dropdown_area.x = dropdown_area.x.max(0);
dropdown_area.y = dropdown_area.y.max(0);
// --- Rendering Logic ---
let background_block =
Block::default().style(Style::default().bg(Color::DarkGray));
f.render_widget(background_block, dropdown_area);

View File

@@ -47,7 +47,7 @@ pub fn render_status_line(
}
// --- The normal status line rendering logic (unchanged) ---
let program_info = format!("multieko2 v{}", env!("CARGO_PKG_VERSION"));
let program_info = format!("komp_ac v{}", env!("CARGO_PKG_VERSION"));
let mode_text = if is_edit_mode { "[EDIT]" } else { "[READ-ONLY]" };
let home_dir = dirs::home_dir()

View File

@@ -1,10 +1,10 @@
// src/components/form/form.rs
use crate::components::common::autocomplete; // <--- ADD THIS IMPORT
use crate::components::common::autocomplete;
use crate::components::handlers::canvas::render_canvas;
use crate::config::colors::themes::Theme;
use crate::state::app::highlight::HighlightState;
use crate::state::pages::canvas_state::CanvasState;
use crate::state::pages::form::FormState; // <--- CHANGE THIS IMPORT
use crate::state::pages::form::FormState;
use ratatui::{
layout::{Alignment, Constraint, Direction, Layout, Margin, Rect},
style::Style,
@@ -78,25 +78,25 @@ pub fn render_form(
// --- NEW: RENDER AUTOCOMPLETE ---
if form_state.autocomplete_active {
// Use the Rect of the active field that render_canvas found for us.
if let Some(active_rect) = active_field_rect {
let selected_index = form_state.get_selected_suggestion_index();
// THE DECIDER LOGIC:
// 1. Check for rich suggestions first.
if let Some(rich_suggestions) = form_state.get_rich_suggestions() {
if !rich_suggestions.is_empty() {
autocomplete::render_rich_autocomplete_dropdown(
// CHANGE THIS to call the renamed function
autocomplete::render_hit_autocomplete_dropdown(
f,
active_rect,
f.area(), // Use f.area() for clamping, not f.size()
f.area(),
theme,
rich_suggestions,
selected_index,
form_state,
);
}
}
// 2. Fallback to simple suggestions if rich ones aren't available.
// The fallback to simple suggestions is now correctly handled
// because the original render_autocomplete_dropdown exists again.
else if let Some(simple_suggestions) = form_state.get_suggestions() {
if !simple_suggestions.is_empty() {
autocomplete::render_autocomplete_dropdown(
@@ -112,3 +112,4 @@ pub fn render_form(
}
}
}

View File

@@ -1,16 +1,16 @@
// src/components/handlers/canvas.rs
use ratatui::{
widgets::{Paragraph, Block, Borders},
layout::{Layout, Constraint, Direction, Rect},
style::{Style, Modifier},
layout::{Alignment, Constraint, Direction, Layout, Rect},
style::{Modifier, Style},
text::{Line, Span},
widgets::{Block, Borders, Paragraph},
Frame,
prelude::Alignment,
};
use crate::config::colors::themes::Theme;
use crate::state::app::highlight::HighlightState;
use crate::state::pages::canvas_state::CanvasState;
use crate::state::app::highlight::HighlightState; // Ensure correct import path
use std::cmp::{min, max};
use std::cmp::{max, min};
pub fn render_canvas(
f: &mut Frame,
@@ -21,9 +21,8 @@ pub fn render_canvas(
inputs: &[&String],
theme: &Theme,
is_edit_mode: bool,
highlight_state: &HighlightState, // Using the enum state
highlight_state: &HighlightState,
) -> Option<Rect> {
// ... (setup code remains the same) ...
let columns = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(30), Constraint::Percentage(70)])
@@ -58,46 +57,47 @@ pub fn render_canvas(
let mut active_field_input_rect = None;
// Render labels
for (i, field) in fields.iter().enumerate() {
let label = Paragraph::new(Line::from(Span::styled(
format!("{}:", field),
Style::default().fg(theme.fg)),
));
f.render_widget(label, Rect {
x: columns[0].x,
y: input_block.y + 1 + i as u16,
width: columns[0].width,
height: 1,
});
Style::default().fg(theme.fg),
)));
f.render_widget(
label,
Rect {
x: columns[0].x,
y: input_block.y + 1 + i as u16,
width: columns[0].width,
height: 1,
},
);
}
// Render inputs and cursor
for (i, input) in inputs.iter().enumerate() {
for (i, _input) in inputs.iter().enumerate() {
let is_active = i == *current_field_idx;
let current_cursor_pos = form_state.current_cursor_pos();
let text = input.as_str();
let text_len = text.chars().count();
// Use the trait method to get display value
let text = form_state.get_display_value_for_field(i);
let text_len = text.chars().count();
let line: Line;
// --- Use match on the highlight_state enum ---
match highlight_state {
HighlightState::Off => {
// Not in highlight mode, render normally
line = Line::from(Span::styled(
text,
if is_active { Style::default().fg(theme.highlight) } else { Style::default().fg(theme.fg) }
if is_active {
Style::default().fg(theme.highlight)
} else {
Style::default().fg(theme.fg)
},
));
}
HighlightState::Characterwise { anchor } => {
// --- Character-wise Highlight Logic ---
let (anchor_field, anchor_char) = *anchor;
let start_field = min(anchor_field, *current_field_idx);
let end_field = max(anchor_field, *current_field_idx);
// Use start_char and end_char consistently
let (start_char, end_char) = if anchor_field == *current_field_idx {
(min(anchor_char, current_cursor_pos), max(anchor_char, current_cursor_pos))
} else if anchor_field < *current_field_idx {
@@ -111,24 +111,20 @@ pub fn render_canvas(
let normal_style_outside = Style::default().fg(theme.fg);
if i >= start_field && i <= end_field {
// This line is within the character-wise highlight range
if start_field == end_field { // Case 1: Single Line Highlight
// Use start_char and end_char here
if start_field == end_field {
let clamped_start = start_char.min(text_len);
let clamped_end = end_char.min(text_len); // Use text_len for slicing logic
let clamped_end = end_char.min(text_len);
let before: String = text.chars().take(clamped_start).collect();
let highlighted: String = text.chars().skip(clamped_start).take(clamped_end.saturating_sub(clamped_start) + 1).collect();
// Define 'after' here
let after: String = text.chars().skip(clamped_end + 1).collect();
line = Line::from(vec![
Span::styled(before, normal_style_in_highlight),
Span::styled(highlighted, highlight_style),
Span::styled(after, normal_style_in_highlight), // Use defined 'after'
Span::styled(after, normal_style_in_highlight),
]);
} else if i == start_field { // Case 2: Multi-Line Highlight - Start Line
// Use start_char here
} else if i == start_field {
let safe_start = start_char.min(text_len);
let before: String = text.chars().take(safe_start).collect();
let highlighted: String = text.chars().skip(safe_start).collect();
@@ -136,8 +132,7 @@ pub fn render_canvas(
Span::styled(before, normal_style_in_highlight),
Span::styled(highlighted, highlight_style),
]);
} else if i == end_field { // Case 3: Multi-Line Highlight - End Line (Corrected index)
// Use end_char here
} else if i == end_field {
let safe_end_inclusive = if text_len > 0 { end_char.min(text_len - 1) } else { 0 };
let highlighted: String = text.chars().take(safe_end_inclusive + 1).collect();
let after: String = text.chars().skip(safe_end_inclusive + 1).collect();
@@ -145,19 +140,17 @@ pub fn render_canvas(
Span::styled(highlighted, highlight_style),
Span::styled(after, normal_style_in_highlight),
]);
} else { // Case 4: Multi-Line Highlight - Middle Line (Corrected index)
line = Line::from(Span::styled(text, highlight_style)); // Highlight whole line
} else {
line = Line::from(Span::styled(text, highlight_style));
}
} else { // Case 5: Line Outside Character-wise Highlight Range
} else {
line = Line::from(Span::styled(
text,
// Use normal styling (active or inactive)
if is_active { normal_style_in_highlight } else { normal_style_outside }
));
}
}
HighlightState::Linewise { anchor_line } => {
// --- Linewise Highlight Logic ---
let start_field = min(*anchor_line, *current_field_idx);
let end_field = max(*anchor_line, *current_field_idx);
let highlight_style = Style::default().fg(theme.highlight).bg(theme.highlight_bg).add_modifier(Modifier::BOLD);
@@ -165,25 +158,31 @@ pub fn render_canvas(
let normal_style_outside = Style::default().fg(theme.fg);
if i >= start_field && i <= end_field {
// Highlight the entire line
line = Line::from(Span::styled(text, highlight_style));
} else {
// Line outside linewise highlight range
line = Line::from(Span::styled(
text,
// Use normal styling (active or inactive)
if is_active { normal_style_in_highlight } else { normal_style_outside }
));
}
}
} // End match highlight_state
}
let input_display = Paragraph::new(line).alignment(Alignment::Left);
f.render_widget(input_display, input_rows[i]);
if is_active {
active_field_input_rect = Some(input_rows[i]);
let cursor_x = input_rows[i].x + form_state.current_cursor_pos() as u16;
// --- CORRECTED CURSOR POSITIONING LOGIC ---
// Use the new generic trait method to check for an override.
let cursor_x = if form_state.has_display_override(i) {
// If an override exists, place the cursor at the end.
input_rows[i].x + text.chars().count() as u16
} else {
// Otherwise, use the real cursor position.
input_rows[i].x + form_state.current_cursor_pos() as u16
};
let cursor_y = input_rows[i].y;
f.set_cursor_position((cursor_x, cursor_y));
}
@@ -191,4 +190,3 @@ pub fn render_canvas(
active_field_input_rect
}

View File

@@ -6,7 +6,7 @@ use ratatui::{
Frame,
};
use crate::config::colors::themes::Theme;
use common::proto::multieko2::table_definition::{ProfileTreeResponse};
use common::proto::komp_ac::table_definition::{ProfileTreeResponse};
use ratatui::text::{Span, Line};
use crate::components::utils::text::truncate_string;

View File

@@ -32,7 +32,7 @@ pub fn render_intro(f: &mut Frame, intro_state: &IntroState, area: Rect, theme:
// Title
let title = Line::from(vec![
Span::styled("multieko2", Style::default().fg(theme.highlight)),
Span::styled("komp_ac", Style::default().fg(theme.highlight)),
Span::styled(" v", Style::default().fg(theme.fg)),
Span::styled(env!("CARGO_PKG_VERSION"), Style::default().fg(theme.secondary)),
]);

View File

@@ -9,7 +9,7 @@ use tracing::{error, info};
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
pub const APP_NAME: &str = "multieko2_client";
pub const APP_NAME: &str = "komp_ac_client";
pub const TOKEN_FILE_NAME: &str = "auth.token";
#[derive(Serialize, Deserialize, Debug, Clone)]

View File

@@ -4,6 +4,7 @@ use crate::services::grpc_client::GrpcClient;
use crate::state::pages::canvas_state::CanvasState;
use crate::state::pages::form::FormState;
use crate::state::pages::auth::RegisterState;
use crate::state::app::state::AppState;
use crate::tui::functions::common::form::{revert, save};
use crossterm::event::{KeyCode, KeyEvent};
use std::any::Any;
@@ -13,6 +14,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
action: &str,
state: &mut S,
grpc_client: &mut GrpcClient,
app_state: &AppState,
current_position: &mut u64,
total_count: u64,
) -> Result<String> {
@@ -27,6 +29,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
match action {
"save" => {
let outcome = save(
app_state,
form_state,
grpc_client,
)

View File

@@ -3,6 +3,7 @@
use crate::services::grpc_client::GrpcClient;
use crate::state::pages::canvas_state::CanvasState;
use crate::state::pages::form::FormState;
use crate::state::app::state::AppState;
use crate::tui::functions::common::form::{revert, save};
use crate::tui::functions::common::form::SaveOutcome;
use crate::modes::handlers::event::EventOutcome;
@@ -14,6 +15,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
action: &str,
state: &mut S,
grpc_client: &mut GrpcClient,
app_state: &AppState,
) -> Result<EventOutcome> {
match action {
"save" | "revert" => {
@@ -26,6 +28,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
match action {
"save" => {
let save_result = save(
app_state,
form_state,
grpc_client,
).await;

View File

@@ -32,6 +32,7 @@ pub async fn handle_core_action(
Ok(EventOutcome::Ok(message))
} else {
let save_outcome = form_save(
app_state,
form_state,
grpc_client,
).await.context("Register save action failed")?;
@@ -52,6 +53,7 @@ pub async fn handle_core_action(
login_save(auth_state, login_state, auth_client, app_state).await.context("Login save n quit action failed")?
} else {
let save_outcome = form_save(
app_state,
form_state,
grpc_client,
).await?;

View File

@@ -13,7 +13,7 @@ use crate::state::pages::{
form::FormState,
};
use anyhow::Result;
use common::proto::multieko2::search::search_response::Hit;
use common::proto::komp_ac::search::search_response::Hit;
use crossterm::event::{KeyCode, KeyEvent};
use tokio::sync::mpsc;
use tracing::{debug, info};
@@ -132,13 +132,24 @@ pub async fn handle_edit_event(
.get(selected_idx)
.cloned()
{
// --- THIS IS THE CORE LOGIC CHANGE ---
// 1. Get the friendly display name for the UI
let display_name =
form_state.get_display_name_for_hit(&selection);
// 2. Store the REAL ID in the form's values
let current_input =
form_state.get_current_input_mut();
*current_input = selection.id.to_string();
let new_cursor_pos = current_input.len();
form_state.set_current_cursor_pos(new_cursor_pos);
// FIX: Access ideal_cursor_column through event_handler
event_handler.ideal_cursor_column = new_cursor_pos;
// 3. Set the persistent display override in the map
form_state.link_display_map.insert(
form_state.current_field,
display_name,
);
// 4. Finalize state
form_state.deactivate_autocomplete();
form_state.set_has_unsaved_changes(true);
return Ok(EditEventOutcome::Message(

View File

@@ -15,7 +15,7 @@ use anyhow::Result;
pub async fn handle_command_event(
key: KeyEvent,
config: &Config,
app_state: &AppState,
app_state: &mut AppState,
login_state: &LoginState,
register_state: &RegisterState,
form_state: &mut FormState,
@@ -74,7 +74,7 @@ pub async fn handle_command_event(
async fn process_command(
config: &Config,
form_state: &mut FormState,
app_state: &AppState,
app_state: &mut AppState,
login_state: &LoginState,
register_state: &RegisterState,
command_input: &mut String,
@@ -117,6 +117,7 @@ async fn process_command(
},
"save" => {
let outcome = save(
app_state,
form_state,
grpc_client,
).await?;

View File

@@ -2,7 +2,7 @@
use crate::config::binds::config::Config;
use crate::modes::handlers::event::EventOutcome;
use anyhow::Result;
use common::proto::multieko2::table_definition::ProfileTreeResponse;
use common::proto::komp_ac::table_definition::ProfileTreeResponse;
use crossterm::event::{KeyCode, KeyEvent};
use std::collections::{HashMap, HashSet};

View File

@@ -42,7 +42,7 @@ use crate::tui::{
use crate::ui::handlers::context::UiContext;
use crate::ui::handlers::rat_state::UiStateHandler;
use anyhow::Result;
use common::proto::multieko2::search::search_response::Hit;
use common::proto::komp_ac::search::search_response::Hit;
use crossterm::cursor::SetCursorStyle;
use crossterm::event::{Event, KeyCode, KeyEvent};
use tokio::sync::mpsc;

View File

@@ -1,6 +1,6 @@
// src/services/auth.rs
use tonic::transport::Channel;
use common::proto::multieko2::auth::{
use common::proto::komp_ac::auth::{
auth_service_client::AuthServiceClient,
LoginRequest, LoginResponse,
RegisterRequest, AuthResponse,

View File

@@ -1,30 +1,34 @@
// src/services/grpc_client.rs
use tonic::transport::Channel;
use common::proto::multieko2::common::{CountResponse, Empty};
use common::proto::multieko2::table_structure::table_structure_service_client::TableStructureServiceClient;
use common::proto::multieko2::table_structure::{GetTableStructureRequest, TableStructureResponse};
use common::proto::multieko2::table_definition::{
use common::proto::komp_ac::common::Empty;
use common::proto::komp_ac::table_structure::table_structure_service_client::TableStructureServiceClient;
use common::proto::komp_ac::table_structure::{GetTableStructureRequest, TableStructureResponse};
use common::proto::komp_ac::table_definition::{
table_definition_client::TableDefinitionClient,
PostTableDefinitionRequest, ProfileTreeResponse, TableDefinitionResponse,
};
use common::proto::multieko2::table_script::{
use common::proto::komp_ac::table_script::{
table_script_client::TableScriptClient,
PostTableScriptRequest, TableScriptResponse,
};
use common::proto::multieko2::tables_data::{
use common::proto::komp_ac::tables_data::{
tables_data_client::TablesDataClient,
GetTableDataByPositionRequest,
GetTableDataRequest, // ADD THIS
GetTableDataResponse,
DeleteTableDataRequest, // ADD THIS
DeleteTableDataResponse, // ADD THIS
GetTableDataCountRequest,
PostTableDataRequest, PostTableDataResponse, PutTableDataRequest,
PutTableDataResponse,
};
use common::proto::multieko2::search::{
use common::proto::komp_ac::search::{
searcher_client::SearcherClient, SearchRequest, SearchResponse,
};
use anyhow::{Context, Result}; // Added Context
use std::collections::HashMap; // NEW
use anyhow::{Context, Result};
use std::collections::HashMap;
use tonic::transport::Channel;
use prost_types::Value;
#[derive(Clone)]
pub struct GrpcClient {
@@ -48,7 +52,6 @@ impl GrpcClient {
TableDefinitionClient::new(channel.clone());
let table_script_client = TableScriptClient::new(channel.clone());
let tables_data_client = TablesDataClient::new(channel.clone());
// NEW: Instantiate the search client
let search_client = SearcherClient::new(channel.clone());
Ok(Self {
@@ -56,7 +59,7 @@ impl GrpcClient {
table_definition_client,
table_script_client,
tables_data_client,
search_client, // NEW
search_client,
})
}
@@ -116,7 +119,7 @@ impl GrpcClient {
Ok(response.into_inner())
}
// NEW Methods for TablesData service
// Existing TablesData methods
pub async fn get_table_data_count(
&mut self,
profile_name: String,
@@ -155,11 +158,53 @@ impl GrpcClient {
Ok(response.into_inner())
}
// ADD THIS: Missing get_table_data method
pub async fn get_table_data(
&mut self,
profile_name: String,
table_name: String,
id: i64,
) -> Result<GetTableDataResponse> {
let grpc_request = GetTableDataRequest {
profile_name,
table_name,
id,
};
let request = tonic::Request::new(grpc_request);
let response = self
.tables_data_client
.get_table_data(request)
.await
.context("gRPC GetTableData call failed")?;
Ok(response.into_inner())
}
// ADD THIS: Missing delete_table_data method
pub async fn delete_table_data(
&mut self,
profile_name: String,
table_name: String,
record_id: i64,
) -> Result<DeleteTableDataResponse> {
let grpc_request = DeleteTableDataRequest {
profile_name,
table_name,
record_id,
};
let request = tonic::Request::new(grpc_request);
let response = self
.tables_data_client
.delete_table_data(request)
.await
.context("gRPC DeleteTableData call failed")?;
Ok(response.into_inner())
}
pub async fn post_table_data(
&mut self,
profile_name: String,
table_name: String,
data: HashMap<String, String>,
data: HashMap<String, Value>,
) -> Result<PostTableDataResponse> {
let grpc_request = PostTableDataRequest {
profile_name,
@@ -180,7 +225,7 @@ impl GrpcClient {
profile_name: String,
table_name: String,
id: i64,
data: HashMap<String, String>,
data: HashMap<String, Value>,
) -> Result<PutTableDataResponse> {
let grpc_request = PutTableDataRequest {
profile_name,

View File

@@ -1,20 +1,104 @@
// src/services/ui_service.rs
use crate::services::grpc_client::GrpcClient;
use crate::state::pages::form::FormState;
use crate::tui::functions::common::form::SaveOutcome;
use crate::state::pages::add_logic::AddLogicState;
use crate::state::app::state::AppState;
use crate::state::pages::add_logic::AddLogicState;
use crate::state::pages::form::{FieldDefinition, FormState};
use crate::tui::functions::common::form::SaveOutcome;
use crate::utils::columns::filter_user_columns;
use anyhow::{Context, Result};
use anyhow::{anyhow, Context, Result};
use std::sync::Arc;
pub struct UiService;
impl UiService {
pub async fn load_table_view(
grpc_client: &mut GrpcClient,
app_state: &mut AppState,
profile_name: &str,
table_name: &str,
) -> Result<FormState> {
// 1. & 2. Fetch and Cache Schema - UNCHANGED
let table_structure = grpc_client
.get_table_structure(profile_name.to_string(), table_name.to_string())
.await
.context(format!(
"Failed to get table structure for {}.{}",
profile_name, table_name
))?;
let cache_key = format!("{}.{}", profile_name, table_name);
app_state
.schema_cache
.insert(cache_key, Arc::new(table_structure.clone()));
tracing::info!("Schema for '{}.{}' cached.", profile_name, table_name);
// --- START: FINAL, SIMPLIFIED, CORRECT LOGIC ---
// 3a. Create definitions for REGULAR fields first.
let mut fields: Vec<FieldDefinition> = table_structure
.columns
.iter()
.filter(|col| {
!col.is_primary_key
&& col.name != "deleted"
&& col.name != "created_at"
&& !col.name.ends_with("_id") // Filter out ALL potential links
})
.map(|col| FieldDefinition {
display_name: col.name.clone(),
data_key: col.name.clone(),
is_link: false,
link_target_table: None,
})
.collect();
// 3b. Now, find and APPEND definitions for LINK fields based on the `_id` convention.
let link_fields: Vec<FieldDefinition> = table_structure
.columns
.iter()
.filter(|col| col.name.ends_with("_id")) // Find all foreign key columns
.map(|col| {
// The table we link to is derived from the column name.
// e.g., "test_diacritics_id" -> "test_diacritics"
let target_table_base = col
.name
.strip_suffix("_id")
.unwrap_or(&col.name);
// Find the full table name from the profile tree for display.
// e.g., "test_diacritics" -> "2025_test_diacritics"
let full_target_table_name = app_state
.profile_tree
.profiles
.iter()
.find(|p| p.name == profile_name)
.and_then(|p| p.tables.iter().find(|t| t.name.ends_with(target_table_base)))
.map_or(target_table_base.to_string(), |t| t.name.clone());
FieldDefinition {
display_name: full_target_table_name.clone(),
data_key: col.name.clone(), // The actual FK column name
is_link: true,
link_target_table: Some(full_target_table_name),
}
})
.collect();
fields.extend(link_fields); // Append the link fields to the end
// --- END: FINAL, SIMPLIFIED, CORRECT LOGIC ---
Ok(FormState::new(
profile_name.to_string(),
table_name.to_string(),
fields,
))
}
pub async fn initialize_add_logic_table_data(
grpc_client: &mut GrpcClient,
add_logic_state: &mut AddLogicState,
profile_tree: &common::proto::multieko2::table_definition::ProfileTreeResponse,
profile_tree: &common::proto::komp_ac::table_definition::ProfileTreeResponse,
) -> Result<String> {
let profile_name_clone_opt = Some(add_logic_state.profile_name.clone());
let table_name_opt_clone = add_logic_state.selected_table_name.clone();
@@ -92,6 +176,7 @@ impl UiService {
}
}
// REFACTOR THIS FUNCTION
pub async fn initialize_app_state_and_form(
grpc_client: &mut GrpcClient,
app_state: &mut AppState,
@@ -102,7 +187,6 @@ impl UiService {
.context("Failed to get profile tree")?;
app_state.profile_tree = profile_tree;
// Determine initial table to load (e.g., first table of first profile, or a default)
let initial_profile_name = app_state
.profile_tree
.profiles
@@ -115,33 +199,26 @@ impl UiService {
.profiles
.first()
.and_then(|p| p.tables.first().map(|t| t.name.clone()))
.unwrap_or_else(|| "2025_company_data1".to_string()); // Fallback if no tables
.unwrap_or_else(|| "2025_company_data1".to_string());
app_state.set_current_view_table(
initial_profile_name.clone(),
initial_table_name.clone(),
);
let table_structure = grpc_client
.get_table_structure(
initial_profile_name.clone(),
initial_table_name.clone(),
)
.await
.context(format!(
"Failed to get initial table structure for {}.{}",
initial_profile_name, initial_table_name
))?;
// NOW, just call our new central function. This avoids code duplication.
let form_state = Self::load_table_view(
grpc_client,
app_state,
&initial_profile_name,
&initial_table_name,
)
.await?;
let column_names: Vec<String> = table_structure
.columns
.iter()
.map(|col| col.name.clone())
.collect();
// The field names for the UI are derived from the new form_state
let field_names = form_state.fields.iter().map(|f| f.display_name.clone()).collect();
let filtered_columns = filter_user_columns(column_names);
Ok((initial_profile_name, initial_table_name, filtered_columns))
Ok((initial_profile_name, initial_table_name, field_names))
}
pub async fn fetch_and_set_table_count(

View File

@@ -1,6 +1,6 @@
// src/state/app/search.rs
use common::proto::multieko2::search::search_response::Hit;
use common::proto::komp_ac::search::search_response::Hit;
/// Holds the complete state for the search palette.
pub struct SearchState {

View File

@@ -1,15 +1,19 @@
// src/state/app/state.rs
use std::env;
use common::proto::multieko2::table_definition::ProfileTreeResponse;
use crate::modes::handlers::mode_manager::AppMode;
use crate::ui::handlers::context::DialogPurpose;
use crate::state::app::search::SearchState; // ADDED
use anyhow::Result;
use common::proto::komp_ac::table_definition::ProfileTreeResponse;
// NEW: Import the types we need for the cache
use common::proto::komp_ac::table_structure::TableStructureResponse;
use crate::modes::handlers::mode_manager::AppMode;
use crate::state::app::search::SearchState;
use crate::ui::handlers::context::DialogPurpose;
use std::collections::HashMap;
use std::env;
use std::sync::Arc;
#[cfg(feature = "ui-debug")]
use std::time::Instant;
// --- YOUR EXISTING DIALOGSTATE IS UNTOUCHED ---
// --- DialogState and UiState are unchanged ---
pub struct DialogState {
pub dialog_show: bool,
pub dialog_title: String,
@@ -30,7 +34,7 @@ pub struct UiState {
pub show_form: bool,
pub show_login: bool,
pub show_register: bool,
pub show_search_palette: bool, // ADDED
pub show_search_palette: bool,
pub focus_outside_canvas: bool,
pub dialog: DialogState,
}
@@ -52,10 +56,12 @@ pub struct AppState {
pub current_view_profile_name: Option<String>,
pub current_view_table_name: Option<String>,
// NEW: The "Rulebook" cache. We use Arc for efficient sharing.
pub schema_cache: HashMap<String, Arc<TableStructureResponse>>,
pub focused_button_index: usize,
pub pending_table_structure_fetch: Option<(String, String)>,
// ADDED: State for the search palette
pub search_state: Option<SearchState>,
// UI preferences
@@ -67,9 +73,7 @@ pub struct AppState {
impl AppState {
pub fn new() -> Result<Self> {
let current_dir = env::current_dir()?
.to_string_lossy()
.to_string();
let current_dir = env::current_dir()?.to_string_lossy().to_string();
Ok(AppState {
current_dir,
profile_tree: ProfileTreeResponse::default(),
@@ -77,9 +81,10 @@ impl AppState {
current_view_profile_name: None,
current_view_table_name: None,
current_mode: AppMode::General,
schema_cache: HashMap::new(), // NEW: Initialize the cache
focused_button_index: 0,
pending_table_structure_fetch: None,
search_state: None, // ADDED
search_state: None,
ui: UiState::default(),
#[cfg(feature = "ui-debug")]

View File

@@ -1,8 +1,9 @@
// src/state/pages/canvas_state.rs
use common::proto::multieko2::search::search_response::Hit;
use common::proto::komp_ac::search::search_response::Hit;
pub trait CanvasState {
// --- Existing methods (unchanged) ---
fn current_field(&self) -> usize;
fn current_cursor_pos(&self) -> usize;
fn has_unsaved_changes(&self) -> bool;
@@ -10,15 +11,22 @@ pub trait CanvasState {
fn get_current_input(&self) -> &str;
fn get_current_input_mut(&mut self) -> &mut String;
fn fields(&self) -> Vec<&str>;
fn set_current_field(&mut self, index: usize);
fn set_current_cursor_pos(&mut self, pos: usize);
fn set_has_unsaved_changes(&mut self, changed: bool);
// --- Autocomplete Support ---
fn get_suggestions(&self) -> Option<&[String]>;
fn get_selected_suggestion_index(&self) -> Option<usize>;
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
None
}
fn get_display_value_for_field(&self, index: usize) -> &str {
self.inputs()
.get(index)
.map(|s| s.as_str())
.unwrap_or("")
}
fn has_display_override(&self, _index: usize) -> bool {
false
}
}

View File

@@ -3,18 +3,26 @@
use crate::config::colors::themes::Theme;
use crate::state::app::highlight::HighlightState;
use crate::state::pages::canvas_state::CanvasState;
use common::proto::multieko2::search::search_response::Hit; // Import Hit
use common::proto::komp_ac::search::search_response::Hit;
use ratatui::layout::Rect;
use ratatui::Frame;
use std::collections::HashMap;
// A struct to bridge the display name (label) to the data key from the server.
fn json_value_to_string(value: &serde_json::Value) -> String {
match value {
serde_json::Value::String(s) => s.clone(),
serde_json::Value::Number(n) => n.to_string(),
serde_json::Value::Bool(b) => b.to_string(),
_ => String::new(),
}
}
#[derive(Debug, Clone)]
pub struct FieldDefinition {
pub display_name: String,
pub data_key: String,
pub is_link: bool,
pub link_target_table: Option<String>,
pub link_target_table: Option<String>,
}
#[derive(Clone)]
@@ -29,12 +37,11 @@ pub struct FormState {
pub current_field: usize,
pub has_unsaved_changes: bool,
pub current_cursor_pos: usize,
// --- MODIFIED AUTOCOMPLETE STATE ---
pub autocomplete_active: bool,
pub autocomplete_suggestions: Vec<Hit>, // Changed to use the Hit struct
pub autocomplete_suggestions: Vec<Hit>,
pub selected_suggestion_index: Option<usize>,
pub autocomplete_loading: bool, // To show a loading indicator
pub autocomplete_loading: bool,
pub link_display_map: HashMap<usize, String>,
}
impl FormState {
@@ -55,11 +62,48 @@ impl FormState {
current_field: 0,
has_unsaved_changes: false,
current_cursor_pos: 0,
// --- INITIALIZE NEW STATE ---
autocomplete_active: false,
autocomplete_suggestions: Vec::new(),
selected_suggestion_index: None,
autocomplete_loading: false, // Initialize loading state
autocomplete_loading: false,
link_display_map: HashMap::new(),
}
}
pub fn get_display_name_for_hit(&self, hit: &Hit) -> String {
if let Ok(content_map) =
serde_json::from_str::<HashMap<String, serde_json::Value>>(
&hit.content_json,
)
{
const IGNORED_KEYS: &[&str] = &["id", "deleted", "created_at"];
let mut keys: Vec<_> = content_map
.keys()
.filter(|k| !IGNORED_KEYS.contains(&k.as_str()))
.cloned()
.collect();
keys.sort();
let values: Vec<_> = keys
.iter()
.map(|key| {
content_map
.get(key)
.map(json_value_to_string)
.unwrap_or_default()
})
.filter(|s| !s.is_empty())
.take(1)
.collect();
let display_part = values.first().cloned().unwrap_or_default();
if display_part.is_empty() {
format!("ID: {}", hit.id)
} else {
format!("{} | ID: {}", display_part, hit.id)
}
} else {
format!("ID: {} (parse error)", hit.id)
}
}
@@ -78,7 +122,7 @@ impl FormState {
crate::components::form::form::render_form(
f,
area,
self, // <--- This now correctly passes the concrete &FormState
self,
&fields_str_slice,
&self.current_field,
&values_str_slice,
@@ -102,7 +146,8 @@ impl FormState {
} else {
self.current_position = 1;
}
self.deactivate_autocomplete(); // Deactivate on reset
self.deactivate_autocomplete();
self.link_display_map.clear();
}
pub fn get_current_input(&self) -> &str {
@@ -113,6 +158,7 @@ impl FormState {
}
pub fn get_current_input_mut(&mut self) -> &mut String {
self.link_display_map.remove(&self.current_field);
self.values
.get_mut(self.current_field)
.expect("Invalid current_field index")
@@ -159,11 +205,10 @@ impl FormState {
self.has_unsaved_changes = false;
self.current_field = 0;
self.current_cursor_pos = 0;
self.deactivate_autocomplete(); // Deactivate on update
self.deactivate_autocomplete();
self.link_display_map.clear();
}
// --- NEW HELPER METHOD ---
/// Deactivates autocomplete and clears its state.
pub fn deactivate_autocomplete(&mut self) {
self.autocomplete_active = false;
self.autocomplete_suggestions.clear();
@@ -176,58 +221,42 @@ impl CanvasState for FormState {
fn current_field(&self) -> usize {
self.current_field
}
fn current_cursor_pos(&self) -> usize {
self.current_cursor_pos
}
fn has_unsaved_changes(&self) -> bool {
self.has_unsaved_changes
}
fn inputs(&self) -> Vec<&String> {
self.values.iter().collect()
}
fn get_current_input(&self) -> &str {
FormState::get_current_input(self)
}
fn get_current_input_mut(&mut self) -> &mut String {
FormState::get_current_input_mut(self)
}
fn fields(&self) -> Vec<&str> {
self.fields
.iter()
.map(|f| f.display_name.as_str())
.collect()
}
fn set_current_field(&mut self, index: usize) {
if index < self.fields.len() {
self.current_field = index;
}
// Deactivate autocomplete when changing fields
self.deactivate_autocomplete();
}
fn set_current_cursor_pos(&mut self, pos: usize) {
self.current_cursor_pos = pos;
}
fn set_has_unsaved_changes(&mut self, changed: bool) {
self.has_unsaved_changes = changed;
}
// --- MODIFIED: Implement autocomplete trait methods ---
/// Returns None because this state uses rich suggestions.
fn get_suggestions(&self) -> Option<&[String]> {
None
}
/// Returns rich suggestions.
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
if self.autocomplete_active {
Some(&self.autocomplete_suggestions)
@@ -235,7 +264,6 @@ impl CanvasState for FormState {
None
}
}
fn get_selected_suggestion_index(&self) -> Option<usize> {
if self.autocomplete_active {
self.selected_suggestion_index
@@ -243,4 +271,19 @@ impl CanvasState for FormState {
None
}
}
fn get_display_value_for_field(&self, index: usize) -> &str {
if let Some(display_text) = self.link_display_map.get(&index) {
return display_text.as_str();
}
self.inputs()
.get(index)
.map(|s| s.as_str())
.unwrap_or("")
}
// --- IMPLEMENT THE NEW TRAIT METHOD ---
fn has_display_override(&self, index: usize) -> bool {
self.link_display_map.contains_key(&index)
}
}

View File

@@ -4,7 +4,7 @@ use crate::state::pages::add_table::{
};
use crate::services::GrpcClient;
use anyhow::{anyhow, Result};
use common::proto::multieko2::table_definition::{
use common::proto::komp_ac::table_definition::{
PostTableDefinitionRequest,
ColumnDefinition as ProtoColumnDefinition,
TableLink as ProtoTableLink,

View File

@@ -1,19 +1,22 @@
// src/tui/functions/common/form.rs
use crate::services::grpc_client::GrpcClient;
use crate::state::app::state::AppState; // NEW: Import AppState
use crate::state::pages::form::FormState;
use anyhow::{Context, Result}; // Added Context
use std::collections::HashMap; // NEW
use crate::utils::data_converter; // NEW: Import our translator
use anyhow::{anyhow, Context, Result};
use std::collections::HashMap;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SaveOutcome {
NoChange,
UpdatedExisting,
CreatedNew(i64), // Keep the ID
CreatedNew(i64),
}
// MODIFIED save function
// MODIFIED save function signature and logic
pub async fn save(
app_state: &AppState, // NEW: Pass in AppState
form_state: &mut FormState,
grpc_client: &mut GrpcClient,
) -> Result<SaveOutcome> {
@@ -21,42 +24,64 @@ pub async fn save(
return Ok(SaveOutcome::NoChange);
}
let data_map: HashMap<String, String> = form_state.fields.iter()
// --- NEW: VALIDATION & CONVERSION STEP ---
let cache_key =
format!("{}.{}", form_state.profile_name, form_state.table_name);
let schema = match app_state.schema_cache.get(&cache_key) {
Some(s) => s,
None => {
return Err(anyhow!(
"Schema for table '{}' not found in cache. Cannot save.",
form_state.table_name
));
}
};
let data_map: HashMap<String, String> = form_state
.fields
.iter()
.zip(form_state.values.iter())
.map(|(field_def, value)| (field_def.data_key.clone(), value.clone()))
.collect();
// Use our new translator. It returns a user-friendly error on failure.
let converted_data =
match data_converter::convert_and_validate_data(&data_map, schema) {
Ok(data) => data,
Err(user_error) => return Err(anyhow!(user_error)),
};
// --- END OF NEW STEP ---
let outcome: SaveOutcome;
let is_new_entry = form_state.id == 0 || (form_state.total_count > 0 && form_state.current_position > form_state.total_count) || (form_state.total_count == 0 && form_state.current_position == 1) ;
let is_new_entry = form_state.id == 0
|| (form_state.total_count > 0
&& form_state.current_position > form_state.total_count)
|| (form_state.total_count == 0 && form_state.current_position == 1);
if is_new_entry {
let response = grpc_client
.post_table_data(
form_state.profile_name.clone(),
form_state.table_name.clone(),
data_map,
converted_data, // Use the validated & converted data
)
.await
.context("Failed to post new table data")?;
if response.success {
form_state.id = response.inserted_id;
// After creating a new entry, total_count increases, and current_position becomes this new total_count
form_state.total_count += 1;
form_state.current_position = form_state.total_count;
outcome = SaveOutcome::CreatedNew(response.inserted_id);
} else {
return Err(anyhow::anyhow!(
return Err(anyhow!(
"Server failed to insert data: {}",
response.message
));
}
} else {
// This assumes form_state.id is valid for an existing record
if form_state.id == 0 {
return Err(anyhow::anyhow!(
return Err(anyhow!(
"Cannot update record: ID is 0, but not classified as new entry."
));
}
@@ -65,7 +90,7 @@ pub async fn save(
form_state.profile_name.clone(),
form_state.table_name.clone(),
form_state.id,
data_map,
converted_data, // Use the validated & converted data
)
.await
.context("Failed to put (update) table data")?;
@@ -73,7 +98,7 @@ pub async fn save(
if response.success {
outcome = SaveOutcome::UpdatedExisting;
} else {
return Err(anyhow::anyhow!(
return Err(anyhow!(
"Server failed to update data: {}",
response.message
));

View File

@@ -8,7 +8,7 @@ use crate::state::app::buffer::{AppView, BufferState};
use crate::config::storage::storage::{StoredAuthData, save_auth_data};
use crate::state::pages::canvas_state::CanvasState;
use crate::ui::handlers::context::DialogPurpose;
use common::proto::multieko2::auth::LoginResponse;
use common::proto::komp_ac::auth::LoginResponse;
use anyhow::{Context, Result};
use tokio::spawn;
use tokio::sync::mpsc;

View File

@@ -8,7 +8,7 @@ use crate::state::{
};
use crate::ui::handlers::context::DialogPurpose;
use crate::state::app::buffer::{AppView, BufferState};
use common::proto::multieko2::auth::AuthResponse;
use common::proto::komp_ac::auth::AuthResponse;
use anyhow::Context;
use tokio::spawn;
use tokio::sync::mpsc;

View File

@@ -350,123 +350,91 @@ pub async fn run_ui() -> Result<()> {
let current_view_profile = app_state.current_view_profile_name.clone();
let current_view_table = app_state.current_view_table_name.clone();
// This condition correctly detects a table switch.
if prev_view_profile_name != current_view_profile
|| prev_view_table_name != current_view_table
{
if let (Some(prof_name), Some(tbl_name)) =
(current_view_profile.as_ref(), current_view_table.as_ref())
{
// --- START OF REFACTORED LOGIC ---
app_state.show_loading_dialog(
"Loading Table",
&format!("Fetching data for {}.{}...", prof_name, tbl_name),
);
needs_redraw = true;
match grpc_client
.get_table_structure(prof_name.clone(), tbl_name.clone())
.await
// 1. Call our new, central function. It handles fetching AND caching.
match UiService::load_table_view(
&mut grpc_client,
&mut app_state,
prof_name,
tbl_name,
)
.await
{
Ok(structure_response) => {
// --- START OF MODIFIED LOGIC ---
let all_columns: Vec<String> = structure_response
.columns
.iter()
.map(|c| c.name.clone())
.collect();
let mut field_definitions: Vec<FieldDefinition> =
filter_user_columns(all_columns)
.into_iter()
.filter(|col_name| !col_name.ends_with("_id"))
.map(|col_name| FieldDefinition {
display_name: col_name.clone(),
data_key: col_name,
is_link: false,
link_target_table: None, // Regular fields have no target
})
.collect();
let linked_tables: Vec<String> = app_state
.profile_tree
.profiles
.iter()
.find(|p| p.name == *prof_name)
.and_then(|profile| {
profile.tables.iter().find(|t| t.name == *tbl_name)
})
.map_or(vec![], |table| table.depends_on.clone());
for linked_table_name in linked_tables {
let base_name = linked_table_name
.split_once('_')
.map_or(linked_table_name.as_str(), |(_, rest)| rest);
let data_key = format!("{}_id", base_name);
let display_name = linked_table_name.clone(); // Clone for use below
field_definitions.push(FieldDefinition {
display_name,
data_key,
is_link: true,
// --- POPULATE THE NEW FIELD ---
link_target_table: Some(linked_table_name),
});
}
// --- END OF MODIFIED LOGIC ---
form_state = FormState::new(
prof_name.clone(),
tbl_name.clone(),
field_definitions, // This now contains the complete definitions
);
Ok(mut new_form_state) => {
// 2. The function succeeded, we have a new FormState.
// Now, fetch its data.
if let Err(e) = UiService::fetch_and_set_table_count(
&mut grpc_client,
&mut form_state,
&mut new_form_state,
)
.await
{
// Handle count fetching error
app_state.update_dialog_content(
&format!("Error fetching count: {}", e),
vec!["OK".to_string()],
DialogPurpose::LoginFailed,
DialogPurpose::LoginFailed, // Or a more appropriate purpose
);
} else if form_state.total_count > 0 {
} else if new_form_state.total_count > 0 {
// If there are records, load the first/last one
if let Err(e) = UiService::load_table_data_by_position(
&mut grpc_client,
&mut form_state,
&mut new_form_state,
)
.await
{
// Handle data loading error
app_state.update_dialog_content(
&format!("Error loading data: {}", e),
vec!["OK".to_string()],
DialogPurpose::LoginFailed,
DialogPurpose::LoginFailed, // Or a more appropriate purpose
);
} else {
// Success! Hide the loading dialog.
app_state.hide_dialog();
}
} else {
form_state.reset_to_empty();
// No records, so just reset to an empty form.
new_form_state.reset_to_empty();
app_state.hide_dialog();
}
// 3. CRITICAL: Replace the old form_state with the new one.
form_state = new_form_state;
// 4. Update our tracking variables.
prev_view_profile_name = current_view_profile;
prev_view_table_name = current_view_table;
table_just_switched = true;
}
Err(e) => {
// This handles errors from load_table_view (e.g., schema fetch failed)
app_state.update_dialog_content(
&format!("Error fetching table structure: {}", e),
&format!("Error loading table: {}", e),
vec!["OK".to_string()],
DialogPurpose::LoginFailed,
DialogPurpose::LoginFailed, // Or a more appropriate purpose
);
// Revert the view change in app_state to avoid a loop
app_state.current_view_profile_name =
prev_view_profile_name.clone();
app_state.current_view_table_name =
prev_view_table_name.clone();
}
}
// --- END OF REFACTORED LOGIC ---
}
needs_redraw = true;
}

View File

@@ -0,0 +1,50 @@
// src/utils/data_converter.rs
use common::proto::komp_ac::table_structure::TableStructureResponse;
use prost_types::{value::Kind, NullValue, Value};
use std::collections::HashMap;
pub fn convert_and_validate_data(
data: &HashMap<String, String>,
schema: &TableStructureResponse,
) -> Result<HashMap<String, Value>, String> {
let type_map: HashMap<_, _> = schema
.columns
.iter()
.map(|col| (col.name.as_str(), col.data_type.as_str()))
.collect();
data.iter()
.map(|(key, str_value)| {
let expected_type = type_map.get(key.as_str()).unwrap_or(&"TEXT");
let kind = if str_value.is_empty() {
// TODO: Use the correct enum variant
Kind::NullValue(NullValue::NullValue.into())
} else {
// Attempt to parse the string based on the expected type
match *expected_type {
"BOOL" => match str_value.to_lowercase().parse::<bool>() {
Ok(v) => Kind::BoolValue(v),
Err(_) => return Err(format!("Invalid boolean for '{}': must be 'true' or 'false'", key)),
},
"INT8" | "INT4" | "INT2" | "SERIAL" | "BIGSERIAL" => {
match str_value.parse::<f64>() {
Ok(v) => Kind::NumberValue(v),
Err(_) => return Err(format!("Invalid number for '{}': must be a whole number", key)),
}
}
"NUMERIC" | "FLOAT4" | "FLOAT8" => match str_value.parse::<f64>() {
Ok(v) => Kind::NumberValue(v),
Err(_) => return Err(format!("Invalid decimal for '{}': must be a number", key)),
},
"TIMESTAMPTZ" | "DATE" | "TIME" | "TEXT" | "VARCHAR" | "UUID" => {
Kind::StringValue(str_value.clone())
}
_ => Kind::StringValue(str_value.clone()),
}
};
Ok((key.clone(), Value { kind: Some(kind) }))
})
.collect()
}

View File

@@ -2,5 +2,8 @@
pub mod columns;
pub mod debug_logger;
pub mod data_converter;
pub use columns::*;
pub use debug_logger::*;
pub use data_converter::*;

View File

@@ -0,0 +1,262 @@
// client/tests/form_tests.rs
use rstest::{fixture, rstest};
use std::collections::HashMap;
use client::state::pages::form::{FormState, FieldDefinition};
use client::state::pages::canvas_state::CanvasState;
#[fixture]
fn test_form_state() -> FormState {
let fields = vec![
FieldDefinition {
display_name: "Company".to_string(),
data_key: "firma".to_string(),
is_link: false,
link_target_table: None,
},
FieldDefinition {
display_name: "Phone".to_string(),
data_key: "telefon".to_string(),
is_link: false,
link_target_table: None,
},
FieldDefinition {
display_name: "Email".to_string(),
data_key: "email".to_string(),
is_link: false,
link_target_table: None,
},
];
FormState::new("test_profile".to_string(), "test_table".to_string(), fields)
}
#[fixture]
fn test_form_data() -> HashMap<String, String> {
let mut data = HashMap::new();
data.insert("firma".to_string(), "Test Company".to_string());
data.insert("telefon".to_string(), "+421123456789".to_string());
data.insert("email".to_string(), "test@example.com".to_string());
data
}
#[rstest]
fn test_form_state_creation(test_form_state: FormState) {
assert_eq!(test_form_state.profile_name, "test_profile");
assert_eq!(test_form_state.table_name, "test_table");
assert_eq!(test_form_state.fields.len(), 3);
assert_eq!(test_form_state.current_field(), 0);
assert!(!test_form_state.has_unsaved_changes());
}
#[rstest]
fn test_form_field_navigation(mut test_form_state: FormState) {
// Test initial field
assert_eq!(test_form_state.current_field(), 0);
// Test navigation to next field
test_form_state.set_current_field(1);
assert_eq!(test_form_state.current_field(), 1);
// Test navigation to last field
test_form_state.set_current_field(2);
assert_eq!(test_form_state.current_field(), 2);
// Test invalid field (should not crash)
test_form_state.set_current_field(999);
assert_eq!(test_form_state.current_field(), 2); // Should stay at valid field
}
#[rstest]
fn test_form_data_entry(mut test_form_state: FormState) {
// Test entering data in first field
*test_form_state.get_current_input_mut() = "Test Company".to_string();
test_form_state.set_has_unsaved_changes(true);
assert_eq!(test_form_state.get_current_input(), "Test Company");
assert!(test_form_state.has_unsaved_changes());
}
#[rstest]
fn test_form_field_switching_with_data(mut test_form_state: FormState) {
// Enter data in first field
*test_form_state.get_current_input_mut() = "Company Name".to_string();
// Switch to second field
test_form_state.set_current_field(1);
*test_form_state.get_current_input_mut() = "+421123456789".to_string();
// Switch back to first field
test_form_state.set_current_field(0);
assert_eq!(test_form_state.get_current_input(), "Company Name");
// Switch to second field again
test_form_state.set_current_field(1);
assert_eq!(test_form_state.get_current_input(), "+421123456789");
}
#[rstest]
fn test_form_reset_functionality(mut test_form_state: FormState) {
// Add some data
test_form_state.set_current_field(0);
*test_form_state.get_current_input_mut() = "Test Company".to_string();
test_form_state.set_current_field(1);
*test_form_state.get_current_input_mut() = "+421123456789".to_string();
test_form_state.set_has_unsaved_changes(true);
test_form_state.id = 123;
test_form_state.current_position = 5;
// Reset the form
test_form_state.reset_to_empty();
// Verify reset
assert_eq!(test_form_state.id, 0);
assert!(!test_form_state.has_unsaved_changes());
assert_eq!(test_form_state.current_field(), 0);
// Check all fields are empty
for i in 0..test_form_state.fields.len() {
test_form_state.set_current_field(i);
assert!(test_form_state.get_current_input().is_empty());
}
}
#[rstest]
fn test_form_update_from_response(mut test_form_state: FormState, test_form_data: HashMap<String, String>) {
let position = 3;
// Update form with response data
test_form_state.update_from_response(&test_form_data, position);
// Verify data was loaded
assert_eq!(test_form_state.current_position, position);
assert!(!test_form_state.has_unsaved_changes());
assert_eq!(test_form_state.current_field(), 0);
// Check field values
test_form_state.set_current_field(0);
assert_eq!(test_form_state.get_current_input(), "Test Company");
test_form_state.set_current_field(1);
assert_eq!(test_form_state.get_current_input(), "+421123456789");
test_form_state.set_current_field(2);
assert_eq!(test_form_state.get_current_input(), "test@example.com");
}
#[rstest]
fn test_form_cursor_position(mut test_form_state: FormState) {
// Test initial cursor position
assert_eq!(test_form_state.current_cursor_pos(), 0);
// Add some text
*test_form_state.get_current_input_mut() = "Test Company".to_string();
// Test cursor positioning
test_form_state.set_current_cursor_pos(5);
assert_eq!(test_form_state.current_cursor_pos(), 5);
// Test cursor bounds
test_form_state.set_current_cursor_pos(999);
// Should be clamped to text length
assert!(test_form_state.current_cursor_pos() <= "Test Company".len());
}
#[rstest]
fn test_form_field_display_names(test_form_state: FormState) {
let field_names = test_form_state.fields();
assert_eq!(field_names.len(), 3);
assert_eq!(field_names[0], "Company");
assert_eq!(field_names[1], "Phone");
assert_eq!(field_names[2], "Email");
}
#[rstest]
fn test_form_inputs_vector(mut test_form_state: FormState) {
// Add data to fields
test_form_state.set_current_field(0);
*test_form_state.get_current_input_mut() = "Company A".to_string();
test_form_state.set_current_field(1);
*test_form_state.get_current_input_mut() = "123456789".to_string();
test_form_state.set_current_field(2);
*test_form_state.get_current_input_mut() = "test@test.com".to_string();
// Get inputs vector
let inputs = test_form_state.inputs();
assert_eq!(inputs.len(), 3);
assert_eq!(inputs[0], "Company A");
assert_eq!(inputs[1], "123456789");
assert_eq!(inputs[2], "test@test.com");
}
#[rstest]
fn test_form_position_management(mut test_form_state: FormState) {
// Test initial position
assert_eq!(test_form_state.current_position, 1);
assert_eq!(test_form_state.total_count, 0);
// Set some values
test_form_state.total_count = 10;
test_form_state.current_position = 5;
assert_eq!(test_form_state.current_position, 5);
assert_eq!(test_form_state.total_count, 10);
// Test reset affects position
test_form_state.reset_to_empty();
assert_eq!(test_form_state.current_position, 11); // total_count + 1
}
#[rstest]
fn test_form_autocomplete_state(mut test_form_state: FormState) {
// Test initial autocomplete state
assert!(!test_form_state.autocomplete_active);
assert!(test_form_state.autocomplete_suggestions.is_empty());
assert!(test_form_state.selected_suggestion_index.is_none());
// Test deactivating autocomplete
test_form_state.autocomplete_active = true;
test_form_state.deactivate_autocomplete();
assert!(!test_form_state.autocomplete_active);
assert!(test_form_state.autocomplete_suggestions.is_empty());
assert!(test_form_state.selected_suggestion_index.is_none());
assert!(!test_form_state.autocomplete_loading);
}
#[rstest]
fn test_form_empty_data_handling(mut test_form_state: FormState) {
let empty_data = HashMap::new();
// Update with empty data
test_form_state.update_from_response(&empty_data, 1);
// All fields should be empty
for i in 0..test_form_state.fields.len() {
test_form_state.set_current_field(i);
assert!(test_form_state.get_current_input().is_empty());
}
}
#[rstest]
fn test_form_partial_data_handling(mut test_form_state: FormState) {
let mut partial_data = HashMap::new();
partial_data.insert("firma".to_string(), "Partial Company".to_string());
// Intentionally missing telefon and email
test_form_state.update_from_response(&partial_data, 1);
// First field should have data
test_form_state.set_current_field(0);
assert_eq!(test_form_state.get_current_input(), "Partial Company");
// Other fields should be empty
test_form_state.set_current_field(1);
assert!(test_form_state.get_current_input().is_empty());
test_form_state.set_current_field(2);
assert!(test_form_state.get_current_input().is_empty());
}

View File

@@ -0,0 +1 @@
pub mod form_tests;

2
client/tests/form/mod.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod gui;
pub mod requests;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,267 @@
// ========================================================================
// ROBUST WORKFLOW AND INTEGRATION TESTS
// ========================================================================
#[rstest]
#[tokio::test]
async fn test_partial_update_preserves_other_fields(
#[future] populated_test_context: FormTestContext,
) {
let mut context = populated_test_context.await;
skip_if_backend_unavailable!();
// 1. Create a record with multiple fields
let mut initial_data = context.create_test_form_data();
let original_email = "preserve.this@email.com";
initial_data.insert(
"email".to_string(),
create_string_value(original_email),
);
let post_res = context
.client
.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
initial_data,
)
.await
.expect("Setup: Failed to create record for partial update test");
let created_id = post_res.inserted_id;
println!("Partial Update Test: Created record ID {}", created_id);
// 2. Update only ONE field
let mut partial_update = HashMap::new();
let updated_firma = "Partially Updated Inc.";
partial_update.insert(
"firma".to_string(),
create_string_value(updated_firma),
);
context
.client
.put_table_data(
context.profile_name.clone(),
context.table_name.clone(),
created_id,
partial_update,
)
.await
.expect("Partial update failed");
println!("Partial Update Test: Updated only 'firma' field");
// 3. Get the record back and verify ALL fields
let get_res = context
.client
.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
created_id,
)
.await
.expect("Failed to get record after partial update");
let final_data = get_res.data;
assert_eq!(
final_data.get("firma").unwrap(),
updated_firma,
"The 'firma' field should be updated"
);
assert_eq!(
final_data.get("email").unwrap(),
original_email,
"The 'email' field should have been preserved"
);
println!("Partial Update Test: Verified other fields were preserved. OK.");
}
#[rstest]
#[tokio::test]
async fn test_data_edge_cases_and_unicode(
#[future] form_test_context: FormTestContext,
) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
let edge_case_strings = vec![
("Unicode", "José María González, Москва, 北京市"),
("Emoji", "🚀 Tech Company 🌟"),
("Quotes", "Quote\"Test'Apostrophe"),
("Symbols", "Price: $1,000.50 (50% off!)"),
("Empty", ""),
("Whitespace", " "),
];
for (case_name, test_string) in edge_case_strings {
let mut data = HashMap::new();
data.insert("firma".to_string(), create_string_value(test_string));
data.insert(
"kz".to_string(),
create_string_value(&format!("EDGE-{}", case_name)),
);
let post_res = context
.client
.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
data,
)
.await
.expect(&format!("POST should succeed for case: {}", case_name));
let created_id = post_res.inserted_id;
let get_res = context
.client
.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
created_id,
)
.await
.expect(&format!(
"GET should succeed for case: {}",
case_name
));
assert_eq!(
get_res.data.get("firma").unwrap(),
test_string,
"Data should be identical after round-trip for case: {}",
case_name
);
println!("Edge Case Test: '{}' passed.", case_name);
}
}
#[rstest]
#[tokio::test]
async fn test_numeric_and_null_edge_cases(
#[future] form_test_context: FormTestContext,
) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// 1. Test NULL value
let mut null_data = HashMap::new();
null_data.insert(
"firma".to_string(),
create_string_value("Company With Null Phone"),
);
null_data.insert("telefon".to_string(), create_null_value());
let post_res_null = context
.client
.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
null_data,
)
.await
.expect("POST with NULL value should succeed");
let get_res_null = context
.client
.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
post_res_null.inserted_id,
)
.await
.unwrap();
// Depending on DB, NULL may come back as empty string or be absent.
// The important part is that the operation doesn't fail.
assert!(
get_res_null.data.get("telefon").unwrap_or(&"".to_string()).is_empty(),
"NULL value should result in an empty or absent field"
);
println!("Edge Case Test: NULL value handled correctly. OK.");
// 2. Test Zero value for a numeric field (assuming 'age' is numeric)
let mut zero_data = HashMap::new();
zero_data.insert(
"firma".to_string(),
create_string_value("Newborn Company"),
);
// Assuming 'age' is a field in your actual table definition
// zero_data.insert("age".to_string(), create_number_value(0.0));
// let post_res_zero = context.client.post_table_data(...).await.expect("POST with zero should succeed");
// ... then get and verify it's "0"
println!("Edge Case Test: Zero value test skipped (uncomment if 'age' field exists).");
}
#[rstest]
#[tokio::test]
async fn test_concurrent_updates_on_same_record(
#[future] populated_test_context: FormTestContext,
) {
let mut context = populated_test_context.await;
skip_if_backend_unavailable!();
// 1. Create a single record to be updated by all tasks
let initial_data = context.create_minimal_form_data();
let post_res = context
.client
.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
initial_data,
)
.await
.expect("Setup: Failed to create record for concurrency test");
let record_id = post_res.inserted_id;
println!("Concurrency Test: Target record ID is {}", record_id);
// 2. Spawn multiple concurrent UPDATE operations
let mut handles = Vec::new();
let num_concurrent_tasks = 5;
let mut final_values = Vec::new();
for i in 0..num_concurrent_tasks {
let mut client_clone = context.client.clone();
let profile_name = context.profile_name.clone();
let table_name = context.table_name.clone();
let final_value = format!("Concurrent Update {}", i);
final_values.push(final_value.clone());
let handle = tokio::spawn(async move {
let mut update_data = HashMap::new();
update_data.insert(
"firma".to_string(),
create_string_value(&final_value),
);
client_clone
.put_table_data(profile_name, table_name, record_id, update_data)
.await
});
handles.push(handle);
}
// 3. Wait for all tasks to complete and check for panics
let results = futures::future::join_all(handles).await;
assert!(
results.iter().all(|r| r.is_ok()),
"No concurrent task should panic"
);
println!("Concurrency Test: All update tasks completed without panicking.");
// 4. Get the final state of the record
let final_get_res = context
.client
.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
record_id,
)
.await
.expect("Should be able to get the record after concurrent updates");
let final_firma = final_get_res.data.get("firma").unwrap();
assert!(
final_values.contains(final_firma),
"The final state '{}' must be one of the states set by the tasks",
final_firma
);
println!(
"Concurrency Test: Final state is '{}', which is a valid outcome. OK.",
final_firma
);
}

View File

@@ -0,0 +1,727 @@
// form_request_tests3.rs - Comprehensive and Robust Testing
// ========================================================================
// STEEL SCRIPT VALIDATION TESTS (HIGHEST PRIORITY)
// ========================================================================
#[rstest]
#[tokio::test]
async fn test_steel_script_validation_success(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// Test with data that should pass script validation
// Assuming there's a script that validates 'kz' field to start with "KZ" and be 5 chars
let mut valid_data = HashMap::new();
valid_data.insert("firma".to_string(), create_string_value("Script Test Company"));
valid_data.insert("kz".to_string(), create_string_value("KZ123"));
valid_data.insert("telefon".to_string(), create_string_value("+421123456789"));
let result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
valid_data,
).await;
match result {
Ok(response) => {
assert!(response.success, "Valid data should pass script validation");
println!("Script Validation Test: Valid data passed - ID {}", response.inserted_id);
}
Err(e) => {
if let Some(status) = e.downcast_ref::<Status>() {
if status.code() == tonic::Code::Unavailable {
println!("Script validation test skipped - backend not available");
return;
}
// If there are no scripts configured, this might still work
println!("Script validation test: {}", status.message());
}
}
}
}
#[rstest]
#[tokio::test]
async fn test_steel_script_validation_failure(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// Test with data that should fail script validation
let invalid_script_data = vec![
("TooShort", "KZ12"), // Too short
("TooLong", "KZ12345"), // Too long
("WrongPrefix", "AB123"), // Wrong prefix
("NoPrefix", "12345"), // No prefix
("Empty", ""), // Empty
];
for (test_case, invalid_kz) in invalid_script_data {
let mut invalid_data = HashMap::new();
invalid_data.insert("firma".to_string(), create_string_value("Script Fail Company"));
invalid_data.insert("kz".to_string(), create_string_value(invalid_kz));
let result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
invalid_data,
).await;
match result {
Ok(_) => {
println!("Script Validation Test: {} passed (no validation script configured)", test_case);
}
Err(e) => {
if let Some(status) = e.downcast_ref::<Status>() {
assert_eq!(status.code(), tonic::Code::InvalidArgument,
"Script validation failure should return InvalidArgument for case: {}", test_case);
println!("Script Validation Test: {} correctly failed - {}", test_case, status.message());
}
}
}
}
}
#[rstest]
#[tokio::test]
async fn test_steel_script_validation_on_update(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// 1. Create a valid record first
let mut initial_data = HashMap::new();
initial_data.insert("firma".to_string(), create_string_value("Update Script Test"));
initial_data.insert("kz".to_string(), create_string_value("KZ123"));
let post_result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
initial_data,
).await;
if let Ok(post_response) = post_result {
let record_id = post_response.inserted_id;
// 2. Try to update with invalid data
let mut invalid_update = HashMap::new();
invalid_update.insert("kz".to_string(), create_string_value("INVALID"));
let update_result = context.client.put_table_data(
context.profile_name.clone(),
context.table_name.clone(),
record_id,
invalid_update,
).await;
match update_result {
Ok(_) => {
println!("Script Validation on Update: No validation script configured for updates");
}
Err(e) => {
if let Some(status) = e.downcast_ref::<Status>() {
assert_eq!(status.code(), tonic::Code::InvalidArgument,
"Update with invalid data should fail script validation");
println!("Script Validation on Update: Correctly rejected invalid update");
}
}
}
}
}
// ========================================================================
// COMPREHENSIVE DATA TYPE TESTS
// ========================================================================
#[rstest]
#[tokio::test]
async fn test_boolean_data_type(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// Test valid boolean values
let boolean_test_cases = vec![
("true", true),
("false", false),
];
for (case_name, bool_value) in boolean_test_cases {
let mut data = HashMap::new();
data.insert("firma".to_string(), create_string_value("Boolean Test Company"));
// Assuming there's a boolean field called 'active'
data.insert("active".to_string(), create_bool_value(bool_value));
let result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
data,
).await;
match result {
Ok(response) => {
println!("Boolean Test: {} value succeeded", case_name);
// Verify the value round-trip
if let Ok(get_response) = context.client.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
response.inserted_id,
).await {
if let Some(retrieved_value) = get_response.data.get("active") {
println!("Boolean Test: {} round-trip value: {}", case_name, retrieved_value);
}
}
}
Err(e) => {
println!("Boolean Test: {} failed (field may not exist): {}", case_name, e);
}
}
}
}
#[rstest]
#[tokio::test]
async fn test_numeric_data_types(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// Test various numeric values
let numeric_test_cases = vec![
("Zero", 0.0),
("Positive", 123.45),
("Negative", -67.89),
("Large", 999999.99),
("SmallDecimal", 0.01),
];
for (case_name, numeric_value) in numeric_test_cases {
let mut data = HashMap::new();
data.insert("firma".to_string(), create_string_value("Numeric Test Company"));
// Assuming there's a numeric field called 'price' or 'amount'
data.insert("amount".to_string(), create_number_value(numeric_value));
let result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
data,
).await;
match result {
Ok(response) => {
println!("Numeric Test: {} ({}) succeeded", case_name, numeric_value);
// Verify round-trip
if let Ok(get_response) = context.client.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
response.inserted_id,
).await {
if let Some(retrieved_value) = get_response.data.get("amount") {
println!("Numeric Test: {} round-trip value: {}", case_name, retrieved_value);
}
}
}
Err(e) => {
println!("Numeric Test: {} failed (field may not exist): {}", case_name, e);
}
}
}
}
#[rstest]
#[tokio::test]
async fn test_timestamp_data_type(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// Test various timestamp formats
let timestamp_test_cases = vec![
("ISO8601", "2024-01-15T10:30:00Z"),
("WithTimezone", "2024-01-15T10:30:00+01:00"),
("WithMilliseconds", "2024-01-15T10:30:00.123Z"),
];
for (case_name, timestamp_str) in timestamp_test_cases {
let mut data = HashMap::new();
data.insert("firma".to_string(), create_string_value("Timestamp Test Company"));
// Assuming there's a timestamp field called 'created_at'
data.insert("created_at".to_string(), create_string_value(timestamp_str));
let result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
data,
).await;
match result {
Ok(response) => {
println!("Timestamp Test: {} succeeded", case_name);
// Verify round-trip
if let Ok(get_response) = context.client.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
response.inserted_id,
).await {
if let Some(retrieved_value) = get_response.data.get("created_at") {
println!("Timestamp Test: {} round-trip value: {}", case_name, retrieved_value);
}
}
}
Err(e) => {
println!("Timestamp Test: {} failed (field may not exist): {}", case_name, e);
}
}
}
}
#[rstest]
#[tokio::test]
async fn test_invalid_data_types(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// Test invalid data type combinations
let invalid_type_cases = vec![
("StringForNumber", "amount", create_string_value("not-a-number")),
("NumberForBoolean", "active", create_number_value(123.0)),
("StringForBoolean", "active", create_string_value("maybe")),
("InvalidTimestamp", "created_at", create_string_value("not-a-date")),
];
for (case_name, field_name, invalid_value) in invalid_type_cases {
let mut data = HashMap::new();
data.insert("firma".to_string(), create_string_value("Invalid Type Test"));
data.insert(field_name.to_string(), invalid_value);
let result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
data,
).await;
match result {
Ok(_) => {
println!("Invalid Type Test: {} passed (no type validation or field doesn't exist)", case_name);
}
Err(e) => {
if let Some(status) = e.downcast_ref::<Status>() {
assert_eq!(status.code(), tonic::Code::InvalidArgument,
"Invalid data type should return InvalidArgument for case: {}", case_name);
println!("Invalid Type Test: {} correctly rejected - {}", case_name, status.message());
}
}
}
}
}
// ========================================================================
// FOREIGN KEY RELATIONSHIP TESTS
// ========================================================================
#[rstest]
#[tokio::test]
async fn test_foreign_key_valid_relationship(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// 1. Create a parent record first (e.g., company)
let mut parent_data = HashMap::new();
parent_data.insert("firma".to_string(), create_string_value("Parent Company"));
let parent_result = context.client.post_table_data(
context.profile_name.clone(),
"companies".to_string(), // Assuming companies table exists
parent_data,
).await;
if let Ok(parent_response) = parent_result {
let parent_id = parent_response.inserted_id;
// 2. Create a child record that references the parent
let mut child_data = HashMap::new();
child_data.insert("name".to_string(), create_string_value("Child Record"));
child_data.insert("company_id".to_string(), create_number_value(parent_id as f64));
let child_result = context.client.post_table_data(
context.profile_name.clone(),
"contacts".to_string(), // Assuming contacts table exists
child_data,
).await;
match child_result {
Ok(child_response) => {
assert!(child_response.success, "Valid foreign key relationship should succeed");
println!("Foreign Key Test: Valid relationship created - Parent ID: {}, Child ID: {}",
parent_id, child_response.inserted_id);
}
Err(e) => {
println!("Foreign Key Test: Failed (tables may not exist or no FK constraint): {}", e);
}
}
} else {
println!("Foreign Key Test: Could not create parent record");
}
}
#[rstest]
#[tokio::test]
async fn test_foreign_key_invalid_relationship(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// Try to create a child record with non-existent parent ID
let mut invalid_child_data = HashMap::new();
invalid_child_data.insert("name".to_string(), create_string_value("Orphan Record"));
invalid_child_data.insert("company_id".to_string(), create_number_value(99999.0)); // Non-existent ID
let result = context.client.post_table_data(
context.profile_name.clone(),
"contacts".to_string(),
invalid_child_data,
).await;
match result {
Ok(_) => {
println!("Foreign Key Test: Invalid relationship passed (no FK constraint configured)");
}
Err(e) => {
if let Some(status) = e.downcast_ref::<Status>() {
// Could be InvalidArgument or NotFound depending on implementation
assert!(matches!(status.code(), tonic::Code::InvalidArgument | tonic::Code::NotFound),
"Invalid foreign key should return InvalidArgument or NotFound");
println!("Foreign Key Test: Invalid relationship correctly rejected - {}", status.message());
}
}
}
}
// ========================================================================
// DELETED RECORD INTERACTION TESTS
// ========================================================================
#[rstest]
#[tokio::test]
async fn test_update_deleted_record_behavior(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// 1. Create a record
let initial_data = context.create_test_form_data();
let post_result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
initial_data,
).await;
if let Ok(post_response) = post_result {
let record_id = post_response.inserted_id;
println!("Deleted Record Test: Created record ID {}", record_id);
// 2. Delete the record (soft delete)
let delete_result = context.client.delete_table_data(
context.profile_name.clone(),
context.table_name.clone(),
record_id,
).await;
assert!(delete_result.is_ok(), "Delete should succeed");
println!("Deleted Record Test: Soft-deleted record {}", record_id);
// 3. Try to UPDATE the deleted record
let mut update_data = HashMap::new();
update_data.insert("firma".to_string(), create_string_value("Updated Deleted Record"));
let update_result = context.client.put_table_data(
context.profile_name.clone(),
context.table_name.clone(),
record_id,
update_data,
).await;
match update_result {
Ok(_) => {
// This might be a bug - updating deleted records should probably fail
println!("Deleted Record Test: UPDATE on deleted record succeeded (potential bug?)");
// Check if the record is still considered deleted
let get_result = context.client.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
record_id,
).await;
if get_result.is_err() {
println!("Deleted Record Test: Record still appears deleted after update");
} else {
println!("Deleted Record Test: Record appears to be undeleted after update");
}
}
Err(e) => {
if let Some(status) = e.downcast_ref::<Status>() {
assert_eq!(status.code(), tonic::Code::NotFound,
"UPDATE on deleted record should return NotFound");
println!("Deleted Record Test: UPDATE correctly rejected on deleted record");
}
}
}
}
}
#[rstest]
#[tokio::test]
async fn test_delete_already_deleted_record(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// 1. Create and delete a record
let initial_data = context.create_test_form_data();
let post_result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
initial_data,
).await;
if let Ok(post_response) = post_result {
let record_id = post_response.inserted_id;
// First deletion
let delete_result1 = context.client.delete_table_data(
context.profile_name.clone(),
context.table_name.clone(),
record_id,
).await;
assert!(delete_result1.is_ok(), "First delete should succeed");
// Second deletion (idempotent)
let delete_result2 = context.client.delete_table_data(
context.profile_name.clone(),
context.table_name.clone(),
record_id,
).await;
assert!(delete_result2.is_ok(), "Second delete should succeed (idempotent)");
if let Ok(response) = delete_result2 {
assert!(response.success, "Delete should report success even for already-deleted record");
}
println!("Double Delete Test: Both deletions succeeded (idempotent behavior)");
}
}
// ========================================================================
// VALIDATION AND BOUNDARY TESTS
// ========================================================================
#[rstest]
#[tokio::test]
async fn test_large_data_handling(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// Test with very large string values
let large_string = "A".repeat(10000); // 10KB string
let very_large_string = "B".repeat(100000); // 100KB string
let test_cases = vec![
("Large", large_string),
("VeryLarge", very_large_string),
];
for (case_name, large_value) in test_cases {
let mut data = HashMap::new();
data.insert("firma".to_string(), create_string_value(&large_value));
let result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
data,
).await;
match result {
Ok(response) => {
println!("Large Data Test: {} string handled successfully", case_name);
// Verify round-trip
if let Ok(get_response) = context.client.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
response.inserted_id,
).await {
if let Some(retrieved_value) = get_response.data.get("firma") {
assert_eq!(retrieved_value.len(), large_value.len(),
"Large string should survive round-trip for case: {}", case_name);
}
}
}
Err(e) => {
println!("Large Data Test: {} failed (may hit size limits): {}", case_name, e);
}
}
}
}
#[rstest]
#[tokio::test]
async fn test_sql_injection_attempts(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
// Test potential SQL injection strings
let injection_attempts = vec![
("SingleQuote", "'; DROP TABLE users; --"),
("DoubleQuote", "\"; DROP TABLE users; --"),
("Union", "' UNION SELECT * FROM users --"),
("Comment", "/* malicious comment */"),
("Semicolon", "; DELETE FROM users;"),
];
for (case_name, injection_string) in injection_attempts {
let mut data = HashMap::new();
data.insert("firma".to_string(), create_string_value(injection_string));
data.insert("kz".to_string(), create_string_value("KZ123"));
let result = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
data,
).await;
match result {
Ok(response) => {
println!("SQL Injection Test: {} handled safely (parameterized queries)", case_name);
// Verify the malicious string was stored as-is (not executed)
if let Ok(get_response) = context.client.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
response.inserted_id,
).await {
if let Some(retrieved_value) = get_response.data.get("firma") {
assert_eq!(retrieved_value, injection_string,
"Injection string should be stored literally for case: {}", case_name);
}
}
}
Err(e) => {
println!("SQL Injection Test: {} rejected: {}", case_name, e);
}
}
}
}
#[rstest]
#[tokio::test]
async fn test_concurrent_operations_with_same_data(#[future] form_test_context: FormTestContext) {
let context = form_test_context.await;
skip_if_backend_unavailable!();
// Test multiple concurrent operations with identical data
let mut handles = Vec::new();
let num_tasks = 10;
for i in 0..num_tasks {
let mut context_clone = context.clone();
let handle = tokio::spawn(async move {
let mut data = HashMap::new();
data.insert("firma".to_string(), create_string_value("Concurrent Identical"));
data.insert("kz".to_string(), create_string_value(&format!("SAME{:02}", i)));
context_clone.client.post_table_data(
context_clone.profile_name,
context_clone.table_name,
data,
).await
});
handles.push(handle);
}
// Wait for all to complete
let mut success_count = 0;
let mut inserted_ids = Vec::new();
for (i, handle) in handles.into_iter().enumerate() {
match handle.await {
Ok(Ok(response)) => {
success_count += 1;
inserted_ids.push(response.inserted_id);
println!("Concurrent Identical Data: Task {} succeeded with ID {}", i, response.inserted_id);
}
Ok(Err(e)) => {
println!("Concurrent Identical Data: Task {} failed: {}", i, e);
}
Err(e) => {
println!("Concurrent Identical Data: Task {} panicked: {}", i, e);
}
}
}
assert!(success_count > 0, "At least some concurrent operations should succeed");
// Verify all IDs are unique
let unique_ids: std::collections::HashSet<_> = inserted_ids.iter().collect();
assert_eq!(unique_ids.len(), inserted_ids.len(), "All inserted IDs should be unique");
println!("Concurrent Identical Data: {}/{} operations succeeded with unique IDs",
success_count, num_tasks);
}
// ========================================================================
// PERFORMANCE AND STRESS TESTS
// ========================================================================
#[rstest]
#[tokio::test]
async fn test_bulk_operations_performance(#[future] form_test_context: FormTestContext) {
let mut context = form_test_context.await;
skip_if_backend_unavailable!();
let operation_count = 50;
let start_time = std::time::Instant::now();
let mut successful_operations = 0;
let mut created_ids = Vec::new();
// Bulk create
for i in 0..operation_count {
let mut data = HashMap::new();
data.insert("firma".to_string(), create_string_value(&format!("Bulk Company {}", i)));
data.insert("kz".to_string(), create_string_value(&format!("BLK{:02}", i)));
if let Ok(response) = context.client.post_table_data(
context.profile_name.clone(),
context.table_name.clone(),
data,
).await {
successful_operations += 1;
created_ids.push(response.inserted_id);
}
}
let create_duration = start_time.elapsed();
println!("Bulk Performance: Created {} records in {:?}", successful_operations, create_duration);
// Bulk read
let read_start = std::time::Instant::now();
let mut successful_reads = 0;
for &record_id in &created_ids {
if context.client.get_table_data(
context.profile_name.clone(),
context.table_name.clone(),
record_id,
).await.is_ok() {
successful_reads += 1;
}
}
let read_duration = read_start.elapsed();
println!("Bulk Performance: Read {} records in {:?}", successful_reads, read_duration);
// Performance assertions
assert!(successful_operations > operation_count * 8 / 10,
"At least 80% of operations should succeed");
assert!(create_duration.as_secs() < 60,
"Bulk operations should complete in reasonable time");
println!("Bulk Performance Test: {}/{} creates, {}/{} reads successful",
successful_operations, operation_count, successful_reads, created_ids.len());
}

View File

@@ -0,0 +1 @@
pub mod form_request_tests;

3
client/tests/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
// tests/mod.rs
pub mod form;

View File

@@ -5,6 +5,8 @@ edition.workspace = true
license.workspace = true
[dependencies]
prost-types = { workspace = true }
tonic = "0.13.0"
prost = "0.13.5"
serde = { version = "1.0.219", features = ["derive"] }

View File

@@ -1,6 +1,6 @@
// proto/adresar.proto
syntax = "proto3";
package multieko2.adresar;
package komp_ac.adresar;
import "common.proto";
// import "table_structure.proto";

View File

@@ -1,6 +1,6 @@
// proto/auth.proto
syntax = "proto3";
package multieko2.auth;
package komp_ac.auth;
import "common.proto";

View File

@@ -1,6 +1,6 @@
// proto/common.proto
syntax = "proto3";
package multieko2.common;
package komp_ac.common;
message Empty {}
message CountResponse { int64 count = 1; }

View File

@@ -1,6 +1,6 @@
// In common/proto/search.proto
syntax = "proto3";
package multieko2.search;
package komp_ac.search;
service Searcher {
rpc SearchTable(SearchRequest) returns (SearchResponse);

View File

@@ -1,12 +1,12 @@
// common/proto/table_definition.proto
syntax = "proto3";
package multieko2.table_definition;
package komp_ac.table_definition;
import "common.proto";
service TableDefinition {
rpc PostTableDefinition (PostTableDefinitionRequest) returns (TableDefinitionResponse);
rpc GetProfileTree (multieko2.common.Empty) returns (ProfileTreeResponse);
rpc GetProfileTree (komp_ac.common.Empty) returns (ProfileTreeResponse);
rpc DeleteTable (DeleteTableRequest) returns (DeleteTableResponse);
}

View File

@@ -1,5 +1,5 @@
syntax = "proto3";
package multieko2.table_script;
package komp_ac.table_script;
service TableScript {
rpc PostTableScript(PostTableScriptRequest) returns (TableScriptResponse);

View File

@@ -1,6 +1,6 @@
// proto/table_structure.proto
syntax = "proto3";
package multieko2.table_structure;
package komp_ac.table_structure;
import "common.proto";

View File

@@ -1,22 +1,23 @@
// common/proto/tables_data.proto
syntax = "proto3";
package multieko2.tables_data;
package komp_ac.tables_data;
import "common.proto";
import "google/protobuf/struct.proto";
service TablesData {
rpc PostTableData (PostTableDataRequest) returns (PostTableDataResponse);
rpc PutTableData (PutTableDataRequest) returns (PutTableDataResponse);
rpc DeleteTableData (DeleteTableDataRequest) returns (DeleteTableDataResponse);
rpc GetTableData(GetTableDataRequest) returns (GetTableDataResponse);
rpc GetTableDataCount(GetTableDataCountRequest) returns (multieko2.common.CountResponse);
rpc GetTableDataCount(GetTableDataCountRequest) returns (komp_ac.common.CountResponse);
rpc GetTableDataByPosition(GetTableDataByPositionRequest) returns (GetTableDataResponse);
}
message PostTableDataRequest {
string profile_name = 1;
string table_name = 2;
map<string, string> data = 3;
map<string, google.protobuf.Value> data = 3;
}
message PostTableDataResponse {
@@ -29,7 +30,7 @@ message PutTableDataRequest {
string profile_name = 1;
string table_name = 2;
int64 id = 3;
map<string, string> data = 4;
map<string, google.protobuf.Value> data = 4;
}
message PutTableDataResponse {

View File

@@ -1,6 +1,6 @@
// proto/uctovnictvo.proto
syntax = "proto3";
package multieko2.uctovnictvo;
package komp_ac.uctovnictvo;
import "common.proto";

View File

@@ -3,33 +3,33 @@
pub mod search;
pub mod proto {
pub mod multieko2 {
pub mod komp_ac {
pub mod adresar {
include!("proto/multieko2.adresar.rs");
include!("proto/komp_ac.adresar.rs");
}
pub mod auth {
include!("proto/multieko2.auth.rs");
include!("proto/komp_ac.auth.rs");
}
pub mod common {
include!("proto/multieko2.common.rs");
include!("proto/komp_ac.common.rs");
}
pub mod table_structure {
include!("proto/multieko2.table_structure.rs");
include!("proto/komp_ac.table_structure.rs");
}
pub mod uctovnictvo {
include!("proto/multieko2.uctovnictvo.rs");
include!("proto/komp_ac.uctovnictvo.rs");
}
pub mod table_definition {
include!("proto/multieko2.table_definition.rs");
include!("proto/komp_ac.table_definition.rs");
}
pub mod tables_data {
include!("proto/multieko2.tables_data.rs");
include!("proto/komp_ac.tables_data.rs");
}
pub mod table_script {
include!("proto/multieko2.table_script.rs");
include!("proto/komp_ac.table_script.rs");
}
pub mod search {
include!("proto/multieko2.search.rs");
include!("proto/komp_ac.search.rs");
}
pub const FILE_DESCRIPTOR_SET: &[u8] =
include_bytes!("proto/descriptor.bin");

Binary file not shown.

View File

@@ -0,0 +1,791 @@
// This file is @generated by prost-build.
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct GetAdresarRequest {
#[prost(int64, tag = "1")]
pub id: i64,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct DeleteAdresarRequest {
#[prost(int64, tag = "1")]
pub id: i64,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PostAdresarRequest {
#[prost(string, tag = "1")]
pub firma: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub kz: ::prost::alloc::string::String,
#[prost(string, tag = "3")]
pub drc: ::prost::alloc::string::String,
#[prost(string, tag = "4")]
pub ulica: ::prost::alloc::string::String,
#[prost(string, tag = "5")]
pub psc: ::prost::alloc::string::String,
#[prost(string, tag = "6")]
pub mesto: ::prost::alloc::string::String,
#[prost(string, tag = "7")]
pub stat: ::prost::alloc::string::String,
#[prost(string, tag = "8")]
pub banka: ::prost::alloc::string::String,
#[prost(string, tag = "9")]
pub ucet: ::prost::alloc::string::String,
#[prost(string, tag = "10")]
pub skladm: ::prost::alloc::string::String,
#[prost(string, tag = "11")]
pub ico: ::prost::alloc::string::String,
#[prost(string, tag = "12")]
pub kontakt: ::prost::alloc::string::String,
#[prost(string, tag = "13")]
pub telefon: ::prost::alloc::string::String,
#[prost(string, tag = "14")]
pub skladu: ::prost::alloc::string::String,
#[prost(string, tag = "15")]
pub fax: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AdresarResponse {
#[prost(int64, tag = "1")]
pub id: i64,
#[prost(string, tag = "2")]
pub firma: ::prost::alloc::string::String,
#[prost(string, tag = "3")]
pub kz: ::prost::alloc::string::String,
#[prost(string, tag = "4")]
pub drc: ::prost::alloc::string::String,
#[prost(string, tag = "5")]
pub ulica: ::prost::alloc::string::String,
#[prost(string, tag = "6")]
pub psc: ::prost::alloc::string::String,
#[prost(string, tag = "7")]
pub mesto: ::prost::alloc::string::String,
#[prost(string, tag = "8")]
pub stat: ::prost::alloc::string::String,
#[prost(string, tag = "9")]
pub banka: ::prost::alloc::string::String,
#[prost(string, tag = "10")]
pub ucet: ::prost::alloc::string::String,
#[prost(string, tag = "11")]
pub skladm: ::prost::alloc::string::String,
#[prost(string, tag = "12")]
pub ico: ::prost::alloc::string::String,
#[prost(string, tag = "13")]
pub kontakt: ::prost::alloc::string::String,
#[prost(string, tag = "14")]
pub telefon: ::prost::alloc::string::String,
#[prost(string, tag = "15")]
pub skladu: ::prost::alloc::string::String,
#[prost(string, tag = "16")]
pub fax: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PutAdresarRequest {
#[prost(int64, tag = "1")]
pub id: i64,
#[prost(string, tag = "2")]
pub firma: ::prost::alloc::string::String,
#[prost(string, tag = "3")]
pub kz: ::prost::alloc::string::String,
#[prost(string, tag = "4")]
pub drc: ::prost::alloc::string::String,
#[prost(string, tag = "5")]
pub ulica: ::prost::alloc::string::String,
#[prost(string, tag = "6")]
pub psc: ::prost::alloc::string::String,
#[prost(string, tag = "7")]
pub mesto: ::prost::alloc::string::String,
#[prost(string, tag = "8")]
pub stat: ::prost::alloc::string::String,
#[prost(string, tag = "9")]
pub banka: ::prost::alloc::string::String,
#[prost(string, tag = "10")]
pub ucet: ::prost::alloc::string::String,
#[prost(string, tag = "11")]
pub skladm: ::prost::alloc::string::String,
#[prost(string, tag = "12")]
pub ico: ::prost::alloc::string::String,
#[prost(string, tag = "13")]
pub kontakt: ::prost::alloc::string::String,
#[prost(string, tag = "14")]
pub telefon: ::prost::alloc::string::String,
#[prost(string, tag = "15")]
pub skladu: ::prost::alloc::string::String,
#[prost(string, tag = "16")]
pub fax: ::prost::alloc::string::String,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct DeleteAdresarResponse {
#[prost(bool, tag = "1")]
pub success: bool,
}
/// Generated client implementations.
pub mod adresar_client {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
use tonic::codegen::http::Uri;
#[derive(Debug, Clone)]
pub struct AdresarClient<T> {
inner: tonic::client::Grpc<T>,
}
impl AdresarClient<tonic::transport::Channel> {
/// Attempt to create a new client by connecting to a given endpoint.
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> AdresarClient<T>
where
T: tonic::client::GrpcService<tonic::body::Body>,
T::Error: Into<StdError>,
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_origin(inner: T, origin: Uri) -> Self {
let inner = tonic::client::Grpc::with_origin(inner, origin);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> AdresarClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T::ResponseBody: Default,
T: tonic::codegen::Service<
http::Request<tonic::body::Body>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<
http::Request<tonic::body::Body>,
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
{
AdresarClient::new(InterceptedService::new(inner, interceptor))
}
/// Compress requests with the given encoding.
///
/// This requires the server to support it otherwise it might respond with an
/// error.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.send_compressed(encoding);
self
}
/// Enable decompressing responses.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.accept_compressed(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_decoding_message_size(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_encoding_message_size(limit);
self
}
pub async fn post_adresar(
&mut self,
request: impl tonic::IntoRequest<super::PostAdresarRequest>,
) -> std::result::Result<
tonic::Response<super::AdresarResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.adresar.Adresar/PostAdresar",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "PostAdresar"));
self.inner.unary(req, path, codec).await
}
pub async fn get_adresar(
&mut self,
request: impl tonic::IntoRequest<super::GetAdresarRequest>,
) -> std::result::Result<
tonic::Response<super::AdresarResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.adresar.Adresar/GetAdresar",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "GetAdresar"));
self.inner.unary(req, path, codec).await
}
pub async fn put_adresar(
&mut self,
request: impl tonic::IntoRequest<super::PutAdresarRequest>,
) -> std::result::Result<
tonic::Response<super::AdresarResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.adresar.Adresar/PutAdresar",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "PutAdresar"));
self.inner.unary(req, path, codec).await
}
pub async fn delete_adresar(
&mut self,
request: impl tonic::IntoRequest<super::DeleteAdresarRequest>,
) -> std::result::Result<
tonic::Response<super::DeleteAdresarResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.adresar.Adresar/DeleteAdresar",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "DeleteAdresar"));
self.inner.unary(req, path, codec).await
}
pub async fn get_adresar_count(
&mut self,
request: impl tonic::IntoRequest<super::super::common::Empty>,
) -> std::result::Result<
tonic::Response<super::super::common::CountResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.adresar.Adresar/GetAdresarCount",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "GetAdresarCount"));
self.inner.unary(req, path, codec).await
}
pub async fn get_adresar_by_position(
&mut self,
request: impl tonic::IntoRequest<super::super::common::PositionRequest>,
) -> std::result::Result<
tonic::Response<super::AdresarResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.adresar.Adresar/GetAdresarByPosition",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("komp_ac.adresar.Adresar", "GetAdresarByPosition"),
);
self.inner.unary(req, path, codec).await
}
}
}
/// Generated server implementations.
pub mod adresar_server {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
/// Generated trait containing gRPC methods that should be implemented for use with AdresarServer.
#[async_trait]
pub trait Adresar: std::marker::Send + std::marker::Sync + 'static {
async fn post_adresar(
&self,
request: tonic::Request<super::PostAdresarRequest>,
) -> std::result::Result<tonic::Response<super::AdresarResponse>, tonic::Status>;
async fn get_adresar(
&self,
request: tonic::Request<super::GetAdresarRequest>,
) -> std::result::Result<tonic::Response<super::AdresarResponse>, tonic::Status>;
async fn put_adresar(
&self,
request: tonic::Request<super::PutAdresarRequest>,
) -> std::result::Result<tonic::Response<super::AdresarResponse>, tonic::Status>;
async fn delete_adresar(
&self,
request: tonic::Request<super::DeleteAdresarRequest>,
) -> std::result::Result<
tonic::Response<super::DeleteAdresarResponse>,
tonic::Status,
>;
async fn get_adresar_count(
&self,
request: tonic::Request<super::super::common::Empty>,
) -> std::result::Result<
tonic::Response<super::super::common::CountResponse>,
tonic::Status,
>;
async fn get_adresar_by_position(
&self,
request: tonic::Request<super::super::common::PositionRequest>,
) -> std::result::Result<tonic::Response<super::AdresarResponse>, tonic::Status>;
}
#[derive(Debug)]
pub struct AdresarServer<T> {
inner: Arc<T>,
accept_compression_encodings: EnabledCompressionEncodings,
send_compression_encodings: EnabledCompressionEncodings,
max_decoding_message_size: Option<usize>,
max_encoding_message_size: Option<usize>,
}
impl<T> AdresarServer<T> {
pub fn new(inner: T) -> Self {
Self::from_arc(Arc::new(inner))
}
pub fn from_arc(inner: Arc<T>) -> Self {
Self {
inner,
accept_compression_encodings: Default::default(),
send_compression_encodings: Default::default(),
max_decoding_message_size: None,
max_encoding_message_size: None,
}
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> InterceptedService<Self, F>
where
F: tonic::service::Interceptor,
{
InterceptedService::new(Self::new(inner), interceptor)
}
/// Enable decompressing requests with the given encoding.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.accept_compression_encodings.enable(encoding);
self
}
/// Compress responses with the given encoding, if the client supports it.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.send_compression_encodings.enable(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.max_decoding_message_size = Some(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.max_encoding_message_size = Some(limit);
self
}
}
impl<T, B> tonic::codegen::Service<http::Request<B>> for AdresarServer<T>
where
T: Adresar,
B: Body + std::marker::Send + 'static,
B::Error: Into<StdError> + std::marker::Send + 'static,
{
type Response = http::Response<tonic::body::Body>;
type Error = std::convert::Infallible;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(
&mut self,
_cx: &mut Context<'_>,
) -> Poll<std::result::Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/komp_ac.adresar.Adresar/PostAdresar" => {
#[allow(non_camel_case_types)]
struct PostAdresarSvc<T: Adresar>(pub Arc<T>);
impl<
T: Adresar,
> tonic::server::UnaryService<super::PostAdresarRequest>
for PostAdresarSvc<T> {
type Response = super::AdresarResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::PostAdresarRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Adresar>::post_adresar(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = PostAdresarSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.adresar.Adresar/GetAdresar" => {
#[allow(non_camel_case_types)]
struct GetAdresarSvc<T: Adresar>(pub Arc<T>);
impl<
T: Adresar,
> tonic::server::UnaryService<super::GetAdresarRequest>
for GetAdresarSvc<T> {
type Response = super::AdresarResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::GetAdresarRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Adresar>::get_adresar(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetAdresarSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.adresar.Adresar/PutAdresar" => {
#[allow(non_camel_case_types)]
struct PutAdresarSvc<T: Adresar>(pub Arc<T>);
impl<
T: Adresar,
> tonic::server::UnaryService<super::PutAdresarRequest>
for PutAdresarSvc<T> {
type Response = super::AdresarResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::PutAdresarRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Adresar>::put_adresar(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = PutAdresarSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.adresar.Adresar/DeleteAdresar" => {
#[allow(non_camel_case_types)]
struct DeleteAdresarSvc<T: Adresar>(pub Arc<T>);
impl<
T: Adresar,
> tonic::server::UnaryService<super::DeleteAdresarRequest>
for DeleteAdresarSvc<T> {
type Response = super::DeleteAdresarResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::DeleteAdresarRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Adresar>::delete_adresar(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = DeleteAdresarSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.adresar.Adresar/GetAdresarCount" => {
#[allow(non_camel_case_types)]
struct GetAdresarCountSvc<T: Adresar>(pub Arc<T>);
impl<
T: Adresar,
> tonic::server::UnaryService<super::super::common::Empty>
for GetAdresarCountSvc<T> {
type Response = super::super::common::CountResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::super::common::Empty>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Adresar>::get_adresar_count(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetAdresarCountSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.adresar.Adresar/GetAdresarByPosition" => {
#[allow(non_camel_case_types)]
struct GetAdresarByPositionSvc<T: Adresar>(pub Arc<T>);
impl<
T: Adresar,
> tonic::server::UnaryService<super::super::common::PositionRequest>
for GetAdresarByPositionSvc<T> {
type Response = super::AdresarResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<
super::super::common::PositionRequest,
>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Adresar>::get_adresar_by_position(&inner, request)
.await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetAdresarByPositionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => {
Box::pin(async move {
let mut response = http::Response::new(
tonic::body::Body::default(),
);
let headers = response.headers_mut();
headers
.insert(
tonic::Status::GRPC_STATUS,
(tonic::Code::Unimplemented as i32).into(),
);
headers
.insert(
http::header::CONTENT_TYPE,
tonic::metadata::GRPC_CONTENT_TYPE,
);
Ok(response)
})
}
}
}
}
impl<T> Clone for AdresarServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self {
inner,
accept_compression_encodings: self.accept_compression_encodings,
send_compression_encodings: self.send_compression_encodings,
max_decoding_message_size: self.max_decoding_message_size,
max_encoding_message_size: self.max_encoding_message_size,
}
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "komp_ac.adresar.Adresar";
impl<T> tonic::server::NamedService for AdresarServer<T> {
const NAME: &'static str = SERVICE_NAME;
}
}

View File

@@ -0,0 +1,418 @@
// This file is @generated by prost-build.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RegisterRequest {
#[prost(string, tag = "1")]
pub username: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub email: ::prost::alloc::string::String,
#[prost(string, tag = "3")]
pub password: ::prost::alloc::string::String,
#[prost(string, tag = "4")]
pub password_confirmation: ::prost::alloc::string::String,
#[prost(string, tag = "5")]
pub role: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AuthResponse {
/// UUID in string format
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
/// Registered username
#[prost(string, tag = "2")]
pub username: ::prost::alloc::string::String,
/// Registered email (if provided)
#[prost(string, tag = "3")]
pub email: ::prost::alloc::string::String,
/// Default role: 'accountant'
#[prost(string, tag = "4")]
pub role: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct LoginRequest {
/// Can be username or email
#[prost(string, tag = "1")]
pub identifier: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub password: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct LoginResponse {
/// JWT token
#[prost(string, tag = "1")]
pub access_token: ::prost::alloc::string::String,
/// Usually "Bearer"
#[prost(string, tag = "2")]
pub token_type: ::prost::alloc::string::String,
/// Expiration in seconds (86400 for 24 hours)
#[prost(int32, tag = "3")]
pub expires_in: i32,
/// User's UUID in string format
#[prost(string, tag = "4")]
pub user_id: ::prost::alloc::string::String,
/// User's role
#[prost(string, tag = "5")]
pub role: ::prost::alloc::string::String,
#[prost(string, tag = "6")]
pub username: ::prost::alloc::string::String,
}
/// Generated client implementations.
pub mod auth_service_client {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
use tonic::codegen::http::Uri;
#[derive(Debug, Clone)]
pub struct AuthServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl AuthServiceClient<tonic::transport::Channel> {
/// Attempt to create a new client by connecting to a given endpoint.
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> AuthServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::Body>,
T::Error: Into<StdError>,
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_origin(inner: T, origin: Uri) -> Self {
let inner = tonic::client::Grpc::with_origin(inner, origin);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> AuthServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T::ResponseBody: Default,
T: tonic::codegen::Service<
http::Request<tonic::body::Body>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<
http::Request<tonic::body::Body>,
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
{
AuthServiceClient::new(InterceptedService::new(inner, interceptor))
}
/// Compress requests with the given encoding.
///
/// This requires the server to support it otherwise it might respond with an
/// error.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.send_compressed(encoding);
self
}
/// Enable decompressing responses.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.accept_compressed(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_decoding_message_size(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_encoding_message_size(limit);
self
}
pub async fn register(
&mut self,
request: impl tonic::IntoRequest<super::RegisterRequest>,
) -> std::result::Result<tonic::Response<super::AuthResponse>, tonic::Status> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.auth.AuthService/Register",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("komp_ac.auth.AuthService", "Register"));
self.inner.unary(req, path, codec).await
}
pub async fn login(
&mut self,
request: impl tonic::IntoRequest<super::LoginRequest>,
) -> std::result::Result<tonic::Response<super::LoginResponse>, tonic::Status> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.auth.AuthService/Login",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("komp_ac.auth.AuthService", "Login"));
self.inner.unary(req, path, codec).await
}
}
}
/// Generated server implementations.
pub mod auth_service_server {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
/// Generated trait containing gRPC methods that should be implemented for use with AuthServiceServer.
#[async_trait]
pub trait AuthService: std::marker::Send + std::marker::Sync + 'static {
async fn register(
&self,
request: tonic::Request<super::RegisterRequest>,
) -> std::result::Result<tonic::Response<super::AuthResponse>, tonic::Status>;
async fn login(
&self,
request: tonic::Request<super::LoginRequest>,
) -> std::result::Result<tonic::Response<super::LoginResponse>, tonic::Status>;
}
#[derive(Debug)]
pub struct AuthServiceServer<T> {
inner: Arc<T>,
accept_compression_encodings: EnabledCompressionEncodings,
send_compression_encodings: EnabledCompressionEncodings,
max_decoding_message_size: Option<usize>,
max_encoding_message_size: Option<usize>,
}
impl<T> AuthServiceServer<T> {
pub fn new(inner: T) -> Self {
Self::from_arc(Arc::new(inner))
}
pub fn from_arc(inner: Arc<T>) -> Self {
Self {
inner,
accept_compression_encodings: Default::default(),
send_compression_encodings: Default::default(),
max_decoding_message_size: None,
max_encoding_message_size: None,
}
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> InterceptedService<Self, F>
where
F: tonic::service::Interceptor,
{
InterceptedService::new(Self::new(inner), interceptor)
}
/// Enable decompressing requests with the given encoding.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.accept_compression_encodings.enable(encoding);
self
}
/// Compress responses with the given encoding, if the client supports it.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.send_compression_encodings.enable(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.max_decoding_message_size = Some(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.max_encoding_message_size = Some(limit);
self
}
}
impl<T, B> tonic::codegen::Service<http::Request<B>> for AuthServiceServer<T>
where
T: AuthService,
B: Body + std::marker::Send + 'static,
B::Error: Into<StdError> + std::marker::Send + 'static,
{
type Response = http::Response<tonic::body::Body>;
type Error = std::convert::Infallible;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(
&mut self,
_cx: &mut Context<'_>,
) -> Poll<std::result::Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/komp_ac.auth.AuthService/Register" => {
#[allow(non_camel_case_types)]
struct RegisterSvc<T: AuthService>(pub Arc<T>);
impl<
T: AuthService,
> tonic::server::UnaryService<super::RegisterRequest>
for RegisterSvc<T> {
type Response = super::AuthResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::RegisterRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as AuthService>::register(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = RegisterSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.auth.AuthService/Login" => {
#[allow(non_camel_case_types)]
struct LoginSvc<T: AuthService>(pub Arc<T>);
impl<T: AuthService> tonic::server::UnaryService<super::LoginRequest>
for LoginSvc<T> {
type Response = super::LoginResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::LoginRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as AuthService>::login(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = LoginSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => {
Box::pin(async move {
let mut response = http::Response::new(
tonic::body::Body::default(),
);
let headers = response.headers_mut();
headers
.insert(
tonic::Status::GRPC_STATUS,
(tonic::Code::Unimplemented as i32).into(),
);
headers
.insert(
http::header::CONTENT_TYPE,
tonic::metadata::GRPC_CONTENT_TYPE,
);
Ok(response)
})
}
}
}
}
impl<T> Clone for AuthServiceServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self {
inner,
accept_compression_encodings: self.accept_compression_encodings,
send_compression_encodings: self.send_compression_encodings,
max_decoding_message_size: self.max_decoding_message_size,
max_encoding_message_size: self.max_encoding_message_size,
}
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "komp_ac.auth.AuthService";
impl<T> tonic::server::NamedService for AuthServiceServer<T> {
const NAME: &'static str = SERVICE_NAME;
}
}

View File

@@ -0,0 +1,13 @@
// This file is @generated by prost-build.
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct Empty {}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct CountResponse {
#[prost(int64, tag = "1")]
pub count: i64,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct PositionRequest {
#[prost(int64, tag = "1")]
pub position: i64,
}

View File

@@ -0,0 +1,317 @@
// This file is @generated by prost-build.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchRequest {
#[prost(string, tag = "1")]
pub table_name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub query: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchResponse {
#[prost(message, repeated, tag = "1")]
pub hits: ::prost::alloc::vec::Vec<search_response::Hit>,
}
/// Nested message and enum types in `SearchResponse`.
pub mod search_response {
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Hit {
/// PostgreSQL row ID
#[prost(int64, tag = "1")]
pub id: i64,
#[prost(float, tag = "2")]
pub score: f32,
#[prost(string, tag = "3")]
pub content_json: ::prost::alloc::string::String,
}
}
/// Generated client implementations.
pub mod searcher_client {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
use tonic::codegen::http::Uri;
#[derive(Debug, Clone)]
pub struct SearcherClient<T> {
inner: tonic::client::Grpc<T>,
}
impl SearcherClient<tonic::transport::Channel> {
/// Attempt to create a new client by connecting to a given endpoint.
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> SearcherClient<T>
where
T: tonic::client::GrpcService<tonic::body::Body>,
T::Error: Into<StdError>,
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_origin(inner: T, origin: Uri) -> Self {
let inner = tonic::client::Grpc::with_origin(inner, origin);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> SearcherClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T::ResponseBody: Default,
T: tonic::codegen::Service<
http::Request<tonic::body::Body>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<
http::Request<tonic::body::Body>,
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
{
SearcherClient::new(InterceptedService::new(inner, interceptor))
}
/// Compress requests with the given encoding.
///
/// This requires the server to support it otherwise it might respond with an
/// error.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.send_compressed(encoding);
self
}
/// Enable decompressing responses.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.accept_compressed(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_decoding_message_size(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_encoding_message_size(limit);
self
}
pub async fn search_table(
&mut self,
request: impl tonic::IntoRequest<super::SearchRequest>,
) -> std::result::Result<tonic::Response<super::SearchResponse>, tonic::Status> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.search.Searcher/SearchTable",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("komp_ac.search.Searcher", "SearchTable"));
self.inner.unary(req, path, codec).await
}
}
}
/// Generated server implementations.
pub mod searcher_server {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
/// Generated trait containing gRPC methods that should be implemented for use with SearcherServer.
#[async_trait]
pub trait Searcher: std::marker::Send + std::marker::Sync + 'static {
async fn search_table(
&self,
request: tonic::Request<super::SearchRequest>,
) -> std::result::Result<tonic::Response<super::SearchResponse>, tonic::Status>;
}
#[derive(Debug)]
pub struct SearcherServer<T> {
inner: Arc<T>,
accept_compression_encodings: EnabledCompressionEncodings,
send_compression_encodings: EnabledCompressionEncodings,
max_decoding_message_size: Option<usize>,
max_encoding_message_size: Option<usize>,
}
impl<T> SearcherServer<T> {
pub fn new(inner: T) -> Self {
Self::from_arc(Arc::new(inner))
}
pub fn from_arc(inner: Arc<T>) -> Self {
Self {
inner,
accept_compression_encodings: Default::default(),
send_compression_encodings: Default::default(),
max_decoding_message_size: None,
max_encoding_message_size: None,
}
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> InterceptedService<Self, F>
where
F: tonic::service::Interceptor,
{
InterceptedService::new(Self::new(inner), interceptor)
}
/// Enable decompressing requests with the given encoding.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.accept_compression_encodings.enable(encoding);
self
}
/// Compress responses with the given encoding, if the client supports it.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.send_compression_encodings.enable(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.max_decoding_message_size = Some(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.max_encoding_message_size = Some(limit);
self
}
}
impl<T, B> tonic::codegen::Service<http::Request<B>> for SearcherServer<T>
where
T: Searcher,
B: Body + std::marker::Send + 'static,
B::Error: Into<StdError> + std::marker::Send + 'static,
{
type Response = http::Response<tonic::body::Body>;
type Error = std::convert::Infallible;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(
&mut self,
_cx: &mut Context<'_>,
) -> Poll<std::result::Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/komp_ac.search.Searcher/SearchTable" => {
#[allow(non_camel_case_types)]
struct SearchTableSvc<T: Searcher>(pub Arc<T>);
impl<T: Searcher> tonic::server::UnaryService<super::SearchRequest>
for SearchTableSvc<T> {
type Response = super::SearchResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::SearchRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Searcher>::search_table(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = SearchTableSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => {
Box::pin(async move {
let mut response = http::Response::new(
tonic::body::Body::default(),
);
let headers = response.headers_mut();
headers
.insert(
tonic::Status::GRPC_STATUS,
(tonic::Code::Unimplemented as i32).into(),
);
headers
.insert(
http::header::CONTENT_TYPE,
tonic::metadata::GRPC_CONTENT_TYPE,
);
Ok(response)
})
}
}
}
}
impl<T> Clone for SearcherServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self {
inner,
accept_compression_encodings: self.accept_compression_encodings,
send_compression_encodings: self.send_compression_encodings,
max_decoding_message_size: self.max_decoding_message_size,
max_encoding_message_size: self.max_encoding_message_size,
}
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "komp_ac.search.Searcher";
impl<T> tonic::server::NamedService for SearcherServer<T> {
const NAME: &'static str = SERVICE_NAME;
}
}

View File

@@ -0,0 +1,544 @@
// This file is @generated by prost-build.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableLink {
#[prost(string, tag = "1")]
pub linked_table_name: ::prost::alloc::string::String,
#[prost(bool, tag = "2")]
pub required: bool,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PostTableDefinitionRequest {
#[prost(string, tag = "1")]
pub table_name: ::prost::alloc::string::String,
#[prost(message, repeated, tag = "2")]
pub links: ::prost::alloc::vec::Vec<TableLink>,
#[prost(message, repeated, tag = "3")]
pub columns: ::prost::alloc::vec::Vec<ColumnDefinition>,
#[prost(string, repeated, tag = "4")]
pub indexes: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
#[prost(string, tag = "5")]
pub profile_name: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ColumnDefinition {
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub field_type: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableDefinitionResponse {
#[prost(bool, tag = "1")]
pub success: bool,
#[prost(string, tag = "2")]
pub sql: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ProfileTreeResponse {
#[prost(message, repeated, tag = "1")]
pub profiles: ::prost::alloc::vec::Vec<profile_tree_response::Profile>,
}
/// Nested message and enum types in `ProfileTreeResponse`.
pub mod profile_tree_response {
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Table {
#[prost(int64, tag = "1")]
pub id: i64,
#[prost(string, tag = "2")]
pub name: ::prost::alloc::string::String,
#[prost(string, repeated, tag = "3")]
pub depends_on: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Profile {
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
#[prost(message, repeated, tag = "2")]
pub tables: ::prost::alloc::vec::Vec<Table>,
}
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteTableRequest {
#[prost(string, tag = "1")]
pub profile_name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub table_name: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteTableResponse {
#[prost(bool, tag = "1")]
pub success: bool,
#[prost(string, tag = "2")]
pub message: ::prost::alloc::string::String,
}
/// Generated client implementations.
pub mod table_definition_client {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
use tonic::codegen::http::Uri;
#[derive(Debug, Clone)]
pub struct TableDefinitionClient<T> {
inner: tonic::client::Grpc<T>,
}
impl TableDefinitionClient<tonic::transport::Channel> {
/// Attempt to create a new client by connecting to a given endpoint.
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> TableDefinitionClient<T>
where
T: tonic::client::GrpcService<tonic::body::Body>,
T::Error: Into<StdError>,
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_origin(inner: T, origin: Uri) -> Self {
let inner = tonic::client::Grpc::with_origin(inner, origin);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> TableDefinitionClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T::ResponseBody: Default,
T: tonic::codegen::Service<
http::Request<tonic::body::Body>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<
http::Request<tonic::body::Body>,
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
{
TableDefinitionClient::new(InterceptedService::new(inner, interceptor))
}
/// Compress requests with the given encoding.
///
/// This requires the server to support it otherwise it might respond with an
/// error.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.send_compressed(encoding);
self
}
/// Enable decompressing responses.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.accept_compressed(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_decoding_message_size(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_encoding_message_size(limit);
self
}
pub async fn post_table_definition(
&mut self,
request: impl tonic::IntoRequest<super::PostTableDefinitionRequest>,
) -> std::result::Result<
tonic::Response<super::TableDefinitionResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.table_definition.TableDefinition/PostTableDefinition",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"komp_ac.table_definition.TableDefinition",
"PostTableDefinition",
),
);
self.inner.unary(req, path, codec).await
}
pub async fn get_profile_tree(
&mut self,
request: impl tonic::IntoRequest<super::super::common::Empty>,
) -> std::result::Result<
tonic::Response<super::ProfileTreeResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.table_definition.TableDefinition/GetProfileTree",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"komp_ac.table_definition.TableDefinition",
"GetProfileTree",
),
);
self.inner.unary(req, path, codec).await
}
pub async fn delete_table(
&mut self,
request: impl tonic::IntoRequest<super::DeleteTableRequest>,
) -> std::result::Result<
tonic::Response<super::DeleteTableResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.table_definition.TableDefinition/DeleteTable",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"komp_ac.table_definition.TableDefinition",
"DeleteTable",
),
);
self.inner.unary(req, path, codec).await
}
}
}
/// Generated server implementations.
pub mod table_definition_server {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
/// Generated trait containing gRPC methods that should be implemented for use with TableDefinitionServer.
#[async_trait]
pub trait TableDefinition: std::marker::Send + std::marker::Sync + 'static {
async fn post_table_definition(
&self,
request: tonic::Request<super::PostTableDefinitionRequest>,
) -> std::result::Result<
tonic::Response<super::TableDefinitionResponse>,
tonic::Status,
>;
async fn get_profile_tree(
&self,
request: tonic::Request<super::super::common::Empty>,
) -> std::result::Result<
tonic::Response<super::ProfileTreeResponse>,
tonic::Status,
>;
async fn delete_table(
&self,
request: tonic::Request<super::DeleteTableRequest>,
) -> std::result::Result<
tonic::Response<super::DeleteTableResponse>,
tonic::Status,
>;
}
#[derive(Debug)]
pub struct TableDefinitionServer<T> {
inner: Arc<T>,
accept_compression_encodings: EnabledCompressionEncodings,
send_compression_encodings: EnabledCompressionEncodings,
max_decoding_message_size: Option<usize>,
max_encoding_message_size: Option<usize>,
}
impl<T> TableDefinitionServer<T> {
pub fn new(inner: T) -> Self {
Self::from_arc(Arc::new(inner))
}
pub fn from_arc(inner: Arc<T>) -> Self {
Self {
inner,
accept_compression_encodings: Default::default(),
send_compression_encodings: Default::default(),
max_decoding_message_size: None,
max_encoding_message_size: None,
}
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> InterceptedService<Self, F>
where
F: tonic::service::Interceptor,
{
InterceptedService::new(Self::new(inner), interceptor)
}
/// Enable decompressing requests with the given encoding.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.accept_compression_encodings.enable(encoding);
self
}
/// Compress responses with the given encoding, if the client supports it.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.send_compression_encodings.enable(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.max_decoding_message_size = Some(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.max_encoding_message_size = Some(limit);
self
}
}
impl<T, B> tonic::codegen::Service<http::Request<B>> for TableDefinitionServer<T>
where
T: TableDefinition,
B: Body + std::marker::Send + 'static,
B::Error: Into<StdError> + std::marker::Send + 'static,
{
type Response = http::Response<tonic::body::Body>;
type Error = std::convert::Infallible;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(
&mut self,
_cx: &mut Context<'_>,
) -> Poll<std::result::Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/komp_ac.table_definition.TableDefinition/PostTableDefinition" => {
#[allow(non_camel_case_types)]
struct PostTableDefinitionSvc<T: TableDefinition>(pub Arc<T>);
impl<
T: TableDefinition,
> tonic::server::UnaryService<super::PostTableDefinitionRequest>
for PostTableDefinitionSvc<T> {
type Response = super::TableDefinitionResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::PostTableDefinitionRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TableDefinition>::post_table_definition(
&inner,
request,
)
.await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = PostTableDefinitionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.table_definition.TableDefinition/GetProfileTree" => {
#[allow(non_camel_case_types)]
struct GetProfileTreeSvc<T: TableDefinition>(pub Arc<T>);
impl<
T: TableDefinition,
> tonic::server::UnaryService<super::super::common::Empty>
for GetProfileTreeSvc<T> {
type Response = super::ProfileTreeResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::super::common::Empty>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TableDefinition>::get_profile_tree(&inner, request)
.await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetProfileTreeSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.table_definition.TableDefinition/DeleteTable" => {
#[allow(non_camel_case_types)]
struct DeleteTableSvc<T: TableDefinition>(pub Arc<T>);
impl<
T: TableDefinition,
> tonic::server::UnaryService<super::DeleteTableRequest>
for DeleteTableSvc<T> {
type Response = super::DeleteTableResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::DeleteTableRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TableDefinition>::delete_table(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = DeleteTableSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => {
Box::pin(async move {
let mut response = http::Response::new(
tonic::body::Body::default(),
);
let headers = response.headers_mut();
headers
.insert(
tonic::Status::GRPC_STATUS,
(tonic::Code::Unimplemented as i32).into(),
);
headers
.insert(
http::header::CONTENT_TYPE,
tonic::metadata::GRPC_CONTENT_TYPE,
);
Ok(response)
})
}
}
}
}
impl<T> Clone for TableDefinitionServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self {
inner,
accept_compression_encodings: self.accept_compression_encodings,
send_compression_encodings: self.send_compression_encodings,
max_decoding_message_size: self.max_decoding_message_size,
max_encoding_message_size: self.max_encoding_message_size,
}
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "komp_ac.table_definition.TableDefinition";
impl<T> tonic::server::NamedService for TableDefinitionServer<T> {
const NAME: &'static str = SERVICE_NAME;
}
}

View File

@@ -0,0 +1,323 @@
// This file is @generated by prost-build.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PostTableScriptRequest {
#[prost(int64, tag = "1")]
pub table_definition_id: i64,
#[prost(string, tag = "2")]
pub target_column: ::prost::alloc::string::String,
#[prost(string, tag = "3")]
pub script: ::prost::alloc::string::String,
#[prost(string, tag = "4")]
pub description: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableScriptResponse {
#[prost(int64, tag = "1")]
pub id: i64,
#[prost(string, tag = "2")]
pub warnings: ::prost::alloc::string::String,
}
/// Generated client implementations.
pub mod table_script_client {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
use tonic::codegen::http::Uri;
#[derive(Debug, Clone)]
pub struct TableScriptClient<T> {
inner: tonic::client::Grpc<T>,
}
impl TableScriptClient<tonic::transport::Channel> {
/// Attempt to create a new client by connecting to a given endpoint.
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> TableScriptClient<T>
where
T: tonic::client::GrpcService<tonic::body::Body>,
T::Error: Into<StdError>,
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_origin(inner: T, origin: Uri) -> Self {
let inner = tonic::client::Grpc::with_origin(inner, origin);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> TableScriptClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T::ResponseBody: Default,
T: tonic::codegen::Service<
http::Request<tonic::body::Body>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<
http::Request<tonic::body::Body>,
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
{
TableScriptClient::new(InterceptedService::new(inner, interceptor))
}
/// Compress requests with the given encoding.
///
/// This requires the server to support it otherwise it might respond with an
/// error.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.send_compressed(encoding);
self
}
/// Enable decompressing responses.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.accept_compressed(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_decoding_message_size(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_encoding_message_size(limit);
self
}
pub async fn post_table_script(
&mut self,
request: impl tonic::IntoRequest<super::PostTableScriptRequest>,
) -> std::result::Result<
tonic::Response<super::TableScriptResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.table_script.TableScript/PostTableScript",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"komp_ac.table_script.TableScript",
"PostTableScript",
),
);
self.inner.unary(req, path, codec).await
}
}
}
/// Generated server implementations.
pub mod table_script_server {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
/// Generated trait containing gRPC methods that should be implemented for use with TableScriptServer.
#[async_trait]
pub trait TableScript: std::marker::Send + std::marker::Sync + 'static {
async fn post_table_script(
&self,
request: tonic::Request<super::PostTableScriptRequest>,
) -> std::result::Result<
tonic::Response<super::TableScriptResponse>,
tonic::Status,
>;
}
#[derive(Debug)]
pub struct TableScriptServer<T> {
inner: Arc<T>,
accept_compression_encodings: EnabledCompressionEncodings,
send_compression_encodings: EnabledCompressionEncodings,
max_decoding_message_size: Option<usize>,
max_encoding_message_size: Option<usize>,
}
impl<T> TableScriptServer<T> {
pub fn new(inner: T) -> Self {
Self::from_arc(Arc::new(inner))
}
pub fn from_arc(inner: Arc<T>) -> Self {
Self {
inner,
accept_compression_encodings: Default::default(),
send_compression_encodings: Default::default(),
max_decoding_message_size: None,
max_encoding_message_size: None,
}
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> InterceptedService<Self, F>
where
F: tonic::service::Interceptor,
{
InterceptedService::new(Self::new(inner), interceptor)
}
/// Enable decompressing requests with the given encoding.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.accept_compression_encodings.enable(encoding);
self
}
/// Compress responses with the given encoding, if the client supports it.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.send_compression_encodings.enable(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.max_decoding_message_size = Some(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.max_encoding_message_size = Some(limit);
self
}
}
impl<T, B> tonic::codegen::Service<http::Request<B>> for TableScriptServer<T>
where
T: TableScript,
B: Body + std::marker::Send + 'static,
B::Error: Into<StdError> + std::marker::Send + 'static,
{
type Response = http::Response<tonic::body::Body>;
type Error = std::convert::Infallible;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(
&mut self,
_cx: &mut Context<'_>,
) -> Poll<std::result::Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/komp_ac.table_script.TableScript/PostTableScript" => {
#[allow(non_camel_case_types)]
struct PostTableScriptSvc<T: TableScript>(pub Arc<T>);
impl<
T: TableScript,
> tonic::server::UnaryService<super::PostTableScriptRequest>
for PostTableScriptSvc<T> {
type Response = super::TableScriptResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::PostTableScriptRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TableScript>::post_table_script(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = PostTableScriptSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => {
Box::pin(async move {
let mut response = http::Response::new(
tonic::body::Body::default(),
);
let headers = response.headers_mut();
headers
.insert(
tonic::Status::GRPC_STATUS,
(tonic::Code::Unimplemented as i32).into(),
);
headers
.insert(
http::header::CONTENT_TYPE,
tonic::metadata::GRPC_CONTENT_TYPE,
);
Ok(response)
})
}
}
}
}
impl<T> Clone for TableScriptServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self {
inner,
accept_compression_encodings: self.accept_compression_encodings,
send_compression_encodings: self.send_compression_encodings,
max_decoding_message_size: self.max_decoding_message_size,
max_encoding_message_size: self.max_encoding_message_size,
}
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "komp_ac.table_script.TableScript";
impl<T> tonic::server::NamedService for TableScriptServer<T> {
const NAME: &'static str = SERVICE_NAME;
}
}

View File

@@ -0,0 +1,336 @@
// This file is @generated by prost-build.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTableStructureRequest {
/// e.g., "default"
#[prost(string, tag = "1")]
pub profile_name: ::prost::alloc::string::String,
/// e.g., "2025_adresar6"
#[prost(string, tag = "2")]
pub table_name: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableStructureResponse {
#[prost(message, repeated, tag = "1")]
pub columns: ::prost::alloc::vec::Vec<TableColumn>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableColumn {
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// e.g., "TEXT", "BIGINT", "VARCHAR(255)", "TIMESTAMPTZ"
#[prost(string, tag = "2")]
pub data_type: ::prost::alloc::string::String,
#[prost(bool, tag = "3")]
pub is_nullable: bool,
#[prost(bool, tag = "4")]
pub is_primary_key: bool,
}
/// Generated client implementations.
pub mod table_structure_service_client {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
use tonic::codegen::http::Uri;
#[derive(Debug, Clone)]
pub struct TableStructureServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl TableStructureServiceClient<tonic::transport::Channel> {
/// Attempt to create a new client by connecting to a given endpoint.
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> TableStructureServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::Body>,
T::Error: Into<StdError>,
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_origin(inner: T, origin: Uri) -> Self {
let inner = tonic::client::Grpc::with_origin(inner, origin);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> TableStructureServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T::ResponseBody: Default,
T: tonic::codegen::Service<
http::Request<tonic::body::Body>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<
http::Request<tonic::body::Body>,
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
{
TableStructureServiceClient::new(InterceptedService::new(inner, interceptor))
}
/// Compress requests with the given encoding.
///
/// This requires the server to support it otherwise it might respond with an
/// error.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.send_compressed(encoding);
self
}
/// Enable decompressing responses.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.accept_compressed(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_decoding_message_size(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_encoding_message_size(limit);
self
}
pub async fn get_table_structure(
&mut self,
request: impl tonic::IntoRequest<super::GetTableStructureRequest>,
) -> std::result::Result<
tonic::Response<super::TableStructureResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.table_structure.TableStructureService/GetTableStructure",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"komp_ac.table_structure.TableStructureService",
"GetTableStructure",
),
);
self.inner.unary(req, path, codec).await
}
}
}
/// Generated server implementations.
pub mod table_structure_service_server {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
/// Generated trait containing gRPC methods that should be implemented for use with TableStructureServiceServer.
#[async_trait]
pub trait TableStructureService: std::marker::Send + std::marker::Sync + 'static {
async fn get_table_structure(
&self,
request: tonic::Request<super::GetTableStructureRequest>,
) -> std::result::Result<
tonic::Response<super::TableStructureResponse>,
tonic::Status,
>;
}
#[derive(Debug)]
pub struct TableStructureServiceServer<T> {
inner: Arc<T>,
accept_compression_encodings: EnabledCompressionEncodings,
send_compression_encodings: EnabledCompressionEncodings,
max_decoding_message_size: Option<usize>,
max_encoding_message_size: Option<usize>,
}
impl<T> TableStructureServiceServer<T> {
pub fn new(inner: T) -> Self {
Self::from_arc(Arc::new(inner))
}
pub fn from_arc(inner: Arc<T>) -> Self {
Self {
inner,
accept_compression_encodings: Default::default(),
send_compression_encodings: Default::default(),
max_decoding_message_size: None,
max_encoding_message_size: None,
}
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> InterceptedService<Self, F>
where
F: tonic::service::Interceptor,
{
InterceptedService::new(Self::new(inner), interceptor)
}
/// Enable decompressing requests with the given encoding.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.accept_compression_encodings.enable(encoding);
self
}
/// Compress responses with the given encoding, if the client supports it.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.send_compression_encodings.enable(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.max_decoding_message_size = Some(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.max_encoding_message_size = Some(limit);
self
}
}
impl<T, B> tonic::codegen::Service<http::Request<B>>
for TableStructureServiceServer<T>
where
T: TableStructureService,
B: Body + std::marker::Send + 'static,
B::Error: Into<StdError> + std::marker::Send + 'static,
{
type Response = http::Response<tonic::body::Body>;
type Error = std::convert::Infallible;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(
&mut self,
_cx: &mut Context<'_>,
) -> Poll<std::result::Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/komp_ac.table_structure.TableStructureService/GetTableStructure" => {
#[allow(non_camel_case_types)]
struct GetTableStructureSvc<T: TableStructureService>(pub Arc<T>);
impl<
T: TableStructureService,
> tonic::server::UnaryService<super::GetTableStructureRequest>
for GetTableStructureSvc<T> {
type Response = super::TableStructureResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::GetTableStructureRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TableStructureService>::get_table_structure(
&inner,
request,
)
.await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetTableStructureSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => {
Box::pin(async move {
let mut response = http::Response::new(
tonic::body::Body::default(),
);
let headers = response.headers_mut();
headers
.insert(
tonic::Status::GRPC_STATUS,
(tonic::Code::Unimplemented as i32).into(),
);
headers
.insert(
http::header::CONTENT_TYPE,
tonic::metadata::GRPC_CONTENT_TYPE,
);
Ok(response)
})
}
}
}
}
impl<T> Clone for TableStructureServiceServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self {
inner,
accept_compression_encodings: self.accept_compression_encodings,
send_compression_encodings: self.send_compression_encodings,
max_decoding_message_size: self.max_decoding_message_size,
max_encoding_message_size: self.max_encoding_message_size,
}
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "komp_ac.table_structure.TableStructureService";
impl<T> tonic::server::NamedService for TableStructureServiceServer<T> {
const NAME: &'static str = SERVICE_NAME;
}
}

View File

@@ -0,0 +1,794 @@
// This file is @generated by prost-build.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PostTableDataRequest {
#[prost(string, tag = "1")]
pub profile_name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub table_name: ::prost::alloc::string::String,
#[prost(map = "string, message", tag = "3")]
pub data: ::std::collections::HashMap<
::prost::alloc::string::String,
::prost_types::Value,
>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PostTableDataResponse {
#[prost(bool, tag = "1")]
pub success: bool,
#[prost(string, tag = "2")]
pub message: ::prost::alloc::string::String,
#[prost(int64, tag = "3")]
pub inserted_id: i64,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PutTableDataRequest {
#[prost(string, tag = "1")]
pub profile_name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub table_name: ::prost::alloc::string::String,
#[prost(int64, tag = "3")]
pub id: i64,
#[prost(map = "string, message", tag = "4")]
pub data: ::std::collections::HashMap<
::prost::alloc::string::String,
::prost_types::Value,
>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PutTableDataResponse {
#[prost(bool, tag = "1")]
pub success: bool,
#[prost(string, tag = "2")]
pub message: ::prost::alloc::string::String,
#[prost(int64, tag = "3")]
pub updated_id: i64,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteTableDataRequest {
#[prost(string, tag = "1")]
pub profile_name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub table_name: ::prost::alloc::string::String,
#[prost(int64, tag = "3")]
pub record_id: i64,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct DeleteTableDataResponse {
#[prost(bool, tag = "1")]
pub success: bool,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTableDataRequest {
#[prost(string, tag = "1")]
pub profile_name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub table_name: ::prost::alloc::string::String,
#[prost(int64, tag = "3")]
pub id: i64,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTableDataResponse {
#[prost(map = "string, string", tag = "1")]
pub data: ::std::collections::HashMap<
::prost::alloc::string::String,
::prost::alloc::string::String,
>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTableDataCountRequest {
#[prost(string, tag = "1")]
pub profile_name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub table_name: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTableDataByPositionRequest {
#[prost(string, tag = "1")]
pub profile_name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub table_name: ::prost::alloc::string::String,
#[prost(int32, tag = "3")]
pub position: i32,
}
/// Generated client implementations.
pub mod tables_data_client {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
use tonic::codegen::http::Uri;
#[derive(Debug, Clone)]
pub struct TablesDataClient<T> {
inner: tonic::client::Grpc<T>,
}
impl TablesDataClient<tonic::transport::Channel> {
/// Attempt to create a new client by connecting to a given endpoint.
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> TablesDataClient<T>
where
T: tonic::client::GrpcService<tonic::body::Body>,
T::Error: Into<StdError>,
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_origin(inner: T, origin: Uri) -> Self {
let inner = tonic::client::Grpc::with_origin(inner, origin);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> TablesDataClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T::ResponseBody: Default,
T: tonic::codegen::Service<
http::Request<tonic::body::Body>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<
http::Request<tonic::body::Body>,
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
{
TablesDataClient::new(InterceptedService::new(inner, interceptor))
}
/// Compress requests with the given encoding.
///
/// This requires the server to support it otherwise it might respond with an
/// error.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.send_compressed(encoding);
self
}
/// Enable decompressing responses.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.accept_compressed(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_decoding_message_size(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_encoding_message_size(limit);
self
}
pub async fn post_table_data(
&mut self,
request: impl tonic::IntoRequest<super::PostTableDataRequest>,
) -> std::result::Result<
tonic::Response<super::PostTableDataResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.tables_data.TablesData/PostTableData",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("komp_ac.tables_data.TablesData", "PostTableData"),
);
self.inner.unary(req, path, codec).await
}
pub async fn put_table_data(
&mut self,
request: impl tonic::IntoRequest<super::PutTableDataRequest>,
) -> std::result::Result<
tonic::Response<super::PutTableDataResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.tables_data.TablesData/PutTableData",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("komp_ac.tables_data.TablesData", "PutTableData"),
);
self.inner.unary(req, path, codec).await
}
pub async fn delete_table_data(
&mut self,
request: impl tonic::IntoRequest<super::DeleteTableDataRequest>,
) -> std::result::Result<
tonic::Response<super::DeleteTableDataResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.tables_data.TablesData/DeleteTableData",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("komp_ac.tables_data.TablesData", "DeleteTableData"),
);
self.inner.unary(req, path, codec).await
}
pub async fn get_table_data(
&mut self,
request: impl tonic::IntoRequest<super::GetTableDataRequest>,
) -> std::result::Result<
tonic::Response<super::GetTableDataResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.tables_data.TablesData/GetTableData",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("komp_ac.tables_data.TablesData", "GetTableData"),
);
self.inner.unary(req, path, codec).await
}
pub async fn get_table_data_count(
&mut self,
request: impl tonic::IntoRequest<super::GetTableDataCountRequest>,
) -> std::result::Result<
tonic::Response<super::super::common::CountResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.tables_data.TablesData/GetTableDataCount",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"komp_ac.tables_data.TablesData",
"GetTableDataCount",
),
);
self.inner.unary(req, path, codec).await
}
pub async fn get_table_data_by_position(
&mut self,
request: impl tonic::IntoRequest<super::GetTableDataByPositionRequest>,
) -> std::result::Result<
tonic::Response<super::GetTableDataResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.tables_data.TablesData/GetTableDataByPosition",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"komp_ac.tables_data.TablesData",
"GetTableDataByPosition",
),
);
self.inner.unary(req, path, codec).await
}
}
}
/// Generated server implementations.
pub mod tables_data_server {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
/// Generated trait containing gRPC methods that should be implemented for use with TablesDataServer.
#[async_trait]
pub trait TablesData: std::marker::Send + std::marker::Sync + 'static {
async fn post_table_data(
&self,
request: tonic::Request<super::PostTableDataRequest>,
) -> std::result::Result<
tonic::Response<super::PostTableDataResponse>,
tonic::Status,
>;
async fn put_table_data(
&self,
request: tonic::Request<super::PutTableDataRequest>,
) -> std::result::Result<
tonic::Response<super::PutTableDataResponse>,
tonic::Status,
>;
async fn delete_table_data(
&self,
request: tonic::Request<super::DeleteTableDataRequest>,
) -> std::result::Result<
tonic::Response<super::DeleteTableDataResponse>,
tonic::Status,
>;
async fn get_table_data(
&self,
request: tonic::Request<super::GetTableDataRequest>,
) -> std::result::Result<
tonic::Response<super::GetTableDataResponse>,
tonic::Status,
>;
async fn get_table_data_count(
&self,
request: tonic::Request<super::GetTableDataCountRequest>,
) -> std::result::Result<
tonic::Response<super::super::common::CountResponse>,
tonic::Status,
>;
async fn get_table_data_by_position(
&self,
request: tonic::Request<super::GetTableDataByPositionRequest>,
) -> std::result::Result<
tonic::Response<super::GetTableDataResponse>,
tonic::Status,
>;
}
#[derive(Debug)]
pub struct TablesDataServer<T> {
inner: Arc<T>,
accept_compression_encodings: EnabledCompressionEncodings,
send_compression_encodings: EnabledCompressionEncodings,
max_decoding_message_size: Option<usize>,
max_encoding_message_size: Option<usize>,
}
impl<T> TablesDataServer<T> {
pub fn new(inner: T) -> Self {
Self::from_arc(Arc::new(inner))
}
pub fn from_arc(inner: Arc<T>) -> Self {
Self {
inner,
accept_compression_encodings: Default::default(),
send_compression_encodings: Default::default(),
max_decoding_message_size: None,
max_encoding_message_size: None,
}
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> InterceptedService<Self, F>
where
F: tonic::service::Interceptor,
{
InterceptedService::new(Self::new(inner), interceptor)
}
/// Enable decompressing requests with the given encoding.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.accept_compression_encodings.enable(encoding);
self
}
/// Compress responses with the given encoding, if the client supports it.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.send_compression_encodings.enable(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.max_decoding_message_size = Some(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.max_encoding_message_size = Some(limit);
self
}
}
impl<T, B> tonic::codegen::Service<http::Request<B>> for TablesDataServer<T>
where
T: TablesData,
B: Body + std::marker::Send + 'static,
B::Error: Into<StdError> + std::marker::Send + 'static,
{
type Response = http::Response<tonic::body::Body>;
type Error = std::convert::Infallible;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(
&mut self,
_cx: &mut Context<'_>,
) -> Poll<std::result::Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/komp_ac.tables_data.TablesData/PostTableData" => {
#[allow(non_camel_case_types)]
struct PostTableDataSvc<T: TablesData>(pub Arc<T>);
impl<
T: TablesData,
> tonic::server::UnaryService<super::PostTableDataRequest>
for PostTableDataSvc<T> {
type Response = super::PostTableDataResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::PostTableDataRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TablesData>::post_table_data(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = PostTableDataSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.tables_data.TablesData/PutTableData" => {
#[allow(non_camel_case_types)]
struct PutTableDataSvc<T: TablesData>(pub Arc<T>);
impl<
T: TablesData,
> tonic::server::UnaryService<super::PutTableDataRequest>
for PutTableDataSvc<T> {
type Response = super::PutTableDataResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::PutTableDataRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TablesData>::put_table_data(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = PutTableDataSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.tables_data.TablesData/DeleteTableData" => {
#[allow(non_camel_case_types)]
struct DeleteTableDataSvc<T: TablesData>(pub Arc<T>);
impl<
T: TablesData,
> tonic::server::UnaryService<super::DeleteTableDataRequest>
for DeleteTableDataSvc<T> {
type Response = super::DeleteTableDataResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::DeleteTableDataRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TablesData>::delete_table_data(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = DeleteTableDataSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.tables_data.TablesData/GetTableData" => {
#[allow(non_camel_case_types)]
struct GetTableDataSvc<T: TablesData>(pub Arc<T>);
impl<
T: TablesData,
> tonic::server::UnaryService<super::GetTableDataRequest>
for GetTableDataSvc<T> {
type Response = super::GetTableDataResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::GetTableDataRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TablesData>::get_table_data(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetTableDataSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.tables_data.TablesData/GetTableDataCount" => {
#[allow(non_camel_case_types)]
struct GetTableDataCountSvc<T: TablesData>(pub Arc<T>);
impl<
T: TablesData,
> tonic::server::UnaryService<super::GetTableDataCountRequest>
for GetTableDataCountSvc<T> {
type Response = super::super::common::CountResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::GetTableDataCountRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TablesData>::get_table_data_count(&inner, request)
.await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetTableDataCountSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.tables_data.TablesData/GetTableDataByPosition" => {
#[allow(non_camel_case_types)]
struct GetTableDataByPositionSvc<T: TablesData>(pub Arc<T>);
impl<
T: TablesData,
> tonic::server::UnaryService<super::GetTableDataByPositionRequest>
for GetTableDataByPositionSvc<T> {
type Response = super::GetTableDataResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::GetTableDataByPositionRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as TablesData>::get_table_data_by_position(
&inner,
request,
)
.await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetTableDataByPositionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => {
Box::pin(async move {
let mut response = http::Response::new(
tonic::body::Body::default(),
);
let headers = response.headers_mut();
headers
.insert(
tonic::Status::GRPC_STATUS,
(tonic::Code::Unimplemented as i32).into(),
);
headers
.insert(
http::header::CONTENT_TYPE,
tonic::metadata::GRPC_CONTENT_TYPE,
);
Ok(response)
})
}
}
}
}
impl<T> Clone for TablesDataServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self {
inner,
accept_compression_encodings: self.accept_compression_encodings,
send_compression_encodings: self.send_compression_encodings,
max_decoding_message_size: self.max_decoding_message_size,
max_encoding_message_size: self.max_encoding_message_size,
}
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "komp_ac.tables_data.TablesData";
impl<T> tonic::server::NamedService for TablesDataServer<T> {
const NAME: &'static str = SERVICE_NAME;
}
}

View File

@@ -0,0 +1,712 @@
// This file is @generated by prost-build.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PostUctovnictvoRequest {
#[prost(int64, tag = "1")]
pub adresar_id: i64,
#[prost(string, tag = "2")]
pub c_dokladu: ::prost::alloc::string::String,
/// Use string for simplicity, or use google.protobuf.Timestamp for better date handling
#[prost(string, tag = "3")]
pub datum: ::prost::alloc::string::String,
#[prost(string, tag = "4")]
pub c_faktury: ::prost::alloc::string::String,
#[prost(string, tag = "5")]
pub obsah: ::prost::alloc::string::String,
#[prost(string, tag = "6")]
pub stredisko: ::prost::alloc::string::String,
#[prost(string, tag = "7")]
pub c_uctu: ::prost::alloc::string::String,
#[prost(string, tag = "8")]
pub md: ::prost::alloc::string::String,
#[prost(string, tag = "9")]
pub identif: ::prost::alloc::string::String,
#[prost(string, tag = "10")]
pub poznanka: ::prost::alloc::string::String,
#[prost(string, tag = "11")]
pub firma: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UctovnictvoResponse {
#[prost(int64, tag = "1")]
pub id: i64,
#[prost(int64, tag = "2")]
pub adresar_id: i64,
#[prost(string, tag = "3")]
pub c_dokladu: ::prost::alloc::string::String,
#[prost(string, tag = "4")]
pub datum: ::prost::alloc::string::String,
#[prost(string, tag = "5")]
pub c_faktury: ::prost::alloc::string::String,
#[prost(string, tag = "6")]
pub obsah: ::prost::alloc::string::String,
#[prost(string, tag = "7")]
pub stredisko: ::prost::alloc::string::String,
#[prost(string, tag = "8")]
pub c_uctu: ::prost::alloc::string::String,
#[prost(string, tag = "9")]
pub md: ::prost::alloc::string::String,
#[prost(string, tag = "10")]
pub identif: ::prost::alloc::string::String,
#[prost(string, tag = "11")]
pub poznanka: ::prost::alloc::string::String,
#[prost(string, tag = "12")]
pub firma: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PutUctovnictvoRequest {
#[prost(int64, tag = "1")]
pub id: i64,
#[prost(int64, tag = "2")]
pub adresar_id: i64,
#[prost(string, tag = "3")]
pub c_dokladu: ::prost::alloc::string::String,
#[prost(string, tag = "4")]
pub datum: ::prost::alloc::string::String,
#[prost(string, tag = "5")]
pub c_faktury: ::prost::alloc::string::String,
#[prost(string, tag = "6")]
pub obsah: ::prost::alloc::string::String,
#[prost(string, tag = "7")]
pub stredisko: ::prost::alloc::string::String,
#[prost(string, tag = "8")]
pub c_uctu: ::prost::alloc::string::String,
#[prost(string, tag = "9")]
pub md: ::prost::alloc::string::String,
#[prost(string, tag = "10")]
pub identif: ::prost::alloc::string::String,
#[prost(string, tag = "11")]
pub poznanka: ::prost::alloc::string::String,
#[prost(string, tag = "12")]
pub firma: ::prost::alloc::string::String,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct GetUctovnictvoRequest {
#[prost(int64, tag = "1")]
pub id: i64,
}
/// Generated client implementations.
pub mod uctovnictvo_client {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
use tonic::codegen::http::Uri;
#[derive(Debug, Clone)]
pub struct UctovnictvoClient<T> {
inner: tonic::client::Grpc<T>,
}
impl UctovnictvoClient<tonic::transport::Channel> {
/// Attempt to create a new client by connecting to a given endpoint.
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> UctovnictvoClient<T>
where
T: tonic::client::GrpcService<tonic::body::Body>,
T::Error: Into<StdError>,
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_origin(inner: T, origin: Uri) -> Self {
let inner = tonic::client::Grpc::with_origin(inner, origin);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> UctovnictvoClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T::ResponseBody: Default,
T: tonic::codegen::Service<
http::Request<tonic::body::Body>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<
http::Request<tonic::body::Body>,
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
{
UctovnictvoClient::new(InterceptedService::new(inner, interceptor))
}
/// Compress requests with the given encoding.
///
/// This requires the server to support it otherwise it might respond with an
/// error.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.send_compressed(encoding);
self
}
/// Enable decompressing responses.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.accept_compressed(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_decoding_message_size(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_encoding_message_size(limit);
self
}
pub async fn post_uctovnictvo(
&mut self,
request: impl tonic::IntoRequest<super::PostUctovnictvoRequest>,
) -> std::result::Result<
tonic::Response<super::UctovnictvoResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.uctovnictvo.Uctovnictvo/PostUctovnictvo",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("komp_ac.uctovnictvo.Uctovnictvo", "PostUctovnictvo"),
);
self.inner.unary(req, path, codec).await
}
pub async fn get_uctovnictvo(
&mut self,
request: impl tonic::IntoRequest<super::GetUctovnictvoRequest>,
) -> std::result::Result<
tonic::Response<super::UctovnictvoResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvo",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("komp_ac.uctovnictvo.Uctovnictvo", "GetUctovnictvo"),
);
self.inner.unary(req, path, codec).await
}
pub async fn get_uctovnictvo_count(
&mut self,
request: impl tonic::IntoRequest<super::super::common::Empty>,
) -> std::result::Result<
tonic::Response<super::super::common::CountResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvoCount",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"komp_ac.uctovnictvo.Uctovnictvo",
"GetUctovnictvoCount",
),
);
self.inner.unary(req, path, codec).await
}
pub async fn get_uctovnictvo_by_position(
&mut self,
request: impl tonic::IntoRequest<super::super::common::PositionRequest>,
) -> std::result::Result<
tonic::Response<super::UctovnictvoResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvoByPosition",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"komp_ac.uctovnictvo.Uctovnictvo",
"GetUctovnictvoByPosition",
),
);
self.inner.unary(req, path, codec).await
}
pub async fn put_uctovnictvo(
&mut self,
request: impl tonic::IntoRequest<super::PutUctovnictvoRequest>,
) -> std::result::Result<
tonic::Response<super::UctovnictvoResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/komp_ac.uctovnictvo.Uctovnictvo/PutUctovnictvo",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("komp_ac.uctovnictvo.Uctovnictvo", "PutUctovnictvo"),
);
self.inner.unary(req, path, codec).await
}
}
}
/// Generated server implementations.
pub mod uctovnictvo_server {
#![allow(
unused_variables,
dead_code,
missing_docs,
clippy::wildcard_imports,
clippy::let_unit_value,
)]
use tonic::codegen::*;
/// Generated trait containing gRPC methods that should be implemented for use with UctovnictvoServer.
#[async_trait]
pub trait Uctovnictvo: std::marker::Send + std::marker::Sync + 'static {
async fn post_uctovnictvo(
&self,
request: tonic::Request<super::PostUctovnictvoRequest>,
) -> std::result::Result<
tonic::Response<super::UctovnictvoResponse>,
tonic::Status,
>;
async fn get_uctovnictvo(
&self,
request: tonic::Request<super::GetUctovnictvoRequest>,
) -> std::result::Result<
tonic::Response<super::UctovnictvoResponse>,
tonic::Status,
>;
async fn get_uctovnictvo_count(
&self,
request: tonic::Request<super::super::common::Empty>,
) -> std::result::Result<
tonic::Response<super::super::common::CountResponse>,
tonic::Status,
>;
async fn get_uctovnictvo_by_position(
&self,
request: tonic::Request<super::super::common::PositionRequest>,
) -> std::result::Result<
tonic::Response<super::UctovnictvoResponse>,
tonic::Status,
>;
async fn put_uctovnictvo(
&self,
request: tonic::Request<super::PutUctovnictvoRequest>,
) -> std::result::Result<
tonic::Response<super::UctovnictvoResponse>,
tonic::Status,
>;
}
#[derive(Debug)]
pub struct UctovnictvoServer<T> {
inner: Arc<T>,
accept_compression_encodings: EnabledCompressionEncodings,
send_compression_encodings: EnabledCompressionEncodings,
max_decoding_message_size: Option<usize>,
max_encoding_message_size: Option<usize>,
}
impl<T> UctovnictvoServer<T> {
pub fn new(inner: T) -> Self {
Self::from_arc(Arc::new(inner))
}
pub fn from_arc(inner: Arc<T>) -> Self {
Self {
inner,
accept_compression_encodings: Default::default(),
send_compression_encodings: Default::default(),
max_decoding_message_size: None,
max_encoding_message_size: None,
}
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> InterceptedService<Self, F>
where
F: tonic::service::Interceptor,
{
InterceptedService::new(Self::new(inner), interceptor)
}
/// Enable decompressing requests with the given encoding.
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.accept_compression_encodings.enable(encoding);
self
}
/// Compress responses with the given encoding, if the client supports it.
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.send_compression_encodings.enable(encoding);
self
}
/// Limits the maximum size of a decoded message.
///
/// Default: `4MB`
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.max_decoding_message_size = Some(limit);
self
}
/// Limits the maximum size of an encoded message.
///
/// Default: `usize::MAX`
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.max_encoding_message_size = Some(limit);
self
}
}
impl<T, B> tonic::codegen::Service<http::Request<B>> for UctovnictvoServer<T>
where
T: Uctovnictvo,
B: Body + std::marker::Send + 'static,
B::Error: Into<StdError> + std::marker::Send + 'static,
{
type Response = http::Response<tonic::body::Body>;
type Error = std::convert::Infallible;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(
&mut self,
_cx: &mut Context<'_>,
) -> Poll<std::result::Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/komp_ac.uctovnictvo.Uctovnictvo/PostUctovnictvo" => {
#[allow(non_camel_case_types)]
struct PostUctovnictvoSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
T: Uctovnictvo,
> tonic::server::UnaryService<super::PostUctovnictvoRequest>
for PostUctovnictvoSvc<T> {
type Response = super::UctovnictvoResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::PostUctovnictvoRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Uctovnictvo>::post_uctovnictvo(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = PostUctovnictvoSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvo" => {
#[allow(non_camel_case_types)]
struct GetUctovnictvoSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
T: Uctovnictvo,
> tonic::server::UnaryService<super::GetUctovnictvoRequest>
for GetUctovnictvoSvc<T> {
type Response = super::UctovnictvoResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::GetUctovnictvoRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Uctovnictvo>::get_uctovnictvo(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetUctovnictvoSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvoCount" => {
#[allow(non_camel_case_types)]
struct GetUctovnictvoCountSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
T: Uctovnictvo,
> tonic::server::UnaryService<super::super::common::Empty>
for GetUctovnictvoCountSvc<T> {
type Response = super::super::common::CountResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::super::common::Empty>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Uctovnictvo>::get_uctovnictvo_count(&inner, request)
.await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetUctovnictvoCountSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvoByPosition" => {
#[allow(non_camel_case_types)]
struct GetUctovnictvoByPositionSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
T: Uctovnictvo,
> tonic::server::UnaryService<super::super::common::PositionRequest>
for GetUctovnictvoByPositionSvc<T> {
type Response = super::UctovnictvoResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<
super::super::common::PositionRequest,
>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Uctovnictvo>::get_uctovnictvo_by_position(
&inner,
request,
)
.await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = GetUctovnictvoByPositionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/komp_ac.uctovnictvo.Uctovnictvo/PutUctovnictvo" => {
#[allow(non_camel_case_types)]
struct PutUctovnictvoSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
T: Uctovnictvo,
> tonic::server::UnaryService<super::PutUctovnictvoRequest>
for PutUctovnictvoSvc<T> {
type Response = super::UctovnictvoResponse;
type Future = BoxFuture<
tonic::Response<Self::Response>,
tonic::Status,
>;
fn call(
&mut self,
request: tonic::Request<super::PutUctovnictvoRequest>,
) -> Self::Future {
let inner = Arc::clone(&self.0);
let fut = async move {
<T as Uctovnictvo>::put_uctovnictvo(&inner, request).await
};
Box::pin(fut)
}
}
let accept_compression_encodings = self.accept_compression_encodings;
let send_compression_encodings = self.send_compression_encodings;
let max_decoding_message_size = self.max_decoding_message_size;
let max_encoding_message_size = self.max_encoding_message_size;
let inner = self.inner.clone();
let fut = async move {
let method = PutUctovnictvoSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = tonic::server::Grpc::new(codec)
.apply_compression_config(
accept_compression_encodings,
send_compression_encodings,
)
.apply_max_message_size_config(
max_decoding_message_size,
max_encoding_message_size,
);
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => {
Box::pin(async move {
let mut response = http::Response::new(
tonic::body::Body::default(),
);
let headers = response.headers_mut();
headers
.insert(
tonic::Status::GRPC_STATUS,
(tonic::Code::Unimplemented as i32).into(),
);
headers
.insert(
http::header::CONTENT_TYPE,
tonic::metadata::GRPC_CONTENT_TYPE,
);
Ok(response)
})
}
}
}
}
impl<T> Clone for UctovnictvoServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self {
inner,
accept_compression_encodings: self.accept_compression_encodings,
send_compression_encodings: self.send_compression_encodings,
max_decoding_message_size: self.max_decoding_message_size,
max_encoding_message_size: self.max_encoding_message_size,
}
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "komp_ac.uctovnictvo.Uctovnictvo";
impl<T> tonic::server::NamedService for UctovnictvoServer<T> {
const NAME: &'static str = SERVICE_NAME;
}
}

View File

@@ -225,11 +225,11 @@ pub mod adresar_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.adresar.Adresar/PostAdresar",
"/komp_ac.adresar.Adresar/PostAdresar",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("multieko2.adresar.Adresar", "PostAdresar"));
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "PostAdresar"));
self.inner.unary(req, path, codec).await
}
pub async fn get_adresar(
@@ -249,11 +249,11 @@ pub mod adresar_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.adresar.Adresar/GetAdresar",
"/komp_ac.adresar.Adresar/GetAdresar",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("multieko2.adresar.Adresar", "GetAdresar"));
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "GetAdresar"));
self.inner.unary(req, path, codec).await
}
pub async fn put_adresar(
@@ -273,11 +273,11 @@ pub mod adresar_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.adresar.Adresar/PutAdresar",
"/komp_ac.adresar.Adresar/PutAdresar",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("multieko2.adresar.Adresar", "PutAdresar"));
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "PutAdresar"));
self.inner.unary(req, path, codec).await
}
pub async fn delete_adresar(
@@ -297,11 +297,11 @@ pub mod adresar_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.adresar.Adresar/DeleteAdresar",
"/komp_ac.adresar.Adresar/DeleteAdresar",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("multieko2.adresar.Adresar", "DeleteAdresar"));
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "DeleteAdresar"));
self.inner.unary(req, path, codec).await
}
pub async fn get_adresar_count(
@@ -321,11 +321,11 @@ pub mod adresar_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.adresar.Adresar/GetAdresarCount",
"/komp_ac.adresar.Adresar/GetAdresarCount",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("multieko2.adresar.Adresar", "GetAdresarCount"));
.insert(GrpcMethod::new("komp_ac.adresar.Adresar", "GetAdresarCount"));
self.inner.unary(req, path, codec).await
}
pub async fn get_adresar_by_position(
@@ -345,12 +345,12 @@ pub mod adresar_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.adresar.Adresar/GetAdresarByPosition",
"/komp_ac.adresar.Adresar/GetAdresarByPosition",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("multieko2.adresar.Adresar", "GetAdresarByPosition"),
GrpcMethod::new("komp_ac.adresar.Adresar", "GetAdresarByPosition"),
);
self.inner.unary(req, path, codec).await
}
@@ -476,7 +476,7 @@ pub mod adresar_server {
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/multieko2.adresar.Adresar/PostAdresar" => {
"/komp_ac.adresar.Adresar/PostAdresar" => {
#[allow(non_camel_case_types)]
struct PostAdresarSvc<T: Adresar>(pub Arc<T>);
impl<
@@ -521,7 +521,7 @@ pub mod adresar_server {
};
Box::pin(fut)
}
"/multieko2.adresar.Adresar/GetAdresar" => {
"/komp_ac.adresar.Adresar/GetAdresar" => {
#[allow(non_camel_case_types)]
struct GetAdresarSvc<T: Adresar>(pub Arc<T>);
impl<
@@ -566,7 +566,7 @@ pub mod adresar_server {
};
Box::pin(fut)
}
"/multieko2.adresar.Adresar/PutAdresar" => {
"/komp_ac.adresar.Adresar/PutAdresar" => {
#[allow(non_camel_case_types)]
struct PutAdresarSvc<T: Adresar>(pub Arc<T>);
impl<
@@ -611,7 +611,7 @@ pub mod adresar_server {
};
Box::pin(fut)
}
"/multieko2.adresar.Adresar/DeleteAdresar" => {
"/komp_ac.adresar.Adresar/DeleteAdresar" => {
#[allow(non_camel_case_types)]
struct DeleteAdresarSvc<T: Adresar>(pub Arc<T>);
impl<
@@ -656,7 +656,7 @@ pub mod adresar_server {
};
Box::pin(fut)
}
"/multieko2.adresar.Adresar/GetAdresarCount" => {
"/komp_ac.adresar.Adresar/GetAdresarCount" => {
#[allow(non_camel_case_types)]
struct GetAdresarCountSvc<T: Adresar>(pub Arc<T>);
impl<
@@ -701,7 +701,7 @@ pub mod adresar_server {
};
Box::pin(fut)
}
"/multieko2.adresar.Adresar/GetAdresarByPosition" => {
"/komp_ac.adresar.Adresar/GetAdresarByPosition" => {
#[allow(non_camel_case_types)]
struct GetAdresarByPositionSvc<T: Adresar>(pub Arc<T>);
impl<
@@ -784,7 +784,7 @@ pub mod adresar_server {
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "multieko2.adresar.Adresar";
pub const SERVICE_NAME: &str = "komp_ac.adresar.Adresar";
impl<T> tonic::server::NamedService for AdresarServer<T> {
const NAME: &'static str = SERVICE_NAME;
}

View File

@@ -160,11 +160,11 @@ pub mod auth_service_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.auth.AuthService/Register",
"/komp_ac.auth.AuthService/Register",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("multieko2.auth.AuthService", "Register"));
.insert(GrpcMethod::new("komp_ac.auth.AuthService", "Register"));
self.inner.unary(req, path, codec).await
}
pub async fn login(
@@ -181,11 +181,11 @@ pub mod auth_service_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.auth.AuthService/Login",
"/komp_ac.auth.AuthService/Login",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("multieko2.auth.AuthService", "Login"));
.insert(GrpcMethod::new("komp_ac.auth.AuthService", "Login"));
self.inner.unary(req, path, codec).await
}
}
@@ -288,7 +288,7 @@ pub mod auth_service_server {
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/multieko2.auth.AuthService/Register" => {
"/komp_ac.auth.AuthService/Register" => {
#[allow(non_camel_case_types)]
struct RegisterSvc<T: AuthService>(pub Arc<T>);
impl<
@@ -333,7 +333,7 @@ pub mod auth_service_server {
};
Box::pin(fut)
}
"/multieko2.auth.AuthService/Login" => {
"/komp_ac.auth.AuthService/Login" => {
#[allow(non_camel_case_types)]
struct LoginSvc<T: AuthService>(pub Arc<T>);
impl<T: AuthService> tonic::server::UnaryService<super::LoginRequest>
@@ -411,7 +411,7 @@ pub mod auth_service_server {
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "multieko2.auth.AuthService";
pub const SERVICE_NAME: &str = "komp_ac.auth.AuthService";
impl<T> tonic::server::NamedService for AuthServiceServer<T> {
const NAME: &'static str = SERVICE_NAME;
}

View File

@@ -129,11 +129,11 @@ pub mod searcher_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.search.Searcher/SearchTable",
"/komp_ac.search.Searcher/SearchTable",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("multieko2.search.Searcher", "SearchTable"));
.insert(GrpcMethod::new("komp_ac.search.Searcher", "SearchTable"));
self.inner.unary(req, path, codec).await
}
}
@@ -232,7 +232,7 @@ pub mod searcher_server {
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/multieko2.search.Searcher/SearchTable" => {
"/komp_ac.search.Searcher/SearchTable" => {
#[allow(non_camel_case_types)]
struct SearchTableSvc<T: Searcher>(pub Arc<T>);
impl<T: Searcher> tonic::server::UnaryService<super::SearchRequest>
@@ -310,7 +310,7 @@ pub mod searcher_server {
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "multieko2.search.Searcher";
pub const SERVICE_NAME: &str = "komp_ac.search.Searcher";
impl<T> tonic::server::NamedService for SearcherServer<T> {
const NAME: &'static str = SERVICE_NAME;
}

View File

@@ -179,13 +179,13 @@ pub mod table_definition_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.table_definition.TableDefinition/PostTableDefinition",
"/komp_ac.table_definition.TableDefinition/PostTableDefinition",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.table_definition.TableDefinition",
"komp_ac.table_definition.TableDefinition",
"PostTableDefinition",
),
);
@@ -208,13 +208,13 @@ pub mod table_definition_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.table_definition.TableDefinition/GetProfileTree",
"/komp_ac.table_definition.TableDefinition/GetProfileTree",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.table_definition.TableDefinition",
"komp_ac.table_definition.TableDefinition",
"GetProfileTree",
),
);
@@ -237,13 +237,13 @@ pub mod table_definition_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.table_definition.TableDefinition/DeleteTable",
"/komp_ac.table_definition.TableDefinition/DeleteTable",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.table_definition.TableDefinition",
"komp_ac.table_definition.TableDefinition",
"DeleteTable",
),
);
@@ -362,7 +362,7 @@ pub mod table_definition_server {
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/multieko2.table_definition.TableDefinition/PostTableDefinition" => {
"/komp_ac.table_definition.TableDefinition/PostTableDefinition" => {
#[allow(non_camel_case_types)]
struct PostTableDefinitionSvc<T: TableDefinition>(pub Arc<T>);
impl<
@@ -411,7 +411,7 @@ pub mod table_definition_server {
};
Box::pin(fut)
}
"/multieko2.table_definition.TableDefinition/GetProfileTree" => {
"/komp_ac.table_definition.TableDefinition/GetProfileTree" => {
#[allow(non_camel_case_types)]
struct GetProfileTreeSvc<T: TableDefinition>(pub Arc<T>);
impl<
@@ -457,7 +457,7 @@ pub mod table_definition_server {
};
Box::pin(fut)
}
"/multieko2.table_definition.TableDefinition/DeleteTable" => {
"/komp_ac.table_definition.TableDefinition/DeleteTable" => {
#[allow(non_camel_case_types)]
struct DeleteTableSvc<T: TableDefinition>(pub Arc<T>);
impl<
@@ -537,7 +537,7 @@ pub mod table_definition_server {
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "multieko2.table_definition.TableDefinition";
pub const SERVICE_NAME: &str = "komp_ac.table_definition.TableDefinition";
impl<T> tonic::server::NamedService for TableDefinitionServer<T> {
const NAME: &'static str = SERVICE_NAME;
}

View File

@@ -125,13 +125,13 @@ pub mod table_script_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.table_script.TableScript/PostTableScript",
"/komp_ac.table_script.TableScript/PostTableScript",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.table_script.TableScript",
"komp_ac.table_script.TableScript",
"PostTableScript",
),
);
@@ -236,7 +236,7 @@ pub mod table_script_server {
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/multieko2.table_script.TableScript/PostTableScript" => {
"/komp_ac.table_script.TableScript/PostTableScript" => {
#[allow(non_camel_case_types)]
struct PostTableScriptSvc<T: TableScript>(pub Arc<T>);
impl<
@@ -316,7 +316,7 @@ pub mod table_script_server {
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "multieko2.table_script.TableScript";
pub const SERVICE_NAME: &str = "komp_ac.table_script.TableScript";
impl<T> tonic::server::NamedService for TableScriptServer<T> {
const NAME: &'static str = SERVICE_NAME;
}

View File

@@ -133,13 +133,13 @@ pub mod table_structure_service_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.table_structure.TableStructureService/GetTableStructure",
"/komp_ac.table_structure.TableStructureService/GetTableStructure",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.table_structure.TableStructureService",
"komp_ac.table_structure.TableStructureService",
"GetTableStructure",
),
);
@@ -245,7 +245,7 @@ pub mod table_structure_service_server {
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/multieko2.table_structure.TableStructureService/GetTableStructure" => {
"/komp_ac.table_structure.TableStructureService/GetTableStructure" => {
#[allow(non_camel_case_types)]
struct GetTableStructureSvc<T: TableStructureService>(pub Arc<T>);
impl<
@@ -329,7 +329,7 @@ pub mod table_structure_service_server {
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "multieko2.table_structure.TableStructureService";
pub const SERVICE_NAME: &str = "komp_ac.table_structure.TableStructureService";
impl<T> tonic::server::NamedService for TableStructureServiceServer<T> {
const NAME: &'static str = SERVICE_NAME;
}

View File

@@ -5,10 +5,10 @@ pub struct PostTableDataRequest {
pub profile_name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub table_name: ::prost::alloc::string::String,
#[prost(map = "string, string", tag = "3")]
#[prost(map = "string, message", tag = "3")]
pub data: ::std::collections::HashMap<
::prost::alloc::string::String,
::prost::alloc::string::String,
::prost_types::Value,
>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
@@ -28,10 +28,10 @@ pub struct PutTableDataRequest {
pub table_name: ::prost::alloc::string::String,
#[prost(int64, tag = "3")]
pub id: i64,
#[prost(map = "string, string", tag = "4")]
#[prost(map = "string, message", tag = "4")]
pub data: ::std::collections::HashMap<
::prost::alloc::string::String,
::prost::alloc::string::String,
::prost_types::Value,
>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
@@ -198,12 +198,12 @@ pub mod tables_data_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.tables_data.TablesData/PostTableData",
"/komp_ac.tables_data.TablesData/PostTableData",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("multieko2.tables_data.TablesData", "PostTableData"),
GrpcMethod::new("komp_ac.tables_data.TablesData", "PostTableData"),
);
self.inner.unary(req, path, codec).await
}
@@ -224,12 +224,12 @@ pub mod tables_data_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.tables_data.TablesData/PutTableData",
"/komp_ac.tables_data.TablesData/PutTableData",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("multieko2.tables_data.TablesData", "PutTableData"),
GrpcMethod::new("komp_ac.tables_data.TablesData", "PutTableData"),
);
self.inner.unary(req, path, codec).await
}
@@ -250,13 +250,13 @@ pub mod tables_data_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.tables_data.TablesData/DeleteTableData",
"/komp_ac.tables_data.TablesData/DeleteTableData",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.tables_data.TablesData",
"komp_ac.tables_data.TablesData",
"DeleteTableData",
),
);
@@ -279,12 +279,12 @@ pub mod tables_data_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.tables_data.TablesData/GetTableData",
"/komp_ac.tables_data.TablesData/GetTableData",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("multieko2.tables_data.TablesData", "GetTableData"),
GrpcMethod::new("komp_ac.tables_data.TablesData", "GetTableData"),
);
self.inner.unary(req, path, codec).await
}
@@ -305,13 +305,13 @@ pub mod tables_data_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.tables_data.TablesData/GetTableDataCount",
"/komp_ac.tables_data.TablesData/GetTableDataCount",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.tables_data.TablesData",
"komp_ac.tables_data.TablesData",
"GetTableDataCount",
),
);
@@ -334,13 +334,13 @@ pub mod tables_data_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.tables_data.TablesData/GetTableDataByPosition",
"/komp_ac.tables_data.TablesData/GetTableDataByPosition",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.tables_data.TablesData",
"komp_ac.tables_data.TablesData",
"GetTableDataByPosition",
),
);
@@ -480,7 +480,7 @@ pub mod tables_data_server {
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/multieko2.tables_data.TablesData/PostTableData" => {
"/komp_ac.tables_data.TablesData/PostTableData" => {
#[allow(non_camel_case_types)]
struct PostTableDataSvc<T: TablesData>(pub Arc<T>);
impl<
@@ -525,7 +525,7 @@ pub mod tables_data_server {
};
Box::pin(fut)
}
"/multieko2.tables_data.TablesData/PutTableData" => {
"/komp_ac.tables_data.TablesData/PutTableData" => {
#[allow(non_camel_case_types)]
struct PutTableDataSvc<T: TablesData>(pub Arc<T>);
impl<
@@ -570,7 +570,7 @@ pub mod tables_data_server {
};
Box::pin(fut)
}
"/multieko2.tables_data.TablesData/DeleteTableData" => {
"/komp_ac.tables_data.TablesData/DeleteTableData" => {
#[allow(non_camel_case_types)]
struct DeleteTableDataSvc<T: TablesData>(pub Arc<T>);
impl<
@@ -615,7 +615,7 @@ pub mod tables_data_server {
};
Box::pin(fut)
}
"/multieko2.tables_data.TablesData/GetTableData" => {
"/komp_ac.tables_data.TablesData/GetTableData" => {
#[allow(non_camel_case_types)]
struct GetTableDataSvc<T: TablesData>(pub Arc<T>);
impl<
@@ -660,7 +660,7 @@ pub mod tables_data_server {
};
Box::pin(fut)
}
"/multieko2.tables_data.TablesData/GetTableDataCount" => {
"/komp_ac.tables_data.TablesData/GetTableDataCount" => {
#[allow(non_camel_case_types)]
struct GetTableDataCountSvc<T: TablesData>(pub Arc<T>);
impl<
@@ -706,7 +706,7 @@ pub mod tables_data_server {
};
Box::pin(fut)
}
"/multieko2.tables_data.TablesData/GetTableDataByPosition" => {
"/komp_ac.tables_data.TablesData/GetTableDataByPosition" => {
#[allow(non_camel_case_types)]
struct GetTableDataByPositionSvc<T: TablesData>(pub Arc<T>);
impl<
@@ -790,7 +790,7 @@ pub mod tables_data_server {
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "multieko2.tables_data.TablesData";
pub const SERVICE_NAME: &str = "komp_ac.tables_data.TablesData";
impl<T> tonic::server::NamedService for TablesDataServer<T> {
const NAME: &'static str = SERVICE_NAME;
}

View File

@@ -192,13 +192,13 @@ pub mod uctovnictvo_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.uctovnictvo.Uctovnictvo/PostUctovnictvo",
"/komp_ac.uctovnictvo.Uctovnictvo/PostUctovnictvo",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.uctovnictvo.Uctovnictvo",
"komp_ac.uctovnictvo.Uctovnictvo",
"PostUctovnictvo",
),
);
@@ -221,13 +221,13 @@ pub mod uctovnictvo_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvo",
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvo",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.uctovnictvo.Uctovnictvo",
"komp_ac.uctovnictvo.Uctovnictvo",
"GetUctovnictvo",
),
);
@@ -250,13 +250,13 @@ pub mod uctovnictvo_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvoCount",
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvoCount",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.uctovnictvo.Uctovnictvo",
"komp_ac.uctovnictvo.Uctovnictvo",
"GetUctovnictvoCount",
),
);
@@ -279,13 +279,13 @@ pub mod uctovnictvo_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvoByPosition",
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvoByPosition",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.uctovnictvo.Uctovnictvo",
"komp_ac.uctovnictvo.Uctovnictvo",
"GetUctovnictvoByPosition",
),
);
@@ -308,13 +308,13 @@ pub mod uctovnictvo_client {
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/multieko2.uctovnictvo.Uctovnictvo/PutUctovnictvo",
"/komp_ac.uctovnictvo.Uctovnictvo/PutUctovnictvo",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"multieko2.uctovnictvo.Uctovnictvo",
"komp_ac.uctovnictvo.Uctovnictvo",
"PutUctovnictvo",
),
);
@@ -447,7 +447,7 @@ pub mod uctovnictvo_server {
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
match req.uri().path() {
"/multieko2.uctovnictvo.Uctovnictvo/PostUctovnictvo" => {
"/komp_ac.uctovnictvo.Uctovnictvo/PostUctovnictvo" => {
#[allow(non_camel_case_types)]
struct PostUctovnictvoSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
@@ -492,7 +492,7 @@ pub mod uctovnictvo_server {
};
Box::pin(fut)
}
"/multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvo" => {
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvo" => {
#[allow(non_camel_case_types)]
struct GetUctovnictvoSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
@@ -537,7 +537,7 @@ pub mod uctovnictvo_server {
};
Box::pin(fut)
}
"/multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvoCount" => {
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvoCount" => {
#[allow(non_camel_case_types)]
struct GetUctovnictvoCountSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
@@ -583,7 +583,7 @@ pub mod uctovnictvo_server {
};
Box::pin(fut)
}
"/multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvoByPosition" => {
"/komp_ac.uctovnictvo.Uctovnictvo/GetUctovnictvoByPosition" => {
#[allow(non_camel_case_types)]
struct GetUctovnictvoByPositionSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
@@ -634,7 +634,7 @@ pub mod uctovnictvo_server {
};
Box::pin(fut)
}
"/multieko2.uctovnictvo.Uctovnictvo/PutUctovnictvo" => {
"/komp_ac.uctovnictvo.Uctovnictvo/PutUctovnictvo" => {
#[allow(non_camel_case_types)]
struct PutUctovnictvoSvc<T: Uctovnictvo>(pub Arc<T>);
impl<
@@ -714,7 +714,7 @@ pub mod uctovnictvo_server {
}
}
/// Generated gRPC service name
pub const SERVICE_NAME: &str = "multieko2.uctovnictvo.Uctovnictvo";
pub const SERVICE_NAME: &str = "komp_ac.uctovnictvo.Uctovnictvo";
impl<T> tonic::server::NamedService for UctovnictvoServer<T> {
const NAME: &'static str = SERVICE_NAME;
}

View File

@@ -71,6 +71,8 @@ feature-depth = 1
# A list of advisory IDs to ignore. Note that ignored advisories will still
# output a note when they are encountered.
ignore = [
{ id = "RUSTSEC-2024-0014", reason = "generational-arena is archived but no safe upgrade path exists; accepted for now" },
{ id = "RUSTSEC-2024-0436", reason = "paste is archived; no immediate alternative in dependency tree; accepted for now" },
#"RUSTSEC-0000-0000",
#{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" },
#"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish
@@ -100,6 +102,7 @@ allow = [
"HPND",
"ISC",
"LGPL-3.0",
"AGPL-3.0",
"MIT",
"MPL-2.0",
"NCSA",

View File

@@ -11,11 +11,11 @@ use tantivy::schema::{IndexRecordOption, Value};
use tantivy::{Index, TantivyDocument, Term};
use tonic::{Request, Response, Status};
use common::proto::multieko2::search::{
use common::proto::komp_ac::search::{
search_response::Hit, SearchRequest, SearchResponse,
};
pub use common::proto::multieko2::search::searcher_server::SearcherServer;
use common::proto::multieko2::search::searcher_server::Searcher;
pub use common::proto::komp_ac::search::searcher_server::SearcherServer;
use common::proto::komp_ac::search::searcher_server::Searcher;
use common::search::register_slovak_tokenizers;
use sqlx::{PgPool, Row};
use tracing::info;

View File

@@ -10,29 +10,36 @@ search = { path = "../search" }
anyhow = { workspace = true }
tantivy = { workspace = true }
prost-types = { workspace = true }
chrono = { version = "0.4.40", features = ["serde"] }
dotenvy = "0.15.7"
prost = "0.13.5"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140"
sqlx = { version = "0.8.5", features = ["chrono", "postgres", "runtime-tokio", "runtime-tokio-native-tls", "time", "uuid"] }
sqlx = { version = "0.8.5", features = ["chrono", "postgres", "runtime-tokio", "runtime-tokio-native-tls", "rust_decimal", "time", "uuid"] }
tokio = { version = "1.44.2", features = ["full", "macros"] }
tonic = "0.13.0"
tonic-reflection = "0.13.0"
tracing = "0.1.41"
time = { version = "0.3.41", features = ["local-offset"] }
steel-derive = { git = "https://github.com/mattwparas/steel.git", branch = "master", package = "steel-derive" }
steel-core = { git = "https://github.com/mattwparas/steel.git", version = "0.6.0", features = ["anyhow", "dylibs", "sync"] }
thiserror = "2.0.12"
steel-derive = "0.6.0"
steel-core = { version = "0.7.0", features = ["anyhow", "dylibs", "sync"] }
dashmap = "6.1.0"
lazy_static = "1.5.0"
regex = "1.11.1"
bcrypt = "0.17.0"
validator = { version = "0.20.0", features = ["derive"] }
uuid = { version = "1.16.0", features = ["serde", "v4"] }
jsonwebtoken = "9.3.1"
rust-stemmers = "1.2.0"
rust_decimal = { workspace = true }
rust_decimal_macros = { workspace = true }
regex = { workspace = true }
thiserror = { workspace = true }
steel-decimal = "1.0.0"
[lib]
name = "server"
path = "src/lib.rs"
@@ -41,3 +48,6 @@ path = "src/lib.rs"
tokio = { version = "1.44", features = ["full", "test-util"] }
rstest = "0.25.0"
lazy_static = "1.5.0"
rand = "0.9.1"
futures = "0.3.31"
tokio-test = "0.4.4"

13
server/Makefile Normal file
View File

@@ -0,0 +1,13 @@
# Makefile
test: reset_db run_tests
reset_db:
@echo "Resetting test database..."
@./scripts/reset_test_db.sh
run_tests:
@echo "Running tests..."
@cargo test --test mod -- --test-threads=1
.PHONY: test

View File

@@ -1,24 +0,0 @@
-- Add migration script here
CREATE TABLE adresar (
id BIGSERIAL PRIMARY KEY,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
firma TEXT NOT NULL,
kz TEXT,
drc TEXT,
ulica TEXT,
psc TEXT,
mesto TEXT,
stat TEXT,
banka TEXT,
ucet TEXT,
skladm TEXT,
ico TEXT,
kontakt TEXT,
telefon TEXT,
skladu TEXT,
fax TEXT,
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_adresar_firma ON adresar (firma);
CREATE INDEX idx_adresar_mesto ON adresar (mesto);

View File

@@ -1,22 +0,0 @@
-- Add migration script here
CREATE TABLE uctovnictvo (
id BIGSERIAL PRIMARY KEY,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
adresar_id BIGINT NOT NULL REFERENCES adresar(id), -- Link to adresar table
c_dokladu TEXT NOT NULL,
datum DATE NOT NULL,
c_faktury TEXT NOT NULL,
obsah TEXT,
stredisko TEXT,
c_uctu TEXT,
md TEXT,
identif TEXT,
poznanka TEXT,
firma TEXT NOT NULL,
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_uctovnictvo_adresar_id ON uctovnictvo (adresar_id);
CREATE INDEX idx_uctovnictvo_firma ON uctovnictvo (firma);
CREATE INDEX idx_uctovnictvo_c_dokladu ON uctovnictvo (c_dokladu);
CREATE INDEX idx_uctovnictvo_poznanka ON uctovnictvo (poznanka);

View File

@@ -1,9 +1,12 @@
-- Add migration script here
CREATE TABLE profiles (
CREATE TABLE schemas (
id BIGSERIAL PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
description TEXT,
is_active BOOLEAN DEFAULT TRUE
);
-- Create default profile for existing data
INSERT INTO profiles (name) VALUES ('default');
INSERT INTO schemas (name) VALUES ('default');
CREATE SCHEMA IF NOT EXISTS "default";

View File

@@ -1,4 +1,5 @@
-- Main table definitions
CREATE TABLE table_definitions (
id BIGSERIAL PRIMARY KEY,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
@@ -6,7 +7,7 @@ CREATE TABLE table_definitions (
columns JSONB NOT NULL,
indexes JSONB NOT NULL,
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
profile_id BIGINT NOT NULL REFERENCES profiles(id) DEFAULT 1
schema_id BIGINT NOT NULL REFERENCES schemas(id)
);
-- Relationship table for multiple links
@@ -18,9 +19,10 @@ CREATE TABLE table_definition_links (
PRIMARY KEY (source_table_id, linked_table_id)
);
-- Create composite unique index for profile+table combination
CREATE UNIQUE INDEX idx_table_definitions_profile_table
ON table_definitions (profile_id, table_name);
-- Create composite unique index for schema+table combination
CREATE UNIQUE INDEX idx_table_definitions_schema_table
ON table_definitions (schema_id, table_name);
CREATE INDEX idx_links_source ON table_definition_links (source_table_id);
CREATE INDEX idx_links_target ON table_definition_links (linked_table_id);

View File

@@ -8,7 +8,7 @@ CREATE TABLE table_scripts (
script TEXT NOT NULL,
description TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
profile_id BIGINT NOT NULL REFERENCES profiles(id) DEFAULT 1,
schema_id BIGINT NOT NULL REFERENCES schemas(id),
UNIQUE(table_definitions_id, target_column)
);

View File

@@ -1,3 +0,0 @@
-- Add migration script here
CREATE SCHEMA IF NOT EXISTS gen;

View File

@@ -0,0 +1,44 @@
-- migrations/20250710201933_create_script_dependencies.sql
-- This table stores the dependency graph for table scripts
-- More efficient design with better indexing
CREATE TABLE script_dependencies (
id BIGSERIAL PRIMARY KEY,
-- The script that creates this dependency
script_id BIGINT NOT NULL REFERENCES table_scripts(id) ON DELETE CASCADE,
-- The table that depends on another (source of dependency)
source_table_id BIGINT NOT NULL REFERENCES table_definitions(id) ON DELETE CASCADE,
-- The table being depended upon (target of dependency)
target_table_id BIGINT NOT NULL REFERENCES table_definitions(id) ON DELETE CASCADE,
-- What type of dependency (for better debugging)
dependency_type TEXT NOT NULL CHECK (dependency_type IN ('column_access', 'sql_query', 'indexed_access')),
-- Additional context (column name, query snippet, etc.)
context_info JSONB,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
);
-- Optimized indexes for fast cycle detection
CREATE INDEX idx_script_deps_source_target ON script_dependencies (source_table_id, target_table_id);
CREATE INDEX idx_script_deps_by_script ON script_dependencies (script_id);
CREATE INDEX idx_script_deps_by_target ON script_dependencies (target_table_id);
-- View for easy dependency analysis
CREATE VIEW dependency_graph AS
SELECT
sd.source_table_id,
sd.target_table_id,
st.table_name AS source_table,
tt.table_name AS target_table,
sd.dependency_type,
sd.context_info,
s.name AS schema_name
FROM script_dependencies sd
JOIN table_definitions st ON sd.source_table_id = st.id
JOIN table_definitions tt ON sd.target_table_id = tt.id
JOIN schemas s ON st.schema_id = s.id;

View File

@@ -0,0 +1,9 @@
#!/bin/bash
# scripts/reset_test_db.sh
DATABASE_URL=${TEST_DATABASE_URL:-"postgres://multi_psql_dev:3@localhost:5432/multi_rust_test"}
echo "Reset db script"
yes | sqlx database drop --database-url "$DATABASE_URL"
sqlx database create --database-url "$DATABASE_URL"
echo "Test database reset complete."

View File

@@ -1,156 +0,0 @@
grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
{
"id": "1",
"firma": "Updated Firma",
"kz": "Updated KZ",
"drc": "Updated DRC",
"ulica": "Updated Ulica",
"psc": "Updated PSC",
"mesto": "Updated Mesto",
"stat": "Updated Stat",
"banka": "Updated Banka",
"ucet": "Updated Ucet",
"skladm": "Updated Skladm",
"ico": "Updated ICO",
"kontakt": "Updated Kontakt",
"telefon": "Updated Telefon",
"skladu": "Updated Skladu",
"fax": "Updated Fax"
}
grpcurl -plaintext -d '{"id": 2}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
{
"id": "2",
"firma": "asdfasf",
"kz": " ",
"drc": " ",
"ulica": " ",
"psc": "sdfasdf",
"mesto": "asf",
"stat": "as",
"banka": "df",
"ucet": "asf",
"skladm": "f",
"ico": "f",
"kontakt": "f",
"telefon": "f",
"skladu": "f",
"fax": " "
}
grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/DeleteAdresar
{
"success": true
}
grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
ERROR:
Code: NotFound
Message: no rows returned by a query that expected to return at least one row
grpcurl -plaintext -d '{"id": 2}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
{
"id": "2",
"firma": "asdfasf",
"kz": " ",
"drc": " ",
"ulica": " ",
"psc": "sdfasdf",
"mesto": "asf",
"stat": "as",
"banka": "df",
"ucet": "asf",
"skladm": "f",
"ico": "f",
"kontakt": "f",
"telefon": "f",
"skladu": "f",
"fax": " "
}
grpcurl -plaintext -d '{
"firma": "New Firma",
"kz": "New KZ",
"drc": "New DRC",
"ulica": "New Ulica",
"psc": "New PSC",
"mesto": "New Mesto",
"stat": "New Stat",
"banka": "New Banka",
"ucet": "New Ucet",
"skladm": "New Skladm",
"ico": "New ICO",
"kontakt": "New Kontakt",
"telefon": "New Telefon",
"skladu": "New Skladu",
"fax": "New Fax"
}' localhost:50051 multieko2.adresar.Adresar/PostAdresar
{
"id": "43",
"firma": "New Firma",
"kz": "New KZ",
"drc": "New DRC",
"ulica": "New Ulica",
"psc": "New PSC",
"mesto": "New Mesto",
"stat": "New Stat",
"banka": "New Banka",
"ucet": "New Ucet",
"skladm": "New Skladm",
"ico": "New ICO",
"kontakt": "New Kontakt",
"telefon": "New Telefon",
"skladu": "New Skladu",
"fax": "New Fax"
}
grpcurl -plaintext -d '{
"id": 43,
"firma": "Updated Firma",
"kz": "Updated KZ",
"drc": "Updated DRC",
"ulica": "Updated Ulica",
"psc": "Updated PSC",
"mesto": "Updated Mesto",
"stat": "Updated Stat",
"banka": "Updated Banka",
"ucet": "Updated Ucet",
"skladm": "Updated Skladm",
"ico": "Updated ICO",
"kontakt": "Updated Kontakt",
"telefon": "Updated Telefon",
"skladu": "Updated Skladu",
"fax": "Updated Fax"
}' localhost:50051 multieko2.adresar.Adresar/PutAdresar
{
"id": "43",
"firma": "Updated Firma",
"kz": "Updated KZ",
"drc": "Updated DRC",
"ulica": "Updated Ulica",
"psc": "Updated PSC",
"mesto": "Updated Mesto",
"stat": "Updated Stat",
"banka": "Updated Banka",
"ucet": "Updated Ucet",
"skladm": "Updated Skladm",
"ico": "Updated ICO",
"kontakt": "Updated Kontakt",
"telefon": "Updated Telefon",
"skladu": "Updated Skladu",
"fax": "Updated Fax"
}
grpcurl -plaintext -d '{"id": 43}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
{
"id": "43",
"firma": "Updated Firma",
"kz": "Updated KZ",
"drc": "Updated DRC",
"ulica": "Updated Ulica",
"psc": "Updated PSC",
"mesto": "Updated Mesto",
"stat": "Updated Stat",
"banka": "Updated Banka",
"ucet": "Updated Ucet",
"skladm": "Updated Skladm",
"ico": "Updated ICO",
"kontakt": "Updated Kontakt",
"telefon": "Updated Telefon",
"skladu": "Updated Skladu",
"fax": "Updated Fax"
}

View File

@@ -1,29 +0,0 @@
# TOTAL items in the adresar
grpcurl -plaintext localhost:50051 multieko2.adresar.Adresar/GetAdresarCount
{
"count": "5"
}
# Item at this count. If there are 43 items, number 1 is the first item
grpcurl -plaintext -d '{"position": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresarByPosition
{
"id": "1",
"firma": "ks555",
"kz": "f",
"drc": "asdf",
"ulica": "as",
"psc": "f",
"mesto": "asf",
"stat": "as",
"banka": "fa",
"telefon": "a",
"skladu": "fd",
"fax": "asf"
}
# Item fetched by id. The first item was created and marked as deleted, therefore number 1 in ids shouldnt be fetched.
grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
ERROR:
Code: NotFound
Message: no rows returned by a query that expected to return at least one row
╭─    ~ ············································· 69 ✘
╰─

View File

@@ -1,15 +0,0 @@
// src/adresar/handlers.rs
pub mod post_adresar;
pub mod get_adresar;
pub mod put_adresar;
pub mod delete_adresar;
pub mod get_adresar_count;
pub mod get_adresar_by_position;
pub use post_adresar::post_adresar;
pub use get_adresar::get_adresar;
pub use put_adresar::put_adresar;
pub use delete_adresar::delete_adresar;
pub use get_adresar_count::get_adresar_count;
pub use get_adresar_by_position::get_adresar_by_position;

View File

@@ -1,27 +0,0 @@
// src/adresar/handlers/delete_adresar.rs
use tonic::Status;
use sqlx::PgPool;
use common::proto::multieko2::adresar::{DeleteAdresarRequest, DeleteAdresarResponse};
pub async fn delete_adresar(
db_pool: &PgPool,
request: DeleteAdresarRequest,
) -> Result<DeleteAdresarResponse, Status> {
let rows_affected = sqlx::query!(
r#"
UPDATE adresar
SET deleted = true
WHERE id = $1 AND deleted = false
"#,
request.id
)
.execute(db_pool)
.await
.map_err(|e| Status::internal(e.to_string()))?
.rows_affected();
Ok(DeleteAdresarResponse {
success: rows_affected > 0,
})
}

View File

@@ -1,63 +0,0 @@
// src/adresar/handlers/get_adresar.rs
use tonic::Status;
use sqlx::PgPool;
use crate::adresar::models::Adresar;
use common::proto::multieko2::adresar::{GetAdresarRequest, AdresarResponse};
pub async fn get_adresar(
db_pool: &PgPool,
request: GetAdresarRequest,
) -> Result<AdresarResponse, Status> {
let adresar = sqlx::query_as!(
Adresar,
r#"
SELECT
id,
deleted,
firma,
kz,
drc,
ulica,
psc,
mesto,
stat,
banka,
ucet,
skladm,
ico,
kontakt,
telefon,
skladu,
fax
FROM adresar
WHERE id = $1 AND deleted = false
"#,
request.id
)
.fetch_one(db_pool)
.await
.map_err(|e| match e {
sqlx::Error::RowNotFound => Status::not_found("Record not found"),
_ => Status::internal(format!("Database error: {}", e)),
})?;
Ok(AdresarResponse {
id: adresar.id,
firma: adresar.firma,
kz: adresar.kz.unwrap_or_default(),
drc: adresar.drc.unwrap_or_default(),
ulica: adresar.ulica.unwrap_or_default(),
psc: adresar.psc.unwrap_or_default(),
mesto: adresar.mesto.unwrap_or_default(),
stat: adresar.stat.unwrap_or_default(),
banka: adresar.banka.unwrap_or_default(),
ucet: adresar.ucet.unwrap_or_default(),
skladm: adresar.skladm.unwrap_or_default(),
ico: adresar.ico.unwrap_or_default(),
kontakt: adresar.kontakt.unwrap_or_default(),
telefon: adresar.telefon.unwrap_or_default(),
skladu: adresar.skladu.unwrap_or_default(),
fax: adresar.fax.unwrap_or_default(),
})
}

View File

@@ -1,35 +0,0 @@
// src/adresar/handlers/get_adresar_by_position.rs
use tonic::{Status};
use sqlx::PgPool;
use common::proto::multieko2::adresar::{AdresarResponse, GetAdresarRequest};
use common::proto::multieko2::common::PositionRequest;
use super::get_adresar;
pub async fn get_adresar_by_position(
db_pool: &PgPool,
request: PositionRequest,
) -> Result<AdresarResponse, Status> {
if request.position < 1 {
return Err(Status::invalid_argument("Position must be at least 1"));
}
// Find the ID of the Nth non-deleted record
let id: i64 = sqlx::query_scalar!(
r#"
SELECT id
FROM adresar
WHERE deleted = FALSE
ORDER BY id ASC
OFFSET $1
LIMIT 1
"#,
request.position - 1
)
.fetch_optional(db_pool)
.await
.map_err(|e| Status::internal(e.to_string()))?
.ok_or_else(|| Status::not_found("Position out of bounds"))?;
// Now fetch the complete record using the existing get_adresar function
get_adresar(db_pool, GetAdresarRequest { id }).await
}

View File

@@ -1,23 +0,0 @@
// src/adresar/handlers/get_adresar_count.rs
use tonic::Status;
use sqlx::PgPool;
use common::proto::multieko2::common::{CountResponse, Empty};
pub async fn get_adresar_count(
db_pool: &PgPool,
_request: Empty,
) -> Result<CountResponse, Status> {
let count: i64 = sqlx::query_scalar!(
r#"
SELECT COUNT(*) AS count
FROM adresar
WHERE deleted = FALSE
"#
)
.fetch_one(db_pool)
.await
.map_err(|e| Status::internal(e.to_string()))?
.unwrap_or(0);
Ok(CountResponse { count })
}

View File

@@ -1,99 +0,0 @@
// src/adresar/handlers/post_adresar.rs
use tonic::Status;
use sqlx::PgPool;
use crate::adresar::models::Adresar;
use common::proto::multieko2::adresar::{PostAdresarRequest, AdresarResponse};
// Helper function to sanitize inputs
fn sanitize_input(input: &str) -> Option<String> {
let trimmed = input.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
}
pub async fn post_adresar(
db_pool: &PgPool,
mut request: PostAdresarRequest,
) -> Result<AdresarResponse, Status> {
request.firma = request.firma.trim().to_string();
if request.firma.is_empty() {
return Err(Status::invalid_argument("Firma je povinne pole"));
}
// Sanitize optional fields
let kz = sanitize_input(&request.kz);
let drc = sanitize_input(&request.drc);
let ulica = sanitize_input(&request.ulica);
let psc = sanitize_input(&request.psc);
let mesto = sanitize_input(&request.mesto);
let stat = sanitize_input(&request.stat);
let banka = sanitize_input(&request.banka);
let ucet = sanitize_input(&request.ucet);
let skladm = sanitize_input(&request.skladm);
let ico = sanitize_input(&request.ico);
let kontakt = sanitize_input(&request.kontakt);
let telefon = sanitize_input(&request.telefon);
let skladu = sanitize_input(&request.skladu);
let fax = sanitize_input(&request.fax);
let adresar = sqlx::query_as!(
Adresar,
r#"
INSERT INTO adresar (
firma, kz, drc, ulica, psc, mesto, stat, banka, ucet,
skladm, ico, kontakt, telefon, skladu, fax, deleted
)
VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9,
$10, $11, $12, $13, $14, $15, $16
)
RETURNING
id, deleted, firma, kz, drc, ulica, psc, mesto, stat,
banka, ucet, skladm, ico, kontakt, telefon, skladu, fax
"#,
request.firma,
kz,
drc,
ulica,
psc,
mesto,
stat,
banka,
ucet,
skladm,
ico,
kontakt,
telefon,
skladu,
fax,
false
)
.fetch_one(db_pool)
.await
.map_err(|e| Status::internal(e.to_string()))?;
Ok(AdresarResponse {
id: adresar.id,
// Do not include `deleted` in the response since it's not
// defined in the proto message.
firma: adresar.firma,
kz: adresar.kz.unwrap_or_default(),
drc: adresar.drc.unwrap_or_default(),
ulica: adresar.ulica.unwrap_or_default(),
psc: adresar.psc.unwrap_or_default(),
mesto: adresar.mesto.unwrap_or_default(),
stat: adresar.stat.unwrap_or_default(),
banka: adresar.banka.unwrap_or_default(),
ucet: adresar.ucet.unwrap_or_default(),
skladm: adresar.skladm.unwrap_or_default(),
ico: adresar.ico.unwrap_or_default(),
kontakt: adresar.kontakt.unwrap_or_default(),
telefon: adresar.telefon.unwrap_or_default(),
skladu: adresar.skladu.unwrap_or_default(),
fax: adresar.fax.unwrap_or_default(),
})
}

View File

@@ -1,122 +0,0 @@
// src/adresar/handlers/put_adresar.rs
use tonic::Status;
use sqlx::PgPool;
use crate::adresar::models::Adresar;
use common::proto::multieko2::adresar::{PutAdresarRequest, AdresarResponse};
// Add the same sanitize_input helper as in POST handler
fn sanitize_input(input: &str) -> Option<String> {
let trimmed = input.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
}
pub async fn put_adresar(
db_pool: &PgPool,
mut request: PutAdresarRequest,
) -> Result<AdresarResponse, Status> {
// Add validation for required fields like in POST
request.firma = request.firma.trim().to_string();
if request.firma.is_empty() {
return Err(Status::invalid_argument("Firma je povinne pole"));
}
// Sanitize optional fields like in POST
let kz = sanitize_input(&request.kz);
let drc = sanitize_input(&request.drc);
let ulica = sanitize_input(&request.ulica);
let psc = sanitize_input(&request.psc);
let mesto = sanitize_input(&request.mesto);
let stat = sanitize_input(&request.stat);
let banka = sanitize_input(&request.banka);
let ucet = sanitize_input(&request.ucet);
let skladm = sanitize_input(&request.skladm);
let ico = sanitize_input(&request.ico);
let kontakt = sanitize_input(&request.kontakt);
let telefon = sanitize_input(&request.telefon);
let skladu = sanitize_input(&request.skladu);
let fax = sanitize_input(&request.fax);
let adresar = sqlx::query_as!(
Adresar,
r#"
UPDATE adresar
SET
firma = $2,
kz = $3,
drc = $4,
ulica = $5,
psc = $6,
mesto = $7,
stat = $8,
banka = $9,
ucet = $10,
skladm = $11,
ico = $12,
kontakt = $13,
telefon = $14,
skladu = $15,
fax = $16
WHERE id = $1 AND deleted = FALSE
RETURNING
id,
deleted,
firma,
kz,
drc,
ulica,
psc,
mesto,
stat,
banka,
ucet,
skladm,
ico,
kontakt,
telefon,
skladu,
fax
"#,
request.id,
request.firma,
kz,
drc,
ulica,
psc,
mesto,
stat,
banka,
ucet,
skladm,
ico,
kontakt,
telefon,
skladu,
fax
)
.fetch_one(db_pool)
.await
.map_err(|e| Status::internal(e.to_string()))?;
Ok(AdresarResponse {
id: adresar.id,
firma: adresar.firma,
kz: adresar.kz.unwrap_or_default(),
drc: adresar.drc.unwrap_or_default(),
ulica: adresar.ulica.unwrap_or_default(),
psc: adresar.psc.unwrap_or_default(),
mesto: adresar.mesto.unwrap_or_default(),
stat: adresar.stat.unwrap_or_default(),
banka: adresar.banka.unwrap_or_default(),
ucet: adresar.ucet.unwrap_or_default(),
skladm: adresar.skladm.unwrap_or_default(),
ico: adresar.ico.unwrap_or_default(),
kontakt: adresar.kontakt.unwrap_or_default(),
telefon: adresar.telefon.unwrap_or_default(),
skladu: adresar.skladu.unwrap_or_default(),
fax: adresar.fax.unwrap_or_default(),
})
}

View File

@@ -1,7 +0,0 @@
// src/adresar/mod.rs
pub mod models;
pub mod handlers;
// #[cfg(test)]
// pub mod tests;

View File

@@ -1,23 +0,0 @@
// src/adresar/models.rs
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct Adresar {
pub id: i64,
pub deleted: bool,
pub firma: String,
pub kz: Option<String>,
pub drc: Option<String>,
pub ulica: Option<String>,
pub psc: Option<String>,
pub mesto: Option<String>,
pub stat: Option<String>,
pub banka: Option<String>,
pub ucet: Option<String>,
pub skladm: Option<String>,
pub ico: Option<String>,
pub kontakt: Option<String>,
pub telefon: Option<String>,
pub skladu: Option<String>,
pub fax: Option<String>,
}

View File

@@ -1,7 +1,7 @@
grpcurl -plaintext -d '{
"username": "testuser2",
"email": "test2@example.com"
}' localhost:50051 multieko2.auth.AuthService/Register
}' localhost:50051 komp_ac.auth.AuthService/Register
{
"id": "5fa9bbce-85e0-4b06-8364-b561770c2fdd",
"username": "testuser2",

View File

@@ -3,7 +3,7 @@
"email": "test3@example.com",
"password": "your_password",
"password_confirmation": "your_password"
}' localhost:50051 multieko2.auth.AuthService/Register
}' localhost:50051 komp_ac.auth.AuthService/Register
{
"id": "96d2fd35-b39d-4c05-916a-66134453d34c",
"username": "testuser3",
@@ -12,14 +12,14 @@
}
grpcurl -plaintext -d '{
"identifier": "testuser3"
}' localhost:50051 multieko2.auth.AuthService/Login
}' localhost:50051 komp_ac.auth.AuthService/Login
ERROR:
Code: Unauthenticated
Message: Invalid credentials
grpcurl -plaintext -d '{
"identifier": "testuser3",
"password": "your_password"
}' localhost:50051 multieko2.auth.AuthService/Login
}' localhost:50051 komp_ac.auth.AuthService/Login
{
"accessToken": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiI5NmQyZmQzNS1iMzlkLTRjMDUtOTE2YS02NjEzNDQ1M2QzNGMiLCJleHAiOjE3NDI5ODE2MTAsInJvbGUiOiJhY2NvdW50YW50In0.78VIR3X4QZohzeI5x3xmkmqcICTusOC6PELPohMV-k8",
"tokenType": "Bearer",
@@ -30,7 +30,7 @@ ERROR:
grpcurl -plaintext -d '{
"username": "testuser4",
"email": "test4@example.com"
}' localhost:50051 multieko2.auth.AuthService/Register
}' localhost:50051 komp_ac.auth.AuthService/Register
{
"id": "413d7ecc-f231-48af-8c5a-566b1dc2bf0b",
"username": "testuser4",
@@ -39,7 +39,7 @@ ERROR:
}
grpcurl -plaintext -d '{
"identifier": "test4@example.com"
}' localhost:50051 multieko2.auth.AuthService/Login
}' localhost:50051 komp_ac.auth.AuthService/Login
{
"accessToken": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiI0MTNkN2VjYy1mMjMxLTQ4YWYtOGM1YS01NjZiMWRjMmJmMGIiLCJleHAiOjE3NDI5ODE3MDEsInJvbGUiOiJhY2NvdW50YW50In0.4Hzu3tTZRNGHnBSgeCbGy2tFTl8EzpPdXBhcW8kuIc8",
"tokenType": "Bearer",
@@ -47,5 +47,5 @@ ERROR:
"userId": "413d7ecc-f231-48af-8c5a-566b1dc2bf0b",
"role": "accountant"
}
╭─    ~/Doc/pr/multieko2/server    auth ······ ✔
╭─    ~/Doc/pr/komp_ac/server    auth ······ ✔
╰─

View File

@@ -3,7 +3,7 @@
"email": "default@example.com",
"password": "password",
"password_confirmation": "password"
}' localhost:50051 multieko2.auth.AuthService/Register
}' localhost:50051 komp_ac.auth.AuthService/Register
{
"id": "73017288-af41-49d8-b4cb-2ac2109b38a1",
"username": "testuser_default_role",
@@ -16,7 +16,7 @@
"password": "password",
"password_confirmation": "password",
"role": "admin"
}' localhost:50051 multieko2.auth.AuthService/Register
}' localhost:50051 komp_ac.auth.AuthService/Register
{
"id": "9437e318-818b-4c34-822d-c2d6601b835e",
"username": "testuser_admin_role",
@@ -29,9 +29,9 @@
"password": "password",
"password_confirmation": "password",
"role": "invalid_role_name"
}' localhost:50051 multieko2.auth.AuthService/Register
}' localhost:50051 komp_ac.auth.AuthService/Register
ERROR:
Code: InvalidArgument
Message: Invalid role specified: 'invalid_role_name'
╭─    ~/Doc/pr/multieko2/server    main +3 !1 
╭─    ~/Doc/pr/komp_ac/server    main +3 !1 
╰─

View File

@@ -3,7 +3,7 @@ use bcrypt::verify;
use tonic::{Response, Status};
use crate::db::PgPool;
use crate::auth::logic::jwt; // Fixed import path
use common::proto::multieko2::auth::{LoginRequest, LoginResponse};
use common::proto::komp_ac::auth::{LoginRequest, LoginResponse};
pub async fn login(
pool: &PgPool,

View File

@@ -1,7 +1,7 @@
// src/auth/handlers/register.rs
use bcrypt::{hash, DEFAULT_COST};
use tonic::{Response, Status};
use common::proto::multieko2::auth::{RegisterRequest, AuthResponse};
use common::proto::komp_ac::auth::{RegisterRequest, AuthResponse};
use crate::db::PgPool;
use crate::auth::models::AuthError;

View File

@@ -3,6 +3,8 @@
use tower::ServiceBuilder;
use crate::auth::logic::rbac;
// TODO redesign this, adresar and uctovnictvo are nonexistent, but we are keeping this code for
// the reference. Please adjust in the future rbac.
pub async fn run_server(db_pool: sqlx::PgPool) -> Result<(), Box<dyn std::error::Error>> {
// ... existing setup code ...

View File

@@ -8,8 +8,6 @@ use tracing::{error, info, warn};
use tantivy::schema::Schema;
use crate::search_schema;
const INDEX_DIR: &str = "./tantivy_indexes";
/// Defines the commands that can be sent to the indexer task.
#[derive(Debug)]
pub enum IndexCommand {

Some files were not shown because too many files have changed in this diff Show More