Compare commits
80 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d1ebe4732f | ||
|
|
7b7f3ca05a | ||
|
|
234613f831 | ||
|
|
f6d84e70cc | ||
|
|
5cd324b6ae | ||
|
|
a7457f5749 | ||
|
|
a5afc75099 | ||
|
|
625c9b3e09 | ||
|
|
e20623ed53 | ||
|
|
aa9adf7348 | ||
|
|
2e82aba0d1 | ||
|
|
b7a3f0f8d9 | ||
|
|
38c82389f7 | ||
|
|
cb0a2bee17 | ||
|
|
dc99131794 | ||
|
|
5c23f61a10 | ||
|
|
f87e3c03cb | ||
|
|
d346670839 | ||
|
|
560d8b7234 | ||
|
|
b297c2b311 | ||
|
|
d390c567d5 | ||
|
|
029e614b9c | ||
|
|
f9a78e4eec | ||
|
|
d8758f7531 | ||
|
|
4e86ecff84 | ||
|
|
070d091e07 | ||
|
|
7403b3c3f8 | ||
|
|
1b1e7b7205 | ||
|
|
1b8f19f1ce | ||
|
|
2a14eadf34 | ||
|
|
fd36cd5795 | ||
|
|
f4286ac3c9 | ||
|
|
92d5eb4844 | ||
|
|
87b9f6ab87 | ||
|
|
06d98aab5c | ||
|
|
298f56a53c | ||
|
|
714a5f2f1c | ||
|
|
4e29d0084f | ||
|
|
63f1b4da2e | ||
|
|
9477f53432 | ||
|
|
ed786f087c | ||
|
|
8e22ea05ff | ||
|
|
8414657224 | ||
|
|
e25213ed1b | ||
|
|
4843b0778c | ||
|
|
f5fae98c69 | ||
|
|
6faf0a4a31 | ||
|
|
011fafc0ff | ||
|
|
8ebe74484c | ||
|
|
3eb9523103 | ||
|
|
3dfa922b9e | ||
|
|
248d54a30f | ||
|
|
b30fef4ccd | ||
|
|
a9c4527318 | ||
|
|
c31f08d5b8 | ||
|
|
9e0fa9ddb1 | ||
|
|
8fcd28832d | ||
|
|
cccf029464 | ||
|
|
512e7fb9e7 | ||
|
|
0e69df8282 | ||
|
|
eb5532c200 | ||
|
|
49ed1dfe33 | ||
|
|
62d1c3f7f5 | ||
|
|
b49dce3334 | ||
|
|
8ace9bc4d1 | ||
|
|
ce490007ed | ||
|
|
eb96c64e26 | ||
|
|
2ac96a8486 | ||
|
|
b8e6cc22af | ||
|
|
634a01f618 | ||
|
|
6abea062ba | ||
|
|
f50887a326 | ||
|
|
3c0af05a3c | ||
|
|
c9131d4457 | ||
|
|
2af79a3ef2 | ||
|
|
afd9228efa | ||
|
|
495d77fda5 | ||
|
|
679bb3b6ab | ||
|
|
350c522d19 | ||
|
|
4760f42589 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
|||||||
/target
|
/target
|
||||||
.env
|
.env
|
||||||
/tantivy_indexes
|
/tantivy_indexes
|
||||||
|
server/tantivy_indexes
|
||||||
|
|||||||
315
Cargo.lock
generated
315
Cargo.lock
generated
@@ -65,6 +65,17 @@ version = "2.0.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
|
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ahash"
|
||||||
|
version = "0.7.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9"
|
||||||
|
dependencies = [
|
||||||
|
"getrandom 0.2.15",
|
||||||
|
"once_cell",
|
||||||
|
"version_check",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ahash"
|
name = "ahash"
|
||||||
version = "0.8.11"
|
version = "0.8.11"
|
||||||
@@ -125,6 +136,12 @@ version = "0.5.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
|
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "arrayvec"
|
||||||
|
version = "0.7.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "as_derive_utils"
|
name = "as_derive_utils"
|
||||||
version = "0.11.0"
|
version = "0.11.0"
|
||||||
@@ -146,6 +163,28 @@ dependencies = [
|
|||||||
"abi_stable",
|
"abi_stable",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-stream"
|
||||||
|
version = "0.3.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
|
||||||
|
dependencies = [
|
||||||
|
"async-stream-impl",
|
||||||
|
"futures-core",
|
||||||
|
"pin-project-lite",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-stream-impl"
|
||||||
|
version = "0.3.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.100",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "async-trait"
|
name = "async-trait"
|
||||||
version = "0.1.88"
|
version = "0.1.88"
|
||||||
@@ -312,6 +351,18 @@ dependencies = [
|
|||||||
"crunchy",
|
"crunchy",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bitvec"
|
||||||
|
version = "1.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
|
||||||
|
dependencies = [
|
||||||
|
"funty",
|
||||||
|
"radium",
|
||||||
|
"tap",
|
||||||
|
"wyz",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "block-buffer"
|
name = "block-buffer"
|
||||||
version = "0.10.4"
|
version = "0.10.4"
|
||||||
@@ -356,12 +407,57 @@ dependencies = [
|
|||||||
"syn 2.0.100",
|
"syn 2.0.100",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "borsh"
|
||||||
|
version = "1.5.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ad8646f98db542e39fc66e68a20b2144f6a732636df7c2354e74645faaa433ce"
|
||||||
|
dependencies = [
|
||||||
|
"borsh-derive",
|
||||||
|
"cfg_aliases",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "borsh-derive"
|
||||||
|
version = "1.5.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fdd1d3c0c2f5833f22386f252fe8ed005c7f59fdcddeef025c01b4c3b9fd9ac3"
|
||||||
|
dependencies = [
|
||||||
|
"once_cell",
|
||||||
|
"proc-macro-crate",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.100",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.17.0"
|
version = "3.17.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
|
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bytecheck"
|
||||||
|
version = "0.6.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2"
|
||||||
|
dependencies = [
|
||||||
|
"bytecheck_derive",
|
||||||
|
"ptr_meta",
|
||||||
|
"simdutf8",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bytecheck_derive"
|
||||||
|
version = "0.6.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 1.0.109",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "byteorder"
|
name = "byteorder"
|
||||||
version = "1.5.0"
|
version = "1.5.0"
|
||||||
@@ -412,6 +508,12 @@ version = "1.0.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfg_aliases"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chrono"
|
name = "chrono"
|
||||||
version = "0.4.40"
|
version = "0.4.40"
|
||||||
@@ -447,13 +549,17 @@ dependencies = [
|
|||||||
"crossterm",
|
"crossterm",
|
||||||
"dirs 6.0.0",
|
"dirs 6.0.0",
|
||||||
"dotenvy",
|
"dotenvy",
|
||||||
|
"futures",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"prost",
|
"prost",
|
||||||
|
"prost-types",
|
||||||
"ratatui",
|
"ratatui",
|
||||||
|
"rstest",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"time",
|
"time",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tokio-test",
|
||||||
"toml",
|
"toml",
|
||||||
"tonic",
|
"tonic",
|
||||||
"tracing",
|
"tracing",
|
||||||
@@ -461,6 +567,7 @@ dependencies = [
|
|||||||
"tui-textarea",
|
"tui-textarea",
|
||||||
"unicode-segmentation",
|
"unicode-segmentation",
|
||||||
"unicode-width 0.2.0",
|
"unicode-width 0.2.0",
|
||||||
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -487,7 +594,9 @@ name = "common"
|
|||||||
version = "0.3.13"
|
version = "0.3.13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"prost",
|
"prost",
|
||||||
|
"prost-types",
|
||||||
"serde",
|
"serde",
|
||||||
|
"tantivy",
|
||||||
"tonic",
|
"tonic",
|
||||||
"tonic-build",
|
"tonic-build",
|
||||||
]
|
]
|
||||||
@@ -964,6 +1073,27 @@ dependencies = [
|
|||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "funty"
|
||||||
|
version = "2.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "futures"
|
||||||
|
version = "0.3.31"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
|
||||||
|
dependencies = [
|
||||||
|
"futures-channel",
|
||||||
|
"futures-core",
|
||||||
|
"futures-executor",
|
||||||
|
"futures-io",
|
||||||
|
"futures-sink",
|
||||||
|
"futures-task",
|
||||||
|
"futures-util",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-channel"
|
name = "futures-channel"
|
||||||
version = "0.3.31"
|
version = "0.3.31"
|
||||||
@@ -1043,6 +1173,7 @@ version = "0.3.31"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
|
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"futures-io",
|
"futures-io",
|
||||||
"futures-macro",
|
"futures-macro",
|
||||||
@@ -1145,6 +1276,9 @@ name = "hashbrown"
|
|||||||
version = "0.12.3"
|
version = "0.12.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
|
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
|
||||||
|
dependencies = [
|
||||||
|
"ahash 0.7.8",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hashbrown"
|
name = "hashbrown"
|
||||||
@@ -1152,7 +1286,7 @@ version = "0.14.5"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
|
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash 0.8.11",
|
||||||
"allocator-api2",
|
"allocator-api2",
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
@@ -1656,7 +1790,7 @@ version = "0.7.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6e14eda50a3494b3bf7b9ce51c52434a761e383d7238ce1dd5dcec2fbc13e9fb"
|
checksum = "6e14eda50a3494b3bf7b9ce51c52434a761e383d7238ce1dd5dcec2fbc13e9fb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash 0.8.11",
|
||||||
"dashmap",
|
"dashmap",
|
||||||
"hashbrown 0.14.5",
|
"hashbrown 0.14.5",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -2250,7 +2384,7 @@ version = "0.12.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ac98773b7109bc75f475ab5a134c9b64b87e59d776d31098d8f346922396a477"
|
checksum = "ac98773b7109bc75f475ab5a134c9b64b87e59d776d31098d8f346922396a477"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec 0.5.2",
|
||||||
"typed-arena",
|
"typed-arena",
|
||||||
"unicode-width 0.1.14",
|
"unicode-width 0.1.14",
|
||||||
]
|
]
|
||||||
@@ -2357,6 +2491,26 @@ dependencies = [
|
|||||||
"prost",
|
"prost",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ptr_meta"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1"
|
||||||
|
dependencies = [
|
||||||
|
"ptr_meta_derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ptr_meta_derive"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 1.0.109",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quickscope"
|
name = "quickscope"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@@ -2382,6 +2536,12 @@ version = "5.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
|
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "radium"
|
||||||
|
version = "0.7.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "radix_fmt"
|
name = "radix_fmt"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
@@ -2573,6 +2733,15 @@ version = "1.9.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2"
|
checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rend"
|
||||||
|
version = "0.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c"
|
||||||
|
dependencies = [
|
||||||
|
"bytecheck",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "repr_offset"
|
name = "repr_offset"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
@@ -2596,6 +2765,35 @@ dependencies = [
|
|||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rkyv"
|
||||||
|
version = "0.7.45"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b"
|
||||||
|
dependencies = [
|
||||||
|
"bitvec",
|
||||||
|
"bytecheck",
|
||||||
|
"bytes",
|
||||||
|
"hashbrown 0.12.3",
|
||||||
|
"ptr_meta",
|
||||||
|
"rend",
|
||||||
|
"rkyv_derive",
|
||||||
|
"seahash",
|
||||||
|
"tinyvec",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rkyv_derive"
|
||||||
|
version = "0.7.45"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 1.0.109",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rsa"
|
name = "rsa"
|
||||||
version = "0.9.8"
|
version = "0.9.8"
|
||||||
@@ -2656,6 +2854,32 @@ dependencies = [
|
|||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rust_decimal"
|
||||||
|
version = "1.37.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b203a6425500a03e0919c42d3c47caca51e79f1132046626d2c8871c5092035d"
|
||||||
|
dependencies = [
|
||||||
|
"arrayvec 0.7.6",
|
||||||
|
"borsh",
|
||||||
|
"bytes",
|
||||||
|
"num-traits",
|
||||||
|
"rand 0.8.5",
|
||||||
|
"rkyv",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rust_decimal_macros"
|
||||||
|
version = "1.37.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f6268b74858287e1a062271b988a0c534bf85bbeb567fe09331bf40ed78113d5"
|
||||||
|
dependencies = [
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.100",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-demangle"
|
name = "rustc-demangle"
|
||||||
version = "0.1.24"
|
version = "0.1.24"
|
||||||
@@ -2730,6 +2954,12 @@ version = "1.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "seahash"
|
||||||
|
version = "4.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "search"
|
name = "search"
|
||||||
version = "0.3.13"
|
version = "0.3.13"
|
||||||
@@ -2739,6 +2969,7 @@ dependencies = [
|
|||||||
"prost",
|
"prost",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"sqlx",
|
||||||
"tantivy",
|
"tantivy",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tonic",
|
"tonic",
|
||||||
@@ -2838,11 +3069,17 @@ dependencies = [
|
|||||||
"common",
|
"common",
|
||||||
"dashmap",
|
"dashmap",
|
||||||
"dotenvy",
|
"dotenvy",
|
||||||
|
"futures",
|
||||||
"jsonwebtoken",
|
"jsonwebtoken",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"prost",
|
"prost",
|
||||||
|
"prost-types",
|
||||||
|
"rand 0.9.1",
|
||||||
"regex",
|
"regex",
|
||||||
"rstest",
|
"rstest",
|
||||||
|
"rust-stemmers",
|
||||||
|
"rust_decimal",
|
||||||
|
"rust_decimal_macros",
|
||||||
"search",
|
"search",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@@ -2937,6 +3174,12 @@ dependencies = [
|
|||||||
"rand_core 0.6.4",
|
"rand_core 0.6.4",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "simdutf8"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "simple_asn1"
|
name = "simple_asn1"
|
||||||
version = "0.6.3"
|
version = "0.6.3"
|
||||||
@@ -3017,9 +3260,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlx"
|
name = "sqlx"
|
||||||
version = "0.8.5"
|
version = "0.8.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f3c3a85280daca669cfd3bcb68a337882a8bc57ec882f72c5d13a430613a738e"
|
checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"sqlx-core",
|
"sqlx-core",
|
||||||
"sqlx-macros",
|
"sqlx-macros",
|
||||||
@@ -3030,9 +3273,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlx-core"
|
name = "sqlx-core"
|
||||||
version = "0.8.5"
|
version = "0.8.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f743f2a3cea30a58cd479013f75550e879009e3a02f616f18ca699335aa248c3"
|
checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64",
|
"base64",
|
||||||
"bytes",
|
"bytes",
|
||||||
@@ -3053,6 +3296,7 @@ dependencies = [
|
|||||||
"native-tls",
|
"native-tls",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
|
"rust_decimal",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sha2",
|
"sha2",
|
||||||
@@ -3068,9 +3312,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlx-macros"
|
name = "sqlx-macros"
|
||||||
version = "0.8.5"
|
version = "0.8.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7f4200e0fde19834956d4252347c12a083bdcb237d7a1a1446bffd8768417dce"
|
checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -3081,9 +3325,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlx-macros-core"
|
name = "sqlx-macros-core"
|
||||||
version = "0.8.5"
|
version = "0.8.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "882ceaa29cade31beca7129b6beeb05737f44f82dbe2a9806ecea5a7093d00b7"
|
checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"dotenvy",
|
"dotenvy",
|
||||||
"either",
|
"either",
|
||||||
@@ -3100,16 +3344,15 @@ dependencies = [
|
|||||||
"sqlx-postgres",
|
"sqlx-postgres",
|
||||||
"sqlx-sqlite",
|
"sqlx-sqlite",
|
||||||
"syn 2.0.100",
|
"syn 2.0.100",
|
||||||
"tempfile",
|
|
||||||
"tokio",
|
"tokio",
|
||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlx-mysql"
|
name = "sqlx-mysql"
|
||||||
version = "0.8.5"
|
version = "0.8.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0afdd3aa7a629683c2d750c2df343025545087081ab5942593a5288855b1b7a7"
|
checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"atoi",
|
"atoi",
|
||||||
"base64",
|
"base64",
|
||||||
@@ -3137,6 +3380,7 @@ dependencies = [
|
|||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rsa",
|
"rsa",
|
||||||
|
"rust_decimal",
|
||||||
"serde",
|
"serde",
|
||||||
"sha1",
|
"sha1",
|
||||||
"sha2",
|
"sha2",
|
||||||
@@ -3152,9 +3396,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlx-postgres"
|
name = "sqlx-postgres"
|
||||||
version = "0.8.5"
|
version = "0.8.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a0bedbe1bbb5e2615ef347a5e9d8cd7680fb63e77d9dafc0f29be15e53f1ebe6"
|
checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"atoi",
|
"atoi",
|
||||||
"base64",
|
"base64",
|
||||||
@@ -3177,6 +3421,7 @@ dependencies = [
|
|||||||
"memchr",
|
"memchr",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
|
"rust_decimal",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sha2",
|
"sha2",
|
||||||
@@ -3192,9 +3437,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlx-sqlite"
|
name = "sqlx-sqlite"
|
||||||
version = "0.8.5"
|
version = "0.8.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c26083e9a520e8eb87a06b12347679b142dc2ea29e6e409f805644a7a979a5bc"
|
checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"atoi",
|
"atoi",
|
||||||
"chrono",
|
"chrono",
|
||||||
@@ -3554,6 +3799,12 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tap"
|
||||||
|
version = "1.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tempfile"
|
name = "tempfile"
|
||||||
version = "3.19.1"
|
version = "3.19.1"
|
||||||
@@ -3724,6 +3975,19 @@ dependencies = [
|
|||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-test"
|
||||||
|
version = "0.4.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
|
||||||
|
dependencies = [
|
||||||
|
"async-stream",
|
||||||
|
"bytes",
|
||||||
|
"futures-core",
|
||||||
|
"tokio",
|
||||||
|
"tokio-stream",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-util"
|
name = "tokio-util"
|
||||||
version = "0.7.14"
|
version = "0.7.14"
|
||||||
@@ -4054,12 +4318,14 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uuid"
|
name = "uuid"
|
||||||
version = "1.16.0"
|
version = "1.17.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9"
|
checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"getrandom 0.3.2",
|
"getrandom 0.3.2",
|
||||||
|
"js-sys",
|
||||||
"serde",
|
"serde",
|
||||||
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4500,6 +4766,15 @@ version = "0.5.5"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
|
checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wyz"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
|
||||||
|
dependencies = [
|
||||||
|
"tap",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yoke"
|
name = "yoke"
|
||||||
version = "0.7.5"
|
version = "0.7.5"
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ tokio = { version = "1.44.2", features = ["full"] }
|
|||||||
tonic = "0.13.0"
|
tonic = "0.13.0"
|
||||||
prost = "0.13.5"
|
prost = "0.13.5"
|
||||||
async-trait = "0.1.88"
|
async-trait = "0.1.88"
|
||||||
|
prost-types = "0.13.0"
|
||||||
|
|
||||||
# Data Handling & Serialization
|
# Data Handling & Serialization
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ anyhow = "1.0.98"
|
|||||||
async-trait = "0.1.88"
|
async-trait = "0.1.88"
|
||||||
common = { path = "../common" }
|
common = { path = "../common" }
|
||||||
|
|
||||||
|
prost-types = { workspace = true }
|
||||||
crossterm = "0.28.1"
|
crossterm = "0.28.1"
|
||||||
dirs = "6.0.0"
|
dirs = "6.0.0"
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
@@ -30,3 +31,9 @@ unicode-width = "0.2.0"
|
|||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
ui-debug = []
|
ui-debug = []
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
rstest = "0.25.0"
|
||||||
|
tokio-test = "0.4.4"
|
||||||
|
uuid = { version = "1.17.0", features = ["v4"] }
|
||||||
|
futures = "0.3.31"
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ toggle_buffer_list = ["ctrl+b"]
|
|||||||
next_field = ["Tab"]
|
next_field = ["Tab"]
|
||||||
prev_field = ["Shift+Tab"]
|
prev_field = ["Shift+Tab"]
|
||||||
exit_table_scroll = ["esc"]
|
exit_table_scroll = ["esc"]
|
||||||
|
open_search = ["ctrl+f"]
|
||||||
|
|
||||||
[keybindings.common]
|
[keybindings.common]
|
||||||
save = ["ctrl+s"]
|
save = ["ctrl+s"]
|
||||||
@@ -69,10 +70,11 @@ prev_field = ["shift+enter"]
|
|||||||
exit = ["esc", "ctrl+e"]
|
exit = ["esc", "ctrl+e"]
|
||||||
delete_char_forward = ["delete"]
|
delete_char_forward = ["delete"]
|
||||||
delete_char_backward = ["backspace"]
|
delete_char_backward = ["backspace"]
|
||||||
move_left = ["left"]
|
move_left = [""]
|
||||||
move_right = ["right"]
|
move_right = ["right"]
|
||||||
suggestion_down = ["ctrl+n", "tab"]
|
suggestion_down = ["ctrl+n", "tab"]
|
||||||
suggestion_up = ["ctrl+p", "shift+tab"]
|
suggestion_up = ["ctrl+p", "shift+tab"]
|
||||||
|
trigger_autocomplete = ["left"]
|
||||||
|
|
||||||
[keybindings.command]
|
[keybindings.command]
|
||||||
exit_command_mode = ["ctrl+g", "esc"]
|
exit_command_mode = ["ctrl+g", "esc"]
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ pub mod text_editor;
|
|||||||
pub mod background;
|
pub mod background;
|
||||||
pub mod dialog;
|
pub mod dialog;
|
||||||
pub mod autocomplete;
|
pub mod autocomplete;
|
||||||
|
pub mod search_palette;
|
||||||
pub mod find_file_palette;
|
pub mod find_file_palette;
|
||||||
|
|
||||||
pub use command_line::*;
|
pub use command_line::*;
|
||||||
@@ -13,4 +14,5 @@ pub use text_editor::*;
|
|||||||
pub use background::*;
|
pub use background::*;
|
||||||
pub use dialog::*;
|
pub use dialog::*;
|
||||||
pub use autocomplete::*;
|
pub use autocomplete::*;
|
||||||
|
pub use search_palette::*;
|
||||||
pub use find_file_palette::*;
|
pub use find_file_palette::*;
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
// src/components/common/autocomplete.rs
|
// src/components/common/autocomplete.rs
|
||||||
|
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::pages::form::FormState;
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
layout::Rect,
|
layout::Rect,
|
||||||
style::{Color, Modifier, Style},
|
style::{Color, Modifier, Style},
|
||||||
@@ -9,7 +11,8 @@ use ratatui::{
|
|||||||
};
|
};
|
||||||
use unicode_width::UnicodeWidthStr;
|
use unicode_width::UnicodeWidthStr;
|
||||||
|
|
||||||
/// Renders an opaque dropdown list for autocomplete suggestions.
|
/// Renders an opaque dropdown list for simple string-based suggestions.
|
||||||
|
/// THIS IS THE RESTORED FUNCTION.
|
||||||
pub fn render_autocomplete_dropdown(
|
pub fn render_autocomplete_dropdown(
|
||||||
f: &mut Frame,
|
f: &mut Frame,
|
||||||
input_rect: Rect,
|
input_rect: Rect,
|
||||||
@@ -21,39 +24,32 @@ pub fn render_autocomplete_dropdown(
|
|||||||
if suggestions.is_empty() {
|
if suggestions.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// --- Calculate Dropdown Size & Position ---
|
let max_suggestion_width =
|
||||||
let max_suggestion_width = suggestions.iter().map(|s| s.width()).max().unwrap_or(0) as u16;
|
suggestions.iter().map(|s| s.width()).max().unwrap_or(0) as u16;
|
||||||
let horizontal_padding: u16 = 2;
|
let horizontal_padding: u16 = 2;
|
||||||
let dropdown_width = (max_suggestion_width + horizontal_padding).max(10);
|
let dropdown_width = (max_suggestion_width + horizontal_padding).max(10);
|
||||||
let dropdown_height = (suggestions.len() as u16).min(5);
|
let dropdown_height = (suggestions.len() as u16).min(5);
|
||||||
|
|
||||||
let mut dropdown_area = Rect {
|
let mut dropdown_area = Rect {
|
||||||
x: input_rect.x, // Align horizontally with input
|
x: input_rect.x,
|
||||||
y: input_rect.y + 1, // Position directly below input
|
y: input_rect.y + 1,
|
||||||
width: dropdown_width,
|
width: dropdown_width,
|
||||||
height: dropdown_height,
|
height: dropdown_height,
|
||||||
};
|
};
|
||||||
|
|
||||||
// --- Clamping Logic (prevent rendering off-screen) ---
|
|
||||||
// Clamp vertically (if it goes below the frame)
|
|
||||||
if dropdown_area.bottom() > frame_area.height {
|
if dropdown_area.bottom() > frame_area.height {
|
||||||
dropdown_area.y = input_rect.y.saturating_sub(dropdown_height); // Try rendering above
|
dropdown_area.y = input_rect.y.saturating_sub(dropdown_height);
|
||||||
}
|
}
|
||||||
// Clamp horizontally (if it goes past the right edge)
|
|
||||||
if dropdown_area.right() > frame_area.width {
|
if dropdown_area.right() > frame_area.width {
|
||||||
dropdown_area.x = frame_area.width.saturating_sub(dropdown_width);
|
dropdown_area.x = frame_area.width.saturating_sub(dropdown_width);
|
||||||
}
|
}
|
||||||
// Ensure x is not negative (if clamping pushes it left)
|
|
||||||
dropdown_area.x = dropdown_area.x.max(0);
|
dropdown_area.x = dropdown_area.x.max(0);
|
||||||
// Ensure y is not negative (if clamping pushes it up)
|
|
||||||
dropdown_area.y = dropdown_area.y.max(0);
|
dropdown_area.y = dropdown_area.y.max(0);
|
||||||
// --- End Clamping ---
|
|
||||||
|
|
||||||
// Render a solid background block first to ensure opacity
|
let background_block =
|
||||||
let background_block = Block::default().style(Style::default().bg(Color::DarkGray));
|
Block::default().style(Style::default().bg(Color::DarkGray));
|
||||||
f.render_widget(background_block, dropdown_area);
|
f.render_widget(background_block, dropdown_area);
|
||||||
|
|
||||||
// Create list items, ensuring each has a defined background
|
|
||||||
let items: Vec<ListItem> = suggestions
|
let items: Vec<ListItem> = suggestions
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
@@ -61,30 +57,97 @@ pub fn render_autocomplete_dropdown(
|
|||||||
let is_selected = selected_index == Some(i);
|
let is_selected = selected_index == Some(i);
|
||||||
let s_width = s.width() as u16;
|
let s_width = s.width() as u16;
|
||||||
let padding_needed = dropdown_width.saturating_sub(s_width);
|
let padding_needed = dropdown_width.saturating_sub(s_width);
|
||||||
let padded_s = format!("{}{}", s, " ".repeat(padding_needed as usize));
|
let padded_s =
|
||||||
|
format!("{}{}", s, " ".repeat(padding_needed as usize));
|
||||||
|
|
||||||
ListItem::new(padded_s).style(if is_selected {
|
ListItem::new(padded_s).style(if is_selected {
|
||||||
Style::default()
|
Style::default()
|
||||||
.fg(theme.bg) // Text color on highlight
|
.fg(theme.bg)
|
||||||
.bg(theme.highlight) // Highlight background
|
.bg(theme.highlight)
|
||||||
.add_modifier(Modifier::BOLD)
|
.add_modifier(Modifier::BOLD)
|
||||||
} else {
|
} else {
|
||||||
// Style for non-selected items (matching background block)
|
Style::default().fg(theme.fg).bg(Color::DarkGray)
|
||||||
Style::default()
|
|
||||||
.fg(theme.fg) // Text color on gray
|
|
||||||
.bg(Color::DarkGray) // Explicit gray background
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// Create the list widget (without its own block)
|
|
||||||
let list = List::new(items);
|
let list = List::new(items);
|
||||||
|
let mut list_state = ListState::default();
|
||||||
|
list_state.select(selected_index);
|
||||||
|
|
||||||
// State for managing selection highlight (still needed for logic)
|
f.render_stateful_widget(list, dropdown_area, &mut list_state);
|
||||||
let mut profile_list_state = ListState::default();
|
|
||||||
profile_list_state.select(selected_index);
|
|
||||||
|
|
||||||
// Render the list statefully *over* the background block
|
|
||||||
f.render_stateful_widget(list, dropdown_area, &mut profile_list_state);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Renders an opaque dropdown list for rich `Hit`-based suggestions.
|
||||||
|
/// RENAMED from render_rich_autocomplete_dropdown
|
||||||
|
pub fn render_hit_autocomplete_dropdown(
|
||||||
|
f: &mut Frame,
|
||||||
|
input_rect: Rect,
|
||||||
|
frame_area: Rect,
|
||||||
|
theme: &Theme,
|
||||||
|
suggestions: &[Hit],
|
||||||
|
selected_index: Option<usize>,
|
||||||
|
form_state: &FormState,
|
||||||
|
) {
|
||||||
|
if suggestions.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let display_names: Vec<String> = suggestions
|
||||||
|
.iter()
|
||||||
|
.map(|hit| form_state.get_display_name_for_hit(hit))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let max_suggestion_width =
|
||||||
|
display_names.iter().map(|s| s.width()).max().unwrap_or(0) as u16;
|
||||||
|
let horizontal_padding: u16 = 2;
|
||||||
|
let dropdown_width = (max_suggestion_width + horizontal_padding).max(10);
|
||||||
|
let dropdown_height = (suggestions.len() as u16).min(5);
|
||||||
|
|
||||||
|
let mut dropdown_area = Rect {
|
||||||
|
x: input_rect.x,
|
||||||
|
y: input_rect.y + 1,
|
||||||
|
width: dropdown_width,
|
||||||
|
height: dropdown_height,
|
||||||
|
};
|
||||||
|
|
||||||
|
if dropdown_area.bottom() > frame_area.height {
|
||||||
|
dropdown_area.y = input_rect.y.saturating_sub(dropdown_height);
|
||||||
|
}
|
||||||
|
if dropdown_area.right() > frame_area.width {
|
||||||
|
dropdown_area.x = frame_area.width.saturating_sub(dropdown_width);
|
||||||
|
}
|
||||||
|
dropdown_area.x = dropdown_area.x.max(0);
|
||||||
|
dropdown_area.y = dropdown_area.y.max(0);
|
||||||
|
|
||||||
|
let background_block =
|
||||||
|
Block::default().style(Style::default().bg(Color::DarkGray));
|
||||||
|
f.render_widget(background_block, dropdown_area);
|
||||||
|
|
||||||
|
let items: Vec<ListItem> = display_names
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, s)| {
|
||||||
|
let is_selected = selected_index == Some(i);
|
||||||
|
let s_width = s.width() as u16;
|
||||||
|
let padding_needed = dropdown_width.saturating_sub(s_width);
|
||||||
|
let padded_s =
|
||||||
|
format!("{}{}", s, " ".repeat(padding_needed as usize));
|
||||||
|
|
||||||
|
ListItem::new(padded_s).style(if is_selected {
|
||||||
|
Style::default()
|
||||||
|
.fg(theme.bg)
|
||||||
|
.bg(theme.highlight)
|
||||||
|
.add_modifier(Modifier::BOLD)
|
||||||
|
} else {
|
||||||
|
Style::default().fg(theme.fg).bg(Color::DarkGray)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let list = List::new(items);
|
||||||
|
let mut list_state = ListState::default();
|
||||||
|
list_state.select(selected_index);
|
||||||
|
|
||||||
|
f.render_stateful_widget(list, dropdown_area, &mut list_state);
|
||||||
|
}
|
||||||
|
|||||||
121
client/src/components/common/search_palette.rs
Normal file
121
client/src/components/common/search_palette.rs
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
// src/components/common/search_palette.rs
|
||||||
|
|
||||||
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::app::search::SearchState;
|
||||||
|
use ratatui::{
|
||||||
|
layout::{Constraint, Direction, Layout, Rect},
|
||||||
|
style::{Modifier, Style},
|
||||||
|
text::{Line, Span},
|
||||||
|
widgets::{Block, Borders, Clear, List, ListItem, Paragraph},
|
||||||
|
Frame,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Renders the search palette dialog over the main UI.
|
||||||
|
pub fn render_search_palette(
|
||||||
|
f: &mut Frame,
|
||||||
|
area: Rect,
|
||||||
|
theme: &Theme,
|
||||||
|
state: &SearchState,
|
||||||
|
) {
|
||||||
|
// --- Dialog Area Calculation ---
|
||||||
|
let height = (area.height as f32 * 0.7).min(30.0) as u16;
|
||||||
|
let width = (area.width as f32 * 0.6).min(100.0) as u16;
|
||||||
|
let dialog_area = Rect {
|
||||||
|
x: area.x + (area.width - width) / 2,
|
||||||
|
y: area.y + (area.height - height) / 4,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
};
|
||||||
|
|
||||||
|
f.render_widget(Clear, dialog_area); // Clear background
|
||||||
|
|
||||||
|
let block = Block::default()
|
||||||
|
.title(format!(" Search in '{}' ", state.table_name))
|
||||||
|
.borders(Borders::ALL)
|
||||||
|
.border_style(Style::default().fg(theme.accent));
|
||||||
|
f.render_widget(block.clone(), dialog_area);
|
||||||
|
|
||||||
|
// --- Inner Layout (Input + Results) ---
|
||||||
|
let inner_chunks = Layout::default()
|
||||||
|
.direction(Direction::Vertical)
|
||||||
|
.margin(1)
|
||||||
|
.constraints([
|
||||||
|
Constraint::Length(3), // For input box
|
||||||
|
Constraint::Min(0), // For results list
|
||||||
|
])
|
||||||
|
.split(dialog_area);
|
||||||
|
|
||||||
|
// --- Render Input Box ---
|
||||||
|
let input_block = Block::default()
|
||||||
|
.title("Query")
|
||||||
|
.borders(Borders::ALL)
|
||||||
|
.border_style(Style::default().fg(theme.border));
|
||||||
|
let input_text = Paragraph::new(state.input.as_str())
|
||||||
|
.block(input_block)
|
||||||
|
.style(Style::default().fg(theme.fg));
|
||||||
|
f.render_widget(input_text, inner_chunks[0]);
|
||||||
|
// Set cursor position
|
||||||
|
f.set_cursor(
|
||||||
|
inner_chunks[0].x + state.cursor_position as u16 + 1,
|
||||||
|
inner_chunks[0].y + 1,
|
||||||
|
);
|
||||||
|
|
||||||
|
// --- Render Results List ---
|
||||||
|
if state.is_loading {
|
||||||
|
let loading_p = Paragraph::new("Searching...")
|
||||||
|
.style(Style::default().fg(theme.fg).add_modifier(Modifier::ITALIC));
|
||||||
|
f.render_widget(loading_p, inner_chunks[1]);
|
||||||
|
} else {
|
||||||
|
let list_items: Vec<ListItem> = state
|
||||||
|
.results
|
||||||
|
.iter()
|
||||||
|
.map(|hit| {
|
||||||
|
// Parse the JSON string to make it readable
|
||||||
|
let content_summary = match serde_json::from_str::<
|
||||||
|
serde_json::Value,
|
||||||
|
>(&hit.content_json)
|
||||||
|
{
|
||||||
|
Ok(json) => {
|
||||||
|
if let Some(obj) = json.as_object() {
|
||||||
|
// Create a summary from the first few non-null string values
|
||||||
|
obj.values()
|
||||||
|
.filter_map(|v| v.as_str())
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.take(3)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(" | ")
|
||||||
|
} else {
|
||||||
|
"Non-object JSON".to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => "Invalid JSON content".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let line = Line::from(vec![
|
||||||
|
Span::styled(
|
||||||
|
format!("{:<4.2} ", hit.score),
|
||||||
|
Style::default().fg(theme.accent),
|
||||||
|
),
|
||||||
|
Span::raw(content_summary),
|
||||||
|
]);
|
||||||
|
ListItem::new(line)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let results_list = List::new(list_items)
|
||||||
|
.block(Block::default().title("Results"))
|
||||||
|
.highlight_style(
|
||||||
|
Style::default()
|
||||||
|
.bg(theme.highlight)
|
||||||
|
.fg(theme.bg)
|
||||||
|
.add_modifier(Modifier::BOLD),
|
||||||
|
)
|
||||||
|
.highlight_symbol(">> ");
|
||||||
|
|
||||||
|
// We need a mutable ListState to render the selection
|
||||||
|
let mut list_state =
|
||||||
|
ratatui::widgets::ListState::default().with_selected(Some(state.selected_index));
|
||||||
|
|
||||||
|
f.render_stateful_widget(results_list, inner_chunks[1], &mut list_state);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
// src/components/common/status_line.rs
|
// client/src/components/common/status_line.rs
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
use crate::state::app::state::AppState;
|
use crate::state::app::state::AppState;
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
layout::Rect,
|
layout::Rect,
|
||||||
style::Style,
|
style::Style,
|
||||||
text::{Line, Span},
|
text::{Line, Span, Text},
|
||||||
widgets::Paragraph,
|
widgets::{Paragraph, Wrap}, // Make sure Wrap is imported
|
||||||
Frame,
|
Frame,
|
||||||
};
|
};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@@ -20,22 +20,39 @@ pub fn render_status_line(
|
|||||||
current_fps: f64,
|
current_fps: f64,
|
||||||
app_state: &AppState,
|
app_state: &AppState,
|
||||||
) {
|
) {
|
||||||
// --- START FIX ---
|
|
||||||
// Ensure debug_text is always a &str, which implements UnicodeWidthStr.
|
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
let debug_text = app_state.debug_info.as_str();
|
{
|
||||||
#[cfg(not(feature = "ui-debug"))]
|
if let Some(debug_state) = &app_state.debug_state {
|
||||||
let debug_text = "";
|
let paragraph = if debug_state.is_error {
|
||||||
// --- END FIX ---
|
// --- THIS IS THE CRITICAL LOGIC FOR ERRORS ---
|
||||||
|
// 1. Create a `Text` object, which can contain multiple lines.
|
||||||
|
let error_text = Text::from(debug_state.displayed_message.clone());
|
||||||
|
|
||||||
let debug_width = UnicodeWidthStr::width(debug_text);
|
// 2. Create a Paragraph from the Text and TELL IT TO WRAP.
|
||||||
let debug_separator_width = if !debug_text.is_empty() { UnicodeWidthStr::width(" | ") } else { 0 };
|
Paragraph::new(error_text)
|
||||||
|
.wrap(Wrap { trim: true }) // This line makes the text break into new rows.
|
||||||
|
.style(Style::default().bg(theme.highlight).fg(theme.bg))
|
||||||
|
} else {
|
||||||
|
// --- This is for normal, single-line info messages ---
|
||||||
|
Paragraph::new(debug_state.displayed_message.as_str())
|
||||||
|
.style(Style::default().fg(theme.accent).bg(theme.bg))
|
||||||
|
};
|
||||||
|
f.render_widget(paragraph, area);
|
||||||
|
} else {
|
||||||
|
// Fallback for when debug state is None
|
||||||
|
let paragraph = Paragraph::new("").style(Style::default().bg(theme.bg));
|
||||||
|
f.render_widget(paragraph, area);
|
||||||
|
}
|
||||||
|
return; // Stop here and don't render the normal status line.
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- The normal status line rendering logic (unchanged) ---
|
||||||
let program_info = format!("multieko2 v{}", env!("CARGO_PKG_VERSION"));
|
let program_info = format!("multieko2 v{}", env!("CARGO_PKG_VERSION"));
|
||||||
let mode_text = if is_edit_mode { "[EDIT]" } else { "[READ-ONLY]" };
|
let mode_text = if is_edit_mode { "[EDIT]" } else { "[READ-ONLY]" };
|
||||||
|
|
||||||
let home_dir =
|
let home_dir = dirs::home_dir()
|
||||||
dirs::home_dir().map(|p| p.to_string_lossy().into_owned()).unwrap_or_default();
|
.map(|p| p.to_string_lossy().into_owned())
|
||||||
|
.unwrap_or_default();
|
||||||
let display_dir = if current_dir.starts_with(&home_dir) {
|
let display_dir = if current_dir.starts_with(&home_dir) {
|
||||||
current_dir.replacen(&home_dir, "~", 1)
|
current_dir.replacen(&home_dir, "~", 1)
|
||||||
} else {
|
} else {
|
||||||
@@ -50,19 +67,30 @@ pub fn render_status_line(
|
|||||||
let separator = " | ";
|
let separator = " | ";
|
||||||
let separator_width = UnicodeWidthStr::width(separator);
|
let separator_width = UnicodeWidthStr::width(separator);
|
||||||
|
|
||||||
let fixed_width_with_fps = mode_width + separator_width + separator_width +
|
let fixed_width_with_fps = mode_width
|
||||||
program_info_width + separator_width + fps_width +
|
+ separator_width
|
||||||
debug_separator_width + debug_width;
|
+ separator_width
|
||||||
|
+ program_info_width
|
||||||
|
+ separator_width
|
||||||
|
+ fps_width;
|
||||||
|
|
||||||
let show_fps = fixed_width_with_fps <= available_width;
|
let show_fps = fixed_width_with_fps <= available_width;
|
||||||
|
|
||||||
let remaining_width_for_dir = available_width.saturating_sub(
|
let remaining_width_for_dir = available_width.saturating_sub(
|
||||||
mode_width + separator_width +
|
mode_width
|
||||||
separator_width + program_info_width +
|
+ separator_width
|
||||||
(if show_fps { separator_width + fps_width } else { 0 }) +
|
+ separator_width
|
||||||
debug_separator_width + debug_width,
|
+ program_info_width
|
||||||
|
+ (if show_fps {
|
||||||
|
separator_width + fps_width
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
let dir_display_text_str = if UnicodeWidthStr::width(display_dir.as_str()) <= remaining_width_for_dir {
|
let dir_display_text_str = if UnicodeWidthStr::width(display_dir.as_str())
|
||||||
|
<= remaining_width_for_dir
|
||||||
|
{
|
||||||
display_dir
|
display_dir
|
||||||
} else {
|
} else {
|
||||||
let dir_name = Path::new(current_dir)
|
let dir_name = Path::new(current_dir)
|
||||||
@@ -72,14 +100,18 @@ pub fn render_status_line(
|
|||||||
if UnicodeWidthStr::width(dir_name) <= remaining_width_for_dir {
|
if UnicodeWidthStr::width(dir_name) <= remaining_width_for_dir {
|
||||||
dir_name.to_string()
|
dir_name.to_string()
|
||||||
} else {
|
} else {
|
||||||
dir_name.chars().take(remaining_width_for_dir).collect::<String>()
|
dir_name
|
||||||
|
.chars()
|
||||||
|
.take(remaining_width_for_dir)
|
||||||
|
.collect::<String>()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut current_content_width = mode_width + separator_width +
|
let mut current_content_width = mode_width
|
||||||
UnicodeWidthStr::width(dir_display_text_str.as_str()) +
|
+ separator_width
|
||||||
separator_width + program_info_width +
|
+ UnicodeWidthStr::width(dir_display_text_str.as_str())
|
||||||
debug_separator_width + debug_width;
|
+ separator_width
|
||||||
|
+ program_info_width;
|
||||||
if show_fps {
|
if show_fps {
|
||||||
current_content_width += separator_width + fps_width;
|
current_content_width += separator_width + fps_width;
|
||||||
}
|
}
|
||||||
@@ -87,20 +119,24 @@ pub fn render_status_line(
|
|||||||
let mut line_spans = vec![
|
let mut line_spans = vec![
|
||||||
Span::styled(mode_text, Style::default().fg(theme.accent)),
|
Span::styled(mode_text, Style::default().fg(theme.accent)),
|
||||||
Span::styled(separator, Style::default().fg(theme.border)),
|
Span::styled(separator, Style::default().fg(theme.border)),
|
||||||
Span::styled(dir_display_text_str.as_str(), Style::default().fg(theme.fg)),
|
Span::styled(
|
||||||
|
dir_display_text_str.as_str(),
|
||||||
|
Style::default().fg(theme.fg),
|
||||||
|
),
|
||||||
Span::styled(separator, Style::default().fg(theme.border)),
|
Span::styled(separator, Style::default().fg(theme.border)),
|
||||||
Span::styled(program_info.as_str(), Style::default().fg(theme.secondary)),
|
Span::styled(
|
||||||
|
program_info.as_str(),
|
||||||
|
Style::default().fg(theme.secondary),
|
||||||
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
if show_fps {
|
if show_fps {
|
||||||
line_spans.push(Span::styled(separator, Style::default().fg(theme.border)));
|
line_spans
|
||||||
line_spans.push(Span::styled(fps_text.as_str(), Style::default().fg(theme.secondary)));
|
.push(Span::styled(separator, Style::default().fg(theme.border)));
|
||||||
}
|
line_spans.push(Span::styled(
|
||||||
|
fps_text.as_str(),
|
||||||
#[cfg(feature = "ui-debug")]
|
Style::default().fg(theme.secondary),
|
||||||
{
|
));
|
||||||
line_spans.push(Span::styled(separator, Style::default().fg(theme.border)));
|
|
||||||
line_spans.push(Span::styled(debug_text, Style::default().fg(theme.accent)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let padding_needed = available_width.saturating_sub(current_content_width);
|
let padding_needed = available_width.saturating_sub(current_content_width);
|
||||||
@@ -111,8 +147,8 @@ pub fn render_status_line(
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let paragraph = Paragraph::new(Line::from(line_spans))
|
let paragraph =
|
||||||
.style(Style::default().bg(theme.bg));
|
Paragraph::new(Line::from(line_spans)).style(Style::default().bg(theme.bg));
|
||||||
|
|
||||||
f.render_widget(paragraph, area);
|
f.render_widget(paragraph, area);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,36 +1,37 @@
|
|||||||
// src/components/form/form.rs
|
// src/components/form/form.rs
|
||||||
|
use crate::components::common::autocomplete;
|
||||||
|
use crate::components::handlers::canvas::render_canvas;
|
||||||
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::app::highlight::HighlightState;
|
||||||
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
|
use crate::state::pages::form::FormState;
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
widgets::{Paragraph, Block, Borders},
|
layout::{Alignment, Constraint, Direction, Layout, Margin, Rect},
|
||||||
layout::{Layout, Constraint, Direction, Rect, Margin, Alignment},
|
|
||||||
style::Style,
|
style::Style,
|
||||||
|
widgets::{Block, Borders, Paragraph},
|
||||||
Frame,
|
Frame,
|
||||||
};
|
};
|
||||||
use crate::config::colors::themes::Theme;
|
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
|
||||||
use crate::state::app::highlight::HighlightState;
|
|
||||||
use crate::components::handlers::canvas::render_canvas;
|
|
||||||
|
|
||||||
pub fn render_form(
|
pub fn render_form(
|
||||||
f: &mut Frame,
|
f: &mut Frame,
|
||||||
area: Rect,
|
area: Rect,
|
||||||
form_state_param: &impl CanvasState,
|
form_state: &FormState, // <--- CHANGE THIS to the concrete type
|
||||||
fields: &[&str],
|
fields: &[&str],
|
||||||
current_field_idx: &usize,
|
current_field_idx: &usize,
|
||||||
inputs: &[&String],
|
inputs: &[&String],
|
||||||
table_name: &str, // This parameter receives the correct table name
|
table_name: &str,
|
||||||
theme: &Theme,
|
theme: &Theme,
|
||||||
is_edit_mode: bool,
|
is_edit_mode: bool,
|
||||||
highlight_state: &HighlightState,
|
highlight_state: &HighlightState,
|
||||||
total_count: u64,
|
total_count: u64,
|
||||||
current_position: u64,
|
current_position: u64,
|
||||||
) {
|
) {
|
||||||
// Use the dynamic `table_name` parameter for the title instead of a hardcoded string.
|
|
||||||
let card_title = format!(" {} ", table_name);
|
let card_title = format!(" {} ", table_name);
|
||||||
|
|
||||||
let adresar_card = Block::default()
|
let adresar_card = Block::default()
|
||||||
.borders(Borders::ALL)
|
.borders(Borders::ALL)
|
||||||
.border_style(Style::default().fg(theme.border))
|
.border_style(Style::default().fg(theme.border))
|
||||||
.title(card_title) // Use the dynamic title
|
.title(card_title)
|
||||||
.style(Style::default().bg(theme.bg).fg(theme.fg));
|
.style(Style::default().bg(theme.bg).fg(theme.fg));
|
||||||
|
|
||||||
f.render_widget(adresar_card, area);
|
f.render_widget(adresar_card, area);
|
||||||
@@ -42,10 +43,7 @@ pub fn render_form(
|
|||||||
|
|
||||||
let main_layout = Layout::default()
|
let main_layout = Layout::default()
|
||||||
.direction(Direction::Vertical)
|
.direction(Direction::Vertical)
|
||||||
.constraints([
|
.constraints([Constraint::Length(1), Constraint::Min(1)])
|
||||||
Constraint::Length(1),
|
|
||||||
Constraint::Min(1),
|
|
||||||
])
|
|
||||||
.split(inner_area);
|
.split(inner_area);
|
||||||
|
|
||||||
let count_position_text = if total_count == 0 && current_position == 1 {
|
let count_position_text = if total_count == 0 && current_position == 1 {
|
||||||
@@ -54,19 +52,22 @@ pub fn render_form(
|
|||||||
format!("Total: {} | New Entry ({})", total_count, current_position)
|
format!("Total: {} | New Entry ({})", total_count, current_position)
|
||||||
} else if total_count == 0 && current_position > 1 {
|
} else if total_count == 0 && current_position > 1 {
|
||||||
format!("Total: 0 | New Entry ({})", current_position)
|
format!("Total: 0 | New Entry ({})", current_position)
|
||||||
}
|
} else {
|
||||||
else {
|
format!(
|
||||||
format!("Total: {} | Position: {}/{}", total_count, current_position, total_count)
|
"Total: {} | Position: {}/{}",
|
||||||
|
total_count, current_position, total_count
|
||||||
|
)
|
||||||
};
|
};
|
||||||
let count_para = Paragraph::new(count_position_text)
|
let count_para = Paragraph::new(count_position_text)
|
||||||
.style(Style::default().fg(theme.fg))
|
.style(Style::default().fg(theme.fg))
|
||||||
.alignment(Alignment::Left);
|
.alignment(Alignment::Left);
|
||||||
f.render_widget(count_para, main_layout[0]);
|
f.render_widget(count_para, main_layout[0]);
|
||||||
|
|
||||||
render_canvas(
|
// Get the active field's rect from render_canvas
|
||||||
|
let active_field_rect = render_canvas(
|
||||||
f,
|
f,
|
||||||
main_layout[1],
|
main_layout[1],
|
||||||
form_state_param,
|
form_state,
|
||||||
fields,
|
fields,
|
||||||
current_field_idx,
|
current_field_idx,
|
||||||
inputs,
|
inputs,
|
||||||
@@ -74,4 +75,41 @@ pub fn render_form(
|
|||||||
is_edit_mode,
|
is_edit_mode,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// --- NEW: RENDER AUTOCOMPLETE ---
|
||||||
|
if form_state.autocomplete_active {
|
||||||
|
if let Some(active_rect) = active_field_rect {
|
||||||
|
let selected_index = form_state.get_selected_suggestion_index();
|
||||||
|
|
||||||
|
if let Some(rich_suggestions) = form_state.get_rich_suggestions() {
|
||||||
|
if !rich_suggestions.is_empty() {
|
||||||
|
// CHANGE THIS to call the renamed function
|
||||||
|
autocomplete::render_hit_autocomplete_dropdown(
|
||||||
|
f,
|
||||||
|
active_rect,
|
||||||
|
f.area(),
|
||||||
|
theme,
|
||||||
|
rich_suggestions,
|
||||||
|
selected_index,
|
||||||
|
form_state,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// The fallback to simple suggestions is now correctly handled
|
||||||
|
// because the original render_autocomplete_dropdown exists again.
|
||||||
|
else if let Some(simple_suggestions) = form_state.get_suggestions() {
|
||||||
|
if !simple_suggestions.is_empty() {
|
||||||
|
autocomplete::render_autocomplete_dropdown(
|
||||||
|
f,
|
||||||
|
active_rect,
|
||||||
|
f.area(),
|
||||||
|
theme,
|
||||||
|
simple_suggestions,
|
||||||
|
selected_index,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
// src/components/handlers/canvas.rs
|
// src/components/handlers/canvas.rs
|
||||||
|
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
widgets::{Paragraph, Block, Borders},
|
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||||
layout::{Layout, Constraint, Direction, Rect},
|
style::{Modifier, Style},
|
||||||
style::{Style, Modifier},
|
|
||||||
text::{Line, Span},
|
text::{Line, Span},
|
||||||
|
widgets::{Block, Borders, Paragraph},
|
||||||
Frame,
|
Frame,
|
||||||
prelude::Alignment,
|
|
||||||
};
|
};
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::state::app::highlight::HighlightState;
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::app::highlight::HighlightState; // Ensure correct import path
|
use std::cmp::{max, min};
|
||||||
use std::cmp::{min, max};
|
|
||||||
|
|
||||||
pub fn render_canvas(
|
pub fn render_canvas(
|
||||||
f: &mut Frame,
|
f: &mut Frame,
|
||||||
@@ -21,9 +21,8 @@ pub fn render_canvas(
|
|||||||
inputs: &[&String],
|
inputs: &[&String],
|
||||||
theme: &Theme,
|
theme: &Theme,
|
||||||
is_edit_mode: bool,
|
is_edit_mode: bool,
|
||||||
highlight_state: &HighlightState, // Using the enum state
|
highlight_state: &HighlightState,
|
||||||
) -> Option<Rect> {
|
) -> Option<Rect> {
|
||||||
// ... (setup code remains the same) ...
|
|
||||||
let columns = Layout::default()
|
let columns = Layout::default()
|
||||||
.direction(Direction::Horizontal)
|
.direction(Direction::Horizontal)
|
||||||
.constraints([Constraint::Percentage(30), Constraint::Percentage(70)])
|
.constraints([Constraint::Percentage(30), Constraint::Percentage(70)])
|
||||||
@@ -58,46 +57,47 @@ pub fn render_canvas(
|
|||||||
|
|
||||||
let mut active_field_input_rect = None;
|
let mut active_field_input_rect = None;
|
||||||
|
|
||||||
// Render labels
|
|
||||||
for (i, field) in fields.iter().enumerate() {
|
for (i, field) in fields.iter().enumerate() {
|
||||||
let label = Paragraph::new(Line::from(Span::styled(
|
let label = Paragraph::new(Line::from(Span::styled(
|
||||||
format!("{}:", field),
|
format!("{}:", field),
|
||||||
Style::default().fg(theme.fg)),
|
Style::default().fg(theme.fg),
|
||||||
));
|
)));
|
||||||
f.render_widget(label, Rect {
|
f.render_widget(
|
||||||
x: columns[0].x,
|
label,
|
||||||
y: input_block.y + 1 + i as u16,
|
Rect {
|
||||||
width: columns[0].width,
|
x: columns[0].x,
|
||||||
height: 1,
|
y: input_block.y + 1 + i as u16,
|
||||||
});
|
width: columns[0].width,
|
||||||
|
height: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (i, _input) in inputs.iter().enumerate() {
|
||||||
// Render inputs and cursor
|
|
||||||
for (i, input) in inputs.iter().enumerate() {
|
|
||||||
let is_active = i == *current_field_idx;
|
let is_active = i == *current_field_idx;
|
||||||
let current_cursor_pos = form_state.current_cursor_pos();
|
let current_cursor_pos = form_state.current_cursor_pos();
|
||||||
let text = input.as_str();
|
|
||||||
let text_len = text.chars().count();
|
|
||||||
|
|
||||||
|
// Use the trait method to get display value
|
||||||
|
let text = form_state.get_display_value_for_field(i);
|
||||||
|
let text_len = text.chars().count();
|
||||||
let line: Line;
|
let line: Line;
|
||||||
|
|
||||||
// --- Use match on the highlight_state enum ---
|
|
||||||
match highlight_state {
|
match highlight_state {
|
||||||
HighlightState::Off => {
|
HighlightState::Off => {
|
||||||
// Not in highlight mode, render normally
|
|
||||||
line = Line::from(Span::styled(
|
line = Line::from(Span::styled(
|
||||||
text,
|
text,
|
||||||
if is_active { Style::default().fg(theme.highlight) } else { Style::default().fg(theme.fg) }
|
if is_active {
|
||||||
|
Style::default().fg(theme.highlight)
|
||||||
|
} else {
|
||||||
|
Style::default().fg(theme.fg)
|
||||||
|
},
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
HighlightState::Characterwise { anchor } => {
|
HighlightState::Characterwise { anchor } => {
|
||||||
// --- Character-wise Highlight Logic ---
|
|
||||||
let (anchor_field, anchor_char) = *anchor;
|
let (anchor_field, anchor_char) = *anchor;
|
||||||
let start_field = min(anchor_field, *current_field_idx);
|
let start_field = min(anchor_field, *current_field_idx);
|
||||||
let end_field = max(anchor_field, *current_field_idx);
|
let end_field = max(anchor_field, *current_field_idx);
|
||||||
|
|
||||||
// Use start_char and end_char consistently
|
|
||||||
let (start_char, end_char) = if anchor_field == *current_field_idx {
|
let (start_char, end_char) = if anchor_field == *current_field_idx {
|
||||||
(min(anchor_char, current_cursor_pos), max(anchor_char, current_cursor_pos))
|
(min(anchor_char, current_cursor_pos), max(anchor_char, current_cursor_pos))
|
||||||
} else if anchor_field < *current_field_idx {
|
} else if anchor_field < *current_field_idx {
|
||||||
@@ -111,24 +111,20 @@ pub fn render_canvas(
|
|||||||
let normal_style_outside = Style::default().fg(theme.fg);
|
let normal_style_outside = Style::default().fg(theme.fg);
|
||||||
|
|
||||||
if i >= start_field && i <= end_field {
|
if i >= start_field && i <= end_field {
|
||||||
// This line is within the character-wise highlight range
|
if start_field == end_field {
|
||||||
if start_field == end_field { // Case 1: Single Line Highlight
|
|
||||||
// Use start_char and end_char here
|
|
||||||
let clamped_start = start_char.min(text_len);
|
let clamped_start = start_char.min(text_len);
|
||||||
let clamped_end = end_char.min(text_len); // Use text_len for slicing logic
|
let clamped_end = end_char.min(text_len);
|
||||||
|
|
||||||
let before: String = text.chars().take(clamped_start).collect();
|
let before: String = text.chars().take(clamped_start).collect();
|
||||||
let highlighted: String = text.chars().skip(clamped_start).take(clamped_end.saturating_sub(clamped_start) + 1).collect();
|
let highlighted: String = text.chars().skip(clamped_start).take(clamped_end.saturating_sub(clamped_start) + 1).collect();
|
||||||
// Define 'after' here
|
|
||||||
let after: String = text.chars().skip(clamped_end + 1).collect();
|
let after: String = text.chars().skip(clamped_end + 1).collect();
|
||||||
|
|
||||||
line = Line::from(vec![
|
line = Line::from(vec![
|
||||||
Span::styled(before, normal_style_in_highlight),
|
Span::styled(before, normal_style_in_highlight),
|
||||||
Span::styled(highlighted, highlight_style),
|
Span::styled(highlighted, highlight_style),
|
||||||
Span::styled(after, normal_style_in_highlight), // Use defined 'after'
|
Span::styled(after, normal_style_in_highlight),
|
||||||
]);
|
]);
|
||||||
} else if i == start_field { // Case 2: Multi-Line Highlight - Start Line
|
} else if i == start_field {
|
||||||
// Use start_char here
|
|
||||||
let safe_start = start_char.min(text_len);
|
let safe_start = start_char.min(text_len);
|
||||||
let before: String = text.chars().take(safe_start).collect();
|
let before: String = text.chars().take(safe_start).collect();
|
||||||
let highlighted: String = text.chars().skip(safe_start).collect();
|
let highlighted: String = text.chars().skip(safe_start).collect();
|
||||||
@@ -136,8 +132,7 @@ pub fn render_canvas(
|
|||||||
Span::styled(before, normal_style_in_highlight),
|
Span::styled(before, normal_style_in_highlight),
|
||||||
Span::styled(highlighted, highlight_style),
|
Span::styled(highlighted, highlight_style),
|
||||||
]);
|
]);
|
||||||
} else if i == end_field { // Case 3: Multi-Line Highlight - End Line (Corrected index)
|
} else if i == end_field {
|
||||||
// Use end_char here
|
|
||||||
let safe_end_inclusive = if text_len > 0 { end_char.min(text_len - 1) } else { 0 };
|
let safe_end_inclusive = if text_len > 0 { end_char.min(text_len - 1) } else { 0 };
|
||||||
let highlighted: String = text.chars().take(safe_end_inclusive + 1).collect();
|
let highlighted: String = text.chars().take(safe_end_inclusive + 1).collect();
|
||||||
let after: String = text.chars().skip(safe_end_inclusive + 1).collect();
|
let after: String = text.chars().skip(safe_end_inclusive + 1).collect();
|
||||||
@@ -145,19 +140,17 @@ pub fn render_canvas(
|
|||||||
Span::styled(highlighted, highlight_style),
|
Span::styled(highlighted, highlight_style),
|
||||||
Span::styled(after, normal_style_in_highlight),
|
Span::styled(after, normal_style_in_highlight),
|
||||||
]);
|
]);
|
||||||
} else { // Case 4: Multi-Line Highlight - Middle Line (Corrected index)
|
} else {
|
||||||
line = Line::from(Span::styled(text, highlight_style)); // Highlight whole line
|
line = Line::from(Span::styled(text, highlight_style));
|
||||||
}
|
}
|
||||||
} else { // Case 5: Line Outside Character-wise Highlight Range
|
} else {
|
||||||
line = Line::from(Span::styled(
|
line = Line::from(Span::styled(
|
||||||
text,
|
text,
|
||||||
// Use normal styling (active or inactive)
|
|
||||||
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
HighlightState::Linewise { anchor_line } => {
|
HighlightState::Linewise { anchor_line } => {
|
||||||
// --- Linewise Highlight Logic ---
|
|
||||||
let start_field = min(*anchor_line, *current_field_idx);
|
let start_field = min(*anchor_line, *current_field_idx);
|
||||||
let end_field = max(*anchor_line, *current_field_idx);
|
let end_field = max(*anchor_line, *current_field_idx);
|
||||||
let highlight_style = Style::default().fg(theme.highlight).bg(theme.highlight_bg).add_modifier(Modifier::BOLD);
|
let highlight_style = Style::default().fg(theme.highlight).bg(theme.highlight_bg).add_modifier(Modifier::BOLD);
|
||||||
@@ -165,25 +158,31 @@ pub fn render_canvas(
|
|||||||
let normal_style_outside = Style::default().fg(theme.fg);
|
let normal_style_outside = Style::default().fg(theme.fg);
|
||||||
|
|
||||||
if i >= start_field && i <= end_field {
|
if i >= start_field && i <= end_field {
|
||||||
// Highlight the entire line
|
|
||||||
line = Line::from(Span::styled(text, highlight_style));
|
line = Line::from(Span::styled(text, highlight_style));
|
||||||
} else {
|
} else {
|
||||||
// Line outside linewise highlight range
|
|
||||||
line = Line::from(Span::styled(
|
line = Line::from(Span::styled(
|
||||||
text,
|
text,
|
||||||
// Use normal styling (active or inactive)
|
|
||||||
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
if is_active { normal_style_in_highlight } else { normal_style_outside }
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} // End match highlight_state
|
}
|
||||||
|
|
||||||
let input_display = Paragraph::new(line).alignment(Alignment::Left);
|
let input_display = Paragraph::new(line).alignment(Alignment::Left);
|
||||||
f.render_widget(input_display, input_rows[i]);
|
f.render_widget(input_display, input_rows[i]);
|
||||||
|
|
||||||
if is_active {
|
if is_active {
|
||||||
active_field_input_rect = Some(input_rows[i]);
|
active_field_input_rect = Some(input_rows[i]);
|
||||||
let cursor_x = input_rows[i].x + form_state.current_cursor_pos() as u16;
|
|
||||||
|
// --- CORRECTED CURSOR POSITIONING LOGIC ---
|
||||||
|
// Use the new generic trait method to check for an override.
|
||||||
|
let cursor_x = if form_state.has_display_override(i) {
|
||||||
|
// If an override exists, place the cursor at the end.
|
||||||
|
input_rows[i].x + text.chars().count() as u16
|
||||||
|
} else {
|
||||||
|
// Otherwise, use the real cursor position.
|
||||||
|
input_rows[i].x + form_state.current_cursor_pos() as u16
|
||||||
|
};
|
||||||
let cursor_y = input_rows[i].y;
|
let cursor_y = input_rows[i].y;
|
||||||
f.set_cursor_position((cursor_x, cursor_y));
|
f.set_cursor_position((cursor_x, cursor_y));
|
||||||
}
|
}
|
||||||
@@ -191,4 +190,3 @@ pub fn render_canvas(
|
|||||||
|
|
||||||
active_field_input_rect
|
active_field_input_rect
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use crate::services::grpc_client::GrpcClient;
|
|||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::FormState;
|
||||||
use crate::state::pages::auth::RegisterState;
|
use crate::state::pages::auth::RegisterState;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
use crate::tui::functions::common::form::{revert, save};
|
use crate::tui::functions::common::form::{revert, save};
|
||||||
use crossterm::event::{KeyCode, KeyEvent};
|
use crossterm::event::{KeyCode, KeyEvent};
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
@@ -13,6 +14,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
action: &str,
|
action: &str,
|
||||||
state: &mut S,
|
state: &mut S,
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
|
app_state: &AppState,
|
||||||
current_position: &mut u64,
|
current_position: &mut u64,
|
||||||
total_count: u64,
|
total_count: u64,
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
@@ -27,6 +29,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
match action {
|
match action {
|
||||||
"save" => {
|
"save" => {
|
||||||
let outcome = save(
|
let outcome = save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::FormState;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
use crate::tui::functions::common::form::{revert, save};
|
use crate::tui::functions::common::form::{revert, save};
|
||||||
use crate::tui::functions::common::form::SaveOutcome;
|
use crate::tui::functions::common::form::SaveOutcome;
|
||||||
use crate::modes::handlers::event::EventOutcome;
|
use crate::modes::handlers::event::EventOutcome;
|
||||||
@@ -14,6 +15,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
action: &str,
|
action: &str,
|
||||||
state: &mut S,
|
state: &mut S,
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
|
app_state: &AppState,
|
||||||
) -> Result<EventOutcome> {
|
) -> Result<EventOutcome> {
|
||||||
match action {
|
match action {
|
||||||
"save" | "revert" => {
|
"save" | "revert" => {
|
||||||
@@ -26,10 +28,11 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
match action {
|
match action {
|
||||||
"save" => {
|
"save" => {
|
||||||
let save_result = save(
|
let save_result = save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await;
|
).await;
|
||||||
|
|
||||||
match save_result {
|
match save_result {
|
||||||
Ok(save_outcome) => {
|
Ok(save_outcome) => {
|
||||||
let message = match save_outcome {
|
let message = match save_outcome {
|
||||||
@@ -47,7 +50,7 @@ pub async fn execute_common_action<S: CanvasState + Any>(
|
|||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await;
|
).await;
|
||||||
|
|
||||||
match revert_result {
|
match revert_result {
|
||||||
Ok(message) => Ok(EventOutcome::Ok(message)),
|
Ok(message) => Ok(EventOutcome::Ok(message)),
|
||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
// client/src/main.rs
|
// client/src/main.rs
|
||||||
use client::run_ui;
|
use client::run_ui;
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
use client::utils::debug_logger::UiDebugWriter;
|
||||||
use dotenvy::dotenv;
|
use dotenvy::dotenv;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use tracing_subscriber;
|
use tracing_subscriber;
|
||||||
@@ -7,8 +9,22 @@ use std::env;
|
|||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> Result<()> {
|
||||||
if env::var("ENABLE_TRACING").is_ok() {
|
#[cfg(feature = "ui-debug")]
|
||||||
tracing_subscriber::fmt::init();
|
{
|
||||||
|
// If ui-debug is on, set up our custom writer.
|
||||||
|
let writer = UiDebugWriter::new();
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_level(false) // Don't show INFO, ERROR, etc.
|
||||||
|
.with_target(false) // Don't show the module path.
|
||||||
|
.without_time() // This is the correct and simpler method.
|
||||||
|
.with_writer(move || writer.clone())
|
||||||
|
.init();
|
||||||
|
}
|
||||||
|
#[cfg(not(feature = "ui-debug"))]
|
||||||
|
{
|
||||||
|
if env::var("ENABLE_TRACING").is_ok() {
|
||||||
|
tracing_subscriber::fmt::init();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dotenv().ok();
|
dotenv().ok();
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ pub async fn handle_core_action(
|
|||||||
Ok(EventOutcome::Ok(message))
|
Ok(EventOutcome::Ok(message))
|
||||||
} else {
|
} else {
|
||||||
let save_outcome = form_save(
|
let save_outcome = form_save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await.context("Register save action failed")?;
|
).await.context("Register save action failed")?;
|
||||||
@@ -52,6 +53,7 @@ pub async fn handle_core_action(
|
|||||||
login_save(auth_state, login_state, auth_client, app_state).await.context("Login save n quit action failed")?
|
login_save(auth_state, login_state, auth_client, app_state).await.context("Login save n quit action failed")?
|
||||||
} else {
|
} else {
|
||||||
let save_outcome = form_save(
|
let save_outcome = form_save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await?;
|
).await?;
|
||||||
|
|||||||
@@ -1,20 +1,22 @@
|
|||||||
// src/modes/canvas/edit.rs
|
// src/modes/canvas/edit.rs
|
||||||
use crate::config::binds::config::Config;
|
use crate::config::binds::config::Config;
|
||||||
|
use crate::functions::modes::edit::{
|
||||||
|
add_logic_e, add_table_e, auth_e, form_e,
|
||||||
|
};
|
||||||
|
use crate::modes::handlers::event::EventHandler;
|
||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
|
use crate::state::pages::admin::AdminState;
|
||||||
use crate::state::pages::{
|
use crate::state::pages::{
|
||||||
auth::{LoginState, RegisterState},
|
auth::{LoginState, RegisterState},
|
||||||
canvas_state::CanvasState,
|
canvas_state::CanvasState,
|
||||||
|
form::FormState,
|
||||||
};
|
};
|
||||||
use crate::state::pages::form::FormState; // <<< ADD THIS LINE
|
|
||||||
// AddLogicState is already imported
|
|
||||||
// AddTableState is already imported
|
|
||||||
use crate::state::pages::admin::AdminState;
|
|
||||||
use crate::modes::handlers::event::EventOutcome;
|
|
||||||
use crate::functions::modes::edit::{add_logic_e, auth_e, form_e, add_table_e};
|
|
||||||
use crate::state::app::state::AppState;
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use crossterm::event::KeyEvent; // Removed KeyCode, KeyModifiers as they were unused
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
use tracing::debug;
|
use crossterm::event::{KeyCode, KeyEvent};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use tracing::{debug, info};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum EditEventOutcome {
|
pub enum EditEventOutcome {
|
||||||
@@ -22,231 +24,313 @@ pub enum EditEventOutcome {
|
|||||||
ExitEditMode,
|
ExitEditMode,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper function to spawn a non-blocking search task for autocomplete.
|
||||||
|
async fn trigger_form_autocomplete_search(
|
||||||
|
form_state: &mut FormState,
|
||||||
|
grpc_client: &mut GrpcClient,
|
||||||
|
sender: mpsc::UnboundedSender<Vec<Hit>>,
|
||||||
|
) {
|
||||||
|
if let Some(field_def) = form_state.fields.get(form_state.current_field) {
|
||||||
|
if field_def.is_link {
|
||||||
|
if let Some(target_table) = &field_def.link_target_table {
|
||||||
|
// 1. Update state for immediate UI feedback
|
||||||
|
form_state.autocomplete_loading = true;
|
||||||
|
form_state.autocomplete_active = true;
|
||||||
|
form_state.autocomplete_suggestions.clear();
|
||||||
|
form_state.selected_suggestion_index = None;
|
||||||
|
|
||||||
|
// 2. Clone everything needed for the background task
|
||||||
|
let query = form_state.get_current_input().to_string();
|
||||||
|
let table_to_search = target_table.clone();
|
||||||
|
let mut grpc_client_clone = grpc_client.clone();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"[Autocomplete] Spawning search in '{}' for query: '{}'",
|
||||||
|
table_to_search, query
|
||||||
|
);
|
||||||
|
|
||||||
|
// 3. Spawn the non-blocking task
|
||||||
|
tokio::spawn(async move {
|
||||||
|
match grpc_client_clone
|
||||||
|
.search_table(table_to_search, query)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) => {
|
||||||
|
// Send results back through the channel
|
||||||
|
let _ = sender.send(response.hits);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!(
|
||||||
|
"[Autocomplete] Search failed: {:?}",
|
||||||
|
e
|
||||||
|
);
|
||||||
|
// Send an empty vec on error so the UI can stop loading
|
||||||
|
let _ = sender.send(vec![]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub async fn handle_edit_event(
|
pub async fn handle_edit_event(
|
||||||
key: KeyEvent,
|
key: KeyEvent,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
form_state: &mut FormState, // Now FormState is in scope
|
form_state: &mut FormState,
|
||||||
login_state: &mut LoginState,
|
login_state: &mut LoginState,
|
||||||
register_state: &mut RegisterState,
|
register_state: &mut RegisterState,
|
||||||
admin_state: &mut AdminState,
|
admin_state: &mut AdminState,
|
||||||
ideal_cursor_column: &mut usize,
|
|
||||||
current_position: &mut u64,
|
current_position: &mut u64,
|
||||||
total_count: u64,
|
total_count: u64,
|
||||||
grpc_client: &mut GrpcClient,
|
event_handler: &mut EventHandler,
|
||||||
app_state: &AppState,
|
app_state: &AppState,
|
||||||
) -> Result<EditEventOutcome> {
|
) -> Result<EditEventOutcome> {
|
||||||
// --- Global command mode check ---
|
// --- AUTOCOMPLETE-SPECIFIC KEY HANDLING ---
|
||||||
if let Some("enter_command_mode") = config.get_action_for_key_in_mode(
|
if app_state.ui.show_form && form_state.autocomplete_active {
|
||||||
&config.keybindings.global, // Assuming command mode can be entered globally
|
if let Some(action) =
|
||||||
key.code,
|
config.get_edit_action_for_key(key.code, key.modifiers)
|
||||||
key.modifiers,
|
{
|
||||||
) {
|
match action {
|
||||||
// This check might be redundant if EventHandler already prevents entering Edit mode
|
"suggestion_down" => {
|
||||||
// when command_mode is true. However, it's a safeguard.
|
if !form_state.autocomplete_suggestions.is_empty() {
|
||||||
return Ok(EditEventOutcome::Message(
|
let current =
|
||||||
"Cannot enter command mode from edit mode here.".to_string(),
|
form_state.selected_suggestion_index.unwrap_or(0);
|
||||||
));
|
let next = (current + 1)
|
||||||
}
|
% form_state.autocomplete_suggestions.len();
|
||||||
|
form_state.selected_suggestion_index = Some(next);
|
||||||
// --- Common actions (save, revert) ---
|
}
|
||||||
if let Some(action) = config.get_action_for_key_in_mode(
|
return Ok(EditEventOutcome::Message(String::new()));
|
||||||
&config.keybindings.common,
|
|
||||||
key.code,
|
|
||||||
key.modifiers,
|
|
||||||
).as_deref() {
|
|
||||||
if matches!(action, "save" | "revert") {
|
|
||||||
let message_string: String = if app_state.ui.show_login {
|
|
||||||
auth_e::execute_common_action(action, login_state, grpc_client, current_position, total_count).await?
|
|
||||||
} else if app_state.ui.show_register {
|
|
||||||
auth_e::execute_common_action(action, register_state, grpc_client, current_position, total_count).await?
|
|
||||||
} else if app_state.ui.show_add_table {
|
|
||||||
// TODO: Implement common actions for AddTable if needed
|
|
||||||
format!("Action '{}' not implemented for Add Table in edit mode.", action)
|
|
||||||
} else if app_state.ui.show_add_logic {
|
|
||||||
// TODO: Implement common actions for AddLogic if needed
|
|
||||||
format!("Action '{}' not implemented for Add Logic in edit mode.", action)
|
|
||||||
} else { // Assuming Form view
|
|
||||||
let outcome = form_e::execute_common_action(action, form_state, grpc_client).await?;
|
|
||||||
match outcome {
|
|
||||||
EventOutcome::Ok(msg) | EventOutcome::DataSaved(_, msg) => msg,
|
|
||||||
_ => format!("Unexpected outcome from common action: {:?}", outcome),
|
|
||||||
}
|
}
|
||||||
};
|
"suggestion_up" => {
|
||||||
return Ok(EditEventOutcome::Message(message_string));
|
if !form_state.autocomplete_suggestions.is_empty() {
|
||||||
|
let current =
|
||||||
|
form_state.selected_suggestion_index.unwrap_or(0);
|
||||||
|
let prev = if current == 0 {
|
||||||
|
form_state.autocomplete_suggestions.len() - 1
|
||||||
|
} else {
|
||||||
|
current - 1
|
||||||
|
};
|
||||||
|
form_state.selected_suggestion_index = Some(prev);
|
||||||
|
}
|
||||||
|
return Ok(EditEventOutcome::Message(String::new()));
|
||||||
|
}
|
||||||
|
"exit" => {
|
||||||
|
form_state.deactivate_autocomplete();
|
||||||
|
return Ok(EditEventOutcome::Message(
|
||||||
|
"Autocomplete cancelled".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
"enter_decider" => {
|
||||||
|
if let Some(selected_idx) =
|
||||||
|
form_state.selected_suggestion_index
|
||||||
|
{
|
||||||
|
if let Some(selection) = form_state
|
||||||
|
.autocomplete_suggestions
|
||||||
|
.get(selected_idx)
|
||||||
|
.cloned()
|
||||||
|
{
|
||||||
|
// --- THIS IS THE CORE LOGIC CHANGE ---
|
||||||
|
|
||||||
|
// 1. Get the friendly display name for the UI
|
||||||
|
let display_name =
|
||||||
|
form_state.get_display_name_for_hit(&selection);
|
||||||
|
|
||||||
|
// 2. Store the REAL ID in the form's values
|
||||||
|
let current_input =
|
||||||
|
form_state.get_current_input_mut();
|
||||||
|
*current_input = selection.id.to_string();
|
||||||
|
|
||||||
|
// 3. Set the persistent display override in the map
|
||||||
|
form_state.link_display_map.insert(
|
||||||
|
form_state.current_field,
|
||||||
|
display_name,
|
||||||
|
);
|
||||||
|
|
||||||
|
// 4. Finalize state
|
||||||
|
form_state.deactivate_autocomplete();
|
||||||
|
form_state.set_has_unsaved_changes(true);
|
||||||
|
return Ok(EditEventOutcome::Message(
|
||||||
|
"Selection made".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
form_state.deactivate_autocomplete();
|
||||||
|
// Fall through to default 'enter' behavior
|
||||||
|
}
|
||||||
|
_ => {} // Let other keys fall through to the live search logic
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Edit-specific actions ---
|
// --- LIVE AUTOCOMPLETE TRIGGER LOGIC ---
|
||||||
if let Some(action_str) = config.get_edit_action_for_key(key.code, key.modifiers).as_deref() {
|
let mut trigger_search = false;
|
||||||
// --- Handle "enter_decider" (Enter key) ---
|
|
||||||
if action_str == "enter_decider" {
|
|
||||||
let effective_action = if app_state.ui.show_register
|
|
||||||
&& register_state.in_suggestion_mode
|
|
||||||
&& register_state.current_field() == 4 { // Role field
|
|
||||||
"select_suggestion"
|
|
||||||
} else if app_state.ui.show_add_logic
|
|
||||||
&& admin_state.add_logic_state.in_target_column_suggestion_mode
|
|
||||||
&& admin_state.add_logic_state.current_field() == 1 { // Target Column field
|
|
||||||
"select_suggestion"
|
|
||||||
} else {
|
|
||||||
"next_field" // Default action for Enter
|
|
||||||
};
|
|
||||||
|
|
||||||
let msg = if app_state.ui.show_login {
|
if app_state.ui.show_form {
|
||||||
auth_e::execute_edit_action(effective_action, key, login_state, ideal_cursor_column).await?
|
// Manual trigger
|
||||||
} else if app_state.ui.show_add_table {
|
if let Some("trigger_autocomplete") =
|
||||||
add_table_e::execute_edit_action(effective_action, key, &mut admin_state.add_table_state, ideal_cursor_column).await?
|
config.get_edit_action_for_key(key.code, key.modifiers)
|
||||||
} else if app_state.ui.show_add_logic {
|
{
|
||||||
add_logic_e::execute_edit_action(effective_action, key, &mut admin_state.add_logic_state, ideal_cursor_column).await?
|
if !form_state.autocomplete_active {
|
||||||
} else if app_state.ui.show_register {
|
trigger_search = true;
|
||||||
auth_e::execute_edit_action(effective_action, key, register_state, ideal_cursor_column).await?
|
}
|
||||||
} else { // Form view
|
}
|
||||||
form_e::execute_edit_action(effective_action, key, form_state, ideal_cursor_column).await?
|
// Live search trigger while typing
|
||||||
};
|
else if form_state.autocomplete_active {
|
||||||
|
if let KeyCode::Char(_) | KeyCode::Backspace = key.code {
|
||||||
|
let action = if let KeyCode::Backspace = key.code {
|
||||||
|
"delete_char_backward"
|
||||||
|
} else {
|
||||||
|
"insert_char"
|
||||||
|
};
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
form_e::execute_edit_action(
|
||||||
|
action,
|
||||||
|
key,
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
trigger_search = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if trigger_search {
|
||||||
|
trigger_form_autocomplete_search(
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.grpc_client,
|
||||||
|
event_handler.autocomplete_result_sender.clone(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
return Ok(EditEventOutcome::Message("Searching...".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- GENERAL EDIT MODE EVENT HANDLING (IF NOT AUTOCOMPLETE) ---
|
||||||
|
|
||||||
|
if let Some(action_str) =
|
||||||
|
config.get_edit_action_for_key(key.code, key.modifiers)
|
||||||
|
{
|
||||||
|
// Handle Enter key (next field)
|
||||||
|
if action_str == "enter_decider" {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
let msg = form_e::execute_edit_action(
|
||||||
|
"next_field",
|
||||||
|
key,
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
return Ok(EditEventOutcome::Message(msg));
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Handle "exit" (Escape key) ---
|
// Handle exiting edit mode
|
||||||
if action_str == "exit" {
|
if action_str == "exit" {
|
||||||
if app_state.ui.show_register && register_state.in_suggestion_mode {
|
return Ok(EditEventOutcome::ExitEditMode);
|
||||||
let msg = auth_e::execute_edit_action("exit_suggestion_mode", key, register_state, ideal_cursor_column).await?;
|
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
|
||||||
} else if app_state.ui.show_add_logic && admin_state.add_logic_state.in_target_column_suggestion_mode {
|
|
||||||
admin_state.add_logic_state.in_target_column_suggestion_mode = false;
|
|
||||||
admin_state.add_logic_state.show_target_column_suggestions = false;
|
|
||||||
admin_state.add_logic_state.selected_target_column_suggestion_index = None;
|
|
||||||
return Ok(EditEventOutcome::Message("Exited column suggestions".to_string()));
|
|
||||||
} else {
|
|
||||||
return Ok(EditEventOutcome::ExitEditMode);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Autocomplete for AddLogicState Target Column ---
|
// Handle all other edit actions
|
||||||
if app_state.ui.show_add_logic && admin_state.add_logic_state.current_field() == 1 { // Target Column field
|
|
||||||
if action_str == "suggestion_down" { // "Tab" is mapped to suggestion_down
|
|
||||||
if !admin_state.add_logic_state.in_target_column_suggestion_mode {
|
|
||||||
// Attempt to open suggestions
|
|
||||||
if let Some(profile_name) = admin_state.add_logic_state.profile_name.clone().into() {
|
|
||||||
if let Some(table_name) = admin_state.add_logic_state.selected_table_name.clone() {
|
|
||||||
debug!("Fetching table structure for autocomplete: Profile='{}', Table='{}'", profile_name, table_name);
|
|
||||||
match grpc_client.get_table_structure(profile_name, table_name).await {
|
|
||||||
Ok(ts_response) => {
|
|
||||||
admin_state.add_logic_state.table_columns_for_suggestions =
|
|
||||||
ts_response.columns.into_iter().map(|c| c.name).collect();
|
|
||||||
admin_state.add_logic_state.update_target_column_suggestions();
|
|
||||||
if !admin_state.add_logic_state.target_column_suggestions.is_empty() {
|
|
||||||
admin_state.add_logic_state.in_target_column_suggestion_mode = true;
|
|
||||||
// update_target_column_suggestions handles initial selection
|
|
||||||
return Ok(EditEventOutcome::Message("Column suggestions shown".to_string()));
|
|
||||||
} else {
|
|
||||||
return Ok(EditEventOutcome::Message("No column suggestions for current input".to_string()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
debug!("Error fetching table structure: {}", e);
|
|
||||||
admin_state.add_logic_state.table_columns_for_suggestions.clear(); // Clear old data on error
|
|
||||||
admin_state.add_logic_state.update_target_column_suggestions();
|
|
||||||
return Ok(EditEventOutcome::Message(format!("Error fetching columns: {}", e)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Ok(EditEventOutcome::Message("No table selected for column suggestions".to_string()));
|
|
||||||
}
|
|
||||||
} else { // Should not happen if AddLogic is properly initialized
|
|
||||||
return Ok(EditEventOutcome::Message("Profile name missing for column suggestions".to_string()));
|
|
||||||
}
|
|
||||||
} else { // Already in suggestion mode, navigate down
|
|
||||||
let msg = add_logic_e::execute_edit_action(action_str, key, &mut admin_state.add_logic_state, ideal_cursor_column).await?;
|
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
|
||||||
}
|
|
||||||
} else if admin_state.add_logic_state.in_target_column_suggestion_mode && action_str == "suggestion_up" {
|
|
||||||
let msg = add_logic_e::execute_edit_action(action_str, key, &mut admin_state.add_logic_state, ideal_cursor_column).await?;
|
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Autocomplete for RegisterState Role Field ---
|
|
||||||
if app_state.ui.show_register && register_state.current_field() == 4 { // Role field
|
|
||||||
if !register_state.in_suggestion_mode && action_str == "suggestion_down" { // Tab
|
|
||||||
register_state.update_role_suggestions();
|
|
||||||
if !register_state.role_suggestions.is_empty() {
|
|
||||||
register_state.in_suggestion_mode = true;
|
|
||||||
// update_role_suggestions should handle initial selection
|
|
||||||
return Ok(EditEventOutcome::Message("Role suggestions shown".to_string()));
|
|
||||||
} else {
|
|
||||||
// If Tab doesn't open suggestions, it might fall through to "next_field"
|
|
||||||
// or you might want specific behavior. For now, let it fall through.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if register_state.in_suggestion_mode && matches!(action_str, "suggestion_down" | "suggestion_up") {
|
|
||||||
let msg = auth_e::execute_edit_action(action_str, key, register_state, ideal_cursor_column).await?;
|
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Dispatch other edit actions ---
|
|
||||||
let msg = if app_state.ui.show_login {
|
let msg = if app_state.ui.show_login {
|
||||||
auth_e::execute_edit_action(action_str, key, login_state, ideal_cursor_column).await?
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
auth_e::execute_edit_action(
|
||||||
|
action_str,
|
||||||
|
key,
|
||||||
|
login_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
} else if app_state.ui.show_add_table {
|
} else if app_state.ui.show_add_table {
|
||||||
add_table_e::execute_edit_action(action_str, key, &mut admin_state.add_table_state, ideal_cursor_column).await?
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
add_table_e::execute_edit_action(
|
||||||
|
action_str,
|
||||||
|
key,
|
||||||
|
&mut admin_state.add_table_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
} else if app_state.ui.show_add_logic {
|
} else if app_state.ui.show_add_logic {
|
||||||
// If not a suggestion action handled above for AddLogic
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
if !(admin_state.add_logic_state.in_target_column_suggestion_mode && matches!(action_str, "suggestion_down" | "suggestion_up")) {
|
add_logic_e::execute_edit_action(
|
||||||
add_logic_e::execute_edit_action(action_str, key, &mut admin_state.add_logic_state, ideal_cursor_column).await?
|
action_str,
|
||||||
} else { String::new() /* Already handled */ }
|
key,
|
||||||
|
&mut admin_state.add_logic_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
} else if app_state.ui.show_register {
|
} else if app_state.ui.show_register {
|
||||||
if !(register_state.in_suggestion_mode && matches!(action_str, "suggestion_down" | "suggestion_up")) {
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
auth_e::execute_edit_action(action_str, key, register_state, ideal_cursor_column).await?
|
auth_e::execute_edit_action(
|
||||||
} else { String::new() /* Already handled */ }
|
action_str,
|
||||||
} else { // Form view
|
key,
|
||||||
form_e::execute_edit_action(action_str, key, form_state, ideal_cursor_column).await?
|
register_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
form_e::execute_edit_action(
|
||||||
|
action_str,
|
||||||
|
key,
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
};
|
};
|
||||||
return Ok(EditEventOutcome::Message(msg));
|
return Ok(EditEventOutcome::Message(msg));
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Character insertion ---
|
// --- FALLBACK FOR CHARACTER INSERTION (IF NO OTHER BINDING MATCHED) ---
|
||||||
// If character insertion happens while in suggestion mode, exit suggestion mode first.
|
if let KeyCode::Char(_) = key.code {
|
||||||
let mut exited_suggestion_mode_for_typing = false;
|
let msg = if app_state.ui.show_login {
|
||||||
if app_state.ui.show_register && register_state.in_suggestion_mode {
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
register_state.in_suggestion_mode = false;
|
auth_e::execute_edit_action(
|
||||||
register_state.show_role_suggestions = false;
|
"insert_char",
|
||||||
register_state.selected_suggestion_index = None;
|
key,
|
||||||
exited_suggestion_mode_for_typing = true;
|
login_state,
|
||||||
}
|
&mut event_handler.ideal_cursor_column,
|
||||||
if app_state.ui.show_add_logic && admin_state.add_logic_state.in_target_column_suggestion_mode {
|
)
|
||||||
admin_state.add_logic_state.in_target_column_suggestion_mode = false;
|
.await?
|
||||||
admin_state.add_logic_state.show_target_column_suggestions = false;
|
} else if app_state.ui.show_add_table {
|
||||||
admin_state.add_logic_state.selected_target_column_suggestion_index = None;
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
exited_suggestion_mode_for_typing = true;
|
add_table_e::execute_edit_action(
|
||||||
|
"insert_char",
|
||||||
|
key,
|
||||||
|
&mut admin_state.add_table_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
} else if app_state.ui.show_add_logic {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
add_logic_e::execute_edit_action(
|
||||||
|
"insert_char",
|
||||||
|
key,
|
||||||
|
&mut admin_state.add_logic_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
} else if app_state.ui.show_register {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
auth_e::execute_edit_action(
|
||||||
|
"insert_char",
|
||||||
|
key,
|
||||||
|
register_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
// FIX: Pass &mut event_handler.ideal_cursor_column
|
||||||
|
form_e::execute_edit_action(
|
||||||
|
"insert_char",
|
||||||
|
key,
|
||||||
|
form_state,
|
||||||
|
&mut event_handler.ideal_cursor_column,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
};
|
||||||
|
return Ok(EditEventOutcome::Message(msg));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut char_insert_msg = if app_state.ui.show_login {
|
Ok(EditEventOutcome::Message(String::new())) // No action taken
|
||||||
auth_e::execute_edit_action("insert_char", key, login_state, ideal_cursor_column).await?
|
|
||||||
} else if app_state.ui.show_add_table {
|
|
||||||
add_table_e::execute_edit_action("insert_char", key, &mut admin_state.add_table_state, ideal_cursor_column).await?
|
|
||||||
} else if app_state.ui.show_add_logic {
|
|
||||||
add_logic_e::execute_edit_action("insert_char", key, &mut admin_state.add_logic_state, ideal_cursor_column).await?
|
|
||||||
} else if app_state.ui.show_register {
|
|
||||||
auth_e::execute_edit_action("insert_char", key, register_state, ideal_cursor_column).await?
|
|
||||||
} else { // Form view
|
|
||||||
form_e::execute_edit_action("insert_char", key, form_state, ideal_cursor_column).await?
|
|
||||||
};
|
|
||||||
|
|
||||||
// After character insertion, update suggestions if applicable
|
|
||||||
if app_state.ui.show_register && register_state.current_field() == 4 {
|
|
||||||
register_state.update_role_suggestions();
|
|
||||||
// If we just exited suggestion mode by typing, don't immediately show them again unless Tab is pressed.
|
|
||||||
// However, update_role_suggestions will set show_role_suggestions if matches are found.
|
|
||||||
// This is fine, as the render logic checks in_suggestion_mode.
|
|
||||||
}
|
|
||||||
if app_state.ui.show_add_logic && admin_state.add_logic_state.current_field() == 1 {
|
|
||||||
admin_state.add_logic_state.update_target_column_suggestions();
|
|
||||||
}
|
|
||||||
|
|
||||||
if exited_suggestion_mode_for_typing && char_insert_msg.is_empty() {
|
|
||||||
char_insert_msg = "Suggestions hidden".to_string();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Ok(EditEventOutcome::Message(char_insert_msg))
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ use anyhow::Result;
|
|||||||
pub async fn handle_command_event(
|
pub async fn handle_command_event(
|
||||||
key: KeyEvent,
|
key: KeyEvent,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
app_state: &AppState,
|
app_state: &mut AppState,
|
||||||
login_state: &LoginState,
|
login_state: &LoginState,
|
||||||
register_state: &RegisterState,
|
register_state: &RegisterState,
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
@@ -74,7 +74,7 @@ pub async fn handle_command_event(
|
|||||||
async fn process_command(
|
async fn process_command(
|
||||||
config: &Config,
|
config: &Config,
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
app_state: &AppState,
|
app_state: &mut AppState,
|
||||||
login_state: &LoginState,
|
login_state: &LoginState,
|
||||||
register_state: &RegisterState,
|
register_state: &RegisterState,
|
||||||
command_input: &mut String,
|
command_input: &mut String,
|
||||||
@@ -117,6 +117,7 @@ async fn process_command(
|
|||||||
},
|
},
|
||||||
"save" => {
|
"save" => {
|
||||||
let outcome = save(
|
let outcome = save(
|
||||||
|
app_state,
|
||||||
form_state,
|
form_state,
|
||||||
grpc_client,
|
grpc_client,
|
||||||
).await?;
|
).await?;
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ use crate::state::{
|
|||||||
app::{
|
app::{
|
||||||
buffer::{AppView, BufferState},
|
buffer::{AppView, BufferState},
|
||||||
highlight::HighlightState,
|
highlight::HighlightState,
|
||||||
|
search::SearchState, // Correctly imported
|
||||||
state::AppState,
|
state::AppState,
|
||||||
},
|
},
|
||||||
pages::{
|
pages::{
|
||||||
@@ -41,10 +42,12 @@ use crate::tui::{
|
|||||||
use crate::ui::handlers::context::UiContext;
|
use crate::ui::handlers::context::UiContext;
|
||||||
use crate::ui::handlers::rat_state::UiStateHandler;
|
use crate::ui::handlers::rat_state::UiStateHandler;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
use crossterm::cursor::SetCursorStyle;
|
use crossterm::cursor::SetCursorStyle;
|
||||||
use crossterm::event::KeyCode;
|
use crossterm::event::{Event, KeyCode, KeyEvent};
|
||||||
use crossterm::event::{Event, KeyEvent};
|
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
|
use tokio::sync::mpsc::unbounded_channel;
|
||||||
|
use tracing::{error, info};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum EventOutcome {
|
pub enum EventOutcome {
|
||||||
@@ -74,11 +77,17 @@ pub struct EventHandler {
|
|||||||
pub ideal_cursor_column: usize,
|
pub ideal_cursor_column: usize,
|
||||||
pub key_sequence_tracker: KeySequenceTracker,
|
pub key_sequence_tracker: KeySequenceTracker,
|
||||||
pub auth_client: AuthClient,
|
pub auth_client: AuthClient,
|
||||||
|
pub grpc_client: GrpcClient,
|
||||||
pub login_result_sender: mpsc::Sender<LoginResult>,
|
pub login_result_sender: mpsc::Sender<LoginResult>,
|
||||||
pub register_result_sender: mpsc::Sender<RegisterResult>,
|
pub register_result_sender: mpsc::Sender<RegisterResult>,
|
||||||
pub save_table_result_sender: SaveTableResultSender,
|
pub save_table_result_sender: SaveTableResultSender,
|
||||||
pub save_logic_result_sender: SaveLogicResultSender,
|
pub save_logic_result_sender: SaveLogicResultSender,
|
||||||
pub navigation_state: NavigationState,
|
pub navigation_state: NavigationState,
|
||||||
|
pub search_result_sender: mpsc::UnboundedSender<Vec<Hit>>,
|
||||||
|
pub search_result_receiver: mpsc::UnboundedReceiver<Vec<Hit>>,
|
||||||
|
// --- ADDED FOR LIVE AUTOCOMPLETE ---
|
||||||
|
pub autocomplete_result_sender: mpsc::UnboundedSender<Vec<Hit>>,
|
||||||
|
pub autocomplete_result_receiver: mpsc::UnboundedReceiver<Vec<Hit>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EventHandler {
|
impl EventHandler {
|
||||||
@@ -87,7 +96,10 @@ impl EventHandler {
|
|||||||
register_result_sender: mpsc::Sender<RegisterResult>,
|
register_result_sender: mpsc::Sender<RegisterResult>,
|
||||||
save_table_result_sender: SaveTableResultSender,
|
save_table_result_sender: SaveTableResultSender,
|
||||||
save_logic_result_sender: SaveLogicResultSender,
|
save_logic_result_sender: SaveLogicResultSender,
|
||||||
|
grpc_client: GrpcClient,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
|
let (search_tx, search_rx) = unbounded_channel();
|
||||||
|
let (autocomplete_tx, autocomplete_rx) = unbounded_channel(); // ADDED
|
||||||
Ok(EventHandler {
|
Ok(EventHandler {
|
||||||
command_mode: false,
|
command_mode: false,
|
||||||
command_input: String::new(),
|
command_input: String::new(),
|
||||||
@@ -98,11 +110,17 @@ impl EventHandler {
|
|||||||
ideal_cursor_column: 0,
|
ideal_cursor_column: 0,
|
||||||
key_sequence_tracker: KeySequenceTracker::new(400),
|
key_sequence_tracker: KeySequenceTracker::new(400),
|
||||||
auth_client: AuthClient::new().await?,
|
auth_client: AuthClient::new().await?,
|
||||||
|
grpc_client,
|
||||||
login_result_sender,
|
login_result_sender,
|
||||||
register_result_sender,
|
register_result_sender,
|
||||||
save_table_result_sender,
|
save_table_result_sender,
|
||||||
save_logic_result_sender,
|
save_logic_result_sender,
|
||||||
navigation_state: NavigationState::new(),
|
navigation_state: NavigationState::new(),
|
||||||
|
search_result_sender: search_tx,
|
||||||
|
search_result_receiver: search_rx,
|
||||||
|
// --- ADDED ---
|
||||||
|
autocomplete_result_sender: autocomplete_tx,
|
||||||
|
autocomplete_result_receiver: autocomplete_rx,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -114,13 +132,122 @@ impl EventHandler {
|
|||||||
self.navigation_state.activate_find_file(options);
|
self.navigation_state.activate_find_file(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This function handles state changes.
|
||||||
|
async fn handle_search_palette_event(
|
||||||
|
&mut self,
|
||||||
|
key_event: KeyEvent,
|
||||||
|
form_state: &mut FormState,
|
||||||
|
app_state: &mut AppState,
|
||||||
|
) -> Result<EventOutcome> {
|
||||||
|
let mut should_close = false;
|
||||||
|
let mut outcome_message = String::new();
|
||||||
|
let mut trigger_search = false;
|
||||||
|
|
||||||
|
if let Some(search_state) = app_state.search_state.as_mut() {
|
||||||
|
match key_event.code {
|
||||||
|
KeyCode::Esc => {
|
||||||
|
should_close = true;
|
||||||
|
outcome_message = "Search cancelled".to_string();
|
||||||
|
}
|
||||||
|
KeyCode::Enter => {
|
||||||
|
if let Some(selected_hit) =
|
||||||
|
search_state.results.get(search_state.selected_index)
|
||||||
|
{
|
||||||
|
if let Ok(data) = serde_json::from_str::<
|
||||||
|
std::collections::HashMap<String, String>,
|
||||||
|
>(&selected_hit.content_json)
|
||||||
|
{
|
||||||
|
let detached_pos = form_state.total_count + 2;
|
||||||
|
form_state
|
||||||
|
.update_from_response(&data, detached_pos);
|
||||||
|
}
|
||||||
|
should_close = true;
|
||||||
|
outcome_message =
|
||||||
|
format!("Loaded record ID {}", selected_hit.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
KeyCode::Up => search_state.previous_result(),
|
||||||
|
KeyCode::Down => search_state.next_result(),
|
||||||
|
KeyCode::Char(c) => {
|
||||||
|
search_state
|
||||||
|
.input
|
||||||
|
.insert(search_state.cursor_position, c);
|
||||||
|
search_state.cursor_position += 1;
|
||||||
|
trigger_search = true;
|
||||||
|
}
|
||||||
|
KeyCode::Backspace => {
|
||||||
|
if search_state.cursor_position > 0 {
|
||||||
|
search_state.cursor_position -= 1;
|
||||||
|
search_state.input.remove(search_state.cursor_position);
|
||||||
|
trigger_search = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
KeyCode::Left => {
|
||||||
|
search_state.cursor_position =
|
||||||
|
search_state.cursor_position.saturating_sub(1);
|
||||||
|
}
|
||||||
|
KeyCode::Right => {
|
||||||
|
if search_state.cursor_position < search_state.input.len()
|
||||||
|
{
|
||||||
|
search_state.cursor_position += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- START CORRECTED LOGIC ---
|
||||||
|
if trigger_search {
|
||||||
|
search_state.is_loading = true;
|
||||||
|
search_state.results.clear();
|
||||||
|
search_state.selected_index = 0;
|
||||||
|
|
||||||
|
let query = search_state.input.clone();
|
||||||
|
let table_name = search_state.table_name.clone();
|
||||||
|
let sender = self.search_result_sender.clone();
|
||||||
|
let mut grpc_client = self.grpc_client.clone();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"--- 1. Spawning search task for query: '{}' ---",
|
||||||
|
query
|
||||||
|
);
|
||||||
|
// We now move the grpc_client into the task, just like with login.
|
||||||
|
tokio::spawn(async move {
|
||||||
|
info!("--- 2. Background task started. ---");
|
||||||
|
match grpc_client.search_table(table_name, query).await {
|
||||||
|
Ok(response) => {
|
||||||
|
info!(
|
||||||
|
"--- 3a. gRPC call successful. Found {} hits. ---",
|
||||||
|
response.hits.len()
|
||||||
|
);
|
||||||
|
let _ = sender.send(response.hits);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
// THE FIX: Use the debug formatter `{:?}` to print the full error chain.
|
||||||
|
error!("--- 3b. gRPC call failed: {:?} ---", e);
|
||||||
|
let _ = sender.send(vec![]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The borrow on `app_state.search_state` ends here.
|
||||||
|
// Now we can safely modify the Option itself.
|
||||||
|
if should_close {
|
||||||
|
app_state.search_state = None;
|
||||||
|
app_state.ui.show_search_palette = false;
|
||||||
|
app_state.ui.focus_outside_canvas = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(EventOutcome::Ok(outcome_message))
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub async fn handle_event(
|
pub async fn handle_event(
|
||||||
&mut self,
|
&mut self,
|
||||||
event: Event,
|
event: Event,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
terminal: &mut TerminalCore,
|
terminal: &mut TerminalCore,
|
||||||
grpc_client: &mut GrpcClient,
|
|
||||||
command_handler: &mut CommandHandler,
|
command_handler: &mut CommandHandler,
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
auth_state: &mut AuthState,
|
auth_state: &mut AuthState,
|
||||||
@@ -131,17 +258,36 @@ impl EventHandler {
|
|||||||
buffer_state: &mut BufferState,
|
buffer_state: &mut BufferState,
|
||||||
app_state: &mut AppState,
|
app_state: &mut AppState,
|
||||||
) -> Result<EventOutcome> {
|
) -> Result<EventOutcome> {
|
||||||
let mut current_mode = ModeManager::derive_mode(app_state, self, admin_state);
|
if app_state.ui.show_search_palette {
|
||||||
|
if let Event::Key(key_event) = event {
|
||||||
|
// The call no longer passes grpc_client
|
||||||
|
return self
|
||||||
|
.handle_search_palette_event(
|
||||||
|
key_event,
|
||||||
|
form_state,
|
||||||
|
app_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
return Ok(EventOutcome::Ok(String::new()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut current_mode =
|
||||||
|
ModeManager::derive_mode(app_state, self, admin_state);
|
||||||
|
|
||||||
if current_mode == AppMode::General && self.navigation_state.active {
|
if current_mode == AppMode::General && self.navigation_state.active {
|
||||||
if let Event::Key(key_event) = event {
|
if let Event::Key(key_event) = event {
|
||||||
let outcome =
|
let outcome = handle_command_navigation_event(
|
||||||
handle_command_navigation_event(&mut self.navigation_state, key_event, config)
|
&mut self.navigation_state,
|
||||||
.await?;
|
key_event,
|
||||||
|
config,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if !self.navigation_state.active {
|
if !self.navigation_state.active {
|
||||||
self.command_message = outcome.get_message_if_ok();
|
self.command_message = outcome.get_message_if_ok();
|
||||||
current_mode = ModeManager::derive_mode(app_state, self, admin_state);
|
current_mode =
|
||||||
|
ModeManager::derive_mode(app_state, self, admin_state);
|
||||||
}
|
}
|
||||||
app_state.update_mode(current_mode);
|
app_state.update_mode(current_mode);
|
||||||
return Ok(outcome);
|
return Ok(outcome);
|
||||||
@@ -154,23 +300,39 @@ impl EventHandler {
|
|||||||
|
|
||||||
let current_view = {
|
let current_view = {
|
||||||
let ui = &app_state.ui;
|
let ui = &app_state.ui;
|
||||||
if ui.show_intro { AppView::Intro }
|
if ui.show_intro {
|
||||||
else if ui.show_login { AppView::Login }
|
AppView::Intro
|
||||||
else if ui.show_register { AppView::Register }
|
} else if ui.show_login {
|
||||||
else if ui.show_admin { AppView::Admin }
|
AppView::Login
|
||||||
else if ui.show_add_logic { AppView::AddLogic }
|
} else if ui.show_register {
|
||||||
else if ui.show_add_table { AppView::AddTable }
|
AppView::Register
|
||||||
else if ui.show_form { AppView::Form }
|
} else if ui.show_admin {
|
||||||
else { AppView::Scratch }
|
AppView::Admin
|
||||||
|
} else if ui.show_add_logic {
|
||||||
|
AppView::AddLogic
|
||||||
|
} else if ui.show_add_table {
|
||||||
|
AppView::AddTable
|
||||||
|
} else if ui.show_form {
|
||||||
|
AppView::Form
|
||||||
|
} else {
|
||||||
|
AppView::Scratch
|
||||||
|
}
|
||||||
};
|
};
|
||||||
buffer_state.update_history(current_view);
|
buffer_state.update_history(current_view);
|
||||||
|
|
||||||
if app_state.ui.dialog.dialog_show {
|
if app_state.ui.dialog.dialog_show {
|
||||||
if let Event::Key(key_event) = event {
|
if let Event::Key(key_event) = event {
|
||||||
if let Some(dialog_result) = dialog::handle_dialog_event(
|
if let Some(dialog_result) = dialog::handle_dialog_event(
|
||||||
&Event::Key(key_event), config, app_state, login_state,
|
&Event::Key(key_event),
|
||||||
register_state, buffer_state, admin_state,
|
config,
|
||||||
).await {
|
app_state,
|
||||||
|
login_state,
|
||||||
|
register_state,
|
||||||
|
buffer_state,
|
||||||
|
admin_state,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
return dialog_result;
|
return dialog_result;
|
||||||
}
|
}
|
||||||
} else if let Event::Resize(_, _) = event {
|
} else if let Event::Resize(_, _) = event {
|
||||||
@@ -182,99 +344,227 @@ impl EventHandler {
|
|||||||
let key_code = key_event.code;
|
let key_code = key_event.code;
|
||||||
let modifiers = key_event.modifiers;
|
let modifiers = key_event.modifiers;
|
||||||
|
|
||||||
if UiStateHandler::toggle_sidebar(&mut app_state.ui, config, key_code, modifiers) {
|
if UiStateHandler::toggle_sidebar(
|
||||||
let message = format!("Sidebar {}", if app_state.ui.show_sidebar { "shown" } else { "hidden" });
|
&mut app_state.ui,
|
||||||
|
config,
|
||||||
|
key_code,
|
||||||
|
modifiers,
|
||||||
|
) {
|
||||||
|
let message = format!(
|
||||||
|
"Sidebar {}",
|
||||||
|
if app_state.ui.show_sidebar {
|
||||||
|
"shown"
|
||||||
|
} else {
|
||||||
|
"hidden"
|
||||||
|
}
|
||||||
|
);
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
if UiStateHandler::toggle_buffer_list(&mut app_state.ui, config, key_code, modifiers) {
|
if UiStateHandler::toggle_buffer_list(
|
||||||
let message = format!("Buffer {}", if app_state.ui.show_buffer_list { "shown" } else { "hidden" });
|
&mut app_state.ui,
|
||||||
|
config,
|
||||||
|
key_code,
|
||||||
|
modifiers,
|
||||||
|
) {
|
||||||
|
let message = format!(
|
||||||
|
"Buffer {}",
|
||||||
|
if app_state.ui.show_buffer_list {
|
||||||
|
"shown"
|
||||||
|
} else {
|
||||||
|
"hidden"
|
||||||
|
}
|
||||||
|
);
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
|
|
||||||
if !matches!(current_mode, AppMode::Edit | AppMode::Command) {
|
if !matches!(current_mode, AppMode::Edit | AppMode::Command) {
|
||||||
if let Some(action) = config.get_action_for_key_in_mode(&config.keybindings.global, key_code, modifiers) {
|
if let Some(action) = config.get_action_for_key_in_mode(
|
||||||
|
&config.keybindings.global,
|
||||||
|
key_code,
|
||||||
|
modifiers,
|
||||||
|
) {
|
||||||
match action {
|
match action {
|
||||||
"next_buffer" => {
|
"next_buffer" => {
|
||||||
if buffer::switch_buffer(buffer_state, true) {
|
if buffer::switch_buffer(buffer_state, true) {
|
||||||
return Ok(EventOutcome::Ok("Switched to next buffer".to_string()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Switched to next buffer".to_string(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"previous_buffer" => {
|
"previous_buffer" => {
|
||||||
if buffer::switch_buffer(buffer_state, false) {
|
if buffer::switch_buffer(buffer_state, false) {
|
||||||
return Ok(EventOutcome::Ok("Switched to previous buffer".to_string()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Switched to previous buffer".to_string(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"close_buffer" => {
|
"close_buffer" => {
|
||||||
let current_table_name = app_state.current_view_table_name.as_deref();
|
let current_table_name =
|
||||||
let message = buffer_state.close_buffer_with_intro_fallback(current_table_name);
|
app_state.current_view_table_name.as_deref();
|
||||||
|
let message = buffer_state
|
||||||
|
.close_buffer_with_intro_fallback(
|
||||||
|
current_table_name,
|
||||||
|
);
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(action) =
|
||||||
|
config.get_general_action(key_code, modifiers)
|
||||||
|
{
|
||||||
|
if action == "open_search" {
|
||||||
|
if app_state.ui.show_form {
|
||||||
|
if let Some(table_name) =
|
||||||
|
app_state.current_view_table_name.clone()
|
||||||
|
{
|
||||||
|
app_state.ui.show_search_palette = true;
|
||||||
|
app_state.search_state =
|
||||||
|
Some(SearchState::new(table_name));
|
||||||
|
app_state.ui.focus_outside_canvas = true;
|
||||||
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Search palette opened".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match current_mode {
|
match current_mode {
|
||||||
AppMode::General => {
|
AppMode::General => {
|
||||||
if app_state.ui.show_admin && auth_state.role.as_deref() == Some("admin") {
|
if app_state.ui.show_admin
|
||||||
if admin_nav::handle_admin_navigation(key_event, config, app_state, admin_state, buffer_state, &mut self.command_message) {
|
&& auth_state.role.as_deref() == Some("admin")
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
{
|
||||||
|
if admin_nav::handle_admin_navigation(
|
||||||
|
key_event,
|
||||||
|
config,
|
||||||
|
app_state,
|
||||||
|
admin_state,
|
||||||
|
buffer_state,
|
||||||
|
&mut self.command_message,
|
||||||
|
) {
|
||||||
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if app_state.ui.show_add_logic {
|
if app_state.ui.show_add_logic {
|
||||||
let client_clone = grpc_client.clone();
|
let client_clone = self.grpc_client.clone();
|
||||||
let sender_clone = self.save_logic_result_sender.clone();
|
let sender_clone = self.save_logic_result_sender.clone();
|
||||||
if add_logic_nav::handle_add_logic_navigation(
|
if add_logic_nav::handle_add_logic_navigation(
|
||||||
key_event, config, app_state, &mut admin_state.add_logic_state,
|
key_event,
|
||||||
&mut self.is_edit_mode, buffer_state, client_clone, sender_clone, &mut self.command_message,
|
config,
|
||||||
|
app_state,
|
||||||
|
&mut admin_state.add_logic_state,
|
||||||
|
&mut self.is_edit_mode,
|
||||||
|
buffer_state,
|
||||||
|
client_clone,
|
||||||
|
sender_clone,
|
||||||
|
&mut self.command_message,
|
||||||
) {
|
) {
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if app_state.ui.show_add_table {
|
if app_state.ui.show_add_table {
|
||||||
let client_clone = grpc_client.clone();
|
let client_clone = self.grpc_client.clone();
|
||||||
let sender_clone = self.save_table_result_sender.clone();
|
let sender_clone = self.save_table_result_sender.clone();
|
||||||
if add_table_nav::handle_add_table_navigation(
|
if add_table_nav::handle_add_table_navigation(
|
||||||
key_event, config, app_state, &mut admin_state.add_table_state,
|
key_event,
|
||||||
client_clone, sender_clone, &mut self.command_message,
|
config,
|
||||||
|
app_state,
|
||||||
|
&mut admin_state.add_table_state,
|
||||||
|
client_clone,
|
||||||
|
sender_clone,
|
||||||
|
&mut self.command_message,
|
||||||
) {
|
) {
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let nav_outcome = navigation::handle_navigation_event(
|
let nav_outcome = navigation::handle_navigation_event(
|
||||||
key_event, config, form_state, app_state, login_state, register_state,
|
key_event,
|
||||||
intro_state, admin_state, &mut self.command_mode, &mut self.command_input,
|
config,
|
||||||
&mut self.command_message, &mut self.navigation_state,
|
form_state,
|
||||||
).await;
|
app_state,
|
||||||
|
login_state,
|
||||||
|
register_state,
|
||||||
|
intro_state,
|
||||||
|
admin_state,
|
||||||
|
&mut self.command_mode,
|
||||||
|
&mut self.command_input,
|
||||||
|
&mut self.command_message,
|
||||||
|
&mut self.navigation_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
match nav_outcome {
|
match nav_outcome {
|
||||||
Ok(EventOutcome::ButtonSelected { context, index }) => {
|
Ok(EventOutcome::ButtonSelected { context, index }) => {
|
||||||
let message = match context {
|
let message = match context {
|
||||||
UiContext::Intro => {
|
UiContext::Intro => {
|
||||||
intro::handle_intro_selection(app_state, buffer_state, index);
|
intro::handle_intro_selection(
|
||||||
if app_state.ui.show_admin && !app_state.profile_tree.profiles.is_empty() {
|
app_state,
|
||||||
admin_state.profile_list_state.select(Some(0));
|
buffer_state,
|
||||||
|
index,
|
||||||
|
);
|
||||||
|
if app_state.ui.show_admin
|
||||||
|
&& !app_state
|
||||||
|
.profile_tree
|
||||||
|
.profiles
|
||||||
|
.is_empty()
|
||||||
|
{
|
||||||
|
admin_state
|
||||||
|
.profile_list_state
|
||||||
|
.select(Some(0));
|
||||||
}
|
}
|
||||||
format!("Intro Option {} selected", index)
|
format!("Intro Option {} selected", index)
|
||||||
}
|
}
|
||||||
UiContext::Login => match index {
|
UiContext::Login => match index {
|
||||||
0 => login::initiate_login(login_state, app_state, self.auth_client.clone(), self.login_result_sender.clone()),
|
0 => login::initiate_login(
|
||||||
1 => login::back_to_main(login_state, app_state, buffer_state).await,
|
login_state,
|
||||||
|
app_state,
|
||||||
|
self.auth_client.clone(),
|
||||||
|
self.login_result_sender.clone(),
|
||||||
|
),
|
||||||
|
1 => login::back_to_main(
|
||||||
|
login_state,
|
||||||
|
app_state,
|
||||||
|
buffer_state,
|
||||||
|
)
|
||||||
|
.await,
|
||||||
_ => "Invalid Login Option".to_string(),
|
_ => "Invalid Login Option".to_string(),
|
||||||
},
|
},
|
||||||
UiContext::Register => match index {
|
UiContext::Register => match index {
|
||||||
0 => register::initiate_registration(register_state, app_state, self.auth_client.clone(), self.register_result_sender.clone()),
|
0 => register::initiate_registration(
|
||||||
1 => register::back_to_login(register_state, app_state, buffer_state).await,
|
register_state,
|
||||||
|
app_state,
|
||||||
|
self.auth_client.clone(),
|
||||||
|
self.register_result_sender.clone(),
|
||||||
|
),
|
||||||
|
1 => register::back_to_login(
|
||||||
|
register_state,
|
||||||
|
app_state,
|
||||||
|
buffer_state,
|
||||||
|
)
|
||||||
|
.await,
|
||||||
_ => "Invalid Login Option".to_string(),
|
_ => "Invalid Login Option".to_string(),
|
||||||
},
|
},
|
||||||
UiContext::Admin => {
|
UiContext::Admin => {
|
||||||
admin::handle_admin_selection(app_state, admin_state);
|
admin::handle_admin_selection(
|
||||||
|
app_state,
|
||||||
|
admin_state,
|
||||||
|
);
|
||||||
format!("Admin Option {} selected", index)
|
format!("Admin Option {} selected", index)
|
||||||
}
|
}
|
||||||
UiContext::Dialog => "Internal error: Unexpected dialog state".to_string(),
|
UiContext::Dialog => "Internal error: Unexpected dialog state"
|
||||||
|
.to_string(),
|
||||||
};
|
};
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
@@ -326,35 +616,46 @@ impl EventHandler {
|
|||||||
return Ok(EventOutcome::Ok(String::new()));
|
return Ok(EventOutcome::Ok(String::new()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(action) = config.get_common_action(key_code, modifiers) {
|
if let Some(action) =
|
||||||
|
config.get_common_action(key_code, modifiers)
|
||||||
|
{
|
||||||
match action {
|
match action {
|
||||||
"save" | "force_quit" | "save_and_quit" | "revert" => {
|
"save" | "force_quit" | "save_and_quit"
|
||||||
|
| "revert" => {
|
||||||
return common_mode::handle_core_action(
|
return common_mode::handle_core_action(
|
||||||
action, form_state, auth_state, login_state, register_state,
|
action,
|
||||||
grpc_client, &mut self.auth_client, terminal, app_state,
|
form_state,
|
||||||
).await;
|
auth_state,
|
||||||
|
login_state,
|
||||||
|
register_state,
|
||||||
|
&mut self.grpc_client,
|
||||||
|
&mut self.auth_client,
|
||||||
|
terminal,
|
||||||
|
app_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let (_should_exit, message) = read_only::handle_read_only_event(
|
let (_should_exit, message) =
|
||||||
app_state,
|
read_only::handle_read_only_event(
|
||||||
key_event,
|
app_state,
|
||||||
config,
|
key_event,
|
||||||
form_state,
|
config,
|
||||||
login_state,
|
form_state,
|
||||||
register_state,
|
login_state,
|
||||||
&mut admin_state.add_table_state,
|
register_state,
|
||||||
&mut admin_state.add_logic_state,
|
&mut admin_state.add_table_state,
|
||||||
&mut self.key_sequence_tracker,
|
&mut admin_state.add_logic_state,
|
||||||
// No more current_position or total_count arguments
|
&mut self.key_sequence_tracker,
|
||||||
grpc_client,
|
&mut self.grpc_client, // <-- FIX 1
|
||||||
&mut self.command_message,
|
&mut self.command_message,
|
||||||
&mut self.edit_mode_cooldown,
|
&mut self.edit_mode_cooldown,
|
||||||
&mut self.ideal_cursor_column,
|
&mut self.ideal_cursor_column,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -373,33 +674,45 @@ impl EventHandler {
|
|||||||
return Ok(EventOutcome::Ok("".to_string()));
|
return Ok(EventOutcome::Ok("".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let (_should_exit, message) = read_only::handle_read_only_event(
|
let (_should_exit, message) =
|
||||||
app_state,
|
read_only::handle_read_only_event(
|
||||||
key_event,
|
app_state,
|
||||||
config,
|
key_event,
|
||||||
form_state,
|
config,
|
||||||
login_state,
|
form_state,
|
||||||
register_state,
|
login_state,
|
||||||
&mut admin_state.add_table_state,
|
register_state,
|
||||||
&mut admin_state.add_logic_state,
|
&mut admin_state.add_table_state,
|
||||||
&mut self.key_sequence_tracker,
|
&mut admin_state.add_logic_state,
|
||||||
grpc_client,
|
&mut self.key_sequence_tracker,
|
||||||
&mut self.command_message,
|
&mut self.grpc_client, // <-- FIX 2
|
||||||
&mut self.edit_mode_cooldown,
|
&mut self.command_message,
|
||||||
&mut self.ideal_cursor_column,
|
&mut self.edit_mode_cooldown,
|
||||||
)
|
&mut self.ideal_cursor_column,
|
||||||
.await?;
|
)
|
||||||
|
.await?;
|
||||||
return Ok(EventOutcome::Ok(message));
|
return Ok(EventOutcome::Ok(message));
|
||||||
}
|
}
|
||||||
|
|
||||||
AppMode::Edit => {
|
AppMode::Edit => {
|
||||||
if let Some(action) = config.get_common_action(key_code, modifiers) {
|
if let Some(action) =
|
||||||
|
config.get_common_action(key_code, modifiers)
|
||||||
|
{
|
||||||
match action {
|
match action {
|
||||||
"save" | "force_quit" | "save_and_quit" | "revert" => {
|
"save" | "force_quit" | "save_and_quit"
|
||||||
|
| "revert" => {
|
||||||
return common_mode::handle_core_action(
|
return common_mode::handle_core_action(
|
||||||
action, form_state, auth_state, login_state, register_state,
|
action,
|
||||||
grpc_client, &mut self.auth_client, terminal, app_state,
|
form_state,
|
||||||
).await;
|
auth_state,
|
||||||
|
login_state,
|
||||||
|
register_state,
|
||||||
|
&mut self.grpc_client,
|
||||||
|
&mut self.auth_client,
|
||||||
|
terminal,
|
||||||
|
app_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@@ -407,11 +720,20 @@ impl EventHandler {
|
|||||||
|
|
||||||
let mut current_position = form_state.current_position;
|
let mut current_position = form_state.current_position;
|
||||||
let total_count = form_state.total_count;
|
let total_count = form_state.total_count;
|
||||||
|
// --- MODIFIED: Pass `self` instead of `grpc_client` ---
|
||||||
let edit_result = edit::handle_edit_event(
|
let edit_result = edit::handle_edit_event(
|
||||||
key_event, config, form_state, login_state, register_state, admin_state,
|
key_event,
|
||||||
&mut self.ideal_cursor_column, &mut current_position, total_count,
|
config,
|
||||||
grpc_client, app_state,
|
form_state,
|
||||||
).await;
|
login_state,
|
||||||
|
register_state,
|
||||||
|
admin_state,
|
||||||
|
&mut current_position,
|
||||||
|
total_count,
|
||||||
|
self,
|
||||||
|
app_state,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
match edit_result {
|
match edit_result {
|
||||||
Ok(edit::EditEventOutcome::ExitEditMode) => {
|
Ok(edit::EditEventOutcome::ExitEditMode) => {
|
||||||
@@ -428,14 +750,22 @@ impl EventHandler {
|
|||||||
target_state.set_current_cursor_pos(new_pos);
|
target_state.set_current_cursor_pos(new_pos);
|
||||||
self.ideal_cursor_column = new_pos;
|
self.ideal_cursor_column = new_pos;
|
||||||
}
|
}
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
Ok(edit::EditEventOutcome::Message(msg)) => {
|
Ok(edit::EditEventOutcome::Message(msg)) => {
|
||||||
if !msg.is_empty() { self.command_message = msg; }
|
if !msg.is_empty() {
|
||||||
|
self.command_message = msg;
|
||||||
|
}
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
Err(e) => { return Err(e.into()); }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -445,21 +775,38 @@ impl EventHandler {
|
|||||||
self.command_message.clear();
|
self.command_message.clear();
|
||||||
self.command_mode = false;
|
self.command_mode = false;
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
return Ok(EventOutcome::Ok("Exited command mode".to_string()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Exited command mode".to_string(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.is_command_execute(key_code, modifiers) {
|
if config.is_command_execute(key_code, modifiers) {
|
||||||
let mut current_position = form_state.current_position;
|
let mut current_position = form_state.current_position;
|
||||||
let total_count = form_state.total_count;
|
let total_count = form_state.total_count;
|
||||||
let outcome = command_mode::handle_command_event(
|
let outcome = command_mode::handle_command_event(
|
||||||
key_event, config, app_state, login_state, register_state, form_state,
|
key_event,
|
||||||
&mut self.command_input, &mut self.command_message, grpc_client,
|
config,
|
||||||
command_handler, terminal, &mut current_position, total_count,
|
app_state,
|
||||||
).await?;
|
login_state,
|
||||||
|
register_state,
|
||||||
|
form_state,
|
||||||
|
&mut self.command_input,
|
||||||
|
&mut self.command_message,
|
||||||
|
&mut self.grpc_client, // <-- FIX 5
|
||||||
|
command_handler,
|
||||||
|
terminal,
|
||||||
|
&mut current_position,
|
||||||
|
total_count,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
form_state.current_position = current_position;
|
form_state.current_position = current_position;
|
||||||
self.command_mode = false;
|
self.command_mode = false;
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
let new_mode = ModeManager::derive_mode(app_state, self, admin_state);
|
let new_mode = ModeManager::derive_mode(
|
||||||
|
app_state,
|
||||||
|
self,
|
||||||
|
admin_state,
|
||||||
|
);
|
||||||
app_state.update_mode(new_mode);
|
app_state.update_mode(new_mode);
|
||||||
return Ok(outcome);
|
return Ok(outcome);
|
||||||
}
|
}
|
||||||
@@ -473,39 +820,59 @@ impl EventHandler {
|
|||||||
if let KeyCode::Char(c) = key_code {
|
if let KeyCode::Char(c) = key_code {
|
||||||
if c == 'f' {
|
if c == 'f' {
|
||||||
self.key_sequence_tracker.add_key(key_code);
|
self.key_sequence_tracker.add_key(key_code);
|
||||||
let sequence = self.key_sequence_tracker.get_sequence();
|
let sequence =
|
||||||
|
self.key_sequence_tracker.get_sequence();
|
||||||
|
|
||||||
if config.matches_key_sequence_generalized(&sequence) == Some("find_file_palette_toggle") {
|
if config.matches_key_sequence_generalized(
|
||||||
if app_state.ui.show_form || app_state.ui.show_intro {
|
&sequence,
|
||||||
// --- START FIX ---
|
) == Some("find_file_palette_toggle")
|
||||||
let mut all_table_paths: Vec<String> = app_state
|
{
|
||||||
.profile_tree
|
if app_state.ui.show_form
|
||||||
.profiles
|
|| app_state.ui.show_intro
|
||||||
.iter()
|
{
|
||||||
.flat_map(|profile| {
|
let mut all_table_paths: Vec<String> =
|
||||||
profile.tables.iter().map(move |table| {
|
app_state
|
||||||
format!("{}/{}", profile.name, table.name)
|
.profile_tree
|
||||||
|
.profiles
|
||||||
|
.iter()
|
||||||
|
.flat_map(|profile| {
|
||||||
|
profile.tables.iter().map(
|
||||||
|
move |table| {
|
||||||
|
format!(
|
||||||
|
"{}/{}",
|
||||||
|
profile.name,
|
||||||
|
table.name
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
.collect();
|
||||||
.collect();
|
|
||||||
all_table_paths.sort();
|
all_table_paths.sort();
|
||||||
|
|
||||||
self.navigation_state.activate_find_file(all_table_paths);
|
self.navigation_state
|
||||||
// --- END FIX ---
|
.activate_find_file(all_table_paths);
|
||||||
|
|
||||||
self.command_mode = false;
|
self.command_mode = false;
|
||||||
self.command_input.clear();
|
self.command_input.clear();
|
||||||
self.command_message.clear();
|
self.command_message.clear();
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
return Ok(EventOutcome::Ok("Table selection palette activated".to_string()));
|
return Ok(EventOutcome::Ok(
|
||||||
|
"Table selection palette activated"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
} else {
|
} else {
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
self.command_input.push('f');
|
self.command_input.push('f');
|
||||||
if sequence.len() > 1 && sequence[0] == KeyCode::Char('f') {
|
if sequence.len() > 1
|
||||||
|
&& sequence[0] == KeyCode::Char('f')
|
||||||
|
{
|
||||||
self.command_input.push('f');
|
self.command_input.push('f');
|
||||||
}
|
}
|
||||||
self.command_message = "Find File not available in this view.".to_string();
|
self.command_message = "Find File not available in this view."
|
||||||
return Ok(EventOutcome::Ok(self.command_message.clone()));
|
.to_string();
|
||||||
|
return Ok(EventOutcome::Ok(
|
||||||
|
self.command_message.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -514,7 +881,9 @@ impl EventHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if c != 'f' && !self.key_sequence_tracker.current_sequence.is_empty() {
|
if c != 'f'
|
||||||
|
&& !self.key_sequence_tracker.current_sequence.is_empty()
|
||||||
|
{
|
||||||
self.key_sequence_tracker.reset();
|
self.key_sequence_tracker.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// src/services/grpc_client.rs
|
// src/services/grpc_client.rs
|
||||||
|
|
||||||
use tonic::transport::Channel;
|
use common::proto::multieko2::common::Empty;
|
||||||
use common::proto::multieko2::common::{CountResponse, Empty};
|
|
||||||
use common::proto::multieko2::table_structure::table_structure_service_client::TableStructureServiceClient;
|
use common::proto::multieko2::table_structure::table_structure_service_client::TableStructureServiceClient;
|
||||||
use common::proto::multieko2::table_structure::{GetTableStructureRequest, TableStructureResponse};
|
use common::proto::multieko2::table_structure::{GetTableStructureRequest, TableStructureResponse};
|
||||||
use common::proto::multieko2::table_definition::{
|
use common::proto::multieko2::table_definition::{
|
||||||
@@ -15,49 +14,52 @@ use common::proto::multieko2::table_script::{
|
|||||||
use common::proto::multieko2::tables_data::{
|
use common::proto::multieko2::tables_data::{
|
||||||
tables_data_client::TablesDataClient,
|
tables_data_client::TablesDataClient,
|
||||||
GetTableDataByPositionRequest,
|
GetTableDataByPositionRequest,
|
||||||
|
GetTableDataRequest, // ADD THIS
|
||||||
GetTableDataResponse,
|
GetTableDataResponse,
|
||||||
|
DeleteTableDataRequest, // ADD THIS
|
||||||
|
DeleteTableDataResponse, // ADD THIS
|
||||||
GetTableDataCountRequest,
|
GetTableDataCountRequest,
|
||||||
PostTableDataRequest, PostTableDataResponse, PutTableDataRequest,
|
PostTableDataRequest, PostTableDataResponse, PutTableDataRequest,
|
||||||
PutTableDataResponse,
|
PutTableDataResponse,
|
||||||
};
|
};
|
||||||
use anyhow::{Context, Result}; // Added Context
|
use common::proto::multieko2::search::{
|
||||||
use std::collections::HashMap; // NEW
|
searcher_client::SearcherClient, SearchRequest, SearchResponse,
|
||||||
|
};
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use tonic::transport::Channel;
|
||||||
|
use prost_types::Value;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct GrpcClient {
|
pub struct GrpcClient {
|
||||||
table_structure_client: TableStructureServiceClient<Channel>,
|
table_structure_client: TableStructureServiceClient<Channel>,
|
||||||
table_definition_client: TableDefinitionClient<Channel>,
|
table_definition_client: TableDefinitionClient<Channel>,
|
||||||
table_script_client: TableScriptClient<Channel>,
|
table_script_client: TableScriptClient<Channel>,
|
||||||
tables_data_client: TablesDataClient<Channel>, // NEW
|
tables_data_client: TablesDataClient<Channel>,
|
||||||
|
search_client: SearcherClient<Channel>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GrpcClient {
|
impl GrpcClient {
|
||||||
pub async fn new() -> Result<Self> {
|
pub async fn new() -> Result<Self> {
|
||||||
let table_structure_client = TableStructureServiceClient::connect(
|
let channel = Channel::from_static("http://[::1]:50051")
|
||||||
"http://[::1]:50051",
|
.connect()
|
||||||
)
|
.await
|
||||||
.await
|
.context("Failed to create gRPC channel")?;
|
||||||
.context("Failed to connect to TableStructureService")?;
|
|
||||||
let table_definition_client = TableDefinitionClient::connect(
|
let table_structure_client =
|
||||||
"http://[::1]:50051",
|
TableStructureServiceClient::new(channel.clone());
|
||||||
)
|
let table_definition_client =
|
||||||
.await
|
TableDefinitionClient::new(channel.clone());
|
||||||
.context("Failed to connect to TableDefinitionService")?;
|
let table_script_client = TableScriptClient::new(channel.clone());
|
||||||
let table_script_client =
|
let tables_data_client = TablesDataClient::new(channel.clone());
|
||||||
TableScriptClient::connect("http://[::1]:50051")
|
let search_client = SearcherClient::new(channel.clone());
|
||||||
.await
|
|
||||||
.context("Failed to connect to TableScriptService")?;
|
|
||||||
let tables_data_client =
|
|
||||||
TablesDataClient::connect("http://[::1]:50051")
|
|
||||||
.await
|
|
||||||
.context("Failed to connect to TablesDataService")?; // NEW
|
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
// adresar_client, // REMOVE
|
|
||||||
table_structure_client,
|
table_structure_client,
|
||||||
table_definition_client,
|
table_definition_client,
|
||||||
table_script_client,
|
table_script_client,
|
||||||
tables_data_client, // NEW
|
tables_data_client,
|
||||||
|
search_client,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -117,7 +119,7 @@ impl GrpcClient {
|
|||||||
Ok(response.into_inner())
|
Ok(response.into_inner())
|
||||||
}
|
}
|
||||||
|
|
||||||
// NEW Methods for TablesData service
|
// Existing TablesData methods
|
||||||
pub async fn get_table_data_count(
|
pub async fn get_table_data_count(
|
||||||
&mut self,
|
&mut self,
|
||||||
profile_name: String,
|
profile_name: String,
|
||||||
@@ -156,11 +158,53 @@ impl GrpcClient {
|
|||||||
Ok(response.into_inner())
|
Ok(response.into_inner())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ADD THIS: Missing get_table_data method
|
||||||
|
pub async fn get_table_data(
|
||||||
|
&mut self,
|
||||||
|
profile_name: String,
|
||||||
|
table_name: String,
|
||||||
|
id: i64,
|
||||||
|
) -> Result<GetTableDataResponse> {
|
||||||
|
let grpc_request = GetTableDataRequest {
|
||||||
|
profile_name,
|
||||||
|
table_name,
|
||||||
|
id,
|
||||||
|
};
|
||||||
|
let request = tonic::Request::new(grpc_request);
|
||||||
|
let response = self
|
||||||
|
.tables_data_client
|
||||||
|
.get_table_data(request)
|
||||||
|
.await
|
||||||
|
.context("gRPC GetTableData call failed")?;
|
||||||
|
Ok(response.into_inner())
|
||||||
|
}
|
||||||
|
|
||||||
|
// ADD THIS: Missing delete_table_data method
|
||||||
|
pub async fn delete_table_data(
|
||||||
|
&mut self,
|
||||||
|
profile_name: String,
|
||||||
|
table_name: String,
|
||||||
|
record_id: i64,
|
||||||
|
) -> Result<DeleteTableDataResponse> {
|
||||||
|
let grpc_request = DeleteTableDataRequest {
|
||||||
|
profile_name,
|
||||||
|
table_name,
|
||||||
|
record_id,
|
||||||
|
};
|
||||||
|
let request = tonic::Request::new(grpc_request);
|
||||||
|
let response = self
|
||||||
|
.tables_data_client
|
||||||
|
.delete_table_data(request)
|
||||||
|
.await
|
||||||
|
.context("gRPC DeleteTableData call failed")?;
|
||||||
|
Ok(response.into_inner())
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn post_table_data(
|
pub async fn post_table_data(
|
||||||
&mut self,
|
&mut self,
|
||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
data: HashMap<String, String>,
|
data: HashMap<String, Value>,
|
||||||
) -> Result<PostTableDataResponse> {
|
) -> Result<PostTableDataResponse> {
|
||||||
let grpc_request = PostTableDataRequest {
|
let grpc_request = PostTableDataRequest {
|
||||||
profile_name,
|
profile_name,
|
||||||
@@ -181,7 +225,7 @@ impl GrpcClient {
|
|||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
id: i64,
|
id: i64,
|
||||||
data: HashMap<String, String>,
|
data: HashMap<String, Value>,
|
||||||
) -> Result<PutTableDataResponse> {
|
) -> Result<PutTableDataResponse> {
|
||||||
let grpc_request = PutTableDataRequest {
|
let grpc_request = PutTableDataRequest {
|
||||||
profile_name,
|
profile_name,
|
||||||
@@ -197,4 +241,17 @@ impl GrpcClient {
|
|||||||
.context("gRPC PutTableData call failed")?;
|
.context("gRPC PutTableData call failed")?;
|
||||||
Ok(response.into_inner())
|
Ok(response.into_inner())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn search_table(
|
||||||
|
&mut self,
|
||||||
|
table_name: String,
|
||||||
|
query: String,
|
||||||
|
) -> Result<SearchResponse> {
|
||||||
|
let request = tonic::Request::new(SearchRequest { table_name, query });
|
||||||
|
let response = self
|
||||||
|
.search_client
|
||||||
|
.search_table(request)
|
||||||
|
.await?;
|
||||||
|
Ok(response.into_inner())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,100 @@
|
|||||||
// src/services/ui_service.rs
|
// src/services/ui_service.rs
|
||||||
|
|
||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
use crate::state::pages::form::FormState;
|
|
||||||
use crate::tui::functions::common::form::SaveOutcome;
|
|
||||||
use crate::state::pages::add_logic::AddLogicState;
|
|
||||||
use crate::state::app::state::AppState;
|
use crate::state::app::state::AppState;
|
||||||
|
use crate::state::pages::add_logic::AddLogicState;
|
||||||
|
use crate::state::pages::form::{FieldDefinition, FormState};
|
||||||
|
use crate::tui::functions::common::form::SaveOutcome;
|
||||||
use crate::utils::columns::filter_user_columns;
|
use crate::utils::columns::filter_user_columns;
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub struct UiService;
|
pub struct UiService;
|
||||||
|
|
||||||
impl UiService {
|
impl UiService {
|
||||||
|
pub async fn load_table_view(
|
||||||
|
grpc_client: &mut GrpcClient,
|
||||||
|
app_state: &mut AppState,
|
||||||
|
profile_name: &str,
|
||||||
|
table_name: &str,
|
||||||
|
) -> Result<FormState> {
|
||||||
|
// 1. & 2. Fetch and Cache Schema - UNCHANGED
|
||||||
|
let table_structure = grpc_client
|
||||||
|
.get_table_structure(profile_name.to_string(), table_name.to_string())
|
||||||
|
.await
|
||||||
|
.context(format!(
|
||||||
|
"Failed to get table structure for {}.{}",
|
||||||
|
profile_name, table_name
|
||||||
|
))?;
|
||||||
|
let cache_key = format!("{}.{}", profile_name, table_name);
|
||||||
|
app_state
|
||||||
|
.schema_cache
|
||||||
|
.insert(cache_key, Arc::new(table_structure.clone()));
|
||||||
|
tracing::info!("Schema for '{}.{}' cached.", profile_name, table_name);
|
||||||
|
|
||||||
|
// --- START: FINAL, SIMPLIFIED, CORRECT LOGIC ---
|
||||||
|
|
||||||
|
// 3a. Create definitions for REGULAR fields first.
|
||||||
|
let mut fields: Vec<FieldDefinition> = table_structure
|
||||||
|
.columns
|
||||||
|
.iter()
|
||||||
|
.filter(|col| {
|
||||||
|
!col.is_primary_key
|
||||||
|
&& col.name != "deleted"
|
||||||
|
&& col.name != "created_at"
|
||||||
|
&& !col.name.ends_with("_id") // Filter out ALL potential links
|
||||||
|
})
|
||||||
|
.map(|col| FieldDefinition {
|
||||||
|
display_name: col.name.clone(),
|
||||||
|
data_key: col.name.clone(),
|
||||||
|
is_link: false,
|
||||||
|
link_target_table: None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// 3b. Now, find and APPEND definitions for LINK fields based on the `_id` convention.
|
||||||
|
let link_fields: Vec<FieldDefinition> = table_structure
|
||||||
|
.columns
|
||||||
|
.iter()
|
||||||
|
.filter(|col| col.name.ends_with("_id")) // Find all foreign key columns
|
||||||
|
.map(|col| {
|
||||||
|
// The table we link to is derived from the column name.
|
||||||
|
// e.g., "test_diacritics_id" -> "test_diacritics"
|
||||||
|
let target_table_base = col
|
||||||
|
.name
|
||||||
|
.strip_suffix("_id")
|
||||||
|
.unwrap_or(&col.name);
|
||||||
|
|
||||||
|
// Find the full table name from the profile tree for display.
|
||||||
|
// e.g., "test_diacritics" -> "2025_test_diacritics"
|
||||||
|
let full_target_table_name = app_state
|
||||||
|
.profile_tree
|
||||||
|
.profiles
|
||||||
|
.iter()
|
||||||
|
.find(|p| p.name == profile_name)
|
||||||
|
.and_then(|p| p.tables.iter().find(|t| t.name.ends_with(target_table_base)))
|
||||||
|
.map_or(target_table_base.to_string(), |t| t.name.clone());
|
||||||
|
|
||||||
|
FieldDefinition {
|
||||||
|
display_name: full_target_table_name.clone(),
|
||||||
|
data_key: col.name.clone(), // The actual FK column name
|
||||||
|
is_link: true,
|
||||||
|
link_target_table: Some(full_target_table_name),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
fields.extend(link_fields); // Append the link fields to the end
|
||||||
|
|
||||||
|
// --- END: FINAL, SIMPLIFIED, CORRECT LOGIC ---
|
||||||
|
|
||||||
|
Ok(FormState::new(
|
||||||
|
profile_name.to_string(),
|
||||||
|
table_name.to_string(),
|
||||||
|
fields,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn initialize_add_logic_table_data(
|
pub async fn initialize_add_logic_table_data(
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
add_logic_state: &mut AddLogicState,
|
add_logic_state: &mut AddLogicState,
|
||||||
@@ -92,6 +176,7 @@ impl UiService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// REFACTOR THIS FUNCTION
|
||||||
pub async fn initialize_app_state_and_form(
|
pub async fn initialize_app_state_and_form(
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
app_state: &mut AppState,
|
app_state: &mut AppState,
|
||||||
@@ -102,7 +187,6 @@ impl UiService {
|
|||||||
.context("Failed to get profile tree")?;
|
.context("Failed to get profile tree")?;
|
||||||
app_state.profile_tree = profile_tree;
|
app_state.profile_tree = profile_tree;
|
||||||
|
|
||||||
// Determine initial table to load (e.g., first table of first profile, or a default)
|
|
||||||
let initial_profile_name = app_state
|
let initial_profile_name = app_state
|
||||||
.profile_tree
|
.profile_tree
|
||||||
.profiles
|
.profiles
|
||||||
@@ -115,33 +199,26 @@ impl UiService {
|
|||||||
.profiles
|
.profiles
|
||||||
.first()
|
.first()
|
||||||
.and_then(|p| p.tables.first().map(|t| t.name.clone()))
|
.and_then(|p| p.tables.first().map(|t| t.name.clone()))
|
||||||
.unwrap_or_else(|| "2025_company_data1".to_string()); // Fallback if no tables
|
.unwrap_or_else(|| "2025_company_data1".to_string());
|
||||||
|
|
||||||
app_state.set_current_view_table(
|
app_state.set_current_view_table(
|
||||||
initial_profile_name.clone(),
|
initial_profile_name.clone(),
|
||||||
initial_table_name.clone(),
|
initial_table_name.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let table_structure = grpc_client
|
// NOW, just call our new central function. This avoids code duplication.
|
||||||
.get_table_structure(
|
let form_state = Self::load_table_view(
|
||||||
initial_profile_name.clone(),
|
grpc_client,
|
||||||
initial_table_name.clone(),
|
app_state,
|
||||||
)
|
&initial_profile_name,
|
||||||
.await
|
&initial_table_name,
|
||||||
.context(format!(
|
)
|
||||||
"Failed to get initial table structure for {}.{}",
|
.await?;
|
||||||
initial_profile_name, initial_table_name
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let column_names: Vec<String> = table_structure
|
// The field names for the UI are derived from the new form_state
|
||||||
.columns
|
let field_names = form_state.fields.iter().map(|f| f.display_name.clone()).collect();
|
||||||
.iter()
|
|
||||||
.map(|col| col.name.clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let filtered_columns = filter_user_columns(column_names);
|
Ok((initial_profile_name, initial_table_name, field_names))
|
||||||
|
|
||||||
Ok((initial_profile_name, initial_table_name, filtered_columns))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn fetch_and_set_table_count(
|
pub async fn fetch_and_set_table_count(
|
||||||
|
|||||||
@@ -2,4 +2,5 @@
|
|||||||
|
|
||||||
pub mod state;
|
pub mod state;
|
||||||
pub mod buffer;
|
pub mod buffer;
|
||||||
|
pub mod search;
|
||||||
pub mod highlight;
|
pub mod highlight;
|
||||||
|
|||||||
56
client/src/state/app/search.rs
Normal file
56
client/src/state/app/search.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
// src/state/app/search.rs
|
||||||
|
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
|
|
||||||
|
/// Holds the complete state for the search palette.
|
||||||
|
pub struct SearchState {
|
||||||
|
/// The name of the table being searched.
|
||||||
|
pub table_name: String,
|
||||||
|
/// The current text entered by the user.
|
||||||
|
pub input: String,
|
||||||
|
/// The position of the cursor within the input text.
|
||||||
|
pub cursor_position: usize,
|
||||||
|
/// The search results returned from the server.
|
||||||
|
pub results: Vec<Hit>,
|
||||||
|
/// The index of the currently selected search result.
|
||||||
|
pub selected_index: usize,
|
||||||
|
/// A flag to indicate if a search is currently in progress.
|
||||||
|
pub is_loading: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SearchState {
|
||||||
|
/// Creates a new SearchState for a given table.
|
||||||
|
pub fn new(table_name: String) -> Self {
|
||||||
|
Self {
|
||||||
|
table_name,
|
||||||
|
input: String::new(),
|
||||||
|
cursor_position: 0,
|
||||||
|
results: Vec::new(),
|
||||||
|
selected_index: 0,
|
||||||
|
is_loading: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Moves the selection to the next item, wrapping around if at the end.
|
||||||
|
pub fn next_result(&mut self) {
|
||||||
|
if !self.results.is_empty() {
|
||||||
|
let next = self.selected_index + 1;
|
||||||
|
self.selected_index = if next >= self.results.len() {
|
||||||
|
0 // Wrap to the start
|
||||||
|
} else {
|
||||||
|
next
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Moves the selection to the previous item, wrapping around if at the beginning.
|
||||||
|
pub fn previous_result(&mut self) {
|
||||||
|
if !self.results.is_empty() {
|
||||||
|
self.selected_index = if self.selected_index == 0 {
|
||||||
|
self.results.len() - 1 // Wrap to the end
|
||||||
|
} else {
|
||||||
|
self.selected_index - 1
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,11 +1,19 @@
|
|||||||
// src/state/state.rs
|
// src/state/app/state.rs
|
||||||
|
|
||||||
use std::env;
|
|
||||||
use common::proto::multieko2::table_definition::ProfileTreeResponse;
|
|
||||||
use crate::modes::handlers::mode_manager::AppMode;
|
|
||||||
use crate::ui::handlers::context::DialogPurpose;
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use common::proto::multieko2::table_definition::ProfileTreeResponse;
|
||||||
|
// NEW: Import the types we need for the cache
|
||||||
|
use common::proto::multieko2::table_structure::TableStructureResponse;
|
||||||
|
use crate::modes::handlers::mode_manager::AppMode;
|
||||||
|
use crate::state::app::search::SearchState;
|
||||||
|
use crate::ui::handlers::context::DialogPurpose;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::env;
|
||||||
|
use std::sync::Arc;
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
|
// --- DialogState and UiState are unchanged ---
|
||||||
pub struct DialogState {
|
pub struct DialogState {
|
||||||
pub dialog_show: bool,
|
pub dialog_show: bool,
|
||||||
pub dialog_title: String,
|
pub dialog_title: String,
|
||||||
@@ -26,10 +34,19 @@ pub struct UiState {
|
|||||||
pub show_form: bool,
|
pub show_form: bool,
|
||||||
pub show_login: bool,
|
pub show_login: bool,
|
||||||
pub show_register: bool,
|
pub show_register: bool,
|
||||||
|
pub show_search_palette: bool,
|
||||||
pub focus_outside_canvas: bool,
|
pub focus_outside_canvas: bool,
|
||||||
pub dialog: DialogState,
|
pub dialog: DialogState,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DebugState {
|
||||||
|
pub displayed_message: String,
|
||||||
|
pub is_error: bool,
|
||||||
|
pub display_start_time: Instant,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
// Core editor state
|
// Core editor state
|
||||||
pub current_dir: String,
|
pub current_dir: String,
|
||||||
@@ -39,21 +56,24 @@ pub struct AppState {
|
|||||||
pub current_view_profile_name: Option<String>,
|
pub current_view_profile_name: Option<String>,
|
||||||
pub current_view_table_name: Option<String>,
|
pub current_view_table_name: Option<String>,
|
||||||
|
|
||||||
|
// NEW: The "Rulebook" cache. We use Arc for efficient sharing.
|
||||||
|
pub schema_cache: HashMap<String, Arc<TableStructureResponse>>,
|
||||||
|
|
||||||
pub focused_button_index: usize,
|
pub focused_button_index: usize,
|
||||||
pub pending_table_structure_fetch: Option<(String, String)>,
|
pub pending_table_structure_fetch: Option<(String, String)>,
|
||||||
|
|
||||||
|
pub search_state: Option<SearchState>,
|
||||||
|
|
||||||
// UI preferences
|
// UI preferences
|
||||||
pub ui: UiState,
|
pub ui: UiState,
|
||||||
|
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
pub debug_info: String,
|
pub debug_state: Option<DebugState>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppState {
|
impl AppState {
|
||||||
pub fn new() -> Result<Self> {
|
pub fn new() -> Result<Self> {
|
||||||
let current_dir = env::current_dir()?
|
let current_dir = env::current_dir()?.to_string_lossy().to_string();
|
||||||
.to_string_lossy()
|
|
||||||
.to_string();
|
|
||||||
Ok(AppState {
|
Ok(AppState {
|
||||||
current_dir,
|
current_dir,
|
||||||
profile_tree: ProfileTreeResponse::default(),
|
profile_tree: ProfileTreeResponse::default(),
|
||||||
@@ -61,27 +81,28 @@ impl AppState {
|
|||||||
current_view_profile_name: None,
|
current_view_profile_name: None,
|
||||||
current_view_table_name: None,
|
current_view_table_name: None,
|
||||||
current_mode: AppMode::General,
|
current_mode: AppMode::General,
|
||||||
|
schema_cache: HashMap::new(), // NEW: Initialize the cache
|
||||||
focused_button_index: 0,
|
focused_button_index: 0,
|
||||||
pending_table_structure_fetch: None,
|
pending_table_structure_fetch: None,
|
||||||
|
search_state: None,
|
||||||
ui: UiState::default(),
|
ui: UiState::default(),
|
||||||
|
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
debug_info: String::new(),
|
debug_state: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- ALL YOUR EXISTING METHODS ARE UNTOUCHED ---
|
||||||
|
|
||||||
pub fn update_mode(&mut self, mode: AppMode) {
|
pub fn update_mode(&mut self, mode: AppMode) {
|
||||||
self.current_mode = mode;
|
self.current_mode = mode;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_current_view_table(&mut self, profile_name: String, table_name: String) {
|
pub fn set_current_view_table(&mut self, profile_name: String, table_name: String) {
|
||||||
self.current_view_profile_name = Some(profile_name);
|
self.current_view_profile_name = Some(profile_name);
|
||||||
self.current_view_table_name = Some(table_name);
|
self.current_view_table_name = Some(table_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add dialog helper methods
|
|
||||||
/// Shows a dialog with the given title, message, and buttons.
|
|
||||||
/// The first button (index 0) is active by default.
|
|
||||||
pub fn show_dialog(
|
pub fn show_dialog(
|
||||||
&mut self,
|
&mut self,
|
||||||
title: &str,
|
title: &str,
|
||||||
@@ -99,19 +120,17 @@ impl AppState {
|
|||||||
self.ui.focus_outside_canvas = true;
|
self.ui.focus_outside_canvas = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Shows a dialog specifically for loading states.
|
|
||||||
pub fn show_loading_dialog(&mut self, title: &str, message: &str) {
|
pub fn show_loading_dialog(&mut self, title: &str, message: &str) {
|
||||||
self.ui.dialog.dialog_title = title.to_string();
|
self.ui.dialog.dialog_title = title.to_string();
|
||||||
self.ui.dialog.dialog_message = message.to_string();
|
self.ui.dialog.dialog_message = message.to_string();
|
||||||
self.ui.dialog.dialog_buttons.clear(); // No buttons during loading
|
self.ui.dialog.dialog_buttons.clear();
|
||||||
self.ui.dialog.dialog_active_button_index = 0;
|
self.ui.dialog.dialog_active_button_index = 0;
|
||||||
self.ui.dialog.purpose = None; // Purpose is set when loading finishes
|
self.ui.dialog.purpose = None;
|
||||||
self.ui.dialog.is_loading = true;
|
self.ui.dialog.is_loading = true;
|
||||||
self.ui.dialog.dialog_show = true;
|
self.ui.dialog.dialog_show = true;
|
||||||
self.ui.focus_outside_canvas = true; // Keep focus management consistent
|
self.ui.focus_outside_canvas = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Updates the content of an existing dialog, typically after loading.
|
|
||||||
pub fn update_dialog_content(
|
pub fn update_dialog_content(
|
||||||
&mut self,
|
&mut self,
|
||||||
message: &str,
|
message: &str,
|
||||||
@@ -121,16 +140,12 @@ impl AppState {
|
|||||||
if self.ui.dialog.dialog_show {
|
if self.ui.dialog.dialog_show {
|
||||||
self.ui.dialog.dialog_message = message.to_string();
|
self.ui.dialog.dialog_message = message.to_string();
|
||||||
self.ui.dialog.dialog_buttons = buttons;
|
self.ui.dialog.dialog_buttons = buttons;
|
||||||
self.ui.dialog.dialog_active_button_index = 0; // Reset focus
|
self.ui.dialog.dialog_active_button_index = 0;
|
||||||
self.ui.dialog.purpose = Some(purpose);
|
self.ui.dialog.purpose = Some(purpose);
|
||||||
self.ui.dialog.is_loading = false; // Loading finished
|
self.ui.dialog.is_loading = false;
|
||||||
// Keep dialog_show = true
|
|
||||||
// Keep focus_outside_canvas = true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Hides the dialog and clears its content.
|
|
||||||
pub fn hide_dialog(&mut self) {
|
pub fn hide_dialog(&mut self) {
|
||||||
self.ui.dialog.dialog_show = false;
|
self.ui.dialog.dialog_show = false;
|
||||||
self.ui.dialog.dialog_title.clear();
|
self.ui.dialog.dialog_title.clear();
|
||||||
@@ -142,30 +157,27 @@ impl AppState {
|
|||||||
self.ui.dialog.is_loading = false;
|
self.ui.dialog.is_loading = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the active button index, wrapping around if necessary.
|
|
||||||
pub fn next_dialog_button(&mut self) {
|
pub fn next_dialog_button(&mut self) {
|
||||||
if !self.ui.dialog.dialog_buttons.is_empty() {
|
if !self.ui.dialog.dialog_buttons.is_empty() {
|
||||||
let next_index = (self.ui.dialog.dialog_active_button_index + 1)
|
let next_index = (self.ui.dialog.dialog_active_button_index + 1)
|
||||||
% self.ui.dialog.dialog_buttons.len();
|
% self.ui.dialog.dialog_buttons.len();
|
||||||
self.ui.dialog.dialog_active_button_index = next_index; // Use new name
|
self.ui.dialog.dialog_active_button_index = next_index;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the active button index, wrapping around if necessary.
|
|
||||||
pub fn previous_dialog_button(&mut self) {
|
pub fn previous_dialog_button(&mut self) {
|
||||||
if !self.ui.dialog.dialog_buttons.is_empty() {
|
if !self.ui.dialog.dialog_buttons.is_empty() {
|
||||||
let len = self.ui.dialog.dialog_buttons.len();
|
let len = self.ui.dialog.dialog_buttons.len();
|
||||||
let prev_index =
|
let prev_index =
|
||||||
(self.ui.dialog.dialog_active_button_index + len - 1) % len;
|
(self.ui.dialog.dialog_active_button_index + len - 1) % len;
|
||||||
self.ui.dialog.dialog_active_button_index = prev_index; // Use new name
|
self.ui.dialog.dialog_active_button_index = prev_index;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the label of the currently active button, if any.
|
|
||||||
pub fn get_active_dialog_button_label(&self) -> Option<&str> {
|
pub fn get_active_dialog_button_label(&self) -> Option<&str> {
|
||||||
self.ui.dialog
|
self.ui.dialog
|
||||||
.dialog_buttons // Use new name
|
.dialog_buttons
|
||||||
.get(self.ui.dialog.dialog_active_button_index) // Use new name
|
.get(self.ui.dialog.dialog_active_button_index)
|
||||||
.map(|s| s.as_str())
|
.map(|s| s.as_str())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -182,13 +194,13 @@ impl Default for UiState {
|
|||||||
show_login: false,
|
show_login: false,
|
||||||
show_register: false,
|
show_register: false,
|
||||||
show_buffer_list: true,
|
show_buffer_list: true,
|
||||||
|
show_search_palette: false, // ADDED
|
||||||
focus_outside_canvas: false,
|
focus_outside_canvas: false,
|
||||||
dialog: DialogState::default(),
|
dialog: DialogState::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update the Default implementation for DialogState itself
|
|
||||||
impl Default for DialogState {
|
impl Default for DialogState {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
// src/state/canvas_state.rs
|
// src/state/pages/canvas_state.rs
|
||||||
|
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
|
|
||||||
pub trait CanvasState {
|
pub trait CanvasState {
|
||||||
|
// --- Existing methods (unchanged) ---
|
||||||
fn current_field(&self) -> usize;
|
fn current_field(&self) -> usize;
|
||||||
fn current_cursor_pos(&self) -> usize;
|
fn current_cursor_pos(&self) -> usize;
|
||||||
fn has_unsaved_changes(&self) -> bool;
|
fn has_unsaved_changes(&self) -> bool;
|
||||||
@@ -9,12 +11,22 @@ pub trait CanvasState {
|
|||||||
fn get_current_input(&self) -> &str;
|
fn get_current_input(&self) -> &str;
|
||||||
fn get_current_input_mut(&mut self) -> &mut String;
|
fn get_current_input_mut(&mut self) -> &mut String;
|
||||||
fn fields(&self) -> Vec<&str>;
|
fn fields(&self) -> Vec<&str>;
|
||||||
|
|
||||||
fn set_current_field(&mut self, index: usize);
|
fn set_current_field(&mut self, index: usize);
|
||||||
fn set_current_cursor_pos(&mut self, pos: usize);
|
fn set_current_cursor_pos(&mut self, pos: usize);
|
||||||
fn set_has_unsaved_changes(&mut self, changed: bool);
|
fn set_has_unsaved_changes(&mut self, changed: bool);
|
||||||
|
|
||||||
// --- Autocomplete Support ---
|
|
||||||
fn get_suggestions(&self) -> Option<&[String]>;
|
fn get_suggestions(&self) -> Option<&[String]>;
|
||||||
fn get_selected_suggestion_index(&self) -> Option<usize>;
|
fn get_selected_suggestion_index(&self) -> Option<usize>;
|
||||||
|
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_display_value_for_field(&self, index: usize) -> &str {
|
||||||
|
self.inputs()
|
||||||
|
.get(index)
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
}
|
||||||
|
fn has_display_override(&self, _index: usize) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,49 +1,109 @@
|
|||||||
// src/state/pages/form.rs
|
// src/state/pages/form.rs
|
||||||
|
|
||||||
use std::collections::HashMap; // NEW
|
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
use ratatui::layout::Rect;
|
|
||||||
use ratatui::Frame;
|
|
||||||
use crate::state::app::highlight::HighlightState;
|
use crate::state::app::highlight::HighlightState;
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
|
use common::proto::multieko2::search::search_response::Hit;
|
||||||
|
use ratatui::layout::Rect;
|
||||||
|
use ratatui::Frame;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
fn json_value_to_string(value: &serde_json::Value) -> String {
|
||||||
|
match value {
|
||||||
|
serde_json::Value::String(s) => s.clone(),
|
||||||
|
serde_json::Value::Number(n) => n.to_string(),
|
||||||
|
serde_json::Value::Bool(b) => b.to_string(),
|
||||||
|
_ => String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct FieldDefinition {
|
||||||
|
pub display_name: String,
|
||||||
|
pub data_key: String,
|
||||||
|
pub is_link: bool,
|
||||||
|
pub link_target_table: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct FormState {
|
pub struct FormState {
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
// NEW fields for dynamic table context
|
|
||||||
pub profile_name: String,
|
pub profile_name: String,
|
||||||
pub table_name: String,
|
pub table_name: String,
|
||||||
pub total_count: u64,
|
pub total_count: u64,
|
||||||
pub current_position: u64, // 1-based index, 0 or total_count + 1 for new entry
|
pub current_position: u64,
|
||||||
|
pub fields: Vec<FieldDefinition>,
|
||||||
pub fields: Vec<String>, // Already dynamic, which is good
|
|
||||||
pub values: Vec<String>,
|
pub values: Vec<String>,
|
||||||
pub current_field: usize,
|
pub current_field: usize,
|
||||||
pub has_unsaved_changes: bool,
|
pub has_unsaved_changes: bool,
|
||||||
pub current_cursor_pos: usize,
|
pub current_cursor_pos: usize,
|
||||||
|
pub autocomplete_active: bool,
|
||||||
|
pub autocomplete_suggestions: Vec<Hit>,
|
||||||
|
pub selected_suggestion_index: Option<usize>,
|
||||||
|
pub autocomplete_loading: bool,
|
||||||
|
pub link_display_map: HashMap<usize, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FormState {
|
impl FormState {
|
||||||
/// Creates a new, empty FormState for a given table.
|
|
||||||
/// The position defaults to 1, representing either the first record
|
|
||||||
/// or the position for a new entry if the table is empty.
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
profile_name: String,
|
profile_name: String,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
fields: Vec<String>,
|
fields: Vec<FieldDefinition>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let values = vec![String::new(); fields.len()];
|
let values = vec![String::new(); fields.len()];
|
||||||
FormState {
|
FormState {
|
||||||
id: 0, // Default to 0, indicating a new or unloaded record
|
id: 0,
|
||||||
profile_name,
|
profile_name,
|
||||||
table_name,
|
table_name,
|
||||||
total_count: 0, // Will be fetched after initialization
|
total_count: 0,
|
||||||
// FIX: Default to 1. A position of 0 is an invalid state.
|
|
||||||
current_position: 1,
|
current_position: 1,
|
||||||
fields,
|
fields,
|
||||||
values,
|
values,
|
||||||
current_field: 0,
|
current_field: 0,
|
||||||
has_unsaved_changes: false,
|
has_unsaved_changes: false,
|
||||||
current_cursor_pos: 0,
|
current_cursor_pos: 0,
|
||||||
|
autocomplete_active: false,
|
||||||
|
autocomplete_suggestions: Vec::new(),
|
||||||
|
selected_suggestion_index: None,
|
||||||
|
autocomplete_loading: false,
|
||||||
|
link_display_map: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_display_name_for_hit(&self, hit: &Hit) -> String {
|
||||||
|
if let Ok(content_map) =
|
||||||
|
serde_json::from_str::<HashMap<String, serde_json::Value>>(
|
||||||
|
&hit.content_json,
|
||||||
|
)
|
||||||
|
{
|
||||||
|
const IGNORED_KEYS: &[&str] = &["id", "deleted", "created_at"];
|
||||||
|
let mut keys: Vec<_> = content_map
|
||||||
|
.keys()
|
||||||
|
.filter(|k| !IGNORED_KEYS.contains(&k.as_str()))
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
keys.sort();
|
||||||
|
|
||||||
|
let values: Vec<_> = keys
|
||||||
|
.iter()
|
||||||
|
.map(|key| {
|
||||||
|
content_map
|
||||||
|
.get(key)
|
||||||
|
.map(json_value_to_string)
|
||||||
|
.unwrap_or_default()
|
||||||
|
})
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.take(1)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let display_part = values.first().cloned().unwrap_or_default();
|
||||||
|
if display_part.is_empty() {
|
||||||
|
format!("ID: {}", hit.id)
|
||||||
|
} else {
|
||||||
|
format!("{} | ID: {}", display_part, hit.id)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
format!("ID: {} (parse error)", hit.id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,13 +116,13 @@ impl FormState {
|
|||||||
highlight_state: &HighlightState,
|
highlight_state: &HighlightState,
|
||||||
) {
|
) {
|
||||||
let fields_str_slice: Vec<&str> =
|
let fields_str_slice: Vec<&str> =
|
||||||
self.fields.iter().map(|s| s.as_str()).collect();
|
self.fields().iter().map(|s| *s).collect();
|
||||||
let values_str_slice: Vec<&String> = self.values.iter().collect();
|
let values_str_slice: Vec<&String> = self.values.iter().collect();
|
||||||
|
|
||||||
crate::components::form::form::render_form(
|
crate::components::form::form::render_form(
|
||||||
f,
|
f,
|
||||||
area,
|
area,
|
||||||
self, // Pass self as CanvasState
|
self,
|
||||||
&fields_str_slice,
|
&fields_str_slice,
|
||||||
&self.current_field,
|
&self.current_field,
|
||||||
&values_str_slice,
|
&values_str_slice,
|
||||||
@@ -75,20 +135,19 @@ impl FormState {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resets the form to a state for creating a new entry.
|
|
||||||
/// It clears all values and sets the position to be one after the last record.
|
|
||||||
pub fn reset_to_empty(&mut self) {
|
pub fn reset_to_empty(&mut self) {
|
||||||
self.id = 0;
|
self.id = 0;
|
||||||
self.values.iter_mut().for_each(|v| v.clear());
|
self.values.iter_mut().for_each(|v| v.clear());
|
||||||
self.current_field = 0;
|
self.current_field = 0;
|
||||||
self.current_cursor_pos = 0;
|
self.current_cursor_pos = 0;
|
||||||
self.has_unsaved_changes = false;
|
self.has_unsaved_changes = false;
|
||||||
// Set the position for a new entry.
|
|
||||||
if self.total_count > 0 {
|
if self.total_count > 0 {
|
||||||
self.current_position = self.total_count + 1;
|
self.current_position = self.total_count + 1;
|
||||||
} else {
|
} else {
|
||||||
self.current_position = 1; // If table is empty, new record is at position 1
|
self.current_position = 1;
|
||||||
}
|
}
|
||||||
|
self.deactivate_autocomplete();
|
||||||
|
self.link_display_map.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_current_input(&self) -> &str {
|
pub fn get_current_input(&self) -> &str {
|
||||||
@@ -99,31 +158,28 @@ impl FormState {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_current_input_mut(&mut self) -> &mut String {
|
pub fn get_current_input_mut(&mut self) -> &mut String {
|
||||||
|
self.link_display_map.remove(&self.current_field);
|
||||||
self.values
|
self.values
|
||||||
.get_mut(self.current_field)
|
.get_mut(self.current_field)
|
||||||
.expect("Invalid current_field index")
|
.expect("Invalid current_field index")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Updates the form's values from a data response and sets its position.
|
|
||||||
/// This is the single source of truth for populating the form after a data fetch.
|
|
||||||
pub fn update_from_response(
|
pub fn update_from_response(
|
||||||
&mut self,
|
&mut self,
|
||||||
response_data: &HashMap<String, String>,
|
response_data: &HashMap<String, String>,
|
||||||
// FIX: Add new_position to make this method authoritative.
|
|
||||||
new_position: u64,
|
new_position: u64,
|
||||||
) {
|
) {
|
||||||
// Create a new vector for the values, ensuring they are in the correct order.
|
self.values = self
|
||||||
self.values = self.fields.iter().map(|field_from_schema| {
|
.fields
|
||||||
// For each field from our schema, find the corresponding key in the
|
.iter()
|
||||||
// response data by doing a case-insensitive comparison.
|
.map(|field_def| {
|
||||||
response_data
|
response_data
|
||||||
.iter()
|
.get(&field_def.data_key)
|
||||||
.find(|(key_from_data, _)| key_from_data.eq_ignore_ascii_case(field_from_schema))
|
.cloned()
|
||||||
.map(|(_, value)| value.clone()) // If found, clone its value.
|
.unwrap_or_default()
|
||||||
.unwrap_or_default() // If not found, use an empty string.
|
})
|
||||||
}).collect();
|
.collect();
|
||||||
|
|
||||||
// Now, do the same case-insensitive lookup for the 'id' field.
|
|
||||||
let id_str_opt = response_data
|
let id_str_opt = response_data
|
||||||
.iter()
|
.iter()
|
||||||
.find(|(k, _)| k.eq_ignore_ascii_case("id"))
|
.find(|(k, _)| k.eq_ignore_ascii_case("id"))
|
||||||
@@ -133,18 +189,31 @@ impl FormState {
|
|||||||
if let Ok(parsed_id) = id_str.parse::<i64>() {
|
if let Ok(parsed_id) = id_str.parse::<i64>() {
|
||||||
self.id = parsed_id;
|
self.id = parsed_id;
|
||||||
} else {
|
} else {
|
||||||
tracing::error!( "Failed to parse 'id' field '{}' for table {}.{}", id_str, self.profile_name, self.table_name);
|
tracing::error!(
|
||||||
|
"Failed to parse 'id' field '{}' for table {}.{}",
|
||||||
|
id_str,
|
||||||
|
self.profile_name,
|
||||||
|
self.table_name
|
||||||
|
);
|
||||||
self.id = 0;
|
self.id = 0;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.id = 0;
|
self.id = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIX: Set the position from the provided parameter.
|
|
||||||
self.current_position = new_position;
|
self.current_position = new_position;
|
||||||
self.has_unsaved_changes = false;
|
self.has_unsaved_changes = false;
|
||||||
self.current_field = 0;
|
self.current_field = 0;
|
||||||
self.current_cursor_pos = 0;
|
self.current_cursor_pos = 0;
|
||||||
|
self.deactivate_autocomplete();
|
||||||
|
self.link_display_map.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deactivate_autocomplete(&mut self) {
|
||||||
|
self.autocomplete_active = false;
|
||||||
|
self.autocomplete_suggestions.clear();
|
||||||
|
self.selected_suggestion_index = None;
|
||||||
|
self.autocomplete_loading = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -152,52 +221,69 @@ impl CanvasState for FormState {
|
|||||||
fn current_field(&self) -> usize {
|
fn current_field(&self) -> usize {
|
||||||
self.current_field
|
self.current_field
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_cursor_pos(&self) -> usize {
|
fn current_cursor_pos(&self) -> usize {
|
||||||
self.current_cursor_pos
|
self.current_cursor_pos
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_unsaved_changes(&self) -> bool {
|
fn has_unsaved_changes(&self) -> bool {
|
||||||
self.has_unsaved_changes
|
self.has_unsaved_changes
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inputs(&self) -> Vec<&String> {
|
fn inputs(&self) -> Vec<&String> {
|
||||||
self.values.iter().collect()
|
self.values.iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_current_input(&self) -> &str {
|
fn get_current_input(&self) -> &str {
|
||||||
// Re-use the struct's own method
|
|
||||||
FormState::get_current_input(self)
|
FormState::get_current_input(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_current_input_mut(&mut self) -> &mut String {
|
fn get_current_input_mut(&mut self) -> &mut String {
|
||||||
// Re-use the struct's own method
|
|
||||||
FormState::get_current_input_mut(self)
|
FormState::get_current_input_mut(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fields(&self) -> Vec<&str> {
|
fn fields(&self) -> Vec<&str> {
|
||||||
self.fields.iter().map(|s| s.as_str()).collect()
|
self.fields
|
||||||
|
.iter()
|
||||||
|
.map(|f| f.display_name.as_str())
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_current_field(&mut self, index: usize) {
|
fn set_current_field(&mut self, index: usize) {
|
||||||
if index < self.fields.len() {
|
if index < self.fields.len() {
|
||||||
self.current_field = index;
|
self.current_field = index;
|
||||||
}
|
}
|
||||||
|
self.deactivate_autocomplete();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_current_cursor_pos(&mut self, pos: usize) {
|
fn set_current_cursor_pos(&mut self, pos: usize) {
|
||||||
self.current_cursor_pos = pos;
|
self.current_cursor_pos = pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_has_unsaved_changes(&mut self, changed: bool) {
|
fn set_has_unsaved_changes(&mut self, changed: bool) {
|
||||||
self.has_unsaved_changes = changed;
|
self.has_unsaved_changes = changed;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_suggestions(&self) -> Option<&[String]> {
|
fn get_suggestions(&self) -> Option<&[String]> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
fn get_rich_suggestions(&self) -> Option<&[Hit]> {
|
||||||
|
if self.autocomplete_active {
|
||||||
|
Some(&self.autocomplete_suggestions)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
fn get_selected_suggestion_index(&self) -> Option<usize> {
|
fn get_selected_suggestion_index(&self) -> Option<usize> {
|
||||||
None
|
if self.autocomplete_active {
|
||||||
|
self.selected_suggestion_index
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_display_value_for_field(&self, index: usize) -> &str {
|
||||||
|
if let Some(display_text) = self.link_display_map.get(&index) {
|
||||||
|
return display_text.as_str();
|
||||||
|
}
|
||||||
|
self.inputs()
|
||||||
|
.get(index)
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- IMPLEMENT THE NEW TRAIT METHOD ---
|
||||||
|
fn has_display_override(&self, index: usize) -> bool {
|
||||||
|
self.link_display_map.contains_key(&index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,19 +1,22 @@
|
|||||||
// src/tui/functions/common/form.rs
|
// src/tui/functions/common/form.rs
|
||||||
|
|
||||||
use crate::services::grpc_client::GrpcClient;
|
use crate::services::grpc_client::GrpcClient;
|
||||||
|
use crate::state::app::state::AppState; // NEW: Import AppState
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::FormState;
|
||||||
use anyhow::{Context, Result}; // Added Context
|
use crate::utils::data_converter; // NEW: Import our translator
|
||||||
use std::collections::HashMap; // NEW
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum SaveOutcome {
|
pub enum SaveOutcome {
|
||||||
NoChange,
|
NoChange,
|
||||||
UpdatedExisting,
|
UpdatedExisting,
|
||||||
CreatedNew(i64), // Keep the ID
|
CreatedNew(i64),
|
||||||
}
|
}
|
||||||
|
|
||||||
// MODIFIED save function
|
// MODIFIED save function signature and logic
|
||||||
pub async fn save(
|
pub async fn save(
|
||||||
|
app_state: &AppState, // NEW: Pass in AppState
|
||||||
form_state: &mut FormState,
|
form_state: &mut FormState,
|
||||||
grpc_client: &mut GrpcClient,
|
grpc_client: &mut GrpcClient,
|
||||||
) -> Result<SaveOutcome> {
|
) -> Result<SaveOutcome> {
|
||||||
@@ -21,42 +24,64 @@ pub async fn save(
|
|||||||
return Ok(SaveOutcome::NoChange);
|
return Ok(SaveOutcome::NoChange);
|
||||||
}
|
}
|
||||||
|
|
||||||
let data_map: HashMap<String, String> = form_state.fields.iter()
|
// --- NEW: VALIDATION & CONVERSION STEP ---
|
||||||
|
let cache_key =
|
||||||
|
format!("{}.{}", form_state.profile_name, form_state.table_name);
|
||||||
|
let schema = match app_state.schema_cache.get(&cache_key) {
|
||||||
|
Some(s) => s,
|
||||||
|
None => {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Schema for table '{}' not found in cache. Cannot save.",
|
||||||
|
form_state.table_name
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let data_map: HashMap<String, String> = form_state
|
||||||
|
.fields
|
||||||
|
.iter()
|
||||||
.zip(form_state.values.iter())
|
.zip(form_state.values.iter())
|
||||||
.map(|(field, value)| (field.clone(), value.clone()))
|
.map(|(field_def, value)| (field_def.data_key.clone(), value.clone()))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
// Use our new translator. It returns a user-friendly error on failure.
|
||||||
|
let converted_data =
|
||||||
|
match data_converter::convert_and_validate_data(&data_map, schema) {
|
||||||
|
Ok(data) => data,
|
||||||
|
Err(user_error) => return Err(anyhow!(user_error)),
|
||||||
|
};
|
||||||
|
// --- END OF NEW STEP ---
|
||||||
|
|
||||||
let outcome: SaveOutcome;
|
let outcome: SaveOutcome;
|
||||||
|
let is_new_entry = form_state.id == 0
|
||||||
let is_new_entry = form_state.id == 0 || (form_state.total_count > 0 && form_state.current_position > form_state.total_count) || (form_state.total_count == 0 && form_state.current_position == 1) ;
|
|| (form_state.total_count > 0
|
||||||
|
&& form_state.current_position > form_state.total_count)
|
||||||
|
|| (form_state.total_count == 0 && form_state.current_position == 1);
|
||||||
|
|
||||||
if is_new_entry {
|
if is_new_entry {
|
||||||
let response = grpc_client
|
let response = grpc_client
|
||||||
.post_table_data(
|
.post_table_data(
|
||||||
form_state.profile_name.clone(),
|
form_state.profile_name.clone(),
|
||||||
form_state.table_name.clone(),
|
form_state.table_name.clone(),
|
||||||
data_map,
|
converted_data, // Use the validated & converted data
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("Failed to post new table data")?;
|
.context("Failed to post new table data")?;
|
||||||
|
|
||||||
if response.success {
|
if response.success {
|
||||||
form_state.id = response.inserted_id;
|
form_state.id = response.inserted_id;
|
||||||
// After creating a new entry, total_count increases, and current_position becomes this new total_count
|
|
||||||
form_state.total_count += 1;
|
form_state.total_count += 1;
|
||||||
form_state.current_position = form_state.total_count;
|
form_state.current_position = form_state.total_count;
|
||||||
outcome = SaveOutcome::CreatedNew(response.inserted_id);
|
outcome = SaveOutcome::CreatedNew(response.inserted_id);
|
||||||
} else {
|
} else {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow!(
|
||||||
"Server failed to insert data: {}",
|
"Server failed to insert data: {}",
|
||||||
response.message
|
response.message
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// This assumes form_state.id is valid for an existing record
|
|
||||||
if form_state.id == 0 {
|
if form_state.id == 0 {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow!(
|
||||||
"Cannot update record: ID is 0, but not classified as new entry."
|
"Cannot update record: ID is 0, but not classified as new entry."
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@@ -65,7 +90,7 @@ pub async fn save(
|
|||||||
form_state.profile_name.clone(),
|
form_state.profile_name.clone(),
|
||||||
form_state.table_name.clone(),
|
form_state.table_name.clone(),
|
||||||
form_state.id,
|
form_state.id,
|
||||||
data_map,
|
converted_data, // Use the validated & converted data
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("Failed to put (update) table data")?;
|
.context("Failed to put (update) table data")?;
|
||||||
@@ -73,7 +98,7 @@ pub async fn save(
|
|||||||
if response.success {
|
if response.success {
|
||||||
outcome = SaveOutcome::UpdatedExisting;
|
outcome = SaveOutcome::UpdatedExisting;
|
||||||
} else {
|
} else {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow!(
|
||||||
"Server failed to update data: {}",
|
"Server failed to update data: {}",
|
||||||
response.message
|
response.message
|
||||||
));
|
));
|
||||||
|
|||||||
@@ -1,34 +1,36 @@
|
|||||||
// client/src/ui/handlers/render.rs
|
// src/ui/handlers/render.rs
|
||||||
|
|
||||||
use crate::components::{
|
use crate::components::{
|
||||||
|
admin::add_logic::render_add_logic,
|
||||||
|
admin::render_add_table,
|
||||||
|
auth::{login::render_login, register::render_register},
|
||||||
|
common::dialog::render_dialog,
|
||||||
|
common::find_file_palette,
|
||||||
|
common::search_palette::render_search_palette,
|
||||||
|
form::form::render_form,
|
||||||
|
handlers::sidebar::{self, calculate_sidebar_layout},
|
||||||
|
intro::intro::render_intro,
|
||||||
render_background,
|
render_background,
|
||||||
render_buffer_list,
|
render_buffer_list,
|
||||||
render_command_line,
|
render_command_line,
|
||||||
render_status_line,
|
render_status_line,
|
||||||
intro::intro::render_intro,
|
|
||||||
handlers::sidebar::{self, calculate_sidebar_layout},
|
|
||||||
form::form::render_form,
|
|
||||||
admin::render_add_table,
|
|
||||||
admin::add_logic::render_add_logic,
|
|
||||||
auth::{login::render_login, register::render_register},
|
|
||||||
common::find_file_palette,
|
|
||||||
};
|
};
|
||||||
use crate::config::colors::themes::Theme;
|
use crate::config::colors::themes::Theme;
|
||||||
|
use crate::modes::general::command_navigation::NavigationState;
|
||||||
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
|
use crate::state::app::buffer::BufferState;
|
||||||
|
use crate::state::app::highlight::HighlightState;
|
||||||
|
use crate::state::app::state::AppState;
|
||||||
|
use crate::state::pages::admin::AdminState;
|
||||||
|
use crate::state::pages::auth::AuthState;
|
||||||
|
use crate::state::pages::auth::LoginState;
|
||||||
|
use crate::state::pages::auth::RegisterState;
|
||||||
|
use crate::state::pages::form::FormState;
|
||||||
|
use crate::state::pages::intro::IntroState;
|
||||||
use ratatui::{
|
use ratatui::{
|
||||||
layout::{Constraint, Direction, Layout},
|
layout::{Constraint, Direction, Layout},
|
||||||
Frame,
|
Frame,
|
||||||
};
|
};
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
|
||||||
use crate::state::pages::form::FormState;
|
|
||||||
use crate::state::pages::auth::AuthState;
|
|
||||||
use crate::state::pages::auth::LoginState;
|
|
||||||
use crate::state::pages::auth::RegisterState;
|
|
||||||
use crate::state::pages::intro::IntroState;
|
|
||||||
use crate::state::app::buffer::BufferState;
|
|
||||||
use crate::state::app::state::AppState;
|
|
||||||
use crate::state::pages::admin::AdminState;
|
|
||||||
use crate::state::app::highlight::HighlightState;
|
|
||||||
use crate::modes::general::command_navigation::NavigationState;
|
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn render_ui(
|
pub fn render_ui(
|
||||||
@@ -53,16 +55,28 @@ pub fn render_ui(
|
|||||||
) {
|
) {
|
||||||
render_background(f, f.area(), theme);
|
render_background(f, f.area(), theme);
|
||||||
|
|
||||||
|
// --- START DYNAMIC LAYOUT LOGIC ---
|
||||||
|
let mut status_line_height = 1;
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
{
|
||||||
|
if let Some(debug_state) = &app_state.debug_state {
|
||||||
|
if debug_state.is_error {
|
||||||
|
status_line_height = 4;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// --- END DYNAMIC LAYOUT LOGIC ---
|
||||||
|
|
||||||
const PALETTE_OPTIONS_HEIGHT_FOR_LAYOUT: u16 = 15;
|
const PALETTE_OPTIONS_HEIGHT_FOR_LAYOUT: u16 = 15;
|
||||||
|
|
||||||
let mut bottom_area_constraints: Vec<Constraint> = vec![Constraint::Length(1)];
|
|
||||||
|
|
||||||
|
let mut bottom_area_constraints: Vec<Constraint> = vec![Constraint::Length(status_line_height)];
|
||||||
let command_palette_area_height = if navigation_state.active {
|
let command_palette_area_height = if navigation_state.active {
|
||||||
1 + PALETTE_OPTIONS_HEIGHT_FOR_LAYOUT
|
1 + PALETTE_OPTIONS_HEIGHT_FOR_LAYOUT
|
||||||
} else if event_handler_command_mode_active {
|
} else if event_handler_command_mode_active {
|
||||||
1
|
1
|
||||||
} else {
|
} else {
|
||||||
0 // Neither is active
|
0
|
||||||
};
|
};
|
||||||
|
|
||||||
if command_palette_area_height > 0 {
|
if command_palette_area_height > 0 {
|
||||||
@@ -75,7 +89,6 @@ pub fn render_ui(
|
|||||||
}
|
}
|
||||||
main_layout_constraints.extend(bottom_area_constraints);
|
main_layout_constraints.extend(bottom_area_constraints);
|
||||||
|
|
||||||
|
|
||||||
let root_chunks = Layout::default()
|
let root_chunks = Layout::default()
|
||||||
.direction(Direction::Vertical)
|
.direction(Direction::Vertical)
|
||||||
.constraints(main_layout_constraints)
|
.constraints(main_layout_constraints)
|
||||||
@@ -106,77 +119,95 @@ pub fn render_ui(
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
if app_state.ui.show_intro {
|
if app_state.ui.show_intro {
|
||||||
render_intro(f, intro_state, main_content_area, theme);
|
render_intro(f, intro_state, main_content_area, theme);
|
||||||
} else if app_state.ui.show_register {
|
} else if app_state.ui.show_register {
|
||||||
render_register(
|
render_register(
|
||||||
f, main_content_area, theme, register_state, app_state,
|
f,
|
||||||
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
register_state,
|
||||||
|
app_state,
|
||||||
register_state.current_field() < 4,
|
register_state.current_field() < 4,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
);
|
);
|
||||||
} else if app_state.ui.show_add_table {
|
} else if app_state.ui.show_add_table {
|
||||||
render_add_table(
|
render_add_table(
|
||||||
f, main_content_area, theme, app_state, &mut admin_state.add_table_state,
|
f,
|
||||||
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
app_state,
|
||||||
|
&mut admin_state.add_table_state,
|
||||||
is_event_handler_edit_mode,
|
is_event_handler_edit_mode,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
);
|
);
|
||||||
} else if app_state.ui.show_add_logic {
|
} else if app_state.ui.show_add_logic {
|
||||||
render_add_logic(
|
render_add_logic(
|
||||||
f, main_content_area, theme, app_state, &mut admin_state.add_logic_state,
|
f,
|
||||||
is_event_handler_edit_mode, highlight_state,
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
app_state,
|
||||||
|
&mut admin_state.add_logic_state,
|
||||||
|
is_event_handler_edit_mode,
|
||||||
|
highlight_state,
|
||||||
);
|
);
|
||||||
} else if app_state.ui.show_login {
|
} else if app_state.ui.show_login {
|
||||||
render_login(
|
render_login(
|
||||||
f, main_content_area, theme, login_state, app_state,
|
f,
|
||||||
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
login_state,
|
||||||
|
app_state,
|
||||||
login_state.current_field() < 2,
|
login_state.current_field() < 2,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
);
|
);
|
||||||
} else if app_state.ui.show_admin {
|
} else if app_state.ui.show_admin {
|
||||||
crate::components::admin::admin_panel::render_admin_panel(
|
crate::components::admin::admin_panel::render_admin_panel(
|
||||||
f, app_state, auth_state, admin_state, main_content_area, theme,
|
f,
|
||||||
&app_state.profile_tree, &app_state.selected_profile,
|
app_state,
|
||||||
|
auth_state,
|
||||||
|
admin_state,
|
||||||
|
main_content_area,
|
||||||
|
theme,
|
||||||
|
&app_state.profile_tree,
|
||||||
|
&app_state.selected_profile,
|
||||||
);
|
);
|
||||||
|
|
||||||
} else if app_state.ui.show_form {
|
} else if app_state.ui.show_form {
|
||||||
let (sidebar_area, form_actual_area) = calculate_sidebar_layout(
|
let (sidebar_area, form_actual_area) =
|
||||||
app_state.ui.show_sidebar, main_content_area
|
calculate_sidebar_layout(app_state.ui.show_sidebar, main_content_area);
|
||||||
);
|
|
||||||
if let Some(sidebar_rect) = sidebar_area {
|
if let Some(sidebar_rect) = sidebar_area {
|
||||||
sidebar::render_sidebar(
|
sidebar::render_sidebar(
|
||||||
f, sidebar_rect, theme, &app_state.profile_tree, &app_state.selected_profile
|
f,
|
||||||
|
sidebar_rect,
|
||||||
|
theme,
|
||||||
|
&app_state.profile_tree,
|
||||||
|
&app_state.selected_profile,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let available_width = form_actual_area.width;
|
let available_width = form_actual_area.width;
|
||||||
let form_render_area = if available_width >= 80 {
|
let form_render_area = if available_width >= 80 {
|
||||||
Layout::default().direction(Direction::Horizontal)
|
Layout::default()
|
||||||
|
.direction(Direction::Horizontal)
|
||||||
.constraints([Constraint::Min(0), Constraint::Length(80), Constraint::Min(0)])
|
.constraints([Constraint::Min(0), Constraint::Length(80), Constraint::Min(0)])
|
||||||
.split(form_actual_area)[1]
|
.split(form_actual_area)[1]
|
||||||
} else {
|
} else {
|
||||||
Layout::default().direction(Direction::Horizontal)
|
Layout::default()
|
||||||
.constraints([Constraint::Min(0), Constraint::Length(available_width), Constraint::Min(0)])
|
.direction(Direction::Horizontal)
|
||||||
|
.constraints([
|
||||||
|
Constraint::Min(0),
|
||||||
|
Constraint::Length(available_width),
|
||||||
|
Constraint::Min(0),
|
||||||
|
])
|
||||||
.split(form_actual_area)[1]
|
.split(form_actual_area)[1]
|
||||||
};
|
};
|
||||||
let fields_vec: Vec<&str> = form_state.fields.iter().map(AsRef::as_ref).collect();
|
|
||||||
let values_vec: Vec<&String> = form_state.values.iter().collect();
|
form_state.render(
|
||||||
|
|
||||||
// --- START FIX ---
|
|
||||||
// Add the missing `&form_state.table_name` argument to this function call.
|
|
||||||
render_form(
|
|
||||||
f,
|
f,
|
||||||
form_render_area,
|
form_render_area,
|
||||||
form_state,
|
|
||||||
&fields_vec,
|
|
||||||
&form_state.current_field,
|
|
||||||
&values_vec,
|
|
||||||
&form_state.table_name, // <-- THIS ARGUMENT WAS MISSING
|
|
||||||
theme,
|
theme,
|
||||||
is_event_handler_edit_mode,
|
is_event_handler_edit_mode,
|
||||||
highlight_state,
|
highlight_state,
|
||||||
form_state.total_count,
|
|
||||||
form_state.current_position,
|
|
||||||
);
|
);
|
||||||
// --- END FIX ---
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(area) = buffer_list_area {
|
if let Some(area) = buffer_list_area {
|
||||||
@@ -193,23 +224,41 @@ pub fn render_ui(
|
|||||||
app_state,
|
app_state,
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(palette_or_command_area) = command_render_area { // Use the calculated area
|
if let Some(palette_or_command_area) = command_render_area {
|
||||||
if navigation_state.active {
|
if navigation_state.active {
|
||||||
find_file_palette::render_find_file_palette(
|
find_file_palette::render_find_file_palette(
|
||||||
f,
|
f,
|
||||||
palette_or_command_area, // Use the correct area
|
palette_or_command_area,
|
||||||
theme,
|
theme,
|
||||||
navigation_state, // Pass the navigation_state directly
|
navigation_state,
|
||||||
);
|
);
|
||||||
} else if event_handler_command_mode_active {
|
} else if event_handler_command_mode_active {
|
||||||
render_command_line(
|
render_command_line(
|
||||||
f,
|
f,
|
||||||
palette_or_command_area, // Use the correct area
|
palette_or_command_area,
|
||||||
event_handler_command_input,
|
event_handler_command_input,
|
||||||
true, // Assuming it's always active when this branch is hit
|
true,
|
||||||
theme,
|
theme,
|
||||||
event_handler_command_message,
|
event_handler_command_message,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This block now correctly handles drawing popups over any view.
|
||||||
|
if app_state.ui.show_search_palette {
|
||||||
|
if let Some(search_state) = &app_state.search_state {
|
||||||
|
render_search_palette(f, f.area(), theme, search_state);
|
||||||
|
}
|
||||||
|
} else if app_state.ui.dialog.dialog_show {
|
||||||
|
render_dialog(
|
||||||
|
f,
|
||||||
|
f.area(),
|
||||||
|
theme,
|
||||||
|
&app_state.ui.dialog.dialog_title,
|
||||||
|
&app_state.ui.dialog.dialog_message,
|
||||||
|
&app_state.ui.dialog.dialog_buttons,
|
||||||
|
app_state.ui.dialog.dialog_active_button_index,
|
||||||
|
app_state.ui.dialog.is_loading,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use crate::modes::common::commands::CommandHandler;
|
|||||||
use crate::modes::handlers::event::{EventHandler, EventOutcome};
|
use crate::modes::handlers::event::{EventHandler, EventOutcome};
|
||||||
use crate::modes::handlers::mode_manager::{AppMode, ModeManager};
|
use crate::modes::handlers::mode_manager::{AppMode, ModeManager};
|
||||||
use crate::state::pages::canvas_state::CanvasState;
|
use crate::state::pages::canvas_state::CanvasState;
|
||||||
use crate::state::pages::form::FormState;
|
use crate::state::pages::form::{FormState, FieldDefinition}; // Import FieldDefinition
|
||||||
use crate::state::pages::auth::AuthState;
|
use crate::state::pages::auth::AuthState;
|
||||||
use crate::state::pages::auth::LoginState;
|
use crate::state::pages::auth::LoginState;
|
||||||
use crate::state::pages::auth::RegisterState;
|
use crate::state::pages::auth::RegisterState;
|
||||||
@@ -27,12 +27,16 @@ use crate::ui::handlers::context::DialogPurpose;
|
|||||||
use crate::tui::functions::common::login;
|
use crate::tui::functions::common::login;
|
||||||
use crate::tui::functions::common::register;
|
use crate::tui::functions::common::register;
|
||||||
use crate::utils::columns::filter_user_columns;
|
use crate::utils::columns::filter_user_columns;
|
||||||
use std::time::Instant;
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use crossterm::cursor::SetCursorStyle;
|
use crossterm::cursor::SetCursorStyle;
|
||||||
use crossterm::event as crossterm_event;
|
use crossterm::event as crossterm_event;
|
||||||
use tracing::{error, info, warn};
|
use tracing::{error, info, warn};
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
use crate::state::app::state::DebugState;
|
||||||
|
#[cfg(feature = "ui-debug")]
|
||||||
|
use crate::utils::debug_logger::pop_next_debug_message;
|
||||||
|
|
||||||
pub async fn run_ui() -> Result<()> {
|
pub async fn run_ui() -> Result<()> {
|
||||||
let config = Config::load().context("Failed to load configuration")?;
|
let config = Config::load().context("Failed to load configuration")?;
|
||||||
@@ -51,6 +55,7 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
register_result_sender.clone(),
|
register_result_sender.clone(),
|
||||||
save_table_result_sender.clone(),
|
save_table_result_sender.clone(),
|
||||||
save_logic_result_sender.clone(),
|
save_logic_result_sender.clone(),
|
||||||
|
grpc_client.clone(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("Failed to create event handler")?;
|
.context("Failed to create event handler")?;
|
||||||
@@ -87,12 +92,20 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
.await
|
.await
|
||||||
.context("Failed to initialize app state and form")?;
|
.context("Failed to initialize app state and form")?;
|
||||||
|
|
||||||
let filtered_columns = filter_user_columns(initial_columns_from_service);
|
let initial_field_defs: Vec<FieldDefinition> = filter_user_columns(initial_columns_from_service)
|
||||||
|
.into_iter()
|
||||||
|
.map(|col_name| FieldDefinition {
|
||||||
|
display_name: col_name.clone(),
|
||||||
|
data_key: col_name,
|
||||||
|
is_link: false,
|
||||||
|
link_target_table: None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
let mut form_state = FormState::new(
|
let mut form_state = FormState::new(
|
||||||
initial_profile.clone(),
|
initial_profile.clone(),
|
||||||
initial_table.clone(),
|
initial_table.clone(),
|
||||||
filtered_columns,
|
initial_field_defs,
|
||||||
);
|
);
|
||||||
|
|
||||||
UiService::fetch_and_set_table_count(&mut grpc_client, &mut form_state)
|
UiService::fetch_and_set_table_count(&mut grpc_client, &mut form_state)
|
||||||
@@ -126,6 +139,51 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
loop {
|
loop {
|
||||||
let position_before_event = form_state.current_position;
|
let position_before_event = form_state.current_position;
|
||||||
let mut event_processed = false;
|
let mut event_processed = false;
|
||||||
|
|
||||||
|
// --- CHANNEL RECEIVERS ---
|
||||||
|
|
||||||
|
// For main search palette
|
||||||
|
match event_handler.search_result_receiver.try_recv() {
|
||||||
|
Ok(hits) => {
|
||||||
|
info!("--- 4. Main loop received message from channel. ---");
|
||||||
|
if let Some(search_state) = app_state.search_state.as_mut() {
|
||||||
|
search_state.results = hits;
|
||||||
|
search_state.is_loading = false;
|
||||||
|
}
|
||||||
|
needs_redraw = true;
|
||||||
|
}
|
||||||
|
Err(mpsc::error::TryRecvError::Empty) => {
|
||||||
|
}
|
||||||
|
Err(mpsc::error::TryRecvError::Disconnected) => {
|
||||||
|
error!("Search result channel disconnected!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- ADDED: For live form autocomplete ---
|
||||||
|
match event_handler.autocomplete_result_receiver.try_recv() {
|
||||||
|
Ok(hits) => {
|
||||||
|
if form_state.autocomplete_active {
|
||||||
|
form_state.autocomplete_suggestions = hits;
|
||||||
|
form_state.autocomplete_loading = false;
|
||||||
|
if !form_state.autocomplete_suggestions.is_empty() {
|
||||||
|
form_state.selected_suggestion_index = Some(0);
|
||||||
|
} else {
|
||||||
|
form_state.selected_suggestion_index = None;
|
||||||
|
}
|
||||||
|
event_handler.command_message = format!("Found {} suggestions.", form_state.autocomplete_suggestions.len());
|
||||||
|
}
|
||||||
|
needs_redraw = true;
|
||||||
|
}
|
||||||
|
Err(mpsc::error::TryRecvError::Empty) => {}
|
||||||
|
Err(mpsc::error::TryRecvError::Disconnected) => {
|
||||||
|
error!("Autocomplete result channel disconnected!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if app_state.ui.show_search_palette {
|
||||||
|
needs_redraw = true;
|
||||||
|
}
|
||||||
if crossterm_event::poll(std::time::Duration::from_millis(1))? {
|
if crossterm_event::poll(std::time::Duration::from_millis(1))? {
|
||||||
let event = event_reader.read_event().context("Failed to read terminal event")?;
|
let event = event_reader.read_event().context("Failed to read terminal event")?;
|
||||||
event_processed = true;
|
event_processed = true;
|
||||||
@@ -133,7 +191,6 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
event,
|
event,
|
||||||
&config,
|
&config,
|
||||||
&mut terminal,
|
&mut terminal,
|
||||||
&mut grpc_client,
|
|
||||||
&mut command_handler,
|
&mut command_handler,
|
||||||
&mut form_state,
|
&mut form_state,
|
||||||
&mut auth_state,
|
&mut auth_state,
|
||||||
@@ -293,83 +350,91 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
let current_view_profile = app_state.current_view_profile_name.clone();
|
let current_view_profile = app_state.current_view_profile_name.clone();
|
||||||
let current_view_table = app_state.current_view_table_name.clone();
|
let current_view_table = app_state.current_view_table_name.clone();
|
||||||
|
|
||||||
|
// This condition correctly detects a table switch.
|
||||||
if prev_view_profile_name != current_view_profile
|
if prev_view_profile_name != current_view_profile
|
||||||
|| prev_view_table_name != current_view_table
|
|| prev_view_table_name != current_view_table
|
||||||
{
|
{
|
||||||
if let (Some(prof_name), Some(tbl_name)) =
|
if let (Some(prof_name), Some(tbl_name)) =
|
||||||
(current_view_profile.as_ref(), current_view_table.as_ref())
|
(current_view_profile.as_ref(), current_view_table.as_ref())
|
||||||
{
|
{
|
||||||
|
// --- START OF REFACTORED LOGIC ---
|
||||||
app_state.show_loading_dialog(
|
app_state.show_loading_dialog(
|
||||||
"Loading Table",
|
"Loading Table",
|
||||||
&format!("Fetching data for {}.{}...", prof_name, tbl_name),
|
&format!("Fetching data for {}.{}...", prof_name, tbl_name),
|
||||||
);
|
);
|
||||||
needs_redraw = true;
|
needs_redraw = true;
|
||||||
|
|
||||||
match grpc_client
|
// 1. Call our new, central function. It handles fetching AND caching.
|
||||||
.get_table_structure(prof_name.clone(), tbl_name.clone())
|
match UiService::load_table_view(
|
||||||
.await
|
&mut grpc_client,
|
||||||
|
&mut app_state,
|
||||||
|
prof_name,
|
||||||
|
tbl_name,
|
||||||
|
)
|
||||||
|
.await
|
||||||
{
|
{
|
||||||
Ok(structure_response) => {
|
Ok(mut new_form_state) => {
|
||||||
let new_columns: Vec<String> = structure_response
|
// 2. The function succeeded, we have a new FormState.
|
||||||
.columns
|
// Now, fetch its data.
|
||||||
.iter()
|
|
||||||
.map(|c| c.name.clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let filtered_columns = filter_user_columns(new_columns);
|
|
||||||
form_state = FormState::new(
|
|
||||||
prof_name.clone(),
|
|
||||||
tbl_name.clone(),
|
|
||||||
filtered_columns,
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Err(e) = UiService::fetch_and_set_table_count(
|
if let Err(e) = UiService::fetch_and_set_table_count(
|
||||||
&mut grpc_client,
|
&mut grpc_client,
|
||||||
&mut form_state,
|
&mut new_form_state,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
|
// Handle count fetching error
|
||||||
app_state.update_dialog_content(
|
app_state.update_dialog_content(
|
||||||
&format!("Error fetching count: {}", e),
|
&format!("Error fetching count: {}", e),
|
||||||
vec!["OK".to_string()],
|
vec!["OK".to_string()],
|
||||||
DialogPurpose::LoginFailed,
|
DialogPurpose::LoginFailed, // Or a more appropriate purpose
|
||||||
);
|
);
|
||||||
} else if form_state.total_count > 0 {
|
} else if new_form_state.total_count > 0 {
|
||||||
|
// If there are records, load the first/last one
|
||||||
if let Err(e) = UiService::load_table_data_by_position(
|
if let Err(e) = UiService::load_table_data_by_position(
|
||||||
&mut grpc_client,
|
&mut grpc_client,
|
||||||
&mut form_state,
|
&mut new_form_state,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
|
// Handle data loading error
|
||||||
app_state.update_dialog_content(
|
app_state.update_dialog_content(
|
||||||
&format!("Error loading data: {}", e),
|
&format!("Error loading data: {}", e),
|
||||||
vec!["OK".to_string()],
|
vec!["OK".to_string()],
|
||||||
DialogPurpose::LoginFailed,
|
DialogPurpose::LoginFailed, // Or a more appropriate purpose
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
// Success! Hide the loading dialog.
|
||||||
app_state.hide_dialog();
|
app_state.hide_dialog();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
form_state.reset_to_empty();
|
// No records, so just reset to an empty form.
|
||||||
|
new_form_state.reset_to_empty();
|
||||||
app_state.hide_dialog();
|
app_state.hide_dialog();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 3. CRITICAL: Replace the old form_state with the new one.
|
||||||
|
form_state = new_form_state;
|
||||||
|
|
||||||
|
// 4. Update our tracking variables.
|
||||||
prev_view_profile_name = current_view_profile;
|
prev_view_profile_name = current_view_profile;
|
||||||
prev_view_table_name = current_view_table;
|
prev_view_table_name = current_view_table;
|
||||||
table_just_switched = true;
|
table_just_switched = true;
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
// This handles errors from load_table_view (e.g., schema fetch failed)
|
||||||
app_state.update_dialog_content(
|
app_state.update_dialog_content(
|
||||||
&format!("Error fetching table structure: {}", e),
|
&format!("Error loading table: {}", e),
|
||||||
vec!["OK".to_string()],
|
vec!["OK".to_string()],
|
||||||
DialogPurpose::LoginFailed,
|
DialogPurpose::LoginFailed, // Or a more appropriate purpose
|
||||||
);
|
);
|
||||||
|
// Revert the view change in app_state to avoid a loop
|
||||||
app_state.current_view_profile_name =
|
app_state.current_view_profile_name =
|
||||||
prev_view_profile_name.clone();
|
prev_view_profile_name.clone();
|
||||||
app_state.current_view_table_name =
|
app_state.current_view_table_name =
|
||||||
prev_view_table_name.clone();
|
prev_view_table_name.clone();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// --- END OF REFACTORED LOGIC ---
|
||||||
}
|
}
|
||||||
needs_redraw = true;
|
needs_redraw = true;
|
||||||
}
|
}
|
||||||
@@ -499,10 +564,20 @@ pub async fn run_ui() -> Result<()> {
|
|||||||
|
|
||||||
#[cfg(feature = "ui-debug")]
|
#[cfg(feature = "ui-debug")]
|
||||||
{
|
{
|
||||||
app_state.debug_info = format!(
|
let can_display_next = match &app_state.debug_state {
|
||||||
"Redraw -> event: {}, needs_redraw: {}, pos_changed: {}",
|
Some(current) => current.display_start_time.elapsed() >= Duration::from_secs(2),
|
||||||
event_processed, needs_redraw, position_changed
|
None => true,
|
||||||
);
|
};
|
||||||
|
|
||||||
|
if can_display_next {
|
||||||
|
if let Some((new_message, is_error)) = pop_next_debug_message() {
|
||||||
|
app_state.debug_state = Some(DebugState {
|
||||||
|
displayed_message: new_message,
|
||||||
|
is_error,
|
||||||
|
display_start_time: Instant::now(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if event_processed || needs_redraw || position_changed {
|
if event_processed || needs_redraw || position_changed {
|
||||||
|
|||||||
50
client/src/utils/data_converter.rs
Normal file
50
client/src/utils/data_converter.rs
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
// src/utils/data_converter.rs
|
||||||
|
|
||||||
|
use common::proto::multieko2::table_structure::TableStructureResponse;
|
||||||
|
use prost_types::{value::Kind, NullValue, Value};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
pub fn convert_and_validate_data(
|
||||||
|
data: &HashMap<String, String>,
|
||||||
|
schema: &TableStructureResponse,
|
||||||
|
) -> Result<HashMap<String, Value>, String> {
|
||||||
|
let type_map: HashMap<_, _> = schema
|
||||||
|
.columns
|
||||||
|
.iter()
|
||||||
|
.map(|col| (col.name.as_str(), col.data_type.as_str()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
data.iter()
|
||||||
|
.map(|(key, str_value)| {
|
||||||
|
let expected_type = type_map.get(key.as_str()).unwrap_or(&"TEXT");
|
||||||
|
|
||||||
|
let kind = if str_value.is_empty() {
|
||||||
|
// TODO: Use the correct enum variant
|
||||||
|
Kind::NullValue(NullValue::NullValue.into())
|
||||||
|
} else {
|
||||||
|
// Attempt to parse the string based on the expected type
|
||||||
|
match *expected_type {
|
||||||
|
"BOOL" => match str_value.to_lowercase().parse::<bool>() {
|
||||||
|
Ok(v) => Kind::BoolValue(v),
|
||||||
|
Err(_) => return Err(format!("Invalid boolean for '{}': must be 'true' or 'false'", key)),
|
||||||
|
},
|
||||||
|
"INT8" | "INT4" | "INT2" | "SERIAL" | "BIGSERIAL" => {
|
||||||
|
match str_value.parse::<f64>() {
|
||||||
|
Ok(v) => Kind::NumberValue(v),
|
||||||
|
Err(_) => return Err(format!("Invalid number for '{}': must be a whole number", key)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"NUMERIC" | "FLOAT4" | "FLOAT8" => match str_value.parse::<f64>() {
|
||||||
|
Ok(v) => Kind::NumberValue(v),
|
||||||
|
Err(_) => return Err(format!("Invalid decimal for '{}': must be a number", key)),
|
||||||
|
},
|
||||||
|
"TIMESTAMPTZ" | "DATE" | "TIME" | "TEXT" | "VARCHAR" | "UUID" => {
|
||||||
|
Kind::StringValue(str_value.clone())
|
||||||
|
}
|
||||||
|
_ => Kind::StringValue(str_value.clone()),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok((key.clone(), Value { kind: Some(kind) }))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
46
client/src/utils/debug_logger.rs
Normal file
46
client/src/utils/debug_logger.rs
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
// client/src/utils/debug_logger.rs
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use std::collections::VecDeque; // <-- FIX: Import VecDeque
|
||||||
|
use std::io;
|
||||||
|
use std::sync::{Arc, Mutex}; // <-- FIX: Import Mutex
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref UI_DEBUG_BUFFER: Arc<Mutex<VecDeque<(String, bool)>>> =
|
||||||
|
Arc::new(Mutex::new(VecDeque::from([(String::from("Logger initialized..."), false)])));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct UiDebugWriter;
|
||||||
|
|
||||||
|
impl Default for UiDebugWriter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UiDebugWriter {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl io::Write for UiDebugWriter {
|
||||||
|
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||||
|
let mut buffer = UI_DEBUG_BUFFER.lock().unwrap();
|
||||||
|
let message = String::from_utf8_lossy(buf);
|
||||||
|
let trimmed_message = message.trim().to_string();
|
||||||
|
let is_error = trimmed_message.starts_with("ERROR");
|
||||||
|
// Add the new message to the back of the queue
|
||||||
|
buffer.push_back((trimmed_message, is_error));
|
||||||
|
Ok(buf.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&mut self) -> io::Result<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A public function to pop the next message from the front of the queue.
|
||||||
|
pub fn pop_next_debug_message() -> Option<(String, bool)> {
|
||||||
|
UI_DEBUG_BUFFER.lock().unwrap().pop_front()
|
||||||
|
}
|
||||||
@@ -1,4 +1,9 @@
|
|||||||
// src/utils/mod.rs
|
// src/utils/mod.rs
|
||||||
|
|
||||||
pub mod columns;
|
pub mod columns;
|
||||||
|
pub mod debug_logger;
|
||||||
|
pub mod data_converter;
|
||||||
|
|
||||||
pub use columns::*;
|
pub use columns::*;
|
||||||
|
pub use debug_logger::*;
|
||||||
|
pub use data_converter::*;
|
||||||
|
|||||||
262
client/tests/form/gui/form_tests.rs
Normal file
262
client/tests/form/gui/form_tests.rs
Normal file
@@ -0,0 +1,262 @@
|
|||||||
|
// client/tests/form_tests.rs
|
||||||
|
use rstest::{fixture, rstest};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use client::state::pages::form::{FormState, FieldDefinition};
|
||||||
|
use client::state::pages::canvas_state::CanvasState;
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn test_form_state() -> FormState {
|
||||||
|
let fields = vec![
|
||||||
|
FieldDefinition {
|
||||||
|
display_name: "Company".to_string(),
|
||||||
|
data_key: "firma".to_string(),
|
||||||
|
is_link: false,
|
||||||
|
link_target_table: None,
|
||||||
|
},
|
||||||
|
FieldDefinition {
|
||||||
|
display_name: "Phone".to_string(),
|
||||||
|
data_key: "telefon".to_string(),
|
||||||
|
is_link: false,
|
||||||
|
link_target_table: None,
|
||||||
|
},
|
||||||
|
FieldDefinition {
|
||||||
|
display_name: "Email".to_string(),
|
||||||
|
data_key: "email".to_string(),
|
||||||
|
is_link: false,
|
||||||
|
link_target_table: None,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
FormState::new("test_profile".to_string(), "test_table".to_string(), fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn test_form_data() -> HashMap<String, String> {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), "Test Company".to_string());
|
||||||
|
data.insert("telefon".to_string(), "+421123456789".to_string());
|
||||||
|
data.insert("email".to_string(), "test@example.com".to_string());
|
||||||
|
data
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_state_creation(test_form_state: FormState) {
|
||||||
|
assert_eq!(test_form_state.profile_name, "test_profile");
|
||||||
|
assert_eq!(test_form_state.table_name, "test_table");
|
||||||
|
assert_eq!(test_form_state.fields.len(), 3);
|
||||||
|
assert_eq!(test_form_state.current_field(), 0);
|
||||||
|
assert!(!test_form_state.has_unsaved_changes());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_field_navigation(mut test_form_state: FormState) {
|
||||||
|
// Test initial field
|
||||||
|
assert_eq!(test_form_state.current_field(), 0);
|
||||||
|
|
||||||
|
// Test navigation to next field
|
||||||
|
test_form_state.set_current_field(1);
|
||||||
|
assert_eq!(test_form_state.current_field(), 1);
|
||||||
|
|
||||||
|
// Test navigation to last field
|
||||||
|
test_form_state.set_current_field(2);
|
||||||
|
assert_eq!(test_form_state.current_field(), 2);
|
||||||
|
|
||||||
|
// Test invalid field (should not crash)
|
||||||
|
test_form_state.set_current_field(999);
|
||||||
|
assert_eq!(test_form_state.current_field(), 2); // Should stay at valid field
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_data_entry(mut test_form_state: FormState) {
|
||||||
|
// Test entering data in first field
|
||||||
|
*test_form_state.get_current_input_mut() = "Test Company".to_string();
|
||||||
|
test_form_state.set_has_unsaved_changes(true);
|
||||||
|
|
||||||
|
assert_eq!(test_form_state.get_current_input(), "Test Company");
|
||||||
|
assert!(test_form_state.has_unsaved_changes());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_field_switching_with_data(mut test_form_state: FormState) {
|
||||||
|
// Enter data in first field
|
||||||
|
*test_form_state.get_current_input_mut() = "Company Name".to_string();
|
||||||
|
|
||||||
|
// Switch to second field
|
||||||
|
test_form_state.set_current_field(1);
|
||||||
|
*test_form_state.get_current_input_mut() = "+421123456789".to_string();
|
||||||
|
|
||||||
|
// Switch back to first field
|
||||||
|
test_form_state.set_current_field(0);
|
||||||
|
assert_eq!(test_form_state.get_current_input(), "Company Name");
|
||||||
|
|
||||||
|
// Switch to second field again
|
||||||
|
test_form_state.set_current_field(1);
|
||||||
|
assert_eq!(test_form_state.get_current_input(), "+421123456789");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_reset_functionality(mut test_form_state: FormState) {
|
||||||
|
// Add some data
|
||||||
|
test_form_state.set_current_field(0);
|
||||||
|
*test_form_state.get_current_input_mut() = "Test Company".to_string();
|
||||||
|
test_form_state.set_current_field(1);
|
||||||
|
*test_form_state.get_current_input_mut() = "+421123456789".to_string();
|
||||||
|
test_form_state.set_has_unsaved_changes(true);
|
||||||
|
test_form_state.id = 123;
|
||||||
|
test_form_state.current_position = 5;
|
||||||
|
|
||||||
|
// Reset the form
|
||||||
|
test_form_state.reset_to_empty();
|
||||||
|
|
||||||
|
// Verify reset
|
||||||
|
assert_eq!(test_form_state.id, 0);
|
||||||
|
assert!(!test_form_state.has_unsaved_changes());
|
||||||
|
assert_eq!(test_form_state.current_field(), 0);
|
||||||
|
|
||||||
|
// Check all fields are empty
|
||||||
|
for i in 0..test_form_state.fields.len() {
|
||||||
|
test_form_state.set_current_field(i);
|
||||||
|
assert!(test_form_state.get_current_input().is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_update_from_response(mut test_form_state: FormState, test_form_data: HashMap<String, String>) {
|
||||||
|
let position = 3;
|
||||||
|
|
||||||
|
// Update form with response data
|
||||||
|
test_form_state.update_from_response(&test_form_data, position);
|
||||||
|
|
||||||
|
// Verify data was loaded
|
||||||
|
assert_eq!(test_form_state.current_position, position);
|
||||||
|
assert!(!test_form_state.has_unsaved_changes());
|
||||||
|
assert_eq!(test_form_state.current_field(), 0);
|
||||||
|
|
||||||
|
// Check field values
|
||||||
|
test_form_state.set_current_field(0);
|
||||||
|
assert_eq!(test_form_state.get_current_input(), "Test Company");
|
||||||
|
|
||||||
|
test_form_state.set_current_field(1);
|
||||||
|
assert_eq!(test_form_state.get_current_input(), "+421123456789");
|
||||||
|
|
||||||
|
test_form_state.set_current_field(2);
|
||||||
|
assert_eq!(test_form_state.get_current_input(), "test@example.com");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_cursor_position(mut test_form_state: FormState) {
|
||||||
|
// Test initial cursor position
|
||||||
|
assert_eq!(test_form_state.current_cursor_pos(), 0);
|
||||||
|
|
||||||
|
// Add some text
|
||||||
|
*test_form_state.get_current_input_mut() = "Test Company".to_string();
|
||||||
|
|
||||||
|
// Test cursor positioning
|
||||||
|
test_form_state.set_current_cursor_pos(5);
|
||||||
|
assert_eq!(test_form_state.current_cursor_pos(), 5);
|
||||||
|
|
||||||
|
// Test cursor bounds
|
||||||
|
test_form_state.set_current_cursor_pos(999);
|
||||||
|
// Should be clamped to text length
|
||||||
|
assert!(test_form_state.current_cursor_pos() <= "Test Company".len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_field_display_names(test_form_state: FormState) {
|
||||||
|
let field_names = test_form_state.fields();
|
||||||
|
|
||||||
|
assert_eq!(field_names.len(), 3);
|
||||||
|
assert_eq!(field_names[0], "Company");
|
||||||
|
assert_eq!(field_names[1], "Phone");
|
||||||
|
assert_eq!(field_names[2], "Email");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_inputs_vector(mut test_form_state: FormState) {
|
||||||
|
// Add data to fields
|
||||||
|
test_form_state.set_current_field(0);
|
||||||
|
*test_form_state.get_current_input_mut() = "Company A".to_string();
|
||||||
|
|
||||||
|
test_form_state.set_current_field(1);
|
||||||
|
*test_form_state.get_current_input_mut() = "123456789".to_string();
|
||||||
|
|
||||||
|
test_form_state.set_current_field(2);
|
||||||
|
*test_form_state.get_current_input_mut() = "test@test.com".to_string();
|
||||||
|
|
||||||
|
// Get inputs vector
|
||||||
|
let inputs = test_form_state.inputs();
|
||||||
|
|
||||||
|
assert_eq!(inputs.len(), 3);
|
||||||
|
assert_eq!(inputs[0], "Company A");
|
||||||
|
assert_eq!(inputs[1], "123456789");
|
||||||
|
assert_eq!(inputs[2], "test@test.com");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_position_management(mut test_form_state: FormState) {
|
||||||
|
// Test initial position
|
||||||
|
assert_eq!(test_form_state.current_position, 1);
|
||||||
|
assert_eq!(test_form_state.total_count, 0);
|
||||||
|
|
||||||
|
// Set some values
|
||||||
|
test_form_state.total_count = 10;
|
||||||
|
test_form_state.current_position = 5;
|
||||||
|
|
||||||
|
assert_eq!(test_form_state.current_position, 5);
|
||||||
|
assert_eq!(test_form_state.total_count, 10);
|
||||||
|
|
||||||
|
// Test reset affects position
|
||||||
|
test_form_state.reset_to_empty();
|
||||||
|
assert_eq!(test_form_state.current_position, 11); // total_count + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_autocomplete_state(mut test_form_state: FormState) {
|
||||||
|
// Test initial autocomplete state
|
||||||
|
assert!(!test_form_state.autocomplete_active);
|
||||||
|
assert!(test_form_state.autocomplete_suggestions.is_empty());
|
||||||
|
assert!(test_form_state.selected_suggestion_index.is_none());
|
||||||
|
|
||||||
|
// Test deactivating autocomplete
|
||||||
|
test_form_state.autocomplete_active = true;
|
||||||
|
test_form_state.deactivate_autocomplete();
|
||||||
|
|
||||||
|
assert!(!test_form_state.autocomplete_active);
|
||||||
|
assert!(test_form_state.autocomplete_suggestions.is_empty());
|
||||||
|
assert!(test_form_state.selected_suggestion_index.is_none());
|
||||||
|
assert!(!test_form_state.autocomplete_loading);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_empty_data_handling(mut test_form_state: FormState) {
|
||||||
|
let empty_data = HashMap::new();
|
||||||
|
|
||||||
|
// Update with empty data
|
||||||
|
test_form_state.update_from_response(&empty_data, 1);
|
||||||
|
|
||||||
|
// All fields should be empty
|
||||||
|
for i in 0..test_form_state.fields.len() {
|
||||||
|
test_form_state.set_current_field(i);
|
||||||
|
assert!(test_form_state.get_current_input().is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn test_form_partial_data_handling(mut test_form_state: FormState) {
|
||||||
|
let mut partial_data = HashMap::new();
|
||||||
|
partial_data.insert("firma".to_string(), "Partial Company".to_string());
|
||||||
|
// Intentionally missing telefon and email
|
||||||
|
|
||||||
|
test_form_state.update_from_response(&partial_data, 1);
|
||||||
|
|
||||||
|
// First field should have data
|
||||||
|
test_form_state.set_current_field(0);
|
||||||
|
assert_eq!(test_form_state.get_current_input(), "Partial Company");
|
||||||
|
|
||||||
|
// Other fields should be empty
|
||||||
|
test_form_state.set_current_field(1);
|
||||||
|
assert!(test_form_state.get_current_input().is_empty());
|
||||||
|
|
||||||
|
test_form_state.set_current_field(2);
|
||||||
|
assert!(test_form_state.get_current_input().is_empty());
|
||||||
|
}
|
||||||
1
client/tests/form/gui/mod.rs
Normal file
1
client/tests/form/gui/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod form_tests;
|
||||||
2
client/tests/form/mod.rs
Normal file
2
client/tests/form/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod gui;
|
||||||
|
pub mod requests;
|
||||||
1019
client/tests/form/requests/form_request_tests.rs
Normal file
1019
client/tests/form/requests/form_request_tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
267
client/tests/form/requests/form_request_tests2.rs
Normal file
267
client/tests/form/requests/form_request_tests2.rs
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
// ========================================================================
|
||||||
|
// ROBUST WORKFLOW AND INTEGRATION TESTS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_partial_update_preserves_other_fields(
|
||||||
|
#[future] populated_test_context: FormTestContext,
|
||||||
|
) {
|
||||||
|
let mut context = populated_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// 1. Create a record with multiple fields
|
||||||
|
let mut initial_data = context.create_test_form_data();
|
||||||
|
let original_email = "preserve.this@email.com";
|
||||||
|
initial_data.insert(
|
||||||
|
"email".to_string(),
|
||||||
|
create_string_value(original_email),
|
||||||
|
);
|
||||||
|
|
||||||
|
let post_res = context
|
||||||
|
.client
|
||||||
|
.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
initial_data,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Setup: Failed to create record for partial update test");
|
||||||
|
let created_id = post_res.inserted_id;
|
||||||
|
println!("Partial Update Test: Created record ID {}", created_id);
|
||||||
|
|
||||||
|
// 2. Update only ONE field
|
||||||
|
let mut partial_update = HashMap::new();
|
||||||
|
let updated_firma = "Partially Updated Inc.";
|
||||||
|
partial_update.insert(
|
||||||
|
"firma".to_string(),
|
||||||
|
create_string_value(updated_firma),
|
||||||
|
);
|
||||||
|
|
||||||
|
context
|
||||||
|
.client
|
||||||
|
.put_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
created_id,
|
||||||
|
partial_update,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Partial update failed");
|
||||||
|
println!("Partial Update Test: Updated only 'firma' field");
|
||||||
|
|
||||||
|
// 3. Get the record back and verify ALL fields
|
||||||
|
let get_res = context
|
||||||
|
.client
|
||||||
|
.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
created_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to get record after partial update");
|
||||||
|
|
||||||
|
let final_data = get_res.data;
|
||||||
|
assert_eq!(
|
||||||
|
final_data.get("firma").unwrap(),
|
||||||
|
updated_firma,
|
||||||
|
"The 'firma' field should be updated"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
final_data.get("email").unwrap(),
|
||||||
|
original_email,
|
||||||
|
"The 'email' field should have been preserved"
|
||||||
|
);
|
||||||
|
println!("Partial Update Test: Verified other fields were preserved. OK.");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_data_edge_cases_and_unicode(
|
||||||
|
#[future] form_test_context: FormTestContext,
|
||||||
|
) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
let edge_case_strings = vec![
|
||||||
|
("Unicode", "José María González, Москва, 北京市"),
|
||||||
|
("Emoji", "🚀 Tech Company 🌟"),
|
||||||
|
("Quotes", "Quote\"Test'Apostrophe"),
|
||||||
|
("Symbols", "Price: $1,000.50 (50% off!)"),
|
||||||
|
("Empty", ""),
|
||||||
|
("Whitespace", " "),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (case_name, test_string) in edge_case_strings {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), create_string_value(test_string));
|
||||||
|
data.insert(
|
||||||
|
"kz".to_string(),
|
||||||
|
create_string_value(&format!("EDGE-{}", case_name)),
|
||||||
|
);
|
||||||
|
|
||||||
|
let post_res = context
|
||||||
|
.client
|
||||||
|
.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
data,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect(&format!("POST should succeed for case: {}", case_name));
|
||||||
|
let created_id = post_res.inserted_id;
|
||||||
|
|
||||||
|
let get_res = context
|
||||||
|
.client
|
||||||
|
.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
created_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect(&format!(
|
||||||
|
"GET should succeed for case: {}",
|
||||||
|
case_name
|
||||||
|
));
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
get_res.data.get("firma").unwrap(),
|
||||||
|
test_string,
|
||||||
|
"Data should be identical after round-trip for case: {}",
|
||||||
|
case_name
|
||||||
|
);
|
||||||
|
println!("Edge Case Test: '{}' passed.", case_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_numeric_and_null_edge_cases(
|
||||||
|
#[future] form_test_context: FormTestContext,
|
||||||
|
) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// 1. Test NULL value
|
||||||
|
let mut null_data = HashMap::new();
|
||||||
|
null_data.insert(
|
||||||
|
"firma".to_string(),
|
||||||
|
create_string_value("Company With Null Phone"),
|
||||||
|
);
|
||||||
|
null_data.insert("telefon".to_string(), create_null_value());
|
||||||
|
let post_res_null = context
|
||||||
|
.client
|
||||||
|
.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
null_data,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("POST with NULL value should succeed");
|
||||||
|
let get_res_null = context
|
||||||
|
.client
|
||||||
|
.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
post_res_null.inserted_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
// Depending on DB, NULL may come back as empty string or be absent.
|
||||||
|
// The important part is that the operation doesn't fail.
|
||||||
|
assert!(
|
||||||
|
get_res_null.data.get("telefon").unwrap_or(&"".to_string()).is_empty(),
|
||||||
|
"NULL value should result in an empty or absent field"
|
||||||
|
);
|
||||||
|
println!("Edge Case Test: NULL value handled correctly. OK.");
|
||||||
|
|
||||||
|
// 2. Test Zero value for a numeric field (assuming 'age' is numeric)
|
||||||
|
let mut zero_data = HashMap::new();
|
||||||
|
zero_data.insert(
|
||||||
|
"firma".to_string(),
|
||||||
|
create_string_value("Newborn Company"),
|
||||||
|
);
|
||||||
|
// Assuming 'age' is a field in your actual table definition
|
||||||
|
// zero_data.insert("age".to_string(), create_number_value(0.0));
|
||||||
|
// let post_res_zero = context.client.post_table_data(...).await.expect("POST with zero should succeed");
|
||||||
|
// ... then get and verify it's "0"
|
||||||
|
println!("Edge Case Test: Zero value test skipped (uncomment if 'age' field exists).");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_concurrent_updates_on_same_record(
|
||||||
|
#[future] populated_test_context: FormTestContext,
|
||||||
|
) {
|
||||||
|
let mut context = populated_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// 1. Create a single record to be updated by all tasks
|
||||||
|
let initial_data = context.create_minimal_form_data();
|
||||||
|
let post_res = context
|
||||||
|
.client
|
||||||
|
.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
initial_data,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Setup: Failed to create record for concurrency test");
|
||||||
|
let record_id = post_res.inserted_id;
|
||||||
|
println!("Concurrency Test: Target record ID is {}", record_id);
|
||||||
|
|
||||||
|
// 2. Spawn multiple concurrent UPDATE operations
|
||||||
|
let mut handles = Vec::new();
|
||||||
|
let num_concurrent_tasks = 5;
|
||||||
|
let mut final_values = Vec::new();
|
||||||
|
|
||||||
|
for i in 0..num_concurrent_tasks {
|
||||||
|
let mut client_clone = context.client.clone();
|
||||||
|
let profile_name = context.profile_name.clone();
|
||||||
|
let table_name = context.table_name.clone();
|
||||||
|
let final_value = format!("Concurrent Update {}", i);
|
||||||
|
final_values.push(final_value.clone());
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move {
|
||||||
|
let mut update_data = HashMap::new();
|
||||||
|
update_data.insert(
|
||||||
|
"firma".to_string(),
|
||||||
|
create_string_value(&final_value),
|
||||||
|
);
|
||||||
|
client_clone
|
||||||
|
.put_table_data(profile_name, table_name, record_id, update_data)
|
||||||
|
.await
|
||||||
|
});
|
||||||
|
handles.push(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Wait for all tasks to complete and check for panics
|
||||||
|
let results = futures::future::join_all(handles).await;
|
||||||
|
assert!(
|
||||||
|
results.iter().all(|r| r.is_ok()),
|
||||||
|
"No concurrent task should panic"
|
||||||
|
);
|
||||||
|
println!("Concurrency Test: All update tasks completed without panicking.");
|
||||||
|
|
||||||
|
// 4. Get the final state of the record
|
||||||
|
let final_get_res = context
|
||||||
|
.client
|
||||||
|
.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
record_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Should be able to get the record after concurrent updates");
|
||||||
|
|
||||||
|
let final_firma = final_get_res.data.get("firma").unwrap();
|
||||||
|
assert!(
|
||||||
|
final_values.contains(final_firma),
|
||||||
|
"The final state '{}' must be one of the states set by the tasks",
|
||||||
|
final_firma
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
"Concurrency Test: Final state is '{}', which is a valid outcome. OK.",
|
||||||
|
final_firma
|
||||||
|
);
|
||||||
|
}
|
||||||
727
client/tests/form/requests/form_request_tests3.rs
Normal file
727
client/tests/form/requests/form_request_tests3.rs
Normal file
@@ -0,0 +1,727 @@
|
|||||||
|
// form_request_tests3.rs - Comprehensive and Robust Testing
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// STEEL SCRIPT VALIDATION TESTS (HIGHEST PRIORITY)
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_steel_script_validation_success(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Test with data that should pass script validation
|
||||||
|
// Assuming there's a script that validates 'kz' field to start with "KZ" and be 5 chars
|
||||||
|
let mut valid_data = HashMap::new();
|
||||||
|
valid_data.insert("firma".to_string(), create_string_value("Script Test Company"));
|
||||||
|
valid_data.insert("kz".to_string(), create_string_value("KZ123"));
|
||||||
|
valid_data.insert("telefon".to_string(), create_string_value("+421123456789"));
|
||||||
|
|
||||||
|
let result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
valid_data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(response) => {
|
||||||
|
assert!(response.success, "Valid data should pass script validation");
|
||||||
|
println!("Script Validation Test: Valid data passed - ID {}", response.inserted_id);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let Some(status) = e.downcast_ref::<Status>() {
|
||||||
|
if status.code() == tonic::Code::Unavailable {
|
||||||
|
println!("Script validation test skipped - backend not available");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// If there are no scripts configured, this might still work
|
||||||
|
println!("Script validation test: {}", status.message());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_steel_script_validation_failure(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Test with data that should fail script validation
|
||||||
|
let invalid_script_data = vec![
|
||||||
|
("TooShort", "KZ12"), // Too short
|
||||||
|
("TooLong", "KZ12345"), // Too long
|
||||||
|
("WrongPrefix", "AB123"), // Wrong prefix
|
||||||
|
("NoPrefix", "12345"), // No prefix
|
||||||
|
("Empty", ""), // Empty
|
||||||
|
];
|
||||||
|
|
||||||
|
for (test_case, invalid_kz) in invalid_script_data {
|
||||||
|
let mut invalid_data = HashMap::new();
|
||||||
|
invalid_data.insert("firma".to_string(), create_string_value("Script Fail Company"));
|
||||||
|
invalid_data.insert("kz".to_string(), create_string_value(invalid_kz));
|
||||||
|
|
||||||
|
let result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
invalid_data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(_) => {
|
||||||
|
println!("Script Validation Test: {} passed (no validation script configured)", test_case);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let Some(status) = e.downcast_ref::<Status>() {
|
||||||
|
assert_eq!(status.code(), tonic::Code::InvalidArgument,
|
||||||
|
"Script validation failure should return InvalidArgument for case: {}", test_case);
|
||||||
|
println!("Script Validation Test: {} correctly failed - {}", test_case, status.message());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_steel_script_validation_on_update(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// 1. Create a valid record first
|
||||||
|
let mut initial_data = HashMap::new();
|
||||||
|
initial_data.insert("firma".to_string(), create_string_value("Update Script Test"));
|
||||||
|
initial_data.insert("kz".to_string(), create_string_value("KZ123"));
|
||||||
|
|
||||||
|
let post_result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
initial_data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
if let Ok(post_response) = post_result {
|
||||||
|
let record_id = post_response.inserted_id;
|
||||||
|
|
||||||
|
// 2. Try to update with invalid data
|
||||||
|
let mut invalid_update = HashMap::new();
|
||||||
|
invalid_update.insert("kz".to_string(), create_string_value("INVALID"));
|
||||||
|
|
||||||
|
let update_result = context.client.put_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
record_id,
|
||||||
|
invalid_update,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match update_result {
|
||||||
|
Ok(_) => {
|
||||||
|
println!("Script Validation on Update: No validation script configured for updates");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let Some(status) = e.downcast_ref::<Status>() {
|
||||||
|
assert_eq!(status.code(), tonic::Code::InvalidArgument,
|
||||||
|
"Update with invalid data should fail script validation");
|
||||||
|
println!("Script Validation on Update: Correctly rejected invalid update");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// COMPREHENSIVE DATA TYPE TESTS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_boolean_data_type(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Test valid boolean values
|
||||||
|
let boolean_test_cases = vec![
|
||||||
|
("true", true),
|
||||||
|
("false", false),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (case_name, bool_value) in boolean_test_cases {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), create_string_value("Boolean Test Company"));
|
||||||
|
// Assuming there's a boolean field called 'active'
|
||||||
|
data.insert("active".to_string(), create_bool_value(bool_value));
|
||||||
|
|
||||||
|
let result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(response) => {
|
||||||
|
println!("Boolean Test: {} value succeeded", case_name);
|
||||||
|
|
||||||
|
// Verify the value round-trip
|
||||||
|
if let Ok(get_response) = context.client.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
response.inserted_id,
|
||||||
|
).await {
|
||||||
|
if let Some(retrieved_value) = get_response.data.get("active") {
|
||||||
|
println!("Boolean Test: {} round-trip value: {}", case_name, retrieved_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("Boolean Test: {} failed (field may not exist): {}", case_name, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_numeric_data_types(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Test various numeric values
|
||||||
|
let numeric_test_cases = vec![
|
||||||
|
("Zero", 0.0),
|
||||||
|
("Positive", 123.45),
|
||||||
|
("Negative", -67.89),
|
||||||
|
("Large", 999999.99),
|
||||||
|
("SmallDecimal", 0.01),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (case_name, numeric_value) in numeric_test_cases {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), create_string_value("Numeric Test Company"));
|
||||||
|
// Assuming there's a numeric field called 'price' or 'amount'
|
||||||
|
data.insert("amount".to_string(), create_number_value(numeric_value));
|
||||||
|
|
||||||
|
let result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(response) => {
|
||||||
|
println!("Numeric Test: {} ({}) succeeded", case_name, numeric_value);
|
||||||
|
|
||||||
|
// Verify round-trip
|
||||||
|
if let Ok(get_response) = context.client.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
response.inserted_id,
|
||||||
|
).await {
|
||||||
|
if let Some(retrieved_value) = get_response.data.get("amount") {
|
||||||
|
println!("Numeric Test: {} round-trip value: {}", case_name, retrieved_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("Numeric Test: {} failed (field may not exist): {}", case_name, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_timestamp_data_type(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Test various timestamp formats
|
||||||
|
let timestamp_test_cases = vec![
|
||||||
|
("ISO8601", "2024-01-15T10:30:00Z"),
|
||||||
|
("WithTimezone", "2024-01-15T10:30:00+01:00"),
|
||||||
|
("WithMilliseconds", "2024-01-15T10:30:00.123Z"),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (case_name, timestamp_str) in timestamp_test_cases {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), create_string_value("Timestamp Test Company"));
|
||||||
|
// Assuming there's a timestamp field called 'created_at'
|
||||||
|
data.insert("created_at".to_string(), create_string_value(timestamp_str));
|
||||||
|
|
||||||
|
let result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(response) => {
|
||||||
|
println!("Timestamp Test: {} succeeded", case_name);
|
||||||
|
|
||||||
|
// Verify round-trip
|
||||||
|
if let Ok(get_response) = context.client.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
response.inserted_id,
|
||||||
|
).await {
|
||||||
|
if let Some(retrieved_value) = get_response.data.get("created_at") {
|
||||||
|
println!("Timestamp Test: {} round-trip value: {}", case_name, retrieved_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("Timestamp Test: {} failed (field may not exist): {}", case_name, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_invalid_data_types(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Test invalid data type combinations
|
||||||
|
let invalid_type_cases = vec![
|
||||||
|
("StringForNumber", "amount", create_string_value("not-a-number")),
|
||||||
|
("NumberForBoolean", "active", create_number_value(123.0)),
|
||||||
|
("StringForBoolean", "active", create_string_value("maybe")),
|
||||||
|
("InvalidTimestamp", "created_at", create_string_value("not-a-date")),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (case_name, field_name, invalid_value) in invalid_type_cases {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), create_string_value("Invalid Type Test"));
|
||||||
|
data.insert(field_name.to_string(), invalid_value);
|
||||||
|
|
||||||
|
let result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(_) => {
|
||||||
|
println!("Invalid Type Test: {} passed (no type validation or field doesn't exist)", case_name);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let Some(status) = e.downcast_ref::<Status>() {
|
||||||
|
assert_eq!(status.code(), tonic::Code::InvalidArgument,
|
||||||
|
"Invalid data type should return InvalidArgument for case: {}", case_name);
|
||||||
|
println!("Invalid Type Test: {} correctly rejected - {}", case_name, status.message());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// FOREIGN KEY RELATIONSHIP TESTS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_foreign_key_valid_relationship(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// 1. Create a parent record first (e.g., company)
|
||||||
|
let mut parent_data = HashMap::new();
|
||||||
|
parent_data.insert("firma".to_string(), create_string_value("Parent Company"));
|
||||||
|
|
||||||
|
let parent_result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
"companies".to_string(), // Assuming companies table exists
|
||||||
|
parent_data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
if let Ok(parent_response) = parent_result {
|
||||||
|
let parent_id = parent_response.inserted_id;
|
||||||
|
|
||||||
|
// 2. Create a child record that references the parent
|
||||||
|
let mut child_data = HashMap::new();
|
||||||
|
child_data.insert("name".to_string(), create_string_value("Child Record"));
|
||||||
|
child_data.insert("company_id".to_string(), create_number_value(parent_id as f64));
|
||||||
|
|
||||||
|
let child_result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
"contacts".to_string(), // Assuming contacts table exists
|
||||||
|
child_data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match child_result {
|
||||||
|
Ok(child_response) => {
|
||||||
|
assert!(child_response.success, "Valid foreign key relationship should succeed");
|
||||||
|
println!("Foreign Key Test: Valid relationship created - Parent ID: {}, Child ID: {}",
|
||||||
|
parent_id, child_response.inserted_id);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("Foreign Key Test: Failed (tables may not exist or no FK constraint): {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!("Foreign Key Test: Could not create parent record");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_foreign_key_invalid_relationship(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Try to create a child record with non-existent parent ID
|
||||||
|
let mut invalid_child_data = HashMap::new();
|
||||||
|
invalid_child_data.insert("name".to_string(), create_string_value("Orphan Record"));
|
||||||
|
invalid_child_data.insert("company_id".to_string(), create_number_value(99999.0)); // Non-existent ID
|
||||||
|
|
||||||
|
let result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
"contacts".to_string(),
|
||||||
|
invalid_child_data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(_) => {
|
||||||
|
println!("Foreign Key Test: Invalid relationship passed (no FK constraint configured)");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let Some(status) = e.downcast_ref::<Status>() {
|
||||||
|
// Could be InvalidArgument or NotFound depending on implementation
|
||||||
|
assert!(matches!(status.code(), tonic::Code::InvalidArgument | tonic::Code::NotFound),
|
||||||
|
"Invalid foreign key should return InvalidArgument or NotFound");
|
||||||
|
println!("Foreign Key Test: Invalid relationship correctly rejected - {}", status.message());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// DELETED RECORD INTERACTION TESTS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_update_deleted_record_behavior(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// 1. Create a record
|
||||||
|
let initial_data = context.create_test_form_data();
|
||||||
|
let post_result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
initial_data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
if let Ok(post_response) = post_result {
|
||||||
|
let record_id = post_response.inserted_id;
|
||||||
|
println!("Deleted Record Test: Created record ID {}", record_id);
|
||||||
|
|
||||||
|
// 2. Delete the record (soft delete)
|
||||||
|
let delete_result = context.client.delete_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
record_id,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
assert!(delete_result.is_ok(), "Delete should succeed");
|
||||||
|
println!("Deleted Record Test: Soft-deleted record {}", record_id);
|
||||||
|
|
||||||
|
// 3. Try to UPDATE the deleted record
|
||||||
|
let mut update_data = HashMap::new();
|
||||||
|
update_data.insert("firma".to_string(), create_string_value("Updated Deleted Record"));
|
||||||
|
|
||||||
|
let update_result = context.client.put_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
record_id,
|
||||||
|
update_data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match update_result {
|
||||||
|
Ok(_) => {
|
||||||
|
// This might be a bug - updating deleted records should probably fail
|
||||||
|
println!("Deleted Record Test: UPDATE on deleted record succeeded (potential bug?)");
|
||||||
|
|
||||||
|
// Check if the record is still considered deleted
|
||||||
|
let get_result = context.client.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
record_id,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
if get_result.is_err() {
|
||||||
|
println!("Deleted Record Test: Record still appears deleted after update");
|
||||||
|
} else {
|
||||||
|
println!("Deleted Record Test: Record appears to be undeleted after update");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if let Some(status) = e.downcast_ref::<Status>() {
|
||||||
|
assert_eq!(status.code(), tonic::Code::NotFound,
|
||||||
|
"UPDATE on deleted record should return NotFound");
|
||||||
|
println!("Deleted Record Test: UPDATE correctly rejected on deleted record");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_delete_already_deleted_record(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// 1. Create and delete a record
|
||||||
|
let initial_data = context.create_test_form_data();
|
||||||
|
let post_result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
initial_data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
if let Ok(post_response) = post_result {
|
||||||
|
let record_id = post_response.inserted_id;
|
||||||
|
|
||||||
|
// First deletion
|
||||||
|
let delete_result1 = context.client.delete_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
record_id,
|
||||||
|
).await;
|
||||||
|
assert!(delete_result1.is_ok(), "First delete should succeed");
|
||||||
|
|
||||||
|
// Second deletion (idempotent)
|
||||||
|
let delete_result2 = context.client.delete_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
record_id,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
assert!(delete_result2.is_ok(), "Second delete should succeed (idempotent)");
|
||||||
|
if let Ok(response) = delete_result2 {
|
||||||
|
assert!(response.success, "Delete should report success even for already-deleted record");
|
||||||
|
}
|
||||||
|
println!("Double Delete Test: Both deletions succeeded (idempotent behavior)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// VALIDATION AND BOUNDARY TESTS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_large_data_handling(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Test with very large string values
|
||||||
|
let large_string = "A".repeat(10000); // 10KB string
|
||||||
|
let very_large_string = "B".repeat(100000); // 100KB string
|
||||||
|
|
||||||
|
let test_cases = vec![
|
||||||
|
("Large", large_string),
|
||||||
|
("VeryLarge", very_large_string),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (case_name, large_value) in test_cases {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), create_string_value(&large_value));
|
||||||
|
|
||||||
|
let result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(response) => {
|
||||||
|
println!("Large Data Test: {} string handled successfully", case_name);
|
||||||
|
|
||||||
|
// Verify round-trip
|
||||||
|
if let Ok(get_response) = context.client.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
response.inserted_id,
|
||||||
|
).await {
|
||||||
|
if let Some(retrieved_value) = get_response.data.get("firma") {
|
||||||
|
assert_eq!(retrieved_value.len(), large_value.len(),
|
||||||
|
"Large string should survive round-trip for case: {}", case_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("Large Data Test: {} failed (may hit size limits): {}", case_name, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_sql_injection_attempts(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Test potential SQL injection strings
|
||||||
|
let injection_attempts = vec![
|
||||||
|
("SingleQuote", "'; DROP TABLE users; --"),
|
||||||
|
("DoubleQuote", "\"; DROP TABLE users; --"),
|
||||||
|
("Union", "' UNION SELECT * FROM users --"),
|
||||||
|
("Comment", "/* malicious comment */"),
|
||||||
|
("Semicolon", "; DELETE FROM users;"),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (case_name, injection_string) in injection_attempts {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), create_string_value(injection_string));
|
||||||
|
data.insert("kz".to_string(), create_string_value("KZ123"));
|
||||||
|
|
||||||
|
let result = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
data,
|
||||||
|
).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(response) => {
|
||||||
|
println!("SQL Injection Test: {} handled safely (parameterized queries)", case_name);
|
||||||
|
|
||||||
|
// Verify the malicious string was stored as-is (not executed)
|
||||||
|
if let Ok(get_response) = context.client.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
response.inserted_id,
|
||||||
|
).await {
|
||||||
|
if let Some(retrieved_value) = get_response.data.get("firma") {
|
||||||
|
assert_eq!(retrieved_value, injection_string,
|
||||||
|
"Injection string should be stored literally for case: {}", case_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("SQL Injection Test: {} rejected: {}", case_name, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_concurrent_operations_with_same_data(#[future] form_test_context: FormTestContext) {
|
||||||
|
let context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
// Test multiple concurrent operations with identical data
|
||||||
|
let mut handles = Vec::new();
|
||||||
|
let num_tasks = 10;
|
||||||
|
|
||||||
|
for i in 0..num_tasks {
|
||||||
|
let mut context_clone = context.clone();
|
||||||
|
let handle = tokio::spawn(async move {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), create_string_value("Concurrent Identical"));
|
||||||
|
data.insert("kz".to_string(), create_string_value(&format!("SAME{:02}", i)));
|
||||||
|
|
||||||
|
context_clone.client.post_table_data(
|
||||||
|
context_clone.profile_name,
|
||||||
|
context_clone.table_name,
|
||||||
|
data,
|
||||||
|
).await
|
||||||
|
});
|
||||||
|
handles.push(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for all to complete
|
||||||
|
let mut success_count = 0;
|
||||||
|
let mut inserted_ids = Vec::new();
|
||||||
|
|
||||||
|
for (i, handle) in handles.into_iter().enumerate() {
|
||||||
|
match handle.await {
|
||||||
|
Ok(Ok(response)) => {
|
||||||
|
success_count += 1;
|
||||||
|
inserted_ids.push(response.inserted_id);
|
||||||
|
println!("Concurrent Identical Data: Task {} succeeded with ID {}", i, response.inserted_id);
|
||||||
|
}
|
||||||
|
Ok(Err(e)) => {
|
||||||
|
println!("Concurrent Identical Data: Task {} failed: {}", i, e);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("Concurrent Identical Data: Task {} panicked: {}", i, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert!(success_count > 0, "At least some concurrent operations should succeed");
|
||||||
|
|
||||||
|
// Verify all IDs are unique
|
||||||
|
let unique_ids: std::collections::HashSet<_> = inserted_ids.iter().collect();
|
||||||
|
assert_eq!(unique_ids.len(), inserted_ids.len(), "All inserted IDs should be unique");
|
||||||
|
|
||||||
|
println!("Concurrent Identical Data: {}/{} operations succeeded with unique IDs",
|
||||||
|
success_count, num_tasks);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// PERFORMANCE AND STRESS TESTS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_bulk_operations_performance(#[future] form_test_context: FormTestContext) {
|
||||||
|
let mut context = form_test_context.await;
|
||||||
|
skip_if_backend_unavailable!();
|
||||||
|
|
||||||
|
let operation_count = 50;
|
||||||
|
let start_time = std::time::Instant::now();
|
||||||
|
|
||||||
|
let mut successful_operations = 0;
|
||||||
|
let mut created_ids = Vec::new();
|
||||||
|
|
||||||
|
// Bulk create
|
||||||
|
for i in 0..operation_count {
|
||||||
|
let mut data = HashMap::new();
|
||||||
|
data.insert("firma".to_string(), create_string_value(&format!("Bulk Company {}", i)));
|
||||||
|
data.insert("kz".to_string(), create_string_value(&format!("BLK{:02}", i)));
|
||||||
|
|
||||||
|
if let Ok(response) = context.client.post_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
data,
|
||||||
|
).await {
|
||||||
|
successful_operations += 1;
|
||||||
|
created_ids.push(response.inserted_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let create_duration = start_time.elapsed();
|
||||||
|
println!("Bulk Performance: Created {} records in {:?}", successful_operations, create_duration);
|
||||||
|
|
||||||
|
// Bulk read
|
||||||
|
let read_start = std::time::Instant::now();
|
||||||
|
let mut successful_reads = 0;
|
||||||
|
|
||||||
|
for &record_id in &created_ids {
|
||||||
|
if context.client.get_table_data(
|
||||||
|
context.profile_name.clone(),
|
||||||
|
context.table_name.clone(),
|
||||||
|
record_id,
|
||||||
|
).await.is_ok() {
|
||||||
|
successful_reads += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let read_duration = read_start.elapsed();
|
||||||
|
println!("Bulk Performance: Read {} records in {:?}", successful_reads, read_duration);
|
||||||
|
|
||||||
|
// Performance assertions
|
||||||
|
assert!(successful_operations > operation_count * 8 / 10,
|
||||||
|
"At least 80% of operations should succeed");
|
||||||
|
assert!(create_duration.as_secs() < 60,
|
||||||
|
"Bulk operations should complete in reasonable time");
|
||||||
|
|
||||||
|
println!("Bulk Performance Test: {}/{} creates, {}/{} reads successful",
|
||||||
|
successful_operations, operation_count, successful_reads, created_ids.len());
|
||||||
|
}
|
||||||
1
client/tests/form/requests/mod.rs
Normal file
1
client/tests/form/requests/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod form_request_tests;
|
||||||
3
client/tests/mod.rs
Normal file
3
client/tests/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// tests/mod.rs
|
||||||
|
|
||||||
|
pub mod form;
|
||||||
@@ -5,9 +5,14 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
prost-types = { workspace = true }
|
||||||
|
|
||||||
tonic = "0.13.0"
|
tonic = "0.13.0"
|
||||||
prost = "0.13.5"
|
prost = "0.13.5"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
|
||||||
|
# Search
|
||||||
|
tantivy = { workspace = true }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
tonic-build = "0.13.0"
|
tonic-build = "0.13.0"
|
||||||
|
|||||||
@@ -10,11 +10,11 @@ message SearchRequest {
|
|||||||
string table_name = 1;
|
string table_name = 1;
|
||||||
string query = 2;
|
string query = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SearchResponse {
|
message SearchResponse {
|
||||||
message Hit {
|
message Hit {
|
||||||
int64 id = 1; // The PostgreSQL row ID
|
int64 id = 1; // PostgreSQL row ID
|
||||||
float score = 2;
|
float score = 2;
|
||||||
|
string content_json = 3;
|
||||||
}
|
}
|
||||||
repeated Hit hits = 1;
|
repeated Hit hits = 1;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ syntax = "proto3";
|
|||||||
package multieko2.tables_data;
|
package multieko2.tables_data;
|
||||||
|
|
||||||
import "common.proto";
|
import "common.proto";
|
||||||
|
import "google/protobuf/struct.proto";
|
||||||
|
|
||||||
service TablesData {
|
service TablesData {
|
||||||
rpc PostTableData (PostTableDataRequest) returns (PostTableDataResponse);
|
rpc PostTableData (PostTableDataRequest) returns (PostTableDataResponse);
|
||||||
@@ -16,7 +17,7 @@ service TablesData {
|
|||||||
message PostTableDataRequest {
|
message PostTableDataRequest {
|
||||||
string profile_name = 1;
|
string profile_name = 1;
|
||||||
string table_name = 2;
|
string table_name = 2;
|
||||||
map<string, string> data = 3;
|
map<string, google.protobuf.Value> data = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message PostTableDataResponse {
|
message PostTableDataResponse {
|
||||||
@@ -29,7 +30,7 @@ message PutTableDataRequest {
|
|||||||
string profile_name = 1;
|
string profile_name = 1;
|
||||||
string table_name = 2;
|
string table_name = 2;
|
||||||
int64 id = 3;
|
int64 id = 3;
|
||||||
map<string, string> data = 4;
|
map<string, google.protobuf.Value> data = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message PutTableDataResponse {
|
message PutTableDataResponse {
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
// common/src/lib.rs
|
// common/src/lib.rs
|
||||||
|
|
||||||
|
pub mod search;
|
||||||
|
|
||||||
pub mod proto {
|
pub mod proto {
|
||||||
pub mod multieko2 {
|
pub mod multieko2 {
|
||||||
pub mod adresar {
|
pub mod adresar {
|
||||||
|
|||||||
Binary file not shown.
@@ -13,13 +13,15 @@ pub struct SearchResponse {
|
|||||||
}
|
}
|
||||||
/// Nested message and enum types in `SearchResponse`.
|
/// Nested message and enum types in `SearchResponse`.
|
||||||
pub mod search_response {
|
pub mod search_response {
|
||||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
pub struct Hit {
|
pub struct Hit {
|
||||||
/// The PostgreSQL row ID
|
/// PostgreSQL row ID
|
||||||
#[prost(int64, tag = "1")]
|
#[prost(int64, tag = "1")]
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
#[prost(float, tag = "2")]
|
#[prost(float, tag = "2")]
|
||||||
pub score: f32,
|
pub score: f32,
|
||||||
|
#[prost(string, tag = "3")]
|
||||||
|
pub content_json: ::prost::alloc::string::String,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// Generated client implementations.
|
/// Generated client implementations.
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ pub struct PostTableDataRequest {
|
|||||||
pub profile_name: ::prost::alloc::string::String,
|
pub profile_name: ::prost::alloc::string::String,
|
||||||
#[prost(string, tag = "2")]
|
#[prost(string, tag = "2")]
|
||||||
pub table_name: ::prost::alloc::string::String,
|
pub table_name: ::prost::alloc::string::String,
|
||||||
#[prost(map = "string, string", tag = "3")]
|
#[prost(map = "string, message", tag = "3")]
|
||||||
pub data: ::std::collections::HashMap<
|
pub data: ::std::collections::HashMap<
|
||||||
::prost::alloc::string::String,
|
::prost::alloc::string::String,
|
||||||
::prost::alloc::string::String,
|
::prost_types::Value,
|
||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
@@ -28,10 +28,10 @@ pub struct PutTableDataRequest {
|
|||||||
pub table_name: ::prost::alloc::string::String,
|
pub table_name: ::prost::alloc::string::String,
|
||||||
#[prost(int64, tag = "3")]
|
#[prost(int64, tag = "3")]
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
#[prost(map = "string, string", tag = "4")]
|
#[prost(map = "string, message", tag = "4")]
|
||||||
pub data: ::std::collections::HashMap<
|
pub data: ::std::collections::HashMap<
|
||||||
::prost::alloc::string::String,
|
::prost::alloc::string::String,
|
||||||
::prost::alloc::string::String,
|
::prost_types::Value,
|
||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||||
|
|||||||
78
common/src/search.rs
Normal file
78
common/src/search.rs
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
// common/src/search.rs
|
||||||
|
|
||||||
|
use tantivy::schema::*;
|
||||||
|
use tantivy::tokenizer::*;
|
||||||
|
use tantivy::Index;
|
||||||
|
|
||||||
|
/// Creates a hybrid Slovak search schema with optimized prefix fields.
|
||||||
|
pub fn create_search_schema() -> Schema {
|
||||||
|
let mut schema_builder = Schema::builder();
|
||||||
|
|
||||||
|
schema_builder.add_u64_field("pg_id", INDEXED | STORED);
|
||||||
|
|
||||||
|
// FIELD 1: For prefixes (1-4 chars).
|
||||||
|
let short_prefix_indexing = TextFieldIndexing::default()
|
||||||
|
.set_tokenizer("slovak_prefix_edge")
|
||||||
|
.set_index_option(IndexRecordOption::WithFreqsAndPositions);
|
||||||
|
let short_prefix_options = TextOptions::default()
|
||||||
|
.set_indexing_options(short_prefix_indexing)
|
||||||
|
.set_stored();
|
||||||
|
schema_builder.add_text_field("prefix_edge", short_prefix_options);
|
||||||
|
|
||||||
|
// FIELD 2: For the full word.
|
||||||
|
let full_word_indexing = TextFieldIndexing::default()
|
||||||
|
.set_tokenizer("slovak_prefix_full")
|
||||||
|
.set_index_option(IndexRecordOption::WithFreqsAndPositions);
|
||||||
|
let full_word_options = TextOptions::default()
|
||||||
|
.set_indexing_options(full_word_indexing)
|
||||||
|
.set_stored();
|
||||||
|
schema_builder.add_text_field("prefix_full", full_word_options);
|
||||||
|
|
||||||
|
// NGRAM FIELD: For substring matching.
|
||||||
|
let ngram_field_indexing = TextFieldIndexing::default()
|
||||||
|
.set_tokenizer("slovak_ngram")
|
||||||
|
.set_index_option(IndexRecordOption::WithFreqsAndPositions);
|
||||||
|
let ngram_options = TextOptions::default()
|
||||||
|
.set_indexing_options(ngram_field_indexing)
|
||||||
|
.set_stored();
|
||||||
|
schema_builder.add_text_field("text_ngram", ngram_options);
|
||||||
|
|
||||||
|
schema_builder.build()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Registers all necessary Slovak tokenizers with the index.
|
||||||
|
///
|
||||||
|
/// This must be called by ANY process that opens the index
|
||||||
|
/// to ensure the tokenizers are loaded into memory.
|
||||||
|
pub fn register_slovak_tokenizers(index: &Index) -> tantivy::Result<()> {
|
||||||
|
let tokenizer_manager = index.tokenizers();
|
||||||
|
|
||||||
|
// TOKENIZER for `prefix_edge`: Edge N-gram (1-4 chars)
|
||||||
|
let edge_tokenizer =
|
||||||
|
TextAnalyzer::builder(NgramTokenizer::new(1, 4, true)?)
|
||||||
|
.filter(RemoveLongFilter::limit(40))
|
||||||
|
.filter(LowerCaser)
|
||||||
|
.filter(AsciiFoldingFilter)
|
||||||
|
.build();
|
||||||
|
tokenizer_manager.register("slovak_prefix_edge", edge_tokenizer);
|
||||||
|
|
||||||
|
// TOKENIZER for `prefix_full`: Simple word tokenizer
|
||||||
|
let full_tokenizer =
|
||||||
|
TextAnalyzer::builder(SimpleTokenizer::default())
|
||||||
|
.filter(RemoveLongFilter::limit(40))
|
||||||
|
.filter(LowerCaser)
|
||||||
|
.filter(AsciiFoldingFilter)
|
||||||
|
.build();
|
||||||
|
tokenizer_manager.register("slovak_prefix_full", full_tokenizer);
|
||||||
|
|
||||||
|
// NGRAM TOKENIZER: For substring matching.
|
||||||
|
let ngram_tokenizer =
|
||||||
|
TextAnalyzer::builder(NgramTokenizer::new(3, 3, false)?)
|
||||||
|
.filter(RemoveLongFilter::limit(40))
|
||||||
|
.filter(LowerCaser)
|
||||||
|
.filter(AsciiFoldingFilter)
|
||||||
|
.build();
|
||||||
|
tokenizer_manager.register("slovak_ngram", ngram_tokenizer);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -16,3 +16,4 @@ tantivy = { workspace = true }
|
|||||||
|
|
||||||
common = { path = "../common" }
|
common = { path = "../common" }
|
||||||
tonic-reflection = "0.13.1"
|
tonic-reflection = "0.13.1"
|
||||||
|
sqlx = { version = "0.8.6", features = ["postgres"] }
|
||||||
|
|||||||
@@ -1,17 +1,67 @@
|
|||||||
// src/lib.rs
|
// src/lib.rs
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use tantivy::{collector::TopDocs, query::QueryParser, Index, TantivyDocument};
|
use tantivy::collector::TopDocs;
|
||||||
|
use tantivy::query::{
|
||||||
|
BooleanQuery, BoostQuery, FuzzyTermQuery, Occur, Query, QueryParser,
|
||||||
|
TermQuery,
|
||||||
|
};
|
||||||
|
use tantivy::schema::{IndexRecordOption, Value};
|
||||||
|
use tantivy::{Index, TantivyDocument, Term};
|
||||||
use tonic::{Request, Response, Status};
|
use tonic::{Request, Response, Status};
|
||||||
|
|
||||||
use common::proto::multieko2::search::{
|
use common::proto::multieko2::search::{
|
||||||
search_response::Hit, SearchRequest, SearchResponse,
|
search_response::Hit, SearchRequest, SearchResponse,
|
||||||
};
|
};
|
||||||
use common::proto::multieko2::search::searcher_server::Searcher;
|
|
||||||
pub use common::proto::multieko2::search::searcher_server::SearcherServer;
|
pub use common::proto::multieko2::search::searcher_server::SearcherServer;
|
||||||
use tantivy::schema::Value;
|
use common::proto::multieko2::search::searcher_server::Searcher;
|
||||||
|
use common::search::register_slovak_tokenizers;
|
||||||
|
use sqlx::{PgPool, Row};
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
pub struct SearcherService;
|
// We need to hold the database pool in our service struct.
|
||||||
|
pub struct SearcherService {
|
||||||
|
pub pool: PgPool,
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalize_slovak_text function remains unchanged...
|
||||||
|
fn normalize_slovak_text(text: &str) -> String {
|
||||||
|
// ... function content is unchanged ...
|
||||||
|
text.chars()
|
||||||
|
.map(|c| match c {
|
||||||
|
'á' | 'à' | 'â' | 'ä' | 'ă' | 'ā' => 'a',
|
||||||
|
'Á' | 'À' | 'Â' | 'Ä' | 'Ă' | 'Ā' => 'A',
|
||||||
|
'é' | 'è' | 'ê' | 'ë' | 'ě' | 'ē' => 'e',
|
||||||
|
'É' | 'È' | 'Ê' | 'Ë' | 'Ě' | 'Ē' => 'E',
|
||||||
|
'í' | 'ì' | 'î' | 'ï' | 'ī' => 'i',
|
||||||
|
'Í' | 'Ì' | 'Î' | 'Ï' | 'Ī' => 'I',
|
||||||
|
'ó' | 'ò' | 'ô' | 'ö' | 'ō' | 'ő' => 'o',
|
||||||
|
'Ó' | 'Ò' | 'Ô' | 'Ö' | 'Ō' | 'Ő' => 'O',
|
||||||
|
'ú' | 'ù' | 'û' | 'ü' | 'ū' | 'ű' => 'u',
|
||||||
|
'Ú' | 'Ù' | 'Û' | 'Ü' | 'Ū' | 'Ű' => 'U',
|
||||||
|
'ý' | 'ỳ' | 'ŷ' | 'ÿ' => 'y',
|
||||||
|
'Ý' | 'Ỳ' | 'Ŷ' | 'Ÿ' => 'Y',
|
||||||
|
'č' => 'c',
|
||||||
|
'Č' => 'C',
|
||||||
|
'ď' => 'd',
|
||||||
|
'Ď' => 'D',
|
||||||
|
'ľ' => 'l',
|
||||||
|
'Ľ' => 'L',
|
||||||
|
'ň' => 'n',
|
||||||
|
'Ň' => 'N',
|
||||||
|
'ř' => 'r',
|
||||||
|
'Ř' => 'R',
|
||||||
|
'š' => 's',
|
||||||
|
'Š' => 'S',
|
||||||
|
'ť' => 't',
|
||||||
|
'Ť' => 'T',
|
||||||
|
'ž' => 'z',
|
||||||
|
'Ž' => 'Z',
|
||||||
|
_ => c,
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
#[tonic::async_trait]
|
#[tonic::async_trait]
|
||||||
impl Searcher for SearcherService {
|
impl Searcher for SearcherService {
|
||||||
@@ -23,13 +73,50 @@ impl Searcher for SearcherService {
|
|||||||
let table_name = req.table_name;
|
let table_name = req.table_name;
|
||||||
let query_str = req.query;
|
let query_str = req.query;
|
||||||
|
|
||||||
|
// --- MODIFIED LOGIC ---
|
||||||
|
// If the query is empty, fetch the 5 most recent records.
|
||||||
if query_str.trim().is_empty() {
|
if query_str.trim().is_empty() {
|
||||||
return Err(Status::invalid_argument("Query cannot be empty"));
|
info!(
|
||||||
|
"Empty query for table '{}'. Fetching default results.",
|
||||||
|
table_name
|
||||||
|
);
|
||||||
|
let qualified_table = format!("gen.\"{}\"", table_name);
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT id, to_jsonb(t) AS data FROM {} t ORDER BY id DESC LIMIT 5",
|
||||||
|
qualified_table
|
||||||
|
);
|
||||||
|
|
||||||
|
let rows = sqlx::query(&sql)
|
||||||
|
.fetch_all(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
Status::internal(format!(
|
||||||
|
"DB query for default results failed: {}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let hits: Vec<Hit> = rows
|
||||||
|
.into_iter()
|
||||||
|
.map(|row| {
|
||||||
|
let id: i64 = row.try_get("id").unwrap_or_default();
|
||||||
|
let json_data: serde_json::Value =
|
||||||
|
row.try_get("data").unwrap_or_default();
|
||||||
|
Hit {
|
||||||
|
id,
|
||||||
|
// Score is 0.0 as this is not a relevance-ranked search
|
||||||
|
score: 0.0,
|
||||||
|
content_json: json_data.to_string(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
info!("--- SERVER: Successfully processed empty query. Returning {} default hits. ---", hits.len());
|
||||||
|
return Ok(Response::new(SearchResponse { hits }));
|
||||||
}
|
}
|
||||||
|
// --- END OF MODIFIED LOGIC ---
|
||||||
|
|
||||||
// Open the index for this table
|
|
||||||
let index_path = Path::new("./tantivy_indexes").join(&table_name);
|
let index_path = Path::new("./tantivy_indexes").join(&table_name);
|
||||||
|
|
||||||
if !index_path.exists() {
|
if !index_path.exists() {
|
||||||
return Err(Status::not_found(format!(
|
return Err(Status::not_found(format!(
|
||||||
"No search index found for table '{}'",
|
"No search index found for table '{}'",
|
||||||
@@ -37,72 +124,178 @@ impl Searcher for SearcherService {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Open the index
|
let index = Index::open_in_dir(&index_path)
|
||||||
let index = Index::open_in_dir(&index_path).map_err(|e| {
|
.map_err(|e| Status::internal(format!("Failed to open index: {}", e)))?;
|
||||||
Status::internal(format!("Failed to open index: {}", e))
|
|
||||||
|
register_slovak_tokenizers(&index).map_err(|e| {
|
||||||
|
Status::internal(format!("Failed to register Slovak tokenizers: {}", e))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// Create reader and searcher
|
|
||||||
let reader = index.reader().map_err(|e| {
|
let reader = index.reader().map_err(|e| {
|
||||||
Status::internal(format!("Failed to create index reader: {}", e))
|
Status::internal(format!("Failed to create index reader: {}", e))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let searcher = reader.searcher();
|
let searcher = reader.searcher();
|
||||||
let schema = index.schema();
|
let schema = index.schema();
|
||||||
|
|
||||||
// Get the fields we need
|
let pg_id_field = schema.get_field("pg_id").map_err(|_| {
|
||||||
let all_text_field = match schema.get_field("all_text") {
|
Status::internal("Schema is missing the 'pg_id' field.")
|
||||||
Ok(field) => field,
|
|
||||||
Err(_) => {
|
|
||||||
return Err(Status::internal(
|
|
||||||
"Schema is missing the 'all_text' field.",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let pg_id_field = match schema.get_field("pg_id") {
|
|
||||||
Ok(field) => field,
|
|
||||||
Err(_) => {
|
|
||||||
return Err(Status::internal(
|
|
||||||
"Schema is missing the 'pg_id' field.",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Parse the query
|
|
||||||
let query_parser =
|
|
||||||
QueryParser::for_index(&index, vec![all_text_field]);
|
|
||||||
let query = query_parser.parse_query(&query_str).map_err(|e| {
|
|
||||||
Status::invalid_argument(format!("Invalid query: {}", e))
|
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// Perform the search
|
// --- Query Building Logic (no changes here) ---
|
||||||
|
let prefix_edge_field = schema.get_field("prefix_edge").unwrap();
|
||||||
|
let prefix_full_field = schema.get_field("prefix_full").unwrap();
|
||||||
|
let text_ngram_field = schema.get_field("text_ngram").unwrap();
|
||||||
|
let normalized_query = normalize_slovak_text(&query_str);
|
||||||
|
let words: Vec<&str> = normalized_query.split_whitespace().collect();
|
||||||
|
if words.is_empty() {
|
||||||
|
return Ok(Response::new(SearchResponse { hits: vec![] }));
|
||||||
|
}
|
||||||
|
let mut query_layers: Vec<(Occur, Box<dyn Query>)> = Vec::new();
|
||||||
|
// ... all your query building layers remain exactly the same ...
|
||||||
|
// ===============================
|
||||||
|
// LAYER 1: PREFIX MATCHING (HIGHEST PRIORITY, Boost: 4.0)
|
||||||
|
// ===============================
|
||||||
|
{
|
||||||
|
let mut must_clauses: Vec<(Occur, Box<dyn Query>)> = Vec::new();
|
||||||
|
for word in &words {
|
||||||
|
let edge_term =
|
||||||
|
Term::from_field_text(prefix_edge_field, word);
|
||||||
|
let full_term =
|
||||||
|
Term::from_field_text(prefix_full_field, word);
|
||||||
|
|
||||||
|
let per_word_query = BooleanQuery::new(vec![
|
||||||
|
(
|
||||||
|
Occur::Should,
|
||||||
|
Box::new(TermQuery::new(
|
||||||
|
edge_term,
|
||||||
|
IndexRecordOption::Basic,
|
||||||
|
)),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Occur::Should,
|
||||||
|
Box::new(TermQuery::new(
|
||||||
|
full_term,
|
||||||
|
IndexRecordOption::Basic,
|
||||||
|
)),
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
must_clauses.push((Occur::Must, Box::new(per_word_query) as Box<dyn Query>));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !must_clauses.is_empty() {
|
||||||
|
let prefix_query = BooleanQuery::new(must_clauses);
|
||||||
|
let boosted_query =
|
||||||
|
BoostQuery::new(Box::new(prefix_query), 4.0);
|
||||||
|
query_layers.push((Occur::Should, Box::new(boosted_query)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===============================
|
||||||
|
// LAYER 2: FUZZY MATCHING (HIGH PRIORITY, Boost: 3.0)
|
||||||
|
// ===============================
|
||||||
|
{
|
||||||
|
let last_word = words.last().unwrap();
|
||||||
|
let fuzzy_term =
|
||||||
|
Term::from_field_text(prefix_full_field, last_word);
|
||||||
|
let fuzzy_query = FuzzyTermQuery::new(fuzzy_term, 2, true);
|
||||||
|
let boosted_query = BoostQuery::new(Box::new(fuzzy_query), 3.0);
|
||||||
|
query_layers.push((Occur::Should, Box::new(boosted_query)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===============================
|
||||||
|
// LAYER 3: PHRASE MATCHING WITH SLOP (MEDIUM PRIORITY, Boost: 2.0)
|
||||||
|
// ===============================
|
||||||
|
if words.len() > 1 {
|
||||||
|
let slop_parser =
|
||||||
|
QueryParser::for_index(&index, vec![prefix_full_field]);
|
||||||
|
let slop_query_str = format!("\"{}\"~3", normalized_query);
|
||||||
|
if let Ok(slop_query) = slop_parser.parse_query(&slop_query_str) {
|
||||||
|
let boosted_query = BoostQuery::new(slop_query, 2.0);
|
||||||
|
query_layers.push((Occur::Should, Box::new(boosted_query)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===============================
|
||||||
|
// LAYER 4: NGRAM SUBSTRING MATCHING (LOWEST PRIORITY, Boost: 1.0)
|
||||||
|
// ===============================
|
||||||
|
{
|
||||||
|
let ngram_parser =
|
||||||
|
QueryParser::for_index(&index, vec![text_ngram_field]);
|
||||||
|
if let Ok(ngram_query) =
|
||||||
|
ngram_parser.parse_query(&normalized_query)
|
||||||
|
{
|
||||||
|
let boosted_query = BoostQuery::new(ngram_query, 1.0);
|
||||||
|
query_layers.push((Occur::Should, Box::new(boosted_query)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let master_query = BooleanQuery::new(query_layers);
|
||||||
|
// --- End of Query Building Logic ---
|
||||||
|
|
||||||
let top_docs = searcher
|
let top_docs = searcher
|
||||||
.search(&query, &TopDocs::with_limit(100))
|
.search(&master_query, &TopDocs::with_limit(100))
|
||||||
.map_err(|e| Status::internal(format!("Search failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Search failed: {}", e)))?;
|
||||||
|
|
||||||
// Convert results to our response format
|
if top_docs.is_empty() {
|
||||||
let mut hits = Vec::new();
|
return Ok(Response::new(SearchResponse { hits: vec![] }));
|
||||||
for (score, doc_address) in top_docs {
|
}
|
||||||
let doc: TantivyDocument = searcher.doc(doc_address).map_err(
|
|
||||||
|e| {
|
|
||||||
Status::internal(format!(
|
|
||||||
"Failed to retrieve document: {}",
|
|
||||||
e
|
|
||||||
))
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
|
// --- NEW LOGIC: Fetch from DB and combine results ---
|
||||||
|
|
||||||
|
// Step 1: Extract (score, pg_id) from Tantivy results.
|
||||||
|
let mut scored_ids: Vec<(f32, u64)> = Vec::new();
|
||||||
|
for (score, doc_address) in top_docs {
|
||||||
|
let doc: TantivyDocument = searcher.doc(doc_address).map_err(|e| {
|
||||||
|
Status::internal(format!("Failed to retrieve document: {}", e))
|
||||||
|
})?;
|
||||||
if let Some(pg_id_value) = doc.get_first(pg_id_field) {
|
if let Some(pg_id_value) = doc.get_first(pg_id_field) {
|
||||||
if let Some(pg_id) = pg_id_value.as_u64() {
|
if let Some(pg_id) = pg_id_value.as_u64() {
|
||||||
hits.push(Hit {
|
scored_ids.push((score, pg_id));
|
||||||
id: pg_id as i64,
|
|
||||||
score,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Step 2: Fetch all corresponding rows from Postgres in a single query.
|
||||||
|
let pg_ids: Vec<i64> =
|
||||||
|
scored_ids.iter().map(|(_, id)| *id as i64).collect();
|
||||||
|
let qualified_table = format!("gen.\"{}\"", table_name);
|
||||||
|
let query_str = format!(
|
||||||
|
"SELECT id, to_jsonb(t) AS data FROM {} t WHERE id = ANY($1)",
|
||||||
|
qualified_table
|
||||||
|
);
|
||||||
|
|
||||||
|
let rows = sqlx::query(&query_str)
|
||||||
|
.bind(&pg_ids)
|
||||||
|
.fetch_all(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
Status::internal(format!("Database query failed: {}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Step 3: Map the database results by ID for quick lookup.
|
||||||
|
let mut content_map: HashMap<i64, String> = HashMap::new();
|
||||||
|
for row in rows {
|
||||||
|
let id: i64 = row.try_get("id").unwrap_or(0);
|
||||||
|
let json_data: serde_json::Value =
|
||||||
|
row.try_get("data").unwrap_or(serde_json::Value::Null);
|
||||||
|
content_map.insert(id, json_data.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Build the final response, combining Tantivy scores with PG content.
|
||||||
|
let hits: Vec<Hit> = scored_ids
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(score, pg_id)| {
|
||||||
|
content_map
|
||||||
|
.get(&(pg_id as i64))
|
||||||
|
.map(|content_json| Hit {
|
||||||
|
id: pg_id as i64,
|
||||||
|
score,
|
||||||
|
content_json: content_json.clone(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
info!("--- SERVER: Successfully processed search. Returning {} hits. ---", hits.len());
|
||||||
|
|
||||||
let response = SearchResponse { hits };
|
let response = SearchResponse { hits };
|
||||||
Ok(Response::new(response))
|
Ok(Response::new(response))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,12 +10,13 @@ search = { path = "../search" }
|
|||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
tantivy = { workspace = true }
|
tantivy = { workspace = true }
|
||||||
|
prost-types = { workspace = true }
|
||||||
chrono = { version = "0.4.40", features = ["serde"] }
|
chrono = { version = "0.4.40", features = ["serde"] }
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
prost = "0.13.5"
|
prost = "0.13.5"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = "1.0.140"
|
serde_json = "1.0.140"
|
||||||
sqlx = { version = "0.8.5", features = ["chrono", "postgres", "runtime-tokio", "runtime-tokio-native-tls", "time", "uuid"] }
|
sqlx = { version = "0.8.5", features = ["chrono", "postgres", "runtime-tokio", "runtime-tokio-native-tls", "rust_decimal", "time", "uuid"] }
|
||||||
tokio = { version = "1.44.2", features = ["full", "macros"] }
|
tokio = { version = "1.44.2", features = ["full", "macros"] }
|
||||||
tonic = "0.13.0"
|
tonic = "0.13.0"
|
||||||
tonic-reflection = "0.13.0"
|
tonic-reflection = "0.13.0"
|
||||||
@@ -31,6 +32,9 @@ bcrypt = "0.17.0"
|
|||||||
validator = { version = "0.20.0", features = ["derive"] }
|
validator = { version = "0.20.0", features = ["derive"] }
|
||||||
uuid = { version = "1.16.0", features = ["serde", "v4"] }
|
uuid = { version = "1.16.0", features = ["serde", "v4"] }
|
||||||
jsonwebtoken = "9.3.1"
|
jsonwebtoken = "9.3.1"
|
||||||
|
rust-stemmers = "1.2.0"
|
||||||
|
rust_decimal = { version = "1.37.2", features = ["maths", "serde"] }
|
||||||
|
rust_decimal_macros = "1.37.1"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "server"
|
name = "server"
|
||||||
@@ -40,3 +44,5 @@ path = "src/lib.rs"
|
|||||||
tokio = { version = "1.44", features = ["full", "test-util"] }
|
tokio = { version = "1.44", features = ["full", "test-util"] }
|
||||||
rstest = "0.25.0"
|
rstest = "0.25.0"
|
||||||
lazy_static = "1.5.0"
|
lazy_static = "1.5.0"
|
||||||
|
rand = "0.9.1"
|
||||||
|
futures = "0.3.31"
|
||||||
|
|||||||
13
server/Makefile
Normal file
13
server/Makefile
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Makefile
|
||||||
|
|
||||||
|
test: reset_db run_tests
|
||||||
|
|
||||||
|
reset_db:
|
||||||
|
@echo "Resetting test database..."
|
||||||
|
@./scripts/reset_test_db.sh
|
||||||
|
|
||||||
|
run_tests:
|
||||||
|
@echo "Running tests..."
|
||||||
|
@cargo test --test mod -- --test-threads=1
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
-- Add migration script here
|
|
||||||
CREATE TABLE adresar (
|
|
||||||
id BIGSERIAL PRIMARY KEY,
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
firma TEXT NOT NULL,
|
|
||||||
kz TEXT,
|
|
||||||
drc TEXT,
|
|
||||||
ulica TEXT,
|
|
||||||
psc TEXT,
|
|
||||||
mesto TEXT,
|
|
||||||
stat TEXT,
|
|
||||||
banka TEXT,
|
|
||||||
ucet TEXT,
|
|
||||||
skladm TEXT,
|
|
||||||
ico TEXT,
|
|
||||||
kontakt TEXT,
|
|
||||||
telefon TEXT,
|
|
||||||
skladu TEXT,
|
|
||||||
fax TEXT,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_adresar_firma ON adresar (firma);
|
|
||||||
CREATE INDEX idx_adresar_mesto ON adresar (mesto);
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
-- Add migration script here
|
|
||||||
CREATE TABLE uctovnictvo (
|
|
||||||
id BIGSERIAL PRIMARY KEY,
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
adresar_id BIGINT NOT NULL REFERENCES adresar(id), -- Link to adresar table
|
|
||||||
c_dokladu TEXT NOT NULL,
|
|
||||||
datum DATE NOT NULL,
|
|
||||||
c_faktury TEXT NOT NULL,
|
|
||||||
obsah TEXT,
|
|
||||||
stredisko TEXT,
|
|
||||||
c_uctu TEXT,
|
|
||||||
md TEXT,
|
|
||||||
identif TEXT,
|
|
||||||
poznanka TEXT,
|
|
||||||
firma TEXT NOT NULL,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_uctovnictvo_adresar_id ON uctovnictvo (adresar_id);
|
|
||||||
CREATE INDEX idx_uctovnictvo_firma ON uctovnictvo (firma);
|
|
||||||
CREATE INDEX idx_uctovnictvo_c_dokladu ON uctovnictvo (c_dokladu);
|
|
||||||
CREATE INDEX idx_uctovnictvo_poznanka ON uctovnictvo (poznanka);
|
|
||||||
@@ -1,9 +1,12 @@
|
|||||||
-- Add migration script here
|
-- Add migration script here
|
||||||
CREATE TABLE profiles (
|
CREATE TABLE schemas (
|
||||||
id BIGSERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
name TEXT NOT NULL UNIQUE,
|
name TEXT NOT NULL UNIQUE,
|
||||||
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
description TEXT,
|
||||||
|
is_active BOOLEAN DEFAULT TRUE
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Create default profile for existing data
|
-- Create default profile for existing data
|
||||||
INSERT INTO profiles (name) VALUES ('default');
|
INSERT INTO schemas (name) VALUES ('default');
|
||||||
|
CREATE SCHEMA IF NOT EXISTS "default";
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
-- Main table definitions
|
-- Main table definitions
|
||||||
|
|
||||||
CREATE TABLE table_definitions (
|
CREATE TABLE table_definitions (
|
||||||
id BIGSERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
@@ -6,7 +7,7 @@ CREATE TABLE table_definitions (
|
|||||||
columns JSONB NOT NULL,
|
columns JSONB NOT NULL,
|
||||||
indexes JSONB NOT NULL,
|
indexes JSONB NOT NULL,
|
||||||
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
|
||||||
profile_id BIGINT NOT NULL REFERENCES profiles(id) DEFAULT 1
|
schema_id BIGINT NOT NULL REFERENCES schemas(id)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Relationship table for multiple links
|
-- Relationship table for multiple links
|
||||||
@@ -18,9 +19,10 @@ CREATE TABLE table_definition_links (
|
|||||||
PRIMARY KEY (source_table_id, linked_table_id)
|
PRIMARY KEY (source_table_id, linked_table_id)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Create composite unique index for profile+table combination
|
-- Create composite unique index for schema+table combination
|
||||||
CREATE UNIQUE INDEX idx_table_definitions_profile_table
|
CREATE UNIQUE INDEX idx_table_definitions_schema_table
|
||||||
ON table_definitions (profile_id, table_name);
|
ON table_definitions (schema_id, table_name);
|
||||||
|
|
||||||
CREATE INDEX idx_links_source ON table_definition_links (source_table_id);
|
CREATE INDEX idx_links_source ON table_definition_links (source_table_id);
|
||||||
CREATE INDEX idx_links_target ON table_definition_links (linked_table_id);
|
CREATE INDEX idx_links_target ON table_definition_links (linked_table_id);
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ CREATE TABLE table_scripts (
|
|||||||
script TEXT NOT NULL,
|
script TEXT NOT NULL,
|
||||||
description TEXT,
|
description TEXT,
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
profile_id BIGINT NOT NULL REFERENCES profiles(id) DEFAULT 1,
|
schema_id BIGINT NOT NULL REFERENCES schemas(id),
|
||||||
UNIQUE(table_definitions_id, target_column)
|
UNIQUE(table_definitions_id, target_column)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
-- Add migration script here
|
|
||||||
|
|
||||||
CREATE SCHEMA IF NOT EXISTS gen;
|
|
||||||
9
server/scripts/reset_test_db.sh
Executable file
9
server/scripts/reset_test_db.sh
Executable file
@@ -0,0 +1,9 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# scripts/reset_test_db.sh
|
||||||
|
|
||||||
|
DATABASE_URL=${TEST_DATABASE_URL:-"postgres://multi_psql_dev:3@localhost:5432/multi_rust_test"}
|
||||||
|
|
||||||
|
echo "Reset db script"
|
||||||
|
yes | sqlx database drop --database-url "$DATABASE_URL"
|
||||||
|
sqlx database create --database-url "$DATABASE_URL"
|
||||||
|
echo "Test database reset complete."
|
||||||
@@ -1,156 +0,0 @@
|
|||||||
❯ grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"firma": "Updated Firma",
|
|
||||||
"kz": "Updated KZ",
|
|
||||||
"drc": "Updated DRC",
|
|
||||||
"ulica": "Updated Ulica",
|
|
||||||
"psc": "Updated PSC",
|
|
||||||
"mesto": "Updated Mesto",
|
|
||||||
"stat": "Updated Stat",
|
|
||||||
"banka": "Updated Banka",
|
|
||||||
"ucet": "Updated Ucet",
|
|
||||||
"skladm": "Updated Skladm",
|
|
||||||
"ico": "Updated ICO",
|
|
||||||
"kontakt": "Updated Kontakt",
|
|
||||||
"telefon": "Updated Telefon",
|
|
||||||
"skladu": "Updated Skladu",
|
|
||||||
"fax": "Updated Fax"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 2}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
{
|
|
||||||
"id": "2",
|
|
||||||
"firma": "asdfasf",
|
|
||||||
"kz": " ",
|
|
||||||
"drc": " ",
|
|
||||||
"ulica": " ",
|
|
||||||
"psc": "sdfasdf",
|
|
||||||
"mesto": "asf",
|
|
||||||
"stat": "as",
|
|
||||||
"banka": "df",
|
|
||||||
"ucet": "asf",
|
|
||||||
"skladm": "f",
|
|
||||||
"ico": "f",
|
|
||||||
"kontakt": "f",
|
|
||||||
"telefon": "f",
|
|
||||||
"skladu": "f",
|
|
||||||
"fax": " "
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/DeleteAdresar
|
|
||||||
{
|
|
||||||
"success": true
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
ERROR:
|
|
||||||
Code: NotFound
|
|
||||||
Message: no rows returned by a query that expected to return at least one row
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 2}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
{
|
|
||||||
"id": "2",
|
|
||||||
"firma": "asdfasf",
|
|
||||||
"kz": " ",
|
|
||||||
"drc": " ",
|
|
||||||
"ulica": " ",
|
|
||||||
"psc": "sdfasdf",
|
|
||||||
"mesto": "asf",
|
|
||||||
"stat": "as",
|
|
||||||
"banka": "df",
|
|
||||||
"ucet": "asf",
|
|
||||||
"skladm": "f",
|
|
||||||
"ico": "f",
|
|
||||||
"kontakt": "f",
|
|
||||||
"telefon": "f",
|
|
||||||
"skladu": "f",
|
|
||||||
"fax": " "
|
|
||||||
}
|
|
||||||
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"firma": "New Firma",
|
|
||||||
"kz": "New KZ",
|
|
||||||
"drc": "New DRC",
|
|
||||||
"ulica": "New Ulica",
|
|
||||||
"psc": "New PSC",
|
|
||||||
"mesto": "New Mesto",
|
|
||||||
"stat": "New Stat",
|
|
||||||
"banka": "New Banka",
|
|
||||||
"ucet": "New Ucet",
|
|
||||||
"skladm": "New Skladm",
|
|
||||||
"ico": "New ICO",
|
|
||||||
"kontakt": "New Kontakt",
|
|
||||||
"telefon": "New Telefon",
|
|
||||||
"skladu": "New Skladu",
|
|
||||||
"fax": "New Fax"
|
|
||||||
}' localhost:50051 multieko2.adresar.Adresar/PostAdresar
|
|
||||||
{
|
|
||||||
"id": "43",
|
|
||||||
"firma": "New Firma",
|
|
||||||
"kz": "New KZ",
|
|
||||||
"drc": "New DRC",
|
|
||||||
"ulica": "New Ulica",
|
|
||||||
"psc": "New PSC",
|
|
||||||
"mesto": "New Mesto",
|
|
||||||
"stat": "New Stat",
|
|
||||||
"banka": "New Banka",
|
|
||||||
"ucet": "New Ucet",
|
|
||||||
"skladm": "New Skladm",
|
|
||||||
"ico": "New ICO",
|
|
||||||
"kontakt": "New Kontakt",
|
|
||||||
"telefon": "New Telefon",
|
|
||||||
"skladu": "New Skladu",
|
|
||||||
"fax": "New Fax"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"id": 43,
|
|
||||||
"firma": "Updated Firma",
|
|
||||||
"kz": "Updated KZ",
|
|
||||||
"drc": "Updated DRC",
|
|
||||||
"ulica": "Updated Ulica",
|
|
||||||
"psc": "Updated PSC",
|
|
||||||
"mesto": "Updated Mesto",
|
|
||||||
"stat": "Updated Stat",
|
|
||||||
"banka": "Updated Banka",
|
|
||||||
"ucet": "Updated Ucet",
|
|
||||||
"skladm": "Updated Skladm",
|
|
||||||
"ico": "Updated ICO",
|
|
||||||
"kontakt": "Updated Kontakt",
|
|
||||||
"telefon": "Updated Telefon",
|
|
||||||
"skladu": "Updated Skladu",
|
|
||||||
"fax": "Updated Fax"
|
|
||||||
}' localhost:50051 multieko2.adresar.Adresar/PutAdresar
|
|
||||||
{
|
|
||||||
"id": "43",
|
|
||||||
"firma": "Updated Firma",
|
|
||||||
"kz": "Updated KZ",
|
|
||||||
"drc": "Updated DRC",
|
|
||||||
"ulica": "Updated Ulica",
|
|
||||||
"psc": "Updated PSC",
|
|
||||||
"mesto": "Updated Mesto",
|
|
||||||
"stat": "Updated Stat",
|
|
||||||
"banka": "Updated Banka",
|
|
||||||
"ucet": "Updated Ucet",
|
|
||||||
"skladm": "Updated Skladm",
|
|
||||||
"ico": "Updated ICO",
|
|
||||||
"kontakt": "Updated Kontakt",
|
|
||||||
"telefon": "Updated Telefon",
|
|
||||||
"skladu": "Updated Skladu",
|
|
||||||
"fax": "Updated Fax"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 43}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
{
|
|
||||||
"id": "43",
|
|
||||||
"firma": "Updated Firma",
|
|
||||||
"kz": "Updated KZ",
|
|
||||||
"drc": "Updated DRC",
|
|
||||||
"ulica": "Updated Ulica",
|
|
||||||
"psc": "Updated PSC",
|
|
||||||
"mesto": "Updated Mesto",
|
|
||||||
"stat": "Updated Stat",
|
|
||||||
"banka": "Updated Banka",
|
|
||||||
"ucet": "Updated Ucet",
|
|
||||||
"skladm": "Updated Skladm",
|
|
||||||
"ico": "Updated ICO",
|
|
||||||
"kontakt": "Updated Kontakt",
|
|
||||||
"telefon": "Updated Telefon",
|
|
||||||
"skladu": "Updated Skladu",
|
|
||||||
"fax": "Updated Fax"
|
|
||||||
}
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
|
|
||||||
# TOTAL items in the adresar
|
|
||||||
❯ grpcurl -plaintext localhost:50051 multieko2.adresar.Adresar/GetAdresarCount
|
|
||||||
{
|
|
||||||
"count": "5"
|
|
||||||
}
|
|
||||||
# Item at this count. If there are 43 items, number 1 is the first item
|
|
||||||
❯ grpcurl -plaintext -d '{"position": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresarByPosition
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"firma": "ks555",
|
|
||||||
"kz": "f",
|
|
||||||
"drc": "asdf",
|
|
||||||
"ulica": "as",
|
|
||||||
"psc": "f",
|
|
||||||
"mesto": "asf",
|
|
||||||
"stat": "as",
|
|
||||||
"banka": "fa",
|
|
||||||
"telefon": "a",
|
|
||||||
"skladu": "fd",
|
|
||||||
"fax": "asf"
|
|
||||||
}
|
|
||||||
# Item fetched by id. The first item was created and marked as deleted, therefore number 1 in ids shouldnt be fetched.
|
|
||||||
❯ grpcurl -plaintext -d '{"id": 1}' localhost:50051 multieko2.adresar.Adresar/GetAdresar
|
|
||||||
ERROR:
|
|
||||||
Code: NotFound
|
|
||||||
Message: no rows returned by a query that expected to return at least one row
|
|
||||||
╭─ ~ ············································· 69 ✘
|
|
||||||
╰─
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
// src/adresar/handlers.rs
|
|
||||||
|
|
||||||
pub mod post_adresar;
|
|
||||||
pub mod get_adresar;
|
|
||||||
pub mod put_adresar;
|
|
||||||
pub mod delete_adresar;
|
|
||||||
pub mod get_adresar_count;
|
|
||||||
pub mod get_adresar_by_position;
|
|
||||||
|
|
||||||
pub use post_adresar::post_adresar;
|
|
||||||
pub use get_adresar::get_adresar;
|
|
||||||
pub use put_adresar::put_adresar;
|
|
||||||
pub use delete_adresar::delete_adresar;
|
|
||||||
pub use get_adresar_count::get_adresar_count;
|
|
||||||
pub use get_adresar_by_position::get_adresar_by_position;
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
// src/adresar/handlers/delete_adresar.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use common::proto::multieko2::adresar::{DeleteAdresarRequest, DeleteAdresarResponse};
|
|
||||||
|
|
||||||
pub async fn delete_adresar(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: DeleteAdresarRequest,
|
|
||||||
) -> Result<DeleteAdresarResponse, Status> {
|
|
||||||
let rows_affected = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
UPDATE adresar
|
|
||||||
SET deleted = true
|
|
||||||
WHERE id = $1 AND deleted = false
|
|
||||||
"#,
|
|
||||||
request.id
|
|
||||||
)
|
|
||||||
.execute(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?
|
|
||||||
.rows_affected();
|
|
||||||
|
|
||||||
Ok(DeleteAdresarResponse {
|
|
||||||
success: rows_affected > 0,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
// src/adresar/handlers/get_adresar.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use crate::adresar::models::Adresar;
|
|
||||||
use common::proto::multieko2::adresar::{GetAdresarRequest, AdresarResponse};
|
|
||||||
|
|
||||||
pub async fn get_adresar(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: GetAdresarRequest,
|
|
||||||
) -> Result<AdresarResponse, Status> {
|
|
||||||
let adresar = sqlx::query_as!(
|
|
||||||
Adresar,
|
|
||||||
r#"
|
|
||||||
SELECT
|
|
||||||
id,
|
|
||||||
deleted,
|
|
||||||
firma,
|
|
||||||
kz,
|
|
||||||
drc,
|
|
||||||
ulica,
|
|
||||||
psc,
|
|
||||||
mesto,
|
|
||||||
stat,
|
|
||||||
banka,
|
|
||||||
ucet,
|
|
||||||
skladm,
|
|
||||||
ico,
|
|
||||||
kontakt,
|
|
||||||
telefon,
|
|
||||||
skladu,
|
|
||||||
fax
|
|
||||||
FROM adresar
|
|
||||||
WHERE id = $1 AND deleted = false
|
|
||||||
"#,
|
|
||||||
request.id
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| match e {
|
|
||||||
sqlx::Error::RowNotFound => Status::not_found("Record not found"),
|
|
||||||
_ => Status::internal(format!("Database error: {}", e)),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(AdresarResponse {
|
|
||||||
id: adresar.id,
|
|
||||||
firma: adresar.firma,
|
|
||||||
kz: adresar.kz.unwrap_or_default(),
|
|
||||||
drc: adresar.drc.unwrap_or_default(),
|
|
||||||
ulica: adresar.ulica.unwrap_or_default(),
|
|
||||||
psc: adresar.psc.unwrap_or_default(),
|
|
||||||
mesto: adresar.mesto.unwrap_or_default(),
|
|
||||||
stat: adresar.stat.unwrap_or_default(),
|
|
||||||
banka: adresar.banka.unwrap_or_default(),
|
|
||||||
ucet: adresar.ucet.unwrap_or_default(),
|
|
||||||
skladm: adresar.skladm.unwrap_or_default(),
|
|
||||||
ico: adresar.ico.unwrap_or_default(),
|
|
||||||
kontakt: adresar.kontakt.unwrap_or_default(),
|
|
||||||
telefon: adresar.telefon.unwrap_or_default(),
|
|
||||||
skladu: adresar.skladu.unwrap_or_default(),
|
|
||||||
fax: adresar.fax.unwrap_or_default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
// src/adresar/handlers/get_adresar_by_position.rs
|
|
||||||
use tonic::{Status};
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use common::proto::multieko2::adresar::{AdresarResponse, GetAdresarRequest};
|
|
||||||
use common::proto::multieko2::common::PositionRequest;
|
|
||||||
use super::get_adresar;
|
|
||||||
|
|
||||||
pub async fn get_adresar_by_position(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
request: PositionRequest,
|
|
||||||
) -> Result<AdresarResponse, Status> {
|
|
||||||
if request.position < 1 {
|
|
||||||
return Err(Status::invalid_argument("Position must be at least 1"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the ID of the Nth non-deleted record
|
|
||||||
let id: i64 = sqlx::query_scalar!(
|
|
||||||
r#"
|
|
||||||
SELECT id
|
|
||||||
FROM adresar
|
|
||||||
WHERE deleted = FALSE
|
|
||||||
ORDER BY id ASC
|
|
||||||
OFFSET $1
|
|
||||||
LIMIT 1
|
|
||||||
"#,
|
|
||||||
request.position - 1
|
|
||||||
)
|
|
||||||
.fetch_optional(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?
|
|
||||||
.ok_or_else(|| Status::not_found("Position out of bounds"))?;
|
|
||||||
|
|
||||||
// Now fetch the complete record using the existing get_adresar function
|
|
||||||
get_adresar(db_pool, GetAdresarRequest { id }).await
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
// src/adresar/handlers/get_adresar_count.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use common::proto::multieko2::common::{CountResponse, Empty};
|
|
||||||
|
|
||||||
pub async fn get_adresar_count(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
_request: Empty,
|
|
||||||
) -> Result<CountResponse, Status> {
|
|
||||||
let count: i64 = sqlx::query_scalar!(
|
|
||||||
r#"
|
|
||||||
SELECT COUNT(*) AS count
|
|
||||||
FROM adresar
|
|
||||||
WHERE deleted = FALSE
|
|
||||||
"#
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
Ok(CountResponse { count })
|
|
||||||
}
|
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
// src/adresar/handlers/post_adresar.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use crate::adresar::models::Adresar;
|
|
||||||
use common::proto::multieko2::adresar::{PostAdresarRequest, AdresarResponse};
|
|
||||||
|
|
||||||
// Helper function to sanitize inputs
|
|
||||||
fn sanitize_input(input: &str) -> Option<String> {
|
|
||||||
let trimmed = input.trim().to_string();
|
|
||||||
if trimmed.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(trimmed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn post_adresar(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
mut request: PostAdresarRequest,
|
|
||||||
) -> Result<AdresarResponse, Status> {
|
|
||||||
request.firma = request.firma.trim().to_string();
|
|
||||||
if request.firma.is_empty() {
|
|
||||||
return Err(Status::invalid_argument("Firma je povinne pole"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanitize optional fields
|
|
||||||
let kz = sanitize_input(&request.kz);
|
|
||||||
let drc = sanitize_input(&request.drc);
|
|
||||||
let ulica = sanitize_input(&request.ulica);
|
|
||||||
let psc = sanitize_input(&request.psc);
|
|
||||||
let mesto = sanitize_input(&request.mesto);
|
|
||||||
let stat = sanitize_input(&request.stat);
|
|
||||||
let banka = sanitize_input(&request.banka);
|
|
||||||
let ucet = sanitize_input(&request.ucet);
|
|
||||||
let skladm = sanitize_input(&request.skladm);
|
|
||||||
let ico = sanitize_input(&request.ico);
|
|
||||||
let kontakt = sanitize_input(&request.kontakt);
|
|
||||||
let telefon = sanitize_input(&request.telefon);
|
|
||||||
let skladu = sanitize_input(&request.skladu);
|
|
||||||
let fax = sanitize_input(&request.fax);
|
|
||||||
|
|
||||||
let adresar = sqlx::query_as!(
|
|
||||||
Adresar,
|
|
||||||
r#"
|
|
||||||
INSERT INTO adresar (
|
|
||||||
firma, kz, drc, ulica, psc, mesto, stat, banka, ucet,
|
|
||||||
skladm, ico, kontakt, telefon, skladu, fax, deleted
|
|
||||||
)
|
|
||||||
VALUES (
|
|
||||||
$1, $2, $3, $4, $5, $6, $7, $8, $9,
|
|
||||||
$10, $11, $12, $13, $14, $15, $16
|
|
||||||
)
|
|
||||||
RETURNING
|
|
||||||
id, deleted, firma, kz, drc, ulica, psc, mesto, stat,
|
|
||||||
banka, ucet, skladm, ico, kontakt, telefon, skladu, fax
|
|
||||||
"#,
|
|
||||||
request.firma,
|
|
||||||
kz,
|
|
||||||
drc,
|
|
||||||
ulica,
|
|
||||||
psc,
|
|
||||||
mesto,
|
|
||||||
stat,
|
|
||||||
banka,
|
|
||||||
ucet,
|
|
||||||
skladm,
|
|
||||||
ico,
|
|
||||||
kontakt,
|
|
||||||
telefon,
|
|
||||||
skladu,
|
|
||||||
fax,
|
|
||||||
false
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?;
|
|
||||||
|
|
||||||
Ok(AdresarResponse {
|
|
||||||
id: adresar.id,
|
|
||||||
// Do not include `deleted` in the response since it's not
|
|
||||||
// defined in the proto message.
|
|
||||||
firma: adresar.firma,
|
|
||||||
kz: adresar.kz.unwrap_or_default(),
|
|
||||||
drc: adresar.drc.unwrap_or_default(),
|
|
||||||
ulica: adresar.ulica.unwrap_or_default(),
|
|
||||||
psc: adresar.psc.unwrap_or_default(),
|
|
||||||
mesto: adresar.mesto.unwrap_or_default(),
|
|
||||||
stat: adresar.stat.unwrap_or_default(),
|
|
||||||
banka: adresar.banka.unwrap_or_default(),
|
|
||||||
ucet: adresar.ucet.unwrap_or_default(),
|
|
||||||
skladm: adresar.skladm.unwrap_or_default(),
|
|
||||||
ico: adresar.ico.unwrap_or_default(),
|
|
||||||
kontakt: adresar.kontakt.unwrap_or_default(),
|
|
||||||
telefon: adresar.telefon.unwrap_or_default(),
|
|
||||||
skladu: adresar.skladu.unwrap_or_default(),
|
|
||||||
fax: adresar.fax.unwrap_or_default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,122 +0,0 @@
|
|||||||
// src/adresar/handlers/put_adresar.rs
|
|
||||||
use tonic::Status;
|
|
||||||
use sqlx::PgPool;
|
|
||||||
use crate::adresar::models::Adresar;
|
|
||||||
use common::proto::multieko2::adresar::{PutAdresarRequest, AdresarResponse};
|
|
||||||
|
|
||||||
// Add the same sanitize_input helper as in POST handler
|
|
||||||
fn sanitize_input(input: &str) -> Option<String> {
|
|
||||||
let trimmed = input.trim().to_string();
|
|
||||||
if trimmed.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(trimmed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn put_adresar(
|
|
||||||
db_pool: &PgPool,
|
|
||||||
mut request: PutAdresarRequest,
|
|
||||||
) -> Result<AdresarResponse, Status> {
|
|
||||||
// Add validation for required fields like in POST
|
|
||||||
request.firma = request.firma.trim().to_string();
|
|
||||||
if request.firma.is_empty() {
|
|
||||||
return Err(Status::invalid_argument("Firma je povinne pole"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanitize optional fields like in POST
|
|
||||||
let kz = sanitize_input(&request.kz);
|
|
||||||
let drc = sanitize_input(&request.drc);
|
|
||||||
let ulica = sanitize_input(&request.ulica);
|
|
||||||
let psc = sanitize_input(&request.psc);
|
|
||||||
let mesto = sanitize_input(&request.mesto);
|
|
||||||
let stat = sanitize_input(&request.stat);
|
|
||||||
let banka = sanitize_input(&request.banka);
|
|
||||||
let ucet = sanitize_input(&request.ucet);
|
|
||||||
let skladm = sanitize_input(&request.skladm);
|
|
||||||
let ico = sanitize_input(&request.ico);
|
|
||||||
let kontakt = sanitize_input(&request.kontakt);
|
|
||||||
let telefon = sanitize_input(&request.telefon);
|
|
||||||
let skladu = sanitize_input(&request.skladu);
|
|
||||||
let fax = sanitize_input(&request.fax);
|
|
||||||
|
|
||||||
let adresar = sqlx::query_as!(
|
|
||||||
Adresar,
|
|
||||||
r#"
|
|
||||||
UPDATE adresar
|
|
||||||
SET
|
|
||||||
firma = $2,
|
|
||||||
kz = $3,
|
|
||||||
drc = $4,
|
|
||||||
ulica = $5,
|
|
||||||
psc = $6,
|
|
||||||
mesto = $7,
|
|
||||||
stat = $8,
|
|
||||||
banka = $9,
|
|
||||||
ucet = $10,
|
|
||||||
skladm = $11,
|
|
||||||
ico = $12,
|
|
||||||
kontakt = $13,
|
|
||||||
telefon = $14,
|
|
||||||
skladu = $15,
|
|
||||||
fax = $16
|
|
||||||
WHERE id = $1 AND deleted = FALSE
|
|
||||||
RETURNING
|
|
||||||
id,
|
|
||||||
deleted,
|
|
||||||
firma,
|
|
||||||
kz,
|
|
||||||
drc,
|
|
||||||
ulica,
|
|
||||||
psc,
|
|
||||||
mesto,
|
|
||||||
stat,
|
|
||||||
banka,
|
|
||||||
ucet,
|
|
||||||
skladm,
|
|
||||||
ico,
|
|
||||||
kontakt,
|
|
||||||
telefon,
|
|
||||||
skladu,
|
|
||||||
fax
|
|
||||||
"#,
|
|
||||||
request.id,
|
|
||||||
request.firma,
|
|
||||||
kz,
|
|
||||||
drc,
|
|
||||||
ulica,
|
|
||||||
psc,
|
|
||||||
mesto,
|
|
||||||
stat,
|
|
||||||
banka,
|
|
||||||
ucet,
|
|
||||||
skladm,
|
|
||||||
ico,
|
|
||||||
kontakt,
|
|
||||||
telefon,
|
|
||||||
skladu,
|
|
||||||
fax
|
|
||||||
)
|
|
||||||
.fetch_one(db_pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Status::internal(e.to_string()))?;
|
|
||||||
|
|
||||||
Ok(AdresarResponse {
|
|
||||||
id: adresar.id,
|
|
||||||
firma: adresar.firma,
|
|
||||||
kz: adresar.kz.unwrap_or_default(),
|
|
||||||
drc: adresar.drc.unwrap_or_default(),
|
|
||||||
ulica: adresar.ulica.unwrap_or_default(),
|
|
||||||
psc: adresar.psc.unwrap_or_default(),
|
|
||||||
mesto: adresar.mesto.unwrap_or_default(),
|
|
||||||
stat: adresar.stat.unwrap_or_default(),
|
|
||||||
banka: adresar.banka.unwrap_or_default(),
|
|
||||||
ucet: adresar.ucet.unwrap_or_default(),
|
|
||||||
skladm: adresar.skladm.unwrap_or_default(),
|
|
||||||
ico: adresar.ico.unwrap_or_default(),
|
|
||||||
kontakt: adresar.kontakt.unwrap_or_default(),
|
|
||||||
telefon: adresar.telefon.unwrap_or_default(),
|
|
||||||
skladu: adresar.skladu.unwrap_or_default(),
|
|
||||||
fax: adresar.fax.unwrap_or_default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
// src/adresar/mod.rs
|
|
||||||
|
|
||||||
pub mod models;
|
|
||||||
pub mod handlers;
|
|
||||||
|
|
||||||
// #[cfg(test)]
|
|
||||||
// pub mod tests;
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
// src/adresar/models.rs
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct Adresar {
|
|
||||||
pub id: i64,
|
|
||||||
pub deleted: bool,
|
|
||||||
pub firma: String,
|
|
||||||
pub kz: Option<String>,
|
|
||||||
pub drc: Option<String>,
|
|
||||||
pub ulica: Option<String>,
|
|
||||||
pub psc: Option<String>,
|
|
||||||
pub mesto: Option<String>,
|
|
||||||
pub stat: Option<String>,
|
|
||||||
pub banka: Option<String>,
|
|
||||||
pub ucet: Option<String>,
|
|
||||||
pub skladm: Option<String>,
|
|
||||||
pub ico: Option<String>,
|
|
||||||
pub kontakt: Option<String>,
|
|
||||||
pub telefon: Option<String>,
|
|
||||||
pub skladu: Option<String>,
|
|
||||||
pub fax: Option<String>,
|
|
||||||
}
|
|
||||||
@@ -3,6 +3,8 @@
|
|||||||
use tower::ServiceBuilder;
|
use tower::ServiceBuilder;
|
||||||
use crate::auth::logic::rbac;
|
use crate::auth::logic::rbac;
|
||||||
|
|
||||||
|
// TODO redesign this, adresar and uctovnictvo are nonexistent, but we are keeping this code for
|
||||||
|
// the reference. Please adjust in the future rbac.
|
||||||
pub async fn run_server(db_pool: sqlx::PgPool) -> Result<(), Box<dyn std::error::Error>> {
|
pub async fn run_server(db_pool: sqlx::PgPool) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
// ... existing setup code ...
|
// ... existing setup code ...
|
||||||
|
|
||||||
|
|||||||
@@ -1,83 +0,0 @@
|
|||||||
// In server/src/bin/manual_indexer.rs
|
|
||||||
|
|
||||||
use sqlx::{PgPool, Row};
|
|
||||||
use tantivy::schema::*;
|
|
||||||
use tantivy::{doc, Index};
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
// --- CONFIGURATION ---
|
|
||||||
// IMPORTANT: Change this to a table name that actually exists and has data in your test DB.
|
|
||||||
// From your grpcurl output, "2025_test_post" is a good candidate.
|
|
||||||
const TABLE_TO_INDEX: &str = "2025_test_post2";
|
|
||||||
const INDEX_DIR: &str = "./tantivy_indexes";
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> anyhow::Result<()> {
|
|
||||||
// --- Database Connection ---
|
|
||||||
// This assumes you have a .env file with DATABASE_URL
|
|
||||||
dotenvy::dotenv().ok();
|
|
||||||
let database_url = std::env::var("DATABASE_URL")
|
|
||||||
.expect("DATABASE_URL must be set in your .env file");
|
|
||||||
let pool = PgPool::connect(&database_url).await?;
|
|
||||||
println!("Connected to database.");
|
|
||||||
|
|
||||||
// --- Tantivy Schema Definition ---
|
|
||||||
let mut schema_builder = Schema::builder();
|
|
||||||
// This field will store the original Postgres row ID. It's crucial.
|
|
||||||
schema_builder.add_u64_field("pg_id", INDEXED | STORED);
|
|
||||||
// This field will contain ALL text data from the row, concatenated.
|
|
||||||
schema_builder.add_text_field("all_text", TEXT | STORED);
|
|
||||||
let schema = schema_builder.build();
|
|
||||||
|
|
||||||
// --- Index Creation ---
|
|
||||||
let index_path = Path::new(INDEX_DIR).join(TABLE_TO_INDEX);
|
|
||||||
if index_path.exists() {
|
|
||||||
println!("Removing existing index at: {}", index_path.display());
|
|
||||||
std::fs::remove_dir_all(&index_path)?;
|
|
||||||
}
|
|
||||||
std::fs::create_dir_all(&index_path)?;
|
|
||||||
let index = Index::create_in_dir(&index_path, schema.clone())?;
|
|
||||||
let mut index_writer = index.writer(100_000_000)?; // 100MB heap
|
|
||||||
|
|
||||||
println!("Indexing table: {}", TABLE_TO_INDEX);
|
|
||||||
|
|
||||||
// --- Data Fetching and Indexing ---
|
|
||||||
let qualified_table = format!("gen.\"{}\"", TABLE_TO_INDEX);
|
|
||||||
let query_str = format!("SELECT id, to_jsonb(t) AS data FROM {} t", qualified_table);
|
|
||||||
let rows = sqlx::query(&query_str).fetch_all(&pool).await?;
|
|
||||||
|
|
||||||
if rows.is_empty() {
|
|
||||||
println!("Warning: No rows found in table '{}'. Index will be empty.", TABLE_TO_INDEX);
|
|
||||||
}
|
|
||||||
|
|
||||||
let pg_id_field = schema.get_field("pg_id").unwrap();
|
|
||||||
let all_text_field = schema.get_field("all_text").unwrap();
|
|
||||||
|
|
||||||
for row in &rows {
|
|
||||||
let id: i64 = row.try_get("id")?;
|
|
||||||
let data: serde_json::Value = row.try_get("data")?;
|
|
||||||
|
|
||||||
// Concatenate all text values from the JSON into one big string.
|
|
||||||
let mut full_text = String::new();
|
|
||||||
if let Some(obj) = data.as_object() {
|
|
||||||
for value in obj.values() {
|
|
||||||
if let Some(s) = value.as_str() {
|
|
||||||
full_text.push_str(s);
|
|
||||||
full_text.push(' ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add the document to Tantivy
|
|
||||||
index_writer.add_document(doc!(
|
|
||||||
pg_id_field => id as u64,
|
|
||||||
all_text_field => full_text
|
|
||||||
))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Finalize ---
|
|
||||||
index_writer.commit()?;
|
|
||||||
println!("Successfully indexed {} documents into '{}'", rows.len(), index_path.display());
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,13 +1,12 @@
|
|||||||
// src/indexer.rs
|
// server/src/indexer.rs
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
use sqlx::{PgPool, Row};
|
use sqlx::{PgPool, Row};
|
||||||
use tantivy::schema::{Schema, Term, TEXT, STORED, INDEXED};
|
use tantivy::schema::Term;
|
||||||
use tantivy::{doc, Index, IndexWriter};
|
use tantivy::{doc, IndexWriter};
|
||||||
use tokio::sync::mpsc::Receiver;
|
use tokio::sync::mpsc::Receiver;
|
||||||
use tracing::{error, info, warn};
|
use tracing::{error, info, warn};
|
||||||
|
use tantivy::schema::Schema;
|
||||||
const INDEX_DIR: &str = "./tantivy_indexes";
|
use crate::search_schema;
|
||||||
|
|
||||||
/// Defines the commands that can be sent to the indexer task.
|
/// Defines the commands that can be sent to the indexer task.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -25,7 +24,6 @@ pub struct IndexCommandData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The main loop for the background indexer task.
|
/// The main loop for the background indexer task.
|
||||||
/// It listens for commands on the receiver and updates the Tantivy index.
|
|
||||||
pub async fn indexer_task(pool: PgPool, mut receiver: Receiver<IndexCommand>) {
|
pub async fn indexer_task(pool: PgPool, mut receiver: Receiver<IndexCommand>) {
|
||||||
info!("Background indexer task started.");
|
info!("Background indexer task started.");
|
||||||
while let Some(command) = receiver.recv().await {
|
while let Some(command) = receiver.recv().await {
|
||||||
@@ -49,52 +47,39 @@ async fn handle_add_or_update(
|
|||||||
pool: &PgPool,
|
pool: &PgPool,
|
||||||
data: IndexCommandData,
|
data: IndexCommandData,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
// 1. Fetch the full row data from PostgreSQL
|
|
||||||
let qualified_table = format!("gen.\"{}\"", data.table_name);
|
let qualified_table = format!("gen.\"{}\"", data.table_name);
|
||||||
let query_str = format!(
|
let query_str = format!(
|
||||||
"SELECT to_jsonb(t) AS data FROM {} t WHERE id = $1",
|
"SELECT to_jsonb(t) AS data FROM {} t WHERE id = $1",
|
||||||
qualified_table
|
qualified_table
|
||||||
);
|
);
|
||||||
|
|
||||||
let row = sqlx::query(&query_str)
|
let row = sqlx::query(&query_str)
|
||||||
.bind(data.row_id)
|
.bind(data.row_id)
|
||||||
.fetch_one(pool)
|
.fetch_one(pool)
|
||||||
.await?;
|
.await?;
|
||||||
let json_data: serde_json::Value = row.try_get("data")?;
|
let json_data: serde_json::Value = row.try_get("data")?;
|
||||||
|
let slovak_text = extract_text_content(&json_data);
|
||||||
|
|
||||||
// 2. Prepare the Tantivy document
|
|
||||||
let mut full_text = String::new();
|
|
||||||
if let Some(obj) = json_data.as_object() {
|
|
||||||
for value in obj.values() {
|
|
||||||
if let Some(s) = value.as_str() {
|
|
||||||
full_text.push_str(s);
|
|
||||||
full_text.push(' ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Open the index and write the document
|
|
||||||
let (mut writer, schema) = get_index_writer(&data.table_name)?;
|
let (mut writer, schema) = get_index_writer(&data.table_name)?;
|
||||||
let pg_id_field = schema.get_field("pg_id").unwrap();
|
let pg_id_field = schema.get_field("pg_id").unwrap();
|
||||||
let all_text_field = schema.get_field("all_text").unwrap();
|
let prefix_edge_field = schema.get_field("prefix_edge").unwrap();
|
||||||
|
let prefix_full_field = schema.get_field("prefix_full").unwrap();
|
||||||
|
let text_ngram_field = schema.get_field("text_ngram").unwrap();
|
||||||
|
|
||||||
// First, delete any existing document with this ID to handle updates
|
|
||||||
let id_term = Term::from_field_u64(pg_id_field, data.row_id as u64);
|
let id_term = Term::from_field_u64(pg_id_field, data.row_id as u64);
|
||||||
writer.delete_term(id_term);
|
writer.delete_term(id_term);
|
||||||
|
|
||||||
// Add the new document
|
|
||||||
writer.add_document(doc!(
|
writer.add_document(doc!(
|
||||||
pg_id_field => data.row_id as u64,
|
pg_id_field => data.row_id as u64,
|
||||||
all_text_field => full_text
|
prefix_edge_field => slovak_text.clone(),
|
||||||
|
prefix_full_field => slovak_text.clone(),
|
||||||
|
text_ngram_field => slovak_text
|
||||||
))?;
|
))?;
|
||||||
|
|
||||||
// 4. Commit changes
|
|
||||||
writer.commit()?;
|
writer.commit()?;
|
||||||
info!(
|
info!(
|
||||||
"Successfully indexed document id:{} for table:{}",
|
"Successfully indexed document id:{} for table:{}",
|
||||||
data.row_id, data.table_name
|
data.row_id, data.table_name
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -122,19 +107,31 @@ async fn handle_delete(
|
|||||||
fn get_index_writer(
|
fn get_index_writer(
|
||||||
table_name: &str,
|
table_name: &str,
|
||||||
) -> anyhow::Result<(IndexWriter, Schema)> {
|
) -> anyhow::Result<(IndexWriter, Schema)> {
|
||||||
let index_path = Path::new(INDEX_DIR).join(table_name);
|
let index = search_schema::get_or_create_index(table_name)?;
|
||||||
std::fs::create_dir_all(&index_path)?;
|
|
||||||
|
|
||||||
let index = Index::open_in_dir(&index_path).or_else(|_| {
|
|
||||||
// If it doesn't exist, create it with the standard schema
|
|
||||||
let mut schema_builder = Schema::builder();
|
|
||||||
schema_builder.add_u64_field("pg_id", INDEXED | STORED);
|
|
||||||
schema_builder.add_text_field("all_text", TEXT | STORED);
|
|
||||||
let schema = schema_builder.build();
|
|
||||||
Index::create_in_dir(&index_path, schema)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let schema = index.schema();
|
let schema = index.schema();
|
||||||
let writer = index.writer(100_000_000)?; // 100MB heap
|
let writer = index.writer(100_000_000)?; // 100MB heap
|
||||||
Ok((writer, schema))
|
Ok((writer, schema))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Extract all text content from a JSON object for indexing
|
||||||
|
fn extract_text_content(json_data: &serde_json::Value) -> String {
|
||||||
|
let mut full_text = String::new();
|
||||||
|
|
||||||
|
if let Some(obj) = json_data.as_object() {
|
||||||
|
for value in obj.values() {
|
||||||
|
match value {
|
||||||
|
serde_json::Value::String(s) => {
|
||||||
|
full_text.push_str(s);
|
||||||
|
full_text.push(' ');
|
||||||
|
}
|
||||||
|
serde_json::Value::Number(n) => {
|
||||||
|
full_text.push_str(&n.to_string());
|
||||||
|
full_text.push(' ');
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
full_text.trim().to_string()
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,9 +2,8 @@
|
|||||||
pub mod db;
|
pub mod db;
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
pub mod indexer;
|
pub mod indexer;
|
||||||
|
pub mod search_schema;
|
||||||
pub mod server;
|
pub mod server;
|
||||||
pub mod adresar;
|
|
||||||
pub mod uctovnictvo;
|
|
||||||
pub mod shared;
|
pub mod shared;
|
||||||
pub mod table_structure;
|
pub mod table_structure;
|
||||||
pub mod table_definition;
|
pub mod table_definition;
|
||||||
|
|||||||
26
server/src/search_schema.rs
Normal file
26
server/src/search_schema.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
// server/src/search_schema.rs
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
use tantivy::Index;
|
||||||
|
|
||||||
|
// Re-export the functions from the common crate.
|
||||||
|
// This makes them available as `crate::search_schema::create_search_schema`, etc.
|
||||||
|
pub use common::search::{create_search_schema, register_slovak_tokenizers};
|
||||||
|
|
||||||
|
/// Gets an existing index or creates a new one.
|
||||||
|
/// This function now uses the shared logic from the `common` crate.
|
||||||
|
pub fn get_or_create_index(table_name: &str) -> tantivy::Result<Index> {
|
||||||
|
let index_path = Path::new("./tantivy_indexes").join(table_name);
|
||||||
|
std::fs::create_dir_all(&index_path)?;
|
||||||
|
|
||||||
|
let index = if index_path.join("meta.json").exists() {
|
||||||
|
Index::open_in_dir(&index_path)?
|
||||||
|
} else {
|
||||||
|
let schema = create_search_schema();
|
||||||
|
Index::create_in_dir(&index_path, schema)?
|
||||||
|
};
|
||||||
|
|
||||||
|
// This now calls the single, authoritative function from `common`.
|
||||||
|
register_slovak_tokenizers(&index)?;
|
||||||
|
Ok(index)
|
||||||
|
}
|
||||||
@@ -1,4 +1,2 @@
|
|||||||
// src/server/handlers.rs
|
// src/server/handlers.rs
|
||||||
pub use crate::server::services::adresar_service::AdresarService;
|
|
||||||
pub use crate::server::services::uctovnictvo_service::UctovnictvoService;
|
|
||||||
pub use crate::server::services::table_structure_service::TableStructureHandler;
|
pub use crate::server::services::table_structure_service::TableStructureHandler;
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
// src/server/run.rs
|
|
||||||
use tonic::transport::Server;
|
use tonic::transport::Server;
|
||||||
use tonic_reflection::server::Builder as ReflectionBuilder;
|
use tonic_reflection::server::Builder as ReflectionBuilder;
|
||||||
|
|
||||||
@@ -7,8 +6,6 @@ use crate::indexer::{indexer_task, IndexCommand};
|
|||||||
|
|
||||||
use common::proto::multieko2::FILE_DESCRIPTOR_SET;
|
use common::proto::multieko2::FILE_DESCRIPTOR_SET;
|
||||||
use crate::server::services::{
|
use crate::server::services::{
|
||||||
AdresarService,
|
|
||||||
UctovnictvoService,
|
|
||||||
TableStructureHandler,
|
TableStructureHandler,
|
||||||
TableDefinitionService,
|
TableDefinitionService,
|
||||||
TablesDataService,
|
TablesDataService,
|
||||||
@@ -16,8 +13,6 @@ use crate::server::services::{
|
|||||||
AuthServiceImpl
|
AuthServiceImpl
|
||||||
};
|
};
|
||||||
use common::proto::multieko2::{
|
use common::proto::multieko2::{
|
||||||
adresar::adresar_server::AdresarServer,
|
|
||||||
uctovnictvo::uctovnictvo_server::UctovnictvoServer,
|
|
||||||
table_structure::table_structure_service_server::TableStructureServiceServer,
|
table_structure::table_structure_service_server::TableStructureServiceServer,
|
||||||
table_definition::table_definition_server::TableDefinitionServer,
|
table_definition::table_definition_server::TableDefinitionServer,
|
||||||
tables_data::tables_data_server::TablesDataServer,
|
tables_data::tables_data_server::TablesDataServer,
|
||||||
@@ -48,15 +43,15 @@ pub async fn run_server(db_pool: sqlx::PgPool) -> Result<(), Box<dyn std::error:
|
|||||||
let table_definition_service = TableDefinitionService { db_pool: db_pool.clone() };
|
let table_definition_service = TableDefinitionService { db_pool: db_pool.clone() };
|
||||||
let tables_data_service = TablesDataService {
|
let tables_data_service = TablesDataService {
|
||||||
db_pool: db_pool.clone(),
|
db_pool: db_pool.clone(),
|
||||||
indexer_tx: indexer_tx.clone(), // Pass the sender
|
indexer_tx: indexer_tx.clone(),
|
||||||
};
|
};
|
||||||
let table_script_service = TableScriptService { db_pool: db_pool.clone() };
|
let table_script_service = TableScriptService { db_pool: db_pool.clone() };
|
||||||
let auth_service = AuthServiceImpl { db_pool: db_pool.clone() };
|
let auth_service = AuthServiceImpl { db_pool: db_pool.clone() };
|
||||||
let search_service = SearcherService;
|
|
||||||
|
// MODIFIED: Instantiate SearcherService with the database pool
|
||||||
|
let search_service = SearcherService { pool: db_pool.clone() };
|
||||||
|
|
||||||
Server::builder()
|
Server::builder()
|
||||||
.add_service(AdresarServer::new(AdresarService { db_pool: db_pool.clone() }))
|
|
||||||
.add_service(UctovnictvoServer::new(UctovnictvoService { db_pool: db_pool.clone() }))
|
|
||||||
.add_service(TableStructureServiceServer::new(TableStructureHandler { db_pool: db_pool.clone() }))
|
.add_service(TableStructureServiceServer::new(TableStructureHandler { db_pool: db_pool.clone() }))
|
||||||
.add_service(TableDefinitionServer::new(table_definition_service))
|
.add_service(TableDefinitionServer::new(table_definition_service))
|
||||||
.add_service(TablesDataServer::new(tables_data_service))
|
.add_service(TablesDataServer::new(tables_data_service))
|
||||||
|
|||||||
@@ -1,69 +0,0 @@
|
|||||||
// src/server/services/adresar_service.rs
|
|
||||||
use tonic::{Request, Response, Status};
|
|
||||||
use common::proto::multieko2::adresar::{
|
|
||||||
adresar_server::Adresar,
|
|
||||||
PostAdresarRequest, AdresarResponse, GetAdresarRequest, PutAdresarRequest,
|
|
||||||
DeleteAdresarRequest, DeleteAdresarResponse,
|
|
||||||
};
|
|
||||||
use common::proto::multieko2::common::{Empty, CountResponse, PositionRequest};
|
|
||||||
use crate::adresar::handlers::{
|
|
||||||
post_adresar, get_adresar, put_adresar, delete_adresar,
|
|
||||||
get_adresar_count, get_adresar_by_position,
|
|
||||||
};
|
|
||||||
use sqlx::PgPool;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct AdresarService {
|
|
||||||
pub db_pool: PgPool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tonic::async_trait]
|
|
||||||
impl Adresar for AdresarService {
|
|
||||||
async fn post_adresar(
|
|
||||||
&self,
|
|
||||||
request: Request<PostAdresarRequest>,
|
|
||||||
) -> Result<Response<AdresarResponse>, Status> {
|
|
||||||
let response = post_adresar(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_adresar(
|
|
||||||
&self,
|
|
||||||
request: Request<GetAdresarRequest>,
|
|
||||||
) -> Result<Response<AdresarResponse>, Status> {
|
|
||||||
let response = get_adresar(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn put_adresar(
|
|
||||||
&self,
|
|
||||||
request: Request<PutAdresarRequest>,
|
|
||||||
) -> Result<Response<AdresarResponse>, Status> {
|
|
||||||
let response = put_adresar(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn delete_adresar(
|
|
||||||
&self,
|
|
||||||
request: Request<DeleteAdresarRequest>,
|
|
||||||
) -> Result<Response<DeleteAdresarResponse>, Status> {
|
|
||||||
let response = delete_adresar(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_adresar_count(
|
|
||||||
&self,
|
|
||||||
request: Request<Empty>,
|
|
||||||
) -> Result<Response<CountResponse>, Status> {
|
|
||||||
let response = get_adresar_count(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_adresar_by_position(
|
|
||||||
&self,
|
|
||||||
request: Request<PositionRequest>,
|
|
||||||
) -> Result<Response<AdresarResponse>, Status> {
|
|
||||||
let response = get_adresar_by_position(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,16 +1,12 @@
|
|||||||
// src/server/services/mod.rs
|
// src/server/services/mod.rs
|
||||||
|
|
||||||
pub mod adresar_service;
|
|
||||||
pub mod table_structure_service;
|
pub mod table_structure_service;
|
||||||
pub mod uctovnictvo_service;
|
|
||||||
pub mod table_definition_service;
|
pub mod table_definition_service;
|
||||||
pub mod tables_data_service;
|
pub mod tables_data_service;
|
||||||
pub mod table_script_service;
|
pub mod table_script_service;
|
||||||
pub mod auth_service;
|
pub mod auth_service;
|
||||||
|
|
||||||
pub use adresar_service::AdresarService;
|
|
||||||
pub use table_structure_service::TableStructureHandler;
|
pub use table_structure_service::TableStructureHandler;
|
||||||
pub use uctovnictvo_service::UctovnictvoService;
|
|
||||||
pub use table_definition_service::TableDefinitionService;
|
pub use table_definition_service::TableDefinitionService;
|
||||||
pub use tables_data_service::TablesDataService;
|
pub use tables_data_service::TablesDataService;
|
||||||
pub use table_script_service::TableScriptService;
|
pub use table_script_service::TableScriptService;
|
||||||
|
|||||||
@@ -41,14 +41,17 @@ impl TablesData for TablesDataService {
|
|||||||
Ok(Response::new(response))
|
Ok(Response::new(response))
|
||||||
}
|
}
|
||||||
|
|
||||||
// You will later apply the same pattern to put_table_data...
|
|
||||||
async fn put_table_data(
|
async fn put_table_data(
|
||||||
&self,
|
&self,
|
||||||
request: Request<PutTableDataRequest>,
|
request: Request<PutTableDataRequest>,
|
||||||
) -> Result<Response<PutTableDataResponse>, Status> {
|
) -> Result<Response<PutTableDataResponse>, Status> {
|
||||||
let request = request.into_inner();
|
let request = request.into_inner();
|
||||||
// TODO: Update put_table_data handler to accept and use indexer_tx
|
let response = put_table_data(
|
||||||
let response = put_table_data(&self.db_pool, request).await?;
|
&self.db_pool,
|
||||||
|
request,
|
||||||
|
&self.indexer_tx,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
Ok(Response::new(response))
|
Ok(Response::new(response))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,60 +0,0 @@
|
|||||||
// src/server/services/uctovnictvo_service.rs
|
|
||||||
use tonic::{Request, Response, Status};
|
|
||||||
use common::proto::multieko2::uctovnictvo::{
|
|
||||||
uctovnictvo_server::Uctovnictvo,
|
|
||||||
PostUctovnictvoRequest, UctovnictvoResponse, GetUctovnictvoRequest, PutUctovnictvoRequest,
|
|
||||||
};
|
|
||||||
use crate::uctovnictvo::handlers::{
|
|
||||||
post_uctovnictvo, get_uctovnictvo, get_uctovnictvo_count,
|
|
||||||
get_uctovnictvo_by_position, put_uctovnictvo,
|
|
||||||
};
|
|
||||||
use common::proto::multieko2::common::{Empty, CountResponse, PositionRequest};
|
|
||||||
use sqlx::PgPool;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct UctovnictvoService {
|
|
||||||
pub db_pool: PgPool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tonic::async_trait]
|
|
||||||
impl Uctovnictvo for UctovnictvoService {
|
|
||||||
async fn post_uctovnictvo(
|
|
||||||
&self,
|
|
||||||
request: Request<PostUctovnictvoRequest>,
|
|
||||||
) -> Result<Response<UctovnictvoResponse>, Status> {
|
|
||||||
let response = post_uctovnictvo(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_uctovnictvo(
|
|
||||||
&self,
|
|
||||||
request: Request<GetUctovnictvoRequest>,
|
|
||||||
) -> Result<Response<UctovnictvoResponse>, Status> {
|
|
||||||
let response = get_uctovnictvo(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_uctovnictvo_count(
|
|
||||||
&self,
|
|
||||||
request: Request<Empty>,
|
|
||||||
) -> Result<Response<CountResponse>, Status> {
|
|
||||||
let response = get_uctovnictvo_count(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_uctovnictvo_by_position(
|
|
||||||
&self,
|
|
||||||
request: Request<PositionRequest>,
|
|
||||||
) -> Result<Response<UctovnictvoResponse>, Status> {
|
|
||||||
let response = get_uctovnictvo_by_position(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn put_uctovnictvo(
|
|
||||||
&self,
|
|
||||||
request: Request<PutUctovnictvoRequest>,
|
|
||||||
) -> Result<Response<UctovnictvoResponse>, Status> {
|
|
||||||
let response = put_uctovnictvo(&self.db_pool, request.into_inner()).await?;
|
|
||||||
Ok(Response::new(response))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,34 +1,50 @@
|
|||||||
// src/shared/schema_qualifier.rs
|
// src/shared/schema_qualifier.rs
|
||||||
|
use sqlx::PgPool;
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
|
|
||||||
/// Qualifies table names with the appropriate schema
|
// TODO in the future, remove database query on every request and implement caching for scalable
|
||||||
///
|
// solution with many data and requests
|
||||||
|
|
||||||
|
/// Qualifies a table name by checking for its existence in the table_definitions table.
|
||||||
|
/// This is the robust, "source of truth" approach.
|
||||||
|
///
|
||||||
/// Rules:
|
/// Rules:
|
||||||
/// - Tables created via PostTableDefinition (dynamically created tables) are in 'gen' schema
|
/// - If a table is found in `table_definitions`, it is qualified with the 'gen' schema.
|
||||||
/// - System tables (like users, profiles) remain in 'public' schema
|
/// - Otherwise, it is assumed to be a system table in the 'public' schema.
|
||||||
pub fn qualify_table_name(table_name: &str) -> String {
|
pub async fn qualify_table_name(
|
||||||
// Check if table matches the pattern of dynamically created tables (e.g., 2025_something)
|
db_pool: &PgPool,
|
||||||
if table_name.starts_with(|c: char| c.is_ascii_digit()) && table_name.contains('_') {
|
profile_name: &str,
|
||||||
format!("gen.\"{}\"", table_name)
|
table_name: &str,
|
||||||
|
) -> Result<String, Status> {
|
||||||
|
// Check if a definition exists for this table in the given profile.
|
||||||
|
let definition_exists = sqlx::query!(
|
||||||
|
r#"SELECT EXISTS (
|
||||||
|
SELECT 1 FROM table_definitions td
|
||||||
|
JOIN schemas s ON td.schema_id = s.id
|
||||||
|
WHERE s.name = $1 AND td.table_name = $2
|
||||||
|
)"#,
|
||||||
|
profile_name,
|
||||||
|
table_name
|
||||||
|
)
|
||||||
|
.fetch_one(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Schema lookup failed: {}", e)))?
|
||||||
|
.exists
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
if definition_exists {
|
||||||
|
Ok(format!("\"{}\".\"{}\"", profile_name, table_name))
|
||||||
} else {
|
} else {
|
||||||
format!("\"{}\"", table_name)
|
// It's not a user-defined table, so it must be a system table in 'public.
|
||||||
|
Ok(format!("\"{}\"", table_name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Qualifies table names for data operations
|
/// Qualifies table names for data operations
|
||||||
pub fn qualify_table_name_for_data(table_name: &str) -> Result<String, Status> {
|
pub async fn qualify_table_name_for_data(
|
||||||
Ok(qualify_table_name(table_name))
|
db_pool: &PgPool,
|
||||||
}
|
profile_name: &str,
|
||||||
|
table_name: &str,
|
||||||
#[cfg(test)]
|
) -> Result<String, Status> {
|
||||||
mod tests {
|
qualify_table_name(db_pool, profile_name, table_name).await
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_qualify_table_name() {
|
|
||||||
assert_eq!(qualify_table_name("2025_test_schema3"), "gen.\"2025_test_schema3\"");
|
|
||||||
assert_eq!(qualify_table_name("users"), "\"users\"");
|
|
||||||
assert_eq!(qualify_table_name("profiles"), "\"profiles\"");
|
|
||||||
assert_eq!(qualify_table_name("adresar"), "\"adresar\"");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
190
server/src/steel/server/decimal_math.rs
Normal file
190
server/src/steel/server/decimal_math.rs
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
// src/steel/server/decimal_math.rs
|
||||||
|
use rust_decimal::prelude::*;
|
||||||
|
use rust_decimal::MathematicalOps;
|
||||||
|
use steel::rvals::SteelVal;
|
||||||
|
use std::str::FromStr;
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum DecimalMathError {
|
||||||
|
#[error("Invalid decimal format: {0}")]
|
||||||
|
InvalidDecimal(String),
|
||||||
|
#[error("Math operation failed: {0}")]
|
||||||
|
MathError(String),
|
||||||
|
#[error("Division by zero")]
|
||||||
|
DivisionByZero,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a SteelVal to a Decimal
|
||||||
|
fn steel_val_to_decimal(val: &SteelVal) -> Result<Decimal, DecimalMathError> {
|
||||||
|
match val {
|
||||||
|
SteelVal::StringV(s) => {
|
||||||
|
Decimal::from_str(&s.to_string())
|
||||||
|
.map_err(|e| DecimalMathError::InvalidDecimal(format!("{}: {}", s, e)))
|
||||||
|
}
|
||||||
|
SteelVal::NumV(n) => {
|
||||||
|
Decimal::try_from(*n)
|
||||||
|
.map_err(|e| DecimalMathError::InvalidDecimal(format!("{}: {}", n, e)))
|
||||||
|
}
|
||||||
|
SteelVal::IntV(i) => {
|
||||||
|
Ok(Decimal::from(*i))
|
||||||
|
}
|
||||||
|
_ => Err(DecimalMathError::InvalidDecimal(format!("Unsupported type: {:?}", val)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a Decimal back to a SteelVal string
|
||||||
|
fn decimal_to_steel_val(decimal: Decimal) -> SteelVal {
|
||||||
|
SteelVal::StringV(decimal.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Basic arithmetic operations
|
||||||
|
pub fn decimal_add(a: String, b: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok((a_dec + b_dec).to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_sub(a: String, b: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok((a_dec - b_dec).to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_mul(a: String, b: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok((a_dec * b_dec).to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_div(a: String, b: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
|
||||||
|
if b_dec.is_zero() {
|
||||||
|
return Err("Division by zero".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((a_dec / b_dec).to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Advanced mathematical functions (requires maths feature)
|
||||||
|
pub fn decimal_pow(base: String, exp: String) -> Result<String, String> {
|
||||||
|
let base_dec = Decimal::from_str(&base).map_err(|e| format!("Invalid decimal '{}': {}", base, e))?;
|
||||||
|
let exp_dec = Decimal::from_str(&exp).map_err(|e| format!("Invalid decimal '{}': {}", exp, e))?;
|
||||||
|
|
||||||
|
base_dec.checked_powd(exp_dec)
|
||||||
|
.map(|result| result.to_string())
|
||||||
|
.ok_or_else(|| "Power operation failed or overflowed".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_sqrt(a: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
|
||||||
|
a_dec.sqrt()
|
||||||
|
.map(|result| result.to_string())
|
||||||
|
.ok_or_else(|| "Square root failed (negative number?)".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_ln(a: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
|
||||||
|
a_dec.checked_ln()
|
||||||
|
.map(|result| result.to_string())
|
||||||
|
.ok_or_else(|| "Natural log failed (non-positive number?)".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_log10(a: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
|
||||||
|
a_dec.checked_log10()
|
||||||
|
.map(|result| result.to_string())
|
||||||
|
.ok_or_else(|| "Log10 failed (non-positive number?)".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_exp(a: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
|
||||||
|
a_dec.checked_exp()
|
||||||
|
.map(|result| result.to_string())
|
||||||
|
.ok_or_else(|| "Exponential failed or overflowed".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trigonometric functions (input in radians)
|
||||||
|
pub fn decimal_sin(a: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
|
||||||
|
a_dec.checked_sin()
|
||||||
|
.map(|result| result.to_string())
|
||||||
|
.ok_or_else(|| "Sine calculation failed or overflowed".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_cos(a: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
|
||||||
|
a_dec.checked_cos()
|
||||||
|
.map(|result| result.to_string())
|
||||||
|
.ok_or_else(|| "Cosine calculation failed or overflowed".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_tan(a: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
|
||||||
|
a_dec.checked_tan()
|
||||||
|
.map(|result| result.to_string())
|
||||||
|
.ok_or_else(|| "Tangent calculation failed or overflowed".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Comparison functions
|
||||||
|
pub fn decimal_gt(a: String, b: String) -> Result<bool, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok(a_dec > b_dec)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_lt(a: String, b: String) -> Result<bool, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok(a_dec < b_dec)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_eq(a: String, b: String) -> Result<bool, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok(a_dec == b_dec)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility functions
|
||||||
|
pub fn decimal_abs(a: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
Ok(a_dec.abs().to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_round(a: String, places: i32) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
Ok(a_dec.round_dp(places as u32).to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_min(a: String, b: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok(a_dec.min(b_dec).to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_max(a: String, b: String) -> Result<String, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok(a_dec.max(b_dec).to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_gte(a: String, b: String) -> Result<bool, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok(a_dec >= b_dec)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal_lte(a: String, b: String) -> Result<bool, String> {
|
||||||
|
let a_dec = Decimal::from_str(&a).map_err(|e| format!("Invalid decimal '{}': {}", a, e))?;
|
||||||
|
let b_dec = Decimal::from_str(&b).map_err(|e| format!("Invalid decimal '{}': {}", b, e))?;
|
||||||
|
Ok(a_dec <= b_dec)
|
||||||
|
}
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
// src/steel/server/execution.rs
|
// Updated src/steel/server/execution.rs
|
||||||
use steel::steel_vm::engine::Engine;
|
use steel::steel_vm::engine::Engine;
|
||||||
use steel::steel_vm::register_fn::RegisterFn;
|
use steel::steel_vm::register_fn::RegisterFn;
|
||||||
use steel::rvals::SteelVal;
|
use steel::rvals::SteelVal;
|
||||||
use super::functions::SteelContext;
|
use super::functions::SteelContext;
|
||||||
|
use super::decimal_math::*;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
@@ -33,6 +34,24 @@ pub fn execute_script(
|
|||||||
let mut vm = Engine::new();
|
let mut vm = Engine::new();
|
||||||
let context = Arc::new(context);
|
let context = Arc::new(context);
|
||||||
|
|
||||||
|
// Register existing Steel functions
|
||||||
|
register_steel_functions(&mut vm, context.clone());
|
||||||
|
|
||||||
|
// Register all decimal math functions
|
||||||
|
register_decimal_math_functions(&mut vm);
|
||||||
|
|
||||||
|
// Execute script and process results
|
||||||
|
let results = vm.compile_and_run_raw_program(script)
|
||||||
|
.map_err(|e| ExecutionError::RuntimeError(e.to_string()))?;
|
||||||
|
|
||||||
|
// Convert results to target type
|
||||||
|
match target_type {
|
||||||
|
"STRINGS" => process_string_results(results),
|
||||||
|
_ => Err(ExecutionError::UnsupportedType(target_type.into()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn register_steel_functions(vm: &mut Engine, context: Arc<SteelContext>) {
|
||||||
// Register steel_get_column with row context
|
// Register steel_get_column with row context
|
||||||
vm.register_fn("steel_get_column", {
|
vm.register_fn("steel_get_column", {
|
||||||
let ctx = context.clone();
|
let ctx = context.clone();
|
||||||
@@ -59,27 +78,101 @@ pub fn execute_script(
|
|||||||
.map_err(|e| e.to_string())
|
.map_err(|e| e.to_string())
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Execute script and process results
|
fn register_decimal_math_functions(vm: &mut Engine) {
|
||||||
let results = vm.compile_and_run_raw_program(script)
|
// Basic arithmetic operations
|
||||||
.map_err(|e| ExecutionError::RuntimeError(e.to_string()))?;
|
vm.register_fn("decimal-add", decimal_add);
|
||||||
|
vm.register_fn("decimal-sub", decimal_sub);
|
||||||
|
vm.register_fn("decimal-mul", decimal_mul);
|
||||||
|
vm.register_fn("decimal-div", decimal_div);
|
||||||
|
|
||||||
// Convert results to target type
|
// Advanced mathematical functions
|
||||||
match target_type {
|
vm.register_fn("decimal-pow", decimal_pow);
|
||||||
"STRINGS" => process_string_results(results),
|
vm.register_fn("decimal-sqrt", decimal_sqrt);
|
||||||
_ => Err(ExecutionError::UnsupportedType(target_type.into()))
|
vm.register_fn("decimal-ln", decimal_ln);
|
||||||
}
|
vm.register_fn("decimal-log10", decimal_log10);
|
||||||
|
vm.register_fn("decimal-exp", decimal_exp);
|
||||||
|
|
||||||
|
// Trigonometric functions
|
||||||
|
vm.register_fn("decimal-sin", decimal_sin);
|
||||||
|
vm.register_fn("decimal-cos", decimal_cos);
|
||||||
|
vm.register_fn("decimal-tan", decimal_tan);
|
||||||
|
|
||||||
|
// Comparison functions
|
||||||
|
vm.register_fn("decimal-gt", decimal_gt);
|
||||||
|
vm.register_fn("decimal-lt", decimal_lt);
|
||||||
|
vm.register_fn("decimal-eq", decimal_eq);
|
||||||
|
|
||||||
|
// Utility functions
|
||||||
|
vm.register_fn("decimal-abs", decimal_abs);
|
||||||
|
vm.register_fn("decimal-round", decimal_round);
|
||||||
|
vm.register_fn("decimal-min", decimal_min);
|
||||||
|
vm.register_fn("decimal-max", decimal_max);
|
||||||
|
|
||||||
|
// Additional convenience functions
|
||||||
|
vm.register_fn("decimal-zero", || "0".to_string());
|
||||||
|
vm.register_fn("decimal-one", || "1".to_string());
|
||||||
|
vm.register_fn("decimal-pi", || "3.1415926535897932384626433833".to_string());
|
||||||
|
vm.register_fn("decimal-e", || "2.7182818284590452353602874714".to_string());
|
||||||
|
|
||||||
|
// Type conversion helpers
|
||||||
|
vm.register_fn("to-decimal", |s: String| -> Result<String, String> {
|
||||||
|
use rust_decimal::prelude::*;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
Decimal::from_str(&s)
|
||||||
|
.map(|d| d.to_string())
|
||||||
|
.map_err(|e| format!("Invalid decimal: {}", e))
|
||||||
|
});
|
||||||
|
|
||||||
|
// Financial functions
|
||||||
|
vm.register_fn("decimal-percentage", |amount: String, percentage: String| -> Result<String, String> {
|
||||||
|
use rust_decimal::prelude::*;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
let amount_dec = Decimal::from_str(&amount)
|
||||||
|
.map_err(|e| format!("Invalid amount: {}", e))?;
|
||||||
|
let percentage_dec = Decimal::from_str(&percentage)
|
||||||
|
.map_err(|e| format!("Invalid percentage: {}", e))?;
|
||||||
|
let hundred = Decimal::from(100);
|
||||||
|
|
||||||
|
Ok((amount_dec * percentage_dec / hundred).to_string())
|
||||||
|
});
|
||||||
|
|
||||||
|
vm.register_fn("decimal-compound", |principal: String, rate: String, time: String| -> Result<String, String> {
|
||||||
|
use rust_decimal::prelude::*;
|
||||||
|
use rust_decimal::MathematicalOps;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
let principal_dec = Decimal::from_str(&principal)
|
||||||
|
.map_err(|e| format!("Invalid principal: {}", e))?;
|
||||||
|
let rate_dec = Decimal::from_str(&rate)
|
||||||
|
.map_err(|e| format!("Invalid rate: {}", e))?;
|
||||||
|
let time_dec = Decimal::from_str(&time)
|
||||||
|
.map_err(|e| format!("Invalid time: {}", e))?;
|
||||||
|
|
||||||
|
let one = Decimal::ONE;
|
||||||
|
let compound_factor = (one + rate_dec).checked_powd(time_dec)
|
||||||
|
.ok_or("Compound calculation overflow")?;
|
||||||
|
|
||||||
|
Ok((principal_dec * compound_factor).to_string())
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_string_results(results: Vec<SteelVal>) -> Result<Value, ExecutionError> {
|
fn process_string_results(results: Vec<SteelVal>) -> Result<Value, ExecutionError> {
|
||||||
let mut strings = Vec::new();
|
let mut strings = Vec::new();
|
||||||
for result in results {
|
for result in results {
|
||||||
if let SteelVal::StringV(s) = result {
|
match result {
|
||||||
strings.push(s.to_string());
|
SteelVal::StringV(s) => strings.push(s.to_string()),
|
||||||
} else {
|
SteelVal::NumV(n) => strings.push(n.to_string()),
|
||||||
return Err(ExecutionError::TypeConversionError(
|
SteelVal::IntV(i) => strings.push(i.to_string()),
|
||||||
format!("Expected string, got {:?}", result)
|
SteelVal::BoolV(b) => strings.push(b.to_string()),
|
||||||
));
|
_ => {
|
||||||
|
return Err(ExecutionError::TypeConversionError(
|
||||||
|
format!("Expected string-convertible type, got {:?}", result)
|
||||||
|
));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(Value::Strings(strings))
|
Ok(Value::Strings(strings))
|
||||||
|
|||||||
@@ -21,7 +21,8 @@ pub enum FunctionError {
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SteelContext {
|
pub struct SteelContext {
|
||||||
pub current_table: String,
|
pub current_table: String,
|
||||||
pub profile_id: i64,
|
pub schema_id: i64,
|
||||||
|
pub schema_name: String,
|
||||||
pub row_data: HashMap<String, String>,
|
pub row_data: HashMap<String, String>,
|
||||||
pub db_pool: Arc<PgPool>,
|
pub db_pool: Arc<PgPool>,
|
||||||
}
|
}
|
||||||
@@ -30,8 +31,8 @@ impl SteelContext {
|
|||||||
pub async fn get_related_table_name(&self, base_name: &str) -> Result<String, FunctionError> {
|
pub async fn get_related_table_name(&self, base_name: &str) -> Result<String, FunctionError> {
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT table_name FROM table_definitions
|
r#"SELECT table_name FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name LIKE $2"#,
|
WHERE schema_id = $1 AND table_name LIKE $2"#,
|
||||||
self.profile_id,
|
self.schema_id,
|
||||||
format!("%_{}", base_name)
|
format!("%_{}", base_name)
|
||||||
)
|
)
|
||||||
.fetch_optional(&*self.db_pool)
|
.fetch_optional(&*self.db_pool)
|
||||||
@@ -66,7 +67,7 @@ impl SteelContext {
|
|||||||
|
|
||||||
// Add quotes around the table name
|
// Add quotes around the table name
|
||||||
sqlx::query_scalar::<_, String>(
|
sqlx::query_scalar::<_, String>(
|
||||||
&format!("SELECT {} FROM \"{}\" WHERE id = $1", column, actual_table)
|
&format!("SELECT {} FROM \"{}\".\"{}\" WHERE id = $1", column, self.schema_name, actual_table)
|
||||||
)
|
)
|
||||||
.bind(fk_value.parse::<i64>().map_err(|_|
|
.bind(fk_value.parse::<i64>().map_err(|_|
|
||||||
SteelVal::StringV("Invalid foreign key format".into()))?)
|
SteelVal::StringV("Invalid foreign key format".into()))?)
|
||||||
|
|||||||
@@ -2,7 +2,9 @@
|
|||||||
pub mod execution;
|
pub mod execution;
|
||||||
pub mod syntax_parser;
|
pub mod syntax_parser;
|
||||||
pub mod functions;
|
pub mod functions;
|
||||||
|
pub mod decimal_math;
|
||||||
|
|
||||||
pub use execution::*;
|
pub use execution::*;
|
||||||
pub use syntax_parser::*;
|
pub use syntax_parser::*;
|
||||||
pub use functions::*;
|
pub use functions::*;
|
||||||
|
pub use decimal_math::*;
|
||||||
|
|||||||
@@ -1,27 +1,111 @@
|
|||||||
// src/steel/server/syntax_parser.rs
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
pub struct SyntaxParser {
|
pub struct SyntaxParser {
|
||||||
|
// Existing patterns for column/SQL integration
|
||||||
current_table_column_re: Regex,
|
current_table_column_re: Regex,
|
||||||
different_table_column_re: Regex,
|
different_table_column_re: Regex,
|
||||||
one_to_many_indexed_re: Regex,
|
one_to_many_indexed_re: Regex,
|
||||||
sql_integration_re: Regex,
|
sql_integration_re: Regex,
|
||||||
|
|
||||||
|
// Simple math operation replacement patterns
|
||||||
|
math_operators: Vec<(Regex, &'static str)>,
|
||||||
|
number_literal_re: Regex,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SyntaxParser {
|
impl SyntaxParser {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
|
// Define math operator replacements
|
||||||
|
let math_operators = vec![
|
||||||
|
// Basic arithmetic
|
||||||
|
(Regex::new(r"\(\s*\+\s+").unwrap(), "(decimal-add "),
|
||||||
|
(Regex::new(r"\(\s*-\s+").unwrap(), "(decimal-sub "),
|
||||||
|
(Regex::new(r"\(\s*\*\s+").unwrap(), "(decimal-mul "),
|
||||||
|
(Regex::new(r"\(\s*/\s+").unwrap(), "(decimal-div "),
|
||||||
|
|
||||||
|
// Power and advanced operations
|
||||||
|
(Regex::new(r"\(\s*\^\s+").unwrap(), "(decimal-pow "),
|
||||||
|
(Regex::new(r"\(\s*\*\*\s+").unwrap(), "(decimal-pow "),
|
||||||
|
(Regex::new(r"\(\s*pow\s+").unwrap(), "(decimal-pow "),
|
||||||
|
(Regex::new(r"\(\s*sqrt\s+").unwrap(), "(decimal-sqrt "),
|
||||||
|
|
||||||
|
// Logarithmic functions
|
||||||
|
(Regex::new(r"\(\s*ln\s+").unwrap(), "(decimal-ln "),
|
||||||
|
(Regex::new(r"\(\s*log\s+").unwrap(), "(decimal-ln "),
|
||||||
|
(Regex::new(r"\(\s*log10\s+").unwrap(), "(decimal-log10 "),
|
||||||
|
(Regex::new(r"\(\s*exp\s+").unwrap(), "(decimal-exp "),
|
||||||
|
|
||||||
|
// Trigonometric functions
|
||||||
|
(Regex::new(r"\(\s*sin\s+").unwrap(), "(decimal-sin "),
|
||||||
|
(Regex::new(r"\(\s*cos\s+").unwrap(), "(decimal-cos "),
|
||||||
|
(Regex::new(r"\(\s*tan\s+").unwrap(), "(decimal-tan "),
|
||||||
|
|
||||||
|
// Comparison operators
|
||||||
|
(Regex::new(r"\(\s*>\s+").unwrap(), "(decimal-gt "),
|
||||||
|
(Regex::new(r"\(\s*<\s+").unwrap(), "(decimal-lt "),
|
||||||
|
(Regex::new(r"\(\s*=\s+").unwrap(), "(decimal-eq "),
|
||||||
|
(Regex::new(r"\(\s*>=\s+").unwrap(), "(decimal-gte "),
|
||||||
|
(Regex::new(r"\(\s*<=\s+").unwrap(), "(decimal-lte "),
|
||||||
|
|
||||||
|
// Utility functions
|
||||||
|
(Regex::new(r"\(\s*abs\s+").unwrap(), "(decimal-abs "),
|
||||||
|
(Regex::new(r"\(\s*min\s+").unwrap(), "(decimal-min "),
|
||||||
|
(Regex::new(r"\(\s*max\s+").unwrap(), "(decimal-max "),
|
||||||
|
(Regex::new(r"\(\s*round\s+").unwrap(), "(decimal-round "),
|
||||||
|
];
|
||||||
|
|
||||||
SyntaxParser {
|
SyntaxParser {
|
||||||
current_table_column_re: Regex::new(r"@(\w+)").unwrap(),
|
current_table_column_re: Regex::new(r"@(\w+)").unwrap(),
|
||||||
different_table_column_re: Regex::new(r"@(\w+)\.(\w+)").unwrap(),
|
different_table_column_re: Regex::new(r"@(\w+)\.(\w+)").unwrap(),
|
||||||
one_to_many_indexed_re: Regex::new(r"@(\w+)\[(\d+)\]\.(\w+)").unwrap(),
|
one_to_many_indexed_re: Regex::new(r"@(\w+)\[(\d+)\]\.(\w+)").unwrap(),
|
||||||
sql_integration_re: Regex::new(r#"@sql\((['"])(.*?)['"]\)"#).unwrap(),
|
sql_integration_re: Regex::new(r#"@sql\((['"])(.*?)['"]\)"#).unwrap(),
|
||||||
|
|
||||||
|
// FIXED: Match negative numbers and avoid already quoted strings
|
||||||
|
number_literal_re: Regex::new(r#"(?<!")(-?\d+\.?\d*(?:[eE][+-]?\d+)?)(?!")"#).unwrap(),
|
||||||
|
|
||||||
|
math_operators,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(&self, script: &str, current_table: &str) -> String {
|
pub fn parse(&self, script: &str, current_table: &str) -> String {
|
||||||
let mut transformed = script.to_string();
|
let mut transformed = script.to_string();
|
||||||
|
|
||||||
|
// Step 1: Convert all numeric literals to strings (FIXED to handle negative numbers)
|
||||||
|
transformed = self.convert_numbers_to_strings(&transformed);
|
||||||
|
|
||||||
|
// Step 2: Replace math function calls with decimal equivalents (SIMPLIFIED)
|
||||||
|
transformed = self.replace_math_functions(&transformed);
|
||||||
|
|
||||||
|
// Step 3: Handle existing column and SQL integrations (unchanged)
|
||||||
|
transformed = self.process_column_integrations(&transformed, current_table);
|
||||||
|
|
||||||
|
transformed
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert all unquoted numeric literals to quoted strings
|
||||||
|
fn convert_numbers_to_strings(&self, script: &str) -> String {
|
||||||
|
// This regex matches numbers that are NOT already inside quotes
|
||||||
|
self.number_literal_re.replace_all(script, |caps: ®ex::Captures| {
|
||||||
|
format!("\"{}\"", &caps[1])
|
||||||
|
}).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Replace math function calls with decimal equivalents (SIMPLIFIED)
|
||||||
|
fn replace_math_functions(&self, script: &str) -> String {
|
||||||
|
let mut result = script.to_string();
|
||||||
|
|
||||||
|
// Apply all math operator replacements
|
||||||
|
for (pattern, replacement) in &self.math_operators {
|
||||||
|
result = pattern.replace_all(&result, *replacement).to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Process existing column and SQL integrations (unchanged logic)
|
||||||
|
fn process_column_integrations(&self, script: &str, current_table: &str) -> String {
|
||||||
|
let mut transformed = script.to_string();
|
||||||
|
|
||||||
// Process indexed access first to avoid overlap with relationship matches
|
// Process indexed access first to avoid overlap with relationship matches
|
||||||
transformed = self.one_to_many_indexed_re.replace_all(&transformed, |caps: ®ex::Captures| {
|
transformed = self.one_to_many_indexed_re.replace_all(&transformed, |caps: ®ex::Captures| {
|
||||||
format!("(steel_get_column_with_index \"{}\" {} \"{}\")",
|
format!("(steel_get_column_with_index \"{}\" {} \"{}\")",
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// server/src/table_definition/handlers/delete_table.rs
|
// src/table_definition/handlers/delete_table.rs
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use common::proto::multieko2::table_definition::{DeleteTableRequest, DeleteTableResponse};
|
use common::proto::multieko2::table_definition::{DeleteTableRequest, DeleteTableResponse};
|
||||||
@@ -10,25 +10,25 @@ pub async fn delete_table(
|
|||||||
let mut transaction = db_pool.begin().await
|
let mut transaction = db_pool.begin().await
|
||||||
.map_err(|e| Status::internal(format!("Failed to start transaction: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Failed to start transaction: {}", e)))?;
|
||||||
|
|
||||||
// Step 1: Get profile and validate existence
|
// Step 1: Get schema and validate existence
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id, name FROM schemas WHERE name = $1",
|
||||||
request.profile_name
|
request.profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(&mut *transaction)
|
.fetch_optional(&mut *transaction)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Schema lookup failed: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = match profile {
|
let (schema_id, schema_name) = match schema {
|
||||||
Some(p) => p.id,
|
Some(s) => (s.id, s.name),
|
||||||
None => return Err(Status::not_found("Profile not found")),
|
None => return Err(Status::not_found("Profile not found")),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Step 2: Get table definition and validate existence
|
// Step 2: Get table definition and validate existence
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
"SELECT id FROM table_definitions
|
"SELECT id FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2",
|
WHERE schema_id = $1 AND table_name = $2",
|
||||||
profile_id,
|
schema_id,
|
||||||
request.table_name
|
request.table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(&mut *transaction)
|
.fetch_optional(&mut *transaction)
|
||||||
@@ -40,8 +40,9 @@ pub async fn delete_table(
|
|||||||
None => return Err(Status::not_found("Table not found in profile")),
|
None => return Err(Status::not_found("Table not found in profile")),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Step 3: Drop the actual PostgreSQL table with CASCADE
|
// Step 3: Drop the actual PostgreSQL table with CASCADE (schema-qualified)
|
||||||
sqlx::query(&format!(r#"DROP TABLE IF EXISTS "{}" CASCADE"#, request.table_name))
|
let drop_table_sql = format!(r#"DROP TABLE IF EXISTS "{}"."{}" CASCADE"#, schema_name, request.table_name);
|
||||||
|
sqlx::query(&drop_table_sql)
|
||||||
.execute(&mut *transaction)
|
.execute(&mut *transaction)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Table drop failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Table drop failed: {}", e)))?;
|
||||||
@@ -55,23 +56,31 @@ pub async fn delete_table(
|
|||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Definition deletion failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Definition deletion failed: {}", e)))?;
|
||||||
|
|
||||||
// Step 5: Check and clean up profile if empty
|
// Step 5: Check and clean up schema if empty
|
||||||
let remaining = sqlx::query!(
|
let remaining = sqlx::query!(
|
||||||
"SELECT COUNT(*) as count FROM table_definitions WHERE profile_id = $1",
|
"SELECT COUNT(*) as count FROM table_definitions WHERE schema_id = $1",
|
||||||
profile_id
|
schema_id
|
||||||
)
|
)
|
||||||
.fetch_one(&mut *transaction)
|
.fetch_one(&mut *transaction)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Count query failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Count query failed: {}", e)))?;
|
||||||
|
|
||||||
if remaining.count.unwrap_or(1) == 0 {
|
if remaining.count.unwrap_or(1) == 0 {
|
||||||
|
// Drop the PostgreSQL schema if empty
|
||||||
|
let drop_schema_sql = format!(r#"DROP SCHEMA IF EXISTS "{}" CASCADE"#, schema_name);
|
||||||
|
sqlx::query(&drop_schema_sql)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Schema drop failed: {}", e)))?;
|
||||||
|
|
||||||
|
// Delete the schema record
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
"DELETE FROM profiles WHERE id = $1",
|
"DELETE FROM schemas WHERE id = $1",
|
||||||
profile_id
|
schema_id
|
||||||
)
|
)
|
||||||
.execute(&mut *transaction)
|
.execute(&mut *transaction)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile cleanup failed: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Schema cleanup failed: {}", e)))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
transaction.commit().await
|
transaction.commit().await
|
||||||
|
|||||||
@@ -15,13 +15,15 @@ pub async fn get_profile_tree(
|
|||||||
) -> Result<Response<ProfileTreeResponse>, Status> {
|
) -> Result<Response<ProfileTreeResponse>, Status> {
|
||||||
let mut profiles = Vec::new();
|
let mut profiles = Vec::new();
|
||||||
|
|
||||||
// Get all profiles
|
// Get all schemas (internally changed from profiles to schemas)
|
||||||
let profile_records = sqlx::query!("SELECT id, name FROM profiles")
|
let schema_records = sqlx::query!(
|
||||||
.fetch_all(db_pool)
|
"SELECT id, name FROM schemas ORDER BY name"
|
||||||
.await
|
)
|
||||||
.map_err(|e| Status::internal(format!("Failed to fetch profiles: {}", e)))?;
|
.fetch_all(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Failed to fetch schemas: {}", e)))?;
|
||||||
|
|
||||||
for profile in profile_records {
|
for schema in schema_records {
|
||||||
// Get all tables with their dependencies from the links table
|
// Get all tables with their dependencies from the links table
|
||||||
let tables = sqlx::query!(
|
let tables = sqlx::query!(
|
||||||
r#"
|
r#"
|
||||||
@@ -35,15 +37,16 @@ pub async fn get_profile_tree(
|
|||||||
'required', tdl.is_required
|
'required', tdl.is_required
|
||||||
)
|
)
|
||||||
) FILTER (WHERE ltd.id IS NOT NULL),
|
) FILTER (WHERE ltd.id IS NOT NULL),
|
||||||
'[]'
|
'[]'::json
|
||||||
) as dependencies
|
) as dependencies
|
||||||
FROM table_definitions td
|
FROM table_definitions td
|
||||||
LEFT JOIN table_definition_links tdl ON td.id = tdl.source_table_id
|
LEFT JOIN table_definition_links tdl ON td.id = tdl.source_table_id
|
||||||
LEFT JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
LEFT JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
||||||
WHERE td.profile_id = $1
|
WHERE td.schema_id = $1
|
||||||
GROUP BY td.id, td.table_name
|
GROUP BY td.id, td.table_name
|
||||||
|
ORDER BY td.table_name
|
||||||
"#,
|
"#,
|
||||||
profile.id
|
schema.id
|
||||||
)
|
)
|
||||||
.fetch_all(db_pool)
|
.fetch_all(db_pool)
|
||||||
.await
|
.await
|
||||||
@@ -70,8 +73,9 @@ pub async fn get_profile_tree(
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
// External API still returns "profiles" for compatibility
|
||||||
profiles.push(Profile {
|
profiles.push(Profile {
|
||||||
name: profile.name,
|
name: schema.name,
|
||||||
tables: proto_tables
|
tables: proto_tables
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,48 +1,170 @@
|
|||||||
|
// src/table_definition/handlers/post_table_definition.rs
|
||||||
|
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::{PgPool, Transaction, Postgres};
|
use sqlx::{PgPool, Transaction, Postgres};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
|
||||||
use common::proto::multieko2::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse};
|
use common::proto::multieko2::table_definition::{PostTableDefinitionRequest, TableDefinitionResponse};
|
||||||
|
|
||||||
const GENERATED_SCHEMA_NAME: &str = "gen";
|
|
||||||
|
|
||||||
const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
|
const PREDEFINED_FIELD_TYPES: &[(&str, &str)] = &[
|
||||||
("text", "TEXT"),
|
("text", "TEXT"),
|
||||||
("psc", "TEXT"),
|
("string", "TEXT"),
|
||||||
("phone", "VARCHAR(15)"),
|
|
||||||
("address", "TEXT"),
|
|
||||||
("email", "VARCHAR(255)"),
|
|
||||||
("boolean", "BOOLEAN"),
|
("boolean", "BOOLEAN"),
|
||||||
("timestamp", "TIMESTAMPTZ"),
|
("timestamp", "TIMESTAMPTZ"),
|
||||||
|
("timestamptz", "TIMESTAMPTZ"),
|
||||||
|
("time", "TIMESTAMPTZ"),
|
||||||
|
("money", "NUMERIC(14, 4)"),
|
||||||
|
("integer", "INTEGER"),
|
||||||
|
("int", "INTEGER"),
|
||||||
|
("biginteger", "BIGINT"),
|
||||||
|
("bigint", "BIGINT"),
|
||||||
|
("date", "DATE"),
|
||||||
];
|
];
|
||||||
|
|
||||||
fn is_valid_identifier(s: &str) -> bool {
|
// NEW: Helper function to provide detailed error messages
|
||||||
!s.is_empty() &&
|
fn validate_identifier_format(s: &str, identifier_type: &str) -> Result<(), Status> {
|
||||||
s.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') &&
|
if s.is_empty() {
|
||||||
!s.starts_with('_') &&
|
return Err(Status::invalid_argument(format!("{} cannot be empty", identifier_type)));
|
||||||
!s.chars().next().unwrap().is_ascii_digit()
|
}
|
||||||
|
|
||||||
|
if s.starts_with('_') {
|
||||||
|
return Err(Status::invalid_argument(format!("{} cannot start with underscore", identifier_type)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.chars().next().unwrap().is_ascii_digit() {
|
||||||
|
return Err(Status::invalid_argument(format!("{} cannot start with a number", identifier_type)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for invalid characters
|
||||||
|
let invalid_chars: Vec<char> = s.chars()
|
||||||
|
.filter(|c| !c.is_ascii_lowercase() && !c.is_ascii_digit() && *c != '_')
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if !invalid_chars.is_empty() {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} contains invalid characters: {:?}. Only lowercase letters, numbers, and underscores are allowed",
|
||||||
|
identifier_type, invalid_chars
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for uppercase letters specifically to give a helpful message
|
||||||
|
if s.chars().any(|c| c.is_ascii_uppercase()) {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} contains uppercase letters. Only lowercase letters are allowed",
|
||||||
|
identifier_type
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sanitize_table_name(s: &str) -> String {
|
fn validate_decimal_number_format(num_str: &str, param_name: &str) -> Result<(), Status> {
|
||||||
let year = OffsetDateTime::now_utc().year();
|
if num_str.is_empty() {
|
||||||
let cleaned = s.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
return Err(Status::invalid_argument(format!(
|
||||||
.trim()
|
"{} cannot be empty",
|
||||||
.to_lowercase();
|
param_name
|
||||||
format!("{}_{}", year, cleaned)
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for explicit signs
|
||||||
|
if num_str.starts_with('+') || num_str.starts_with('-') {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} cannot have explicit positive or negative signs",
|
||||||
|
param_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for decimal points
|
||||||
|
if num_str.contains('.') {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} must be a whole number (no decimal points)",
|
||||||
|
param_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for leading zeros (but allow "0" itself)
|
||||||
|
if num_str.len() > 1 && num_str.starts_with('0') {
|
||||||
|
let trimmed = num_str.trim_start_matches('0');
|
||||||
|
let suggestion = if trimmed.is_empty() { "0" } else { trimmed };
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} cannot have leading zeros (use '{}' instead of '{}')",
|
||||||
|
param_name,
|
||||||
|
suggestion,
|
||||||
|
num_str
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that all characters are digits
|
||||||
|
if !num_str.chars().all(|c| c.is_ascii_digit()) {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"{} contains invalid characters. Only digits 0-9 are allowed",
|
||||||
|
param_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sanitize_identifier(s: &str) -> String {
|
fn map_field_type(field_type: &str) -> Result<String, Status> {
|
||||||
s.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
let lower_field_type = field_type.to_lowercase();
|
||||||
.trim()
|
|
||||||
.to_lowercase()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn map_field_type(field_type: &str) -> Result<&str, Status> {
|
// Special handling for "decimal(precision, scale)"
|
||||||
|
if lower_field_type.starts_with("decimal(") && lower_field_type.ends_with(')') {
|
||||||
|
// Extract the part inside the parentheses, e.g., "10, 2"
|
||||||
|
let args = lower_field_type
|
||||||
|
.strip_prefix("decimal(")
|
||||||
|
.and_then(|s| s.strip_suffix(')'))
|
||||||
|
.unwrap_or(""); // Should always succeed due to the checks above
|
||||||
|
|
||||||
|
// Split into precision and scale parts
|
||||||
|
if let Some((p_str, s_str)) = args.split_once(',') {
|
||||||
|
let precision_str = p_str.trim();
|
||||||
|
let scale_str = s_str.trim();
|
||||||
|
|
||||||
|
// NEW: Validate format BEFORE parsing
|
||||||
|
validate_decimal_number_format(precision_str, "precision")?;
|
||||||
|
validate_decimal_number_format(scale_str, "scale")?;
|
||||||
|
|
||||||
|
// Parse precision, returning an error if it's not a valid number
|
||||||
|
let precision = precision_str.parse::<u32>().map_err(|_| {
|
||||||
|
Status::invalid_argument("Invalid precision in decimal type")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Parse scale, returning an error if it's not a valid number
|
||||||
|
let scale = scale_str.parse::<u32>().map_err(|_| {
|
||||||
|
Status::invalid_argument("Invalid scale in decimal type")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Add validation based on PostgreSQL rules
|
||||||
|
if precision < 1 {
|
||||||
|
return Err(Status::invalid_argument("Precision must be at least 1"));
|
||||||
|
}
|
||||||
|
if scale > precision {
|
||||||
|
return Err(Status::invalid_argument(
|
||||||
|
"Scale cannot be greater than precision",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// If everything is valid, build and return the NUMERIC type string
|
||||||
|
return Ok(format!("NUMERIC({}, {})", precision, scale));
|
||||||
|
} else {
|
||||||
|
// The format was wrong, e.g., "decimal(10)" or "decimal()"
|
||||||
|
return Err(Status::invalid_argument(
|
||||||
|
"Invalid decimal format. Expected: decimal(precision, scale)",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not a decimal, fall back to the predefined list
|
||||||
PREDEFINED_FIELD_TYPES
|
PREDEFINED_FIELD_TYPES
|
||||||
.iter()
|
.iter()
|
||||||
.find(|(key, _)| *key == field_type.to_lowercase().as_str())
|
.find(|(key, _)| *key == lower_field_type.as_str())
|
||||||
.map(|(_, sql_type)| *sql_type)
|
.map(|(_, sql_type)| sql_type.to_string()) // Convert to an owned String
|
||||||
.ok_or_else(|| Status::invalid_argument(format!("Invalid field type: {}", field_type)))
|
.ok_or_else(|| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Invalid field type: {}",
|
||||||
|
field_type
|
||||||
|
))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_invalid_table_name(table_name: &str) -> bool {
|
fn is_invalid_table_name(table_name: &str) -> bool {
|
||||||
@@ -52,33 +174,65 @@ fn is_invalid_table_name(table_name: &str) -> bool {
|
|||||||
table_name == "created_at"
|
table_name == "created_at"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_reserved_schema(schema_name: &str) -> bool {
|
||||||
|
let lower = schema_name.to_lowercase();
|
||||||
|
lower == "public" ||
|
||||||
|
lower == "information_schema" ||
|
||||||
|
lower.starts_with("pg_")
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn post_table_definition(
|
pub async fn post_table_definition(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
request: PostTableDefinitionRequest,
|
request: PostTableDefinitionRequest,
|
||||||
) -> Result<TableDefinitionResponse, Status> {
|
) -> Result<TableDefinitionResponse, Status> {
|
||||||
let base_name = sanitize_table_name(&request.table_name);
|
// Create owned copies of the strings after validation
|
||||||
let user_part_cleaned = request.table_name
|
let profile_name = {
|
||||||
.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "")
|
let trimmed = request.profile_name.trim();
|
||||||
.trim_matches('_')
|
validate_identifier_format(trimmed, "Profile name")?;
|
||||||
.to_lowercase();
|
trimmed.to_string()
|
||||||
|
};
|
||||||
|
|
||||||
// New validation check
|
// Add validation to prevent reserved schemas
|
||||||
if is_invalid_table_name(&user_part_cleaned) {
|
if is_reserved_schema(&profile_name) {
|
||||||
return Err(Status::invalid_argument(
|
return Err(Status::invalid_argument("Profile name is reserved and cannot be used"));
|
||||||
"Table name cannot be 'id', 'deleted', 'created_at' or end with '_id'"
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !user_part_cleaned.is_empty() && !is_valid_identifier(&user_part_cleaned) {
|
const MAX_IDENTIFIER_LENGTH: usize = 63;
|
||||||
return Err(Status::invalid_argument("Invalid table name"));
|
|
||||||
} else if user_part_cleaned.is_empty() {
|
if profile_name.len() > MAX_IDENTIFIER_LENGTH {
|
||||||
return Err(Status::invalid_argument("Table name cannot be empty"));
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Profile name '{}' exceeds the {} character limit.",
|
||||||
|
profile_name,
|
||||||
|
MAX_IDENTIFIER_LENGTH
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let table_name = {
|
||||||
|
let trimmed = request.table_name.trim();
|
||||||
|
validate_identifier_format(trimmed, "Table name")?;
|
||||||
|
|
||||||
|
if trimmed.len() > MAX_IDENTIFIER_LENGTH {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Table name '{}' exceeds the {} character limit.",
|
||||||
|
trimmed,
|
||||||
|
MAX_IDENTIFIER_LENGTH
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check invalid table names on the original input
|
||||||
|
if is_invalid_table_name(trimmed) {
|
||||||
|
return Err(Status::invalid_argument(
|
||||||
|
"Table name cannot be 'id', 'deleted', 'created_at' or end with '_id'"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
trimmed.to_string()
|
||||||
|
};
|
||||||
|
|
||||||
let mut tx = db_pool.begin().await
|
let mut tx = db_pool.begin().await
|
||||||
.map_err(|e| Status::internal(format!("Failed to start transaction: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Failed to start transaction: {}", e)))?;
|
||||||
|
|
||||||
match execute_table_definition(&mut tx, request, base_name).await {
|
match execute_table_definition(&mut tx, request, table_name, profile_name).await {
|
||||||
Ok(response) => {
|
Ok(response) => {
|
||||||
tx.commit().await
|
tx.commit().await
|
||||||
.map_err(|e| Status::internal(format!("Failed to commit transaction: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Failed to commit transaction: {}", e)))?;
|
||||||
@@ -95,23 +249,42 @@ async fn execute_table_definition(
|
|||||||
tx: &mut Transaction<'_, Postgres>,
|
tx: &mut Transaction<'_, Postgres>,
|
||||||
mut request: PostTableDefinitionRequest,
|
mut request: PostTableDefinitionRequest,
|
||||||
table_name: String,
|
table_name: String,
|
||||||
|
profile_name: String,
|
||||||
) -> Result<TableDefinitionResponse, Status> {
|
) -> Result<TableDefinitionResponse, Status> {
|
||||||
let profile = sqlx::query!(
|
// Use the validated profile_name for schema insertion
|
||||||
"INSERT INTO profiles (name) VALUES ($1)
|
let schema = sqlx::query!(
|
||||||
|
"INSERT INTO schemas (name) VALUES ($1)
|
||||||
ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name
|
ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name
|
||||||
RETURNING id",
|
RETURNING id",
|
||||||
request.profile_name
|
profile_name // Use the validated profile name
|
||||||
)
|
)
|
||||||
.fetch_one(&mut **tx)
|
.fetch_one(&mut **tx)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Schema error: {}", e)))?;
|
||||||
|
|
||||||
|
// Create PostgreSQL schema if it doesn't exist
|
||||||
|
let create_schema_sql = format!("CREATE SCHEMA IF NOT EXISTS \"{}\"", profile_name);
|
||||||
|
sqlx::query(&create_schema_sql)
|
||||||
|
.execute(&mut **tx)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Schema creation failed: {}", e)))?;
|
||||||
|
|
||||||
let mut links = Vec::new();
|
let mut links = Vec::new();
|
||||||
|
let mut seen_tables = std::collections::HashSet::new();
|
||||||
|
|
||||||
for link in request.links.drain(..) {
|
for link in request.links.drain(..) {
|
||||||
|
// Check for duplicate link
|
||||||
|
if !seen_tables.insert(link.linked_table_name.clone()) {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Duplicate link to table '{}'",
|
||||||
|
link.linked_table_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
let linked_table = sqlx::query!(
|
let linked_table = sqlx::query!(
|
||||||
"SELECT id FROM table_definitions
|
"SELECT id FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2",
|
WHERE schema_id = $1 AND table_name = $2",
|
||||||
profile.id,
|
schema.id,
|
||||||
link.linked_table_name
|
link.linked_table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(&mut **tx)
|
.fetch_optional(&mut **tx)
|
||||||
@@ -127,34 +300,40 @@ async fn execute_table_definition(
|
|||||||
|
|
||||||
let mut columns = Vec::new();
|
let mut columns = Vec::new();
|
||||||
for col_def in request.columns.drain(..) {
|
for col_def in request.columns.drain(..) {
|
||||||
let col_name = sanitize_identifier(&col_def.name);
|
let col_name = col_def.name.trim().to_string();
|
||||||
if !is_valid_identifier(&col_def.name) {
|
validate_identifier_format(&col_name, "Column name")?;
|
||||||
return Err(Status::invalid_argument("Invalid column name"));
|
|
||||||
|
if col_name.ends_with("_id") || col_name == "id" || col_name == "deleted" || col_name == "created_at" {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Column name '{}' cannot be 'id', 'deleted', 'created_at' or end with '_id'",
|
||||||
|
col_name
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let sql_type = map_field_type(&col_def.field_type)?;
|
let sql_type = map_field_type(&col_def.field_type)?;
|
||||||
columns.push(format!("\"{}\" {}", col_name, sql_type));
|
columns.push(format!("\"{}\" {}", col_name, sql_type));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut indexes = Vec::new();
|
let mut indexes = Vec::new();
|
||||||
for idx in request.indexes.drain(..) {
|
for idx in request.indexes.drain(..) {
|
||||||
let idx_name = sanitize_identifier(&idx);
|
let idx_name = idx.trim().to_string();
|
||||||
if !is_valid_identifier(&idx) {
|
validate_identifier_format(&idx_name, "Index name")?;
|
||||||
return Err(Status::invalid_argument(format!("Invalid index name: {}", idx)));
|
|
||||||
}
|
|
||||||
if !columns.iter().any(|c| c.starts_with(&format!("\"{}\"", idx_name))) {
|
if !columns.iter().any(|c| c.starts_with(&format!("\"{}\"", idx_name))) {
|
||||||
return Err(Status::invalid_argument(format!("Index column {} not found", idx_name)));
|
return Err(Status::invalid_argument(format!("Index column '{}' not found", idx_name)));
|
||||||
}
|
}
|
||||||
indexes.push(idx_name);
|
indexes.push(idx_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
let (create_sql, index_sql) = generate_table_sql(tx, &table_name, &columns, &indexes, &links).await?;
|
let (create_sql, index_sql) = generate_table_sql(tx, &profile_name, &table_name, &columns, &indexes, &links).await?;
|
||||||
|
|
||||||
|
// Use schema_id instead of profile_id
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"INSERT INTO table_definitions
|
r#"INSERT INTO table_definitions
|
||||||
(profile_id, table_name, columns, indexes)
|
(schema_id, table_name, columns, indexes)
|
||||||
VALUES ($1, $2, $3, $4)
|
VALUES ($1, $2, $3, $4)
|
||||||
RETURNING id"#,
|
RETURNING id"#,
|
||||||
profile.id,
|
schema.id,
|
||||||
&table_name,
|
&table_name,
|
||||||
json!(columns),
|
json!(columns),
|
||||||
json!(indexes)
|
json!(indexes)
|
||||||
@@ -163,7 +342,8 @@ async fn execute_table_definition(
|
|||||||
.await
|
.await
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
if db_err.constraint() == Some("idx_table_definitions_profile_table") {
|
// Update constraint name to match new schema
|
||||||
|
if db_err.constraint() == Some("idx_table_definitions_schema_table") {
|
||||||
return Status::already_exists("Table already exists in this profile");
|
return Status::already_exists("Table already exists in this profile");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -204,13 +384,13 @@ async fn execute_table_definition(
|
|||||||
|
|
||||||
async fn generate_table_sql(
|
async fn generate_table_sql(
|
||||||
tx: &mut Transaction<'_, Postgres>,
|
tx: &mut Transaction<'_, Postgres>,
|
||||||
|
profile_name: &str,
|
||||||
table_name: &str,
|
table_name: &str,
|
||||||
columns: &[String],
|
columns: &[String],
|
||||||
indexes: &[String],
|
indexes: &[String],
|
||||||
links: &[(i64, bool)],
|
links: &[(i64, bool)],
|
||||||
) -> Result<(String, Vec<String>), Status> {
|
) -> Result<(String, Vec<String>), Status> {
|
||||||
let qualified_table = format!("{}.\"{}\"", GENERATED_SCHEMA_NAME, table_name);
|
let qualified_table = format!("\"{}\".\"{}\"", profile_name, table_name);
|
||||||
|
|
||||||
let mut system_columns = vec![
|
let mut system_columns = vec![
|
||||||
"id BIGSERIAL PRIMARY KEY".to_string(),
|
"id BIGSERIAL PRIMARY KEY".to_string(),
|
||||||
"deleted BOOLEAN NOT NULL DEFAULT FALSE".to_string(),
|
"deleted BOOLEAN NOT NULL DEFAULT FALSE".to_string(),
|
||||||
@@ -218,16 +398,13 @@ async fn generate_table_sql(
|
|||||||
|
|
||||||
for (linked_id, required) in links {
|
for (linked_id, required) in links {
|
||||||
let linked_table = get_table_name_by_id(tx, *linked_id).await?;
|
let linked_table = get_table_name_by_id(tx, *linked_id).await?;
|
||||||
let qualified_linked_table = format!("{}.\"{}\"", GENERATED_SCHEMA_NAME, linked_table);
|
let qualified_linked_table = format!("\"{}\".\"{}\"", profile_name, linked_table);
|
||||||
let base_name = linked_table.split_once('_')
|
|
||||||
.map(|(_, rest)| rest)
|
|
||||||
.unwrap_or(&linked_table)
|
|
||||||
.to_string();
|
|
||||||
let null_clause = if *required { "NOT NULL" } else { "" };
|
|
||||||
|
|
||||||
|
// Simply use the full table name - no truncation!
|
||||||
|
let null_clause = if *required { "NOT NULL" } else { "" };
|
||||||
system_columns.push(
|
system_columns.push(
|
||||||
format!("\"{0}_id\" BIGINT {1} REFERENCES {2}(id)",
|
format!("\"{}_id\" BIGINT {} REFERENCES {}(id)",
|
||||||
base_name, null_clause, qualified_linked_table
|
linked_table, null_clause, qualified_linked_table
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -247,13 +424,9 @@ async fn generate_table_sql(
|
|||||||
let mut all_indexes = Vec::new();
|
let mut all_indexes = Vec::new();
|
||||||
for (linked_id, _) in links {
|
for (linked_id, _) in links {
|
||||||
let linked_table = get_table_name_by_id(tx, *linked_id).await?;
|
let linked_table = get_table_name_by_id(tx, *linked_id).await?;
|
||||||
let base_name = linked_table.split_once('_')
|
|
||||||
.map(|(_, rest)| rest)
|
|
||||||
.unwrap_or(&linked_table)
|
|
||||||
.to_string();
|
|
||||||
all_indexes.push(format!(
|
all_indexes.push(format!(
|
||||||
"CREATE INDEX \"idx_{}_{}_fk\" ON {} (\"{}_id\")",
|
"CREATE INDEX \"idx_{}_{}_fk\" ON {} (\"{}_id\")",
|
||||||
table_name, base_name, qualified_table, base_name
|
table_name, linked_table, qualified_table, linked_table
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ pub async fn post_table_script(
|
|||||||
) -> Result<TableScriptResponse, Status> {
|
) -> Result<TableScriptResponse, Status> {
|
||||||
// Fetch the table definition
|
// Fetch the table definition
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT id, table_name, columns, profile_id
|
r#"SELECT id, table_name, columns, schema_id
|
||||||
FROM table_definitions WHERE id = $1"#,
|
FROM table_definitions WHERE id = $1"#,
|
||||||
request.table_definition_id
|
request.table_definition_id
|
||||||
)
|
)
|
||||||
@@ -76,7 +76,7 @@ pub async fn post_table_script(
|
|||||||
let script_record = sqlx::query!(
|
let script_record = sqlx::query!(
|
||||||
r#"INSERT INTO table_scripts
|
r#"INSERT INTO table_scripts
|
||||||
(table_definitions_id, target_table, target_column,
|
(table_definitions_id, target_table, target_column,
|
||||||
target_column_type, script, description, profile_id)
|
target_column_type, script, description, schema_id)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||||
RETURNING id"#,
|
RETURNING id"#,
|
||||||
request.table_definition_id,
|
request.table_definition_id,
|
||||||
@@ -85,7 +85,7 @@ pub async fn post_table_script(
|
|||||||
column_type,
|
column_type,
|
||||||
parsed_script,
|
parsed_script,
|
||||||
request.description,
|
request.description,
|
||||||
table_def.profile_id
|
table_def.schema_id
|
||||||
)
|
)
|
||||||
.fetch_one(db_pool)
|
.fetch_one(db_pool)
|
||||||
.await
|
.await
|
||||||
|
|||||||
@@ -20,11 +20,11 @@ pub async fn get_table_structure(
|
|||||||
) -> Result<TableStructureResponse, Status> {
|
) -> Result<TableStructureResponse, Status> {
|
||||||
let profile_name = request.profile_name;
|
let profile_name = request.profile_name;
|
||||||
let table_name = request.table_name;
|
let table_name = request.table_name;
|
||||||
let table_schema = "gen";
|
let table_schema = &profile_name;
|
||||||
|
|
||||||
// 1. Validate Profile
|
// 1. Validate Profile
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -36,8 +36,8 @@ pub async fn get_table_structure(
|
|||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let profile_id = match profile {
|
let schema_id = match schema {
|
||||||
Some(p) => p.id,
|
Some(s) => s.id,
|
||||||
None => {
|
None => {
|
||||||
return Err(Status::not_found(format!(
|
return Err(Status::not_found(format!(
|
||||||
"Profile '{}' not found",
|
"Profile '{}' not found",
|
||||||
@@ -48,8 +48,8 @@ pub async fn get_table_structure(
|
|||||||
|
|
||||||
// 2. Validate Table within Profile
|
// 2. Validate Table within Profile
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
"SELECT id FROM table_definitions WHERE profile_id = $1 AND table_name = $2",
|
"SELECT id FROM table_definitions WHERE schema_id = $1 AND table_name = $2",
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
|
|||||||
@@ -9,24 +9,24 @@ pub async fn delete_table_data(
|
|||||||
request: DeleteTableDataRequest,
|
request: DeleteTableDataRequest,
|
||||||
) -> Result<DeleteTableDataResponse, Status> {
|
) -> Result<DeleteTableDataResponse, Status> {
|
||||||
// Lookup profile
|
// Lookup profile
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
request.profile_name
|
request.profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = match profile {
|
let schema_id = match schema {
|
||||||
Some(p) => p.id,
|
Some(s) => s.id,
|
||||||
None => return Err(Status::not_found("Profile not found")),
|
None => return Err(Status::not_found("Profile not found")),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Verify table exists in profile
|
// Verify table exists in profile
|
||||||
let table_exists = sqlx::query!(
|
let table_exists = sqlx::query!(
|
||||||
"SELECT 1 AS exists FROM table_definitions
|
"SELECT 1 AS exists FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2",
|
WHERE schema_id = $1 AND table_name = $2",
|
||||||
profile_id,
|
schema_id,
|
||||||
request.table_name
|
request.table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -38,7 +38,12 @@ pub async fn delete_table_data(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&request.table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&request.profile_name,
|
||||||
|
&request.table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// Perform soft delete using qualified table name
|
// Perform soft delete using qualified table name
|
||||||
let query = format!(
|
let query = format!(
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
// src/tables_data/handlers/get_table_data.rs
|
// src/tables_data/handlers/get_table_data.rs
|
||||||
|
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::{PgPool, Row};
|
use sqlx::{PgPool, Row};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use common::proto::multieko2::tables_data::{GetTableDataRequest, GetTableDataResponse};
|
use common::proto::multieko2::tables_data::{GetTableDataRequest, GetTableDataResponse};
|
||||||
use crate::shared::schema_qualifier::qualify_table_name_for_data; // Import schema qualifier
|
use crate::shared::schema_qualifier::qualify_table_name_for_data;
|
||||||
|
|
||||||
pub async fn get_table_data(
|
pub async fn get_table_data(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
@@ -14,21 +15,21 @@ pub async fn get_table_data(
|
|||||||
let record_id = request.id;
|
let record_id = request.id;
|
||||||
|
|
||||||
// Lookup profile
|
// Lookup profile
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = profile.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
||||||
|
|
||||||
// Lookup table_definition
|
// Lookup table_definition
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT id, columns FROM table_definitions
|
r#"SELECT id, columns FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2"#,
|
WHERE schema_id = $1 AND table_name = $2"#,
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -48,29 +49,51 @@ pub async fn get_table_data(
|
|||||||
return Err(Status::internal("Invalid column format"));
|
return Err(Status::internal("Invalid column format"));
|
||||||
}
|
}
|
||||||
let name = parts[0].trim_matches('"').to_string();
|
let name = parts[0].trim_matches('"').to_string();
|
||||||
let sql_type = parts[1].to_string();
|
user_columns.push(name);
|
||||||
user_columns.push((name, sql_type));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare all columns (system + user-defined)
|
// --- START OF FIX ---
|
||||||
let system_columns = vec![
|
|
||||||
("id".to_string(), "BIGINT".to_string()),
|
|
||||||
("deleted".to_string(), "BOOLEAN".to_string()),
|
|
||||||
];
|
|
||||||
let all_columns: Vec<(String, String)> = system_columns
|
|
||||||
.into_iter()
|
|
||||||
.chain(user_columns.into_iter())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// Build SELECT clause with COALESCE and type casting
|
// 1. Get all foreign key columns for this table
|
||||||
let columns_clause = all_columns
|
let fk_columns_query = sqlx::query!(
|
||||||
|
r#"SELECT ltd.table_name
|
||||||
|
FROM table_definition_links tdl
|
||||||
|
JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
||||||
|
WHERE tdl.source_table_id = $1"#,
|
||||||
|
table_def.id
|
||||||
|
)
|
||||||
|
.fetch_all(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
||||||
|
|
||||||
|
// 2. Build the list of foreign key column names using full table names
|
||||||
|
let mut foreign_key_columns = Vec::new();
|
||||||
|
for fk in fk_columns_query {
|
||||||
|
// Use the full table name, not a stripped version
|
||||||
|
foreign_key_columns.push(format!("{}_id", fk.table_name));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Prepare a complete list of all columns to select
|
||||||
|
let mut all_column_names = vec!["id".to_string(), "deleted".to_string()];
|
||||||
|
all_column_names.extend(user_columns);
|
||||||
|
all_column_names.extend(foreign_key_columns);
|
||||||
|
|
||||||
|
// 4. Build the SELECT clause with all columns
|
||||||
|
let columns_clause = all_column_names
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(name, _)| format!("COALESCE(\"{0}\"::TEXT, '') AS \"{0}\"", name))
|
.map(|name| format!("COALESCE(\"{0}\"::TEXT, '') AS \"{0}\"", name))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
|
||||||
|
// --- END OF FIX ---
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"SELECT {} FROM {} WHERE id = $1 AND deleted = false",
|
"SELECT {} FROM {} WHERE id = $1 AND deleted = false",
|
||||||
@@ -87,7 +110,6 @@ pub async fn get_table_data(
|
|||||||
Ok(row) => row,
|
Ok(row) => row,
|
||||||
Err(sqlx::Error::RowNotFound) => return Err(Status::not_found("Record not found")),
|
Err(sqlx::Error::RowNotFound) => return Err(Status::not_found("Record not found")),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
// Handle "relation does not exist" error specifically
|
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
||||||
return Err(Status::internal(format!(
|
return Err(Status::internal(format!(
|
||||||
@@ -100,9 +122,9 @@ pub async fn get_table_data(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Build response data
|
// Build response data from the complete list of columns
|
||||||
let mut data = HashMap::new();
|
let mut data = HashMap::new();
|
||||||
for (column_name, _) in &all_columns {
|
for column_name in &all_column_names {
|
||||||
let value: String = row
|
let value: String = row
|
||||||
.try_get(column_name.as_str())
|
.try_get(column_name.as_str())
|
||||||
.map_err(|e| Status::internal(format!("Failed to get column {}: {}", column_name, e)))?;
|
.map_err(|e| Status::internal(format!("Failed to get column {}: {}", column_name, e)))?;
|
||||||
|
|||||||
@@ -18,22 +18,22 @@ pub async fn get_table_data_by_position(
|
|||||||
return Err(Status::invalid_argument("Position must be at least 1"));
|
return Err(Status::invalid_argument("Position must be at least 1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = profile.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
||||||
|
|
||||||
let table_exists = sqlx::query_scalar!(
|
let table_exists = sqlx::query_scalar!(
|
||||||
r#"SELECT EXISTS(
|
r#"SELECT EXISTS(
|
||||||
SELECT 1 FROM table_definitions
|
SELECT 1 FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2
|
WHERE schema_id = $1 AND table_name = $2
|
||||||
) AS "exists!""#,
|
) AS "exists!""#,
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_one(db_pool)
|
.fetch_one(db_pool)
|
||||||
@@ -45,7 +45,12 @@ pub async fn get_table_data_by_position(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Qualify table name with schema
|
// Qualify table name with schema
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let id_result = sqlx::query_scalar(
|
let id_result = sqlx::query_scalar(
|
||||||
&format!(
|
&format!(
|
||||||
|
|||||||
@@ -12,15 +12,15 @@ pub async fn get_table_data_count(
|
|||||||
// We still need to verify that the table is logically defined for the profile.
|
// We still need to verify that the table is logically defined for the profile.
|
||||||
// The schema qualifier handles *how* to access it physically, but this check
|
// The schema qualifier handles *how* to access it physically, but this check
|
||||||
// ensures the request is valid in the context of the application's definitions.
|
// ensures the request is valid in the context of the application's definitions.
|
||||||
let profile = sqlx::query!(
|
let schema = sqlx::query!(
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
request.profile_name
|
request.profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error for '{}': {}", request.profile_name, e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error for '{}': {}", request.profile_name, e)))?;
|
||||||
|
|
||||||
let profile_id = match profile {
|
let schema_id = match schema {
|
||||||
Some(p) => p.id,
|
Some(p) => p.id,
|
||||||
None => return Err(Status::not_found(format!("Profile '{}' not found", request.profile_name))),
|
None => return Err(Status::not_found(format!("Profile '{}' not found", request.profile_name))),
|
||||||
};
|
};
|
||||||
@@ -28,9 +28,9 @@ pub async fn get_table_data_count(
|
|||||||
let table_defined_for_profile = sqlx::query_scalar!(
|
let table_defined_for_profile = sqlx::query_scalar!(
|
||||||
r#"SELECT EXISTS(
|
r#"SELECT EXISTS(
|
||||||
SELECT 1 FROM table_definitions
|
SELECT 1 FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2
|
WHERE schema_id = $1 AND table_name = $2
|
||||||
) AS "exists!" "#, // Added AS "exists!" for clarity with sqlx macro
|
) AS "exists!" "#,
|
||||||
profile_id,
|
schema_id,
|
||||||
request.table_name
|
request.table_name
|
||||||
)
|
)
|
||||||
.fetch_one(db_pool)
|
.fetch_one(db_pool)
|
||||||
@@ -47,7 +47,12 @@ pub async fn get_table_data_count(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 2. QUALIFY THE TABLE NAME using the imported function
|
// 2. QUALIFY THE TABLE NAME using the imported function
|
||||||
let qualified_table_name = qualify_table_name_for_data(&request.table_name)?;
|
let qualified_table = qualify_table_name_for_data(
|
||||||
|
db_pool,
|
||||||
|
&request.profile_name,
|
||||||
|
&request.table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// 3. USE THE QUALIFIED NAME in the SQL query
|
// 3. USE THE QUALIFIED NAME in the SQL query
|
||||||
let query_sql = format!(
|
let query_sql = format!(
|
||||||
@@ -56,7 +61,7 @@ pub async fn get_table_data_count(
|
|||||||
FROM {}
|
FROM {}
|
||||||
WHERE deleted = FALSE
|
WHERE deleted = FALSE
|
||||||
"#,
|
"#,
|
||||||
qualified_table_name // Use the schema-qualified name here
|
qualified_table
|
||||||
);
|
);
|
||||||
|
|
||||||
// The rest of the logic remains largely the same, but error messages can be more specific.
|
// The rest of the logic remains largely the same, but error messages can be more specific.
|
||||||
@@ -81,14 +86,14 @@ pub async fn get_table_data_count(
|
|||||||
// even though it was defined in table_definitions. This is an inconsistency.
|
// even though it was defined in table_definitions. This is an inconsistency.
|
||||||
return Err(Status::internal(format!(
|
return Err(Status::internal(format!(
|
||||||
"Table '{}' is defined but does not physically exist in the database as {}.",
|
"Table '{}' is defined but does not physically exist in the database as {}.",
|
||||||
request.table_name, qualified_table_name
|
request.table_name, qualified_table
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// For other errors, provide a general message.
|
// For other errors, provide a general message.
|
||||||
Err(Status::internal(format!(
|
Err(Status::internal(format!(
|
||||||
"Count query failed for table {}: {}",
|
"Count query failed for table {}: {}",
|
||||||
qualified_table_name, e
|
qualified_table, e
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,17 +7,17 @@ use chrono::{DateTime, Utc};
|
|||||||
use common::proto::multieko2::tables_data::{PostTableDataRequest, PostTableDataResponse};
|
use common::proto::multieko2::tables_data::{PostTableDataRequest, PostTableDataResponse};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use crate::shared::schema_qualifier::qualify_table_name_for_data;
|
use prost_types::value::Kind;
|
||||||
|
use rust_decimal::Decimal;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use crate::steel::server::execution::{self, Value};
|
use crate::steel::server::execution::{self, Value};
|
||||||
use crate::steel::server::functions::SteelContext;
|
use crate::steel::server::functions::SteelContext;
|
||||||
|
|
||||||
// Add these imports
|
|
||||||
use crate::indexer::{IndexCommand, IndexCommandData};
|
use crate::indexer::{IndexCommand, IndexCommandData};
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
// MODIFIED: Function signature now accepts the indexer sender
|
|
||||||
pub async fn post_table_data(
|
pub async fn post_table_data(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
request: PostTableDataRequest,
|
request: PostTableDataRequest,
|
||||||
@@ -25,28 +25,21 @@ pub async fn post_table_data(
|
|||||||
) -> Result<PostTableDataResponse, Status> {
|
) -> Result<PostTableDataResponse, Status> {
|
||||||
let profile_name = request.profile_name;
|
let profile_name = request.profile_name;
|
||||||
let table_name = request.table_name;
|
let table_name = request.table_name;
|
||||||
let mut data = HashMap::new();
|
|
||||||
|
|
||||||
for (key, value) in request.data {
|
let schema = sqlx::query!(
|
||||||
data.insert(key, value.trim().to_string());
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
}
|
|
||||||
|
|
||||||
// Lookup profile
|
|
||||||
let profile = sqlx::query!(
|
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = profile.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
||||||
|
|
||||||
// Lookup table_definition
|
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT id, columns FROM table_definitions
|
r#"SELECT id, columns FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2"#,
|
WHERE schema_id = $1 AND table_name = $2"#,
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -55,7 +48,6 @@ pub async fn post_table_data(
|
|||||||
|
|
||||||
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
||||||
|
|
||||||
// Parse columns from JSON
|
|
||||||
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
||||||
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
||||||
|
|
||||||
@@ -70,7 +62,6 @@ pub async fn post_table_data(
|
|||||||
columns.push((name, sql_type));
|
columns.push((name, sql_type));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all foreign key columns for this table
|
|
||||||
let fk_columns = sqlx::query!(
|
let fk_columns = sqlx::query!(
|
||||||
r#"SELECT ltd.table_name
|
r#"SELECT ltd.table_name
|
||||||
FROM table_definition_links tdl
|
FROM table_definition_links tdl
|
||||||
@@ -82,26 +73,41 @@ pub async fn post_table_data(
|
|||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
||||||
|
|
||||||
// Build system columns with foreign keys
|
|
||||||
let mut system_columns = vec!["deleted".to_string()];
|
let mut system_columns = vec!["deleted".to_string()];
|
||||||
for fk in fk_columns {
|
for fk in fk_columns {
|
||||||
let base_name = fk.table_name.split('_').last().unwrap_or(&fk.table_name);
|
system_columns.push(format!("{}_id", fk.table_name));
|
||||||
system_columns.push(format!("{}_id", base_name));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert to HashSet for faster lookups
|
|
||||||
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
||||||
|
|
||||||
// Validate all data columns
|
|
||||||
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
||||||
for key in data.keys() {
|
for key in request.data.keys() {
|
||||||
if !system_columns_set.contains(key.as_str()) &&
|
if !system_columns_set.contains(key.as_str()) &&
|
||||||
!user_columns.contains(&&key.to_string()) {
|
!user_columns.contains(&&key.to_string()) {
|
||||||
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate Steel scripts
|
let mut string_data_for_scripts = HashMap::new();
|
||||||
|
for (key, proto_value) in &request.data {
|
||||||
|
let str_val = match &proto_value.kind {
|
||||||
|
Some(Kind::StringValue(s)) => {
|
||||||
|
let trimmed = s.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
trimmed.to_string()
|
||||||
|
},
|
||||||
|
Some(Kind::NumberValue(n)) => n.to_string(),
|
||||||
|
Some(Kind::BoolValue(b)) => b.to_string(),
|
||||||
|
Some(Kind::NullValue(_)) | None => continue,
|
||||||
|
Some(Kind::StructValue(_)) | Some(Kind::ListValue(_)) => {
|
||||||
|
return Err(Status::invalid_argument(format!("Unsupported type for script validation in column '{}'", key)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
string_data_for_scripts.insert(key.clone(), str_val);
|
||||||
|
}
|
||||||
|
|
||||||
let scripts = sqlx::query!(
|
let scripts = sqlx::query!(
|
||||||
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
||||||
table_def.id
|
table_def.id
|
||||||
@@ -113,21 +119,19 @@ pub async fn post_table_data(
|
|||||||
for script_record in scripts {
|
for script_record in scripts {
|
||||||
let target_column = script_record.target_column;
|
let target_column = script_record.target_column;
|
||||||
|
|
||||||
// Ensure target column exists in submitted data
|
let user_value = string_data_for_scripts.get(&target_column)
|
||||||
let user_value = data.get(&target_column)
|
|
||||||
.ok_or_else(|| Status::invalid_argument(
|
.ok_or_else(|| Status::invalid_argument(
|
||||||
format!("Script target column '{}' is required", target_column)
|
format!("Script target column '{}' is required", target_column)
|
||||||
))?;
|
))?;
|
||||||
|
|
||||||
// Create execution context
|
|
||||||
let context = SteelContext {
|
let context = SteelContext {
|
||||||
current_table: table_name.clone(), // Keep base name for scripts
|
current_table: table_name.clone(),
|
||||||
profile_id,
|
schema_id,
|
||||||
row_data: data.clone(),
|
schema_name: profile_name.clone(),
|
||||||
|
row_data: string_data_for_scripts.clone(),
|
||||||
db_pool: Arc::new(db_pool.clone()),
|
db_pool: Arc::new(db_pool.clone()),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Execute validation script
|
|
||||||
let script_result = execution::execute_script(
|
let script_result = execution::execute_script(
|
||||||
script_record.script,
|
script_record.script,
|
||||||
"STRINGS",
|
"STRINGS",
|
||||||
@@ -138,7 +142,6 @@ pub async fn post_table_data(
|
|||||||
format!("Script execution failed for '{}': {}", target_column, e)
|
format!("Script execution failed for '{}': {}", target_column, e)
|
||||||
))?;
|
))?;
|
||||||
|
|
||||||
// Validate script output
|
|
||||||
let Value::Strings(mut script_output) = script_result else {
|
let Value::Strings(mut script_output) = script_result else {
|
||||||
return Err(Status::internal("Script must return string values"));
|
return Err(Status::internal("Script must return string values"));
|
||||||
};
|
};
|
||||||
@@ -154,17 +157,16 @@ pub async fn post_table_data(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare SQL parameters
|
|
||||||
let mut params = PgArguments::default();
|
let mut params = PgArguments::default();
|
||||||
let mut columns_list = Vec::new();
|
let mut columns_list = Vec::new();
|
||||||
let mut placeholders = Vec::new();
|
let mut placeholders = Vec::new();
|
||||||
let mut param_idx = 1;
|
let mut param_idx = 1;
|
||||||
|
|
||||||
for (col, value) in data {
|
for (col, proto_value) in request.data {
|
||||||
let sql_type = if system_columns_set.contains(col.as_str()) {
|
let sql_type = if system_columns_set.contains(col.as_str()) {
|
||||||
match col.as_str() {
|
match col.as_str() {
|
||||||
"deleted" => "BOOLEAN",
|
"deleted" => "BOOLEAN",
|
||||||
_ if col.ends_with("_id") => "BIGINT", // Handle foreign keys
|
_ if col.ends_with("_id") => "BIGINT",
|
||||||
_ => return Err(Status::invalid_argument("Invalid system column")),
|
_ => return Err(Status::invalid_argument("Invalid system column")),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -174,38 +176,122 @@ pub async fn post_table_data(
|
|||||||
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
||||||
};
|
};
|
||||||
|
|
||||||
match sql_type {
|
let kind = match &proto_value.kind {
|
||||||
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => {
|
None | Some(Kind::NullValue(_)) => {
|
||||||
if let Some(max_len) = sql_type.strip_prefix("VARCHAR(")
|
match sql_type {
|
||||||
.and_then(|s| s.strip_suffix(')'))
|
"BOOLEAN" => params.add(None::<bool>),
|
||||||
.and_then(|s| s.parse::<usize>().ok())
|
"TEXT" => params.add(None::<String>),
|
||||||
{
|
"TIMESTAMPTZ" => params.add(None::<DateTime<Utc>>),
|
||||||
if value.len() > max_len {
|
"BIGINT" => params.add(None::<i64>),
|
||||||
|
"INTEGER" => params.add(None::<i32>),
|
||||||
|
s if s.starts_with("NUMERIC") => params.add(None::<Decimal>),
|
||||||
|
_ => return Err(Status::invalid_argument(format!("Unsupported type for null value: {}", sql_type))),
|
||||||
|
}.map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||||
|
|
||||||
|
columns_list.push(format!("\"{}\"", col));
|
||||||
|
placeholders.push(format!("${}", param_idx));
|
||||||
|
param_idx += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Some(k) => k,
|
||||||
|
};
|
||||||
|
|
||||||
|
if sql_type == "TEXT" {
|
||||||
|
if let Kind::StringValue(value) = kind {
|
||||||
|
let trimmed_value = value.trim();
|
||||||
|
|
||||||
|
if trimmed_value.is_empty() {
|
||||||
|
params.add(None::<String>).map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
if col == "telefon" && trimmed_value.len() > 15 {
|
||||||
return Err(Status::internal(format!("Value too long for {}", col)));
|
return Err(Status::internal(format!("Value too long for {}", col)));
|
||||||
}
|
}
|
||||||
|
params.add(trimmed_value).map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
||||||
}
|
}
|
||||||
params.add(value)
|
} else {
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected string for column '{}'", col)));
|
||||||
},
|
}
|
||||||
"BOOLEAN" => {
|
} else if sql_type == "BOOLEAN" {
|
||||||
let val = value.parse::<bool>()
|
if let Kind::BoolValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid boolean for {}", col)))?;
|
params.add(val).map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
||||||
params.add(val)
|
} else {
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected boolean for column '{}'", col)));
|
||||||
},
|
}
|
||||||
"TIMESTAMPTZ" => {
|
} else if sql_type == "TIMESTAMPTZ" {
|
||||||
let dt = DateTime::parse_from_rfc3339(&value)
|
if let Kind::StringValue(value) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
let dt = DateTime::parse_from_rfc3339(value).map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
||||||
params.add(dt.with_timezone(&Utc))
|
params.add(dt.with_timezone(&Utc)).map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
} else {
|
||||||
},
|
return Err(Status::invalid_argument(format!("Expected ISO 8601 string for column '{}'", col)));
|
||||||
"BIGINT" => {
|
}
|
||||||
let val = value.parse::<i64>()
|
} else if sql_type == "BIGINT" {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid integer for {}", col)))?;
|
if let Kind::NumberValue(val) = kind {
|
||||||
params.add(val)
|
if val.fract() != 0.0 {
|
||||||
.map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
},
|
}
|
||||||
_ => return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type))),
|
|
||||||
|
// Simple universal check: try the conversion and verify it's reversible
|
||||||
|
// This handles ALL edge cases: infinity, NaN, overflow, underflow, precision loss
|
||||||
|
let as_i64 = *val as i64;
|
||||||
|
if (as_i64 as f64) != *val {
|
||||||
|
return Err(Status::invalid_argument(format!("Integer value out of range for BIGINT column '{}'", col)));
|
||||||
|
}
|
||||||
|
|
||||||
|
params.add(as_i64).map_err(|e| Status::invalid_argument(format!("Failed to add bigint parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
|
} else if sql_type == "INTEGER" {
|
||||||
|
if let Kind::NumberValue(val) = kind {
|
||||||
|
if val.fract() != 0.0 {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simple universal check: try the conversion and verify it's reversible
|
||||||
|
// This handles ALL edge cases: infinity, NaN, overflow, underflow, precision loss
|
||||||
|
let as_i32 = *val as i32;
|
||||||
|
if (as_i32 as f64) != *val {
|
||||||
|
return Err(Status::invalid_argument(format!("Integer value out of range for INTEGER column '{}'", col)));
|
||||||
|
}
|
||||||
|
|
||||||
|
params.add(as_i32).map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
|
} else if sql_type.starts_with("NUMERIC") {
|
||||||
|
// MODIFIED: This block is now stricter.
|
||||||
|
let decimal_val = match kind {
|
||||||
|
Kind::StringValue(s) => {
|
||||||
|
let trimmed = s.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
None // Treat empty string as NULL
|
||||||
|
} else {
|
||||||
|
// This is the only valid path: parse from a string.
|
||||||
|
Some(Decimal::from_str(trimmed).map_err(|_| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Invalid decimal string format for column '{}': {}",
|
||||||
|
col, s
|
||||||
|
))
|
||||||
|
})?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// CATCH-ALL: Reject NumberValue, BoolValue, etc. for NUMERIC fields.
|
||||||
|
_ => {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Expected a string representation for decimal column '{}', but received a different type.",
|
||||||
|
col
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
params.add(decimal_val).map_err(|e| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Failed to add decimal parameter for {}: {}",
|
||||||
|
col, e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type)));
|
||||||
}
|
}
|
||||||
|
|
||||||
columns_list.push(format!("\"{}\"", col));
|
columns_list.push(format!("\"{}\"", col));
|
||||||
@@ -217,8 +303,12 @@ pub async fn post_table_data(
|
|||||||
return Err(Status::invalid_argument("No valid columns to insert"));
|
return Err(Status::invalid_argument("No valid columns to insert"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Qualify table name with schema
|
let qualified_table = crate::shared::schema_qualifier::qualify_table_name_for_data(
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
db_pool,
|
||||||
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"INSERT INTO {} ({}) VALUES ({}) RETURNING id",
|
"INSERT INTO {} ({}) VALUES ({}) RETURNING id",
|
||||||
@@ -227,7 +317,6 @@ pub async fn post_table_data(
|
|||||||
placeholders.join(", ")
|
placeholders.join(", ")
|
||||||
);
|
);
|
||||||
|
|
||||||
// Execute query with enhanced error handling
|
|
||||||
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
||||||
.fetch_one(db_pool)
|
.fetch_one(db_pool)
|
||||||
.await;
|
.await;
|
||||||
@@ -235,8 +324,13 @@ pub async fn post_table_data(
|
|||||||
let inserted_id = match result {
|
let inserted_id = match result {
|
||||||
Ok(id) => id,
|
Ok(id) => id,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
// Handle "relation does not exist" error specifically
|
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("22P02")) ||
|
||||||
|
db_err.code() == Some(std::borrow::Cow::Borrowed("22003")) {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Numeric field overflow or invalid format. Check precision and scale. Details: {}", db_err.message()
|
||||||
|
)));
|
||||||
|
}
|
||||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
||||||
return Err(Status::internal(format!(
|
return Err(Status::internal(format!(
|
||||||
"Table '{}' is defined but does not physically exist in the database as {}",
|
"Table '{}' is defined but does not physically exist in the database as {}",
|
||||||
@@ -248,15 +342,12 @@ pub async fn post_table_data(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// After a successful insert, send a command to the indexer.
|
|
||||||
let command = IndexCommand::AddOrUpdate(IndexCommandData {
|
let command = IndexCommand::AddOrUpdate(IndexCommandData {
|
||||||
table_name: table_name.clone(),
|
table_name: table_name.clone(),
|
||||||
row_id: inserted_id,
|
row_id: inserted_id,
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Err(e) = indexer_tx.send(command).await {
|
if let Err(e) = indexer_tx.send(command).await {
|
||||||
// If sending fails, the DB is updated but the index will be stale.
|
|
||||||
// This is a critical situation to log and monitor.
|
|
||||||
error!(
|
error!(
|
||||||
"CRITICAL: DB insert for table '{}' (id: {}) succeeded but failed to queue for indexing: {}. Search index is now inconsistent.",
|
"CRITICAL: DB insert for table '{}' (id: {}) succeeded but failed to queue for indexing: {}. Search index is now inconsistent.",
|
||||||
table_name, inserted_id, e
|
table_name, inserted_id, e
|
||||||
|
|||||||
@@ -1,52 +1,56 @@
|
|||||||
// src/tables_data/handlers/put_table_data.rs
|
// src/tables_data/handlers/put_table_data.rs
|
||||||
|
|
||||||
use tonic::Status;
|
use tonic::Status;
|
||||||
use sqlx::{PgPool, Arguments, Postgres};
|
use sqlx::{PgPool, Arguments};
|
||||||
use sqlx::postgres::PgArguments;
|
use sqlx::postgres::PgArguments;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use common::proto::multieko2::tables_data::{PutTableDataRequest, PutTableDataResponse};
|
use common::proto::multieko2::tables_data::{PutTableDataRequest, PutTableDataResponse};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use crate::shared::schema_qualifier::qualify_table_name_for_data; // Import schema qualifier
|
use std::sync::Arc;
|
||||||
|
use prost_types::value::Kind;
|
||||||
|
use rust_decimal::Decimal;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use crate::steel::server::execution::{self, Value};
|
||||||
|
use crate::steel::server::functions::SteelContext;
|
||||||
|
use crate::indexer::{IndexCommand, IndexCommandData};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
pub async fn put_table_data(
|
pub async fn put_table_data(
|
||||||
db_pool: &PgPool,
|
db_pool: &PgPool,
|
||||||
request: PutTableDataRequest,
|
request: PutTableDataRequest,
|
||||||
|
indexer_tx: &mpsc::Sender<IndexCommand>,
|
||||||
) -> Result<PutTableDataResponse, Status> {
|
) -> Result<PutTableDataResponse, Status> {
|
||||||
let profile_name = request.profile_name;
|
let profile_name = request.profile_name;
|
||||||
let table_name = request.table_name;
|
let table_name = request.table_name;
|
||||||
let record_id = request.id;
|
let record_id = request.id;
|
||||||
|
|
||||||
// Preprocess and validate data
|
// An update with no fields is a no-op; we can return success early.
|
||||||
let mut processed_data = HashMap::new();
|
if request.data.is_empty() {
|
||||||
let mut null_fields = Vec::new();
|
return Ok(PutTableDataResponse {
|
||||||
|
success: true,
|
||||||
// CORRECTED: Generic handling for all fields.
|
message: "No fields to update.".into(),
|
||||||
// Any field with an empty string will be added to the null_fields list.
|
updated_id: record_id,
|
||||||
// The special, hardcoded logic for "firma" has been removed.
|
});
|
||||||
for (key, value) in request.data {
|
|
||||||
let trimmed = value.trim().to_string();
|
|
||||||
if trimmed.is_empty() {
|
|
||||||
null_fields.push(key);
|
|
||||||
} else {
|
|
||||||
processed_data.insert(key, trimmed);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Lookup profile
|
// --- Start of logic copied and adapted from post_table_data ---
|
||||||
let profile = sqlx::query!(
|
|
||||||
"SELECT id FROM profiles WHERE name = $1",
|
let schema = sqlx::query!(
|
||||||
|
"SELECT id FROM schemas WHERE name = $1",
|
||||||
profile_name
|
profile_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Profile lookup error: {}", e)))?;
|
||||||
|
|
||||||
let profile_id = profile.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
let schema_id = schema.ok_or_else(|| Status::not_found("Profile not found"))?.id;
|
||||||
|
|
||||||
// Lookup table_definition
|
|
||||||
let table_def = sqlx::query!(
|
let table_def = sqlx::query!(
|
||||||
r#"SELECT id, columns FROM table_definitions
|
r#"SELECT id, columns FROM table_definitions
|
||||||
WHERE profile_id = $1 AND table_name = $2"#,
|
WHERE schema_id = $1 AND table_name = $2"#,
|
||||||
profile_id,
|
schema_id,
|
||||||
table_name
|
table_name
|
||||||
)
|
)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
@@ -55,7 +59,6 @@ pub async fn put_table_data(
|
|||||||
|
|
||||||
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
let table_def = table_def.ok_or_else(|| Status::not_found("Table not found"))?;
|
||||||
|
|
||||||
// Parse columns from JSON
|
|
||||||
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
let columns_json: Vec<String> = serde_json::from_value(table_def.columns.clone())
|
||||||
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
.map_err(|e| Status::internal(format!("Column parsing error: {}", e)))?;
|
||||||
|
|
||||||
@@ -70,130 +73,287 @@ pub async fn put_table_data(
|
|||||||
columns.push((name, sql_type));
|
columns.push((name, sql_type));
|
||||||
}
|
}
|
||||||
|
|
||||||
// CORRECTED: "firma" is not a system column.
|
let fk_columns = sqlx::query!(
|
||||||
// It should be treated as a user-defined column.
|
r#"SELECT ltd.table_name
|
||||||
let system_columns = ["deleted"];
|
FROM table_definition_links tdl
|
||||||
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
JOIN table_definitions ltd ON tdl.linked_table_id = ltd.id
|
||||||
|
WHERE tdl.source_table_id = $1"#,
|
||||||
|
table_def.id
|
||||||
|
)
|
||||||
|
.fetch_all(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Foreign key lookup error: {}", e)))?;
|
||||||
|
|
||||||
// Validate input columns
|
let mut system_columns = vec!["deleted".to_string()];
|
||||||
for key in processed_data.keys() {
|
for fk in fk_columns {
|
||||||
if !system_columns.contains(&key.as_str()) && !user_columns.contains(&key) {
|
system_columns.push(format!("{}_id", fk.table_name));
|
||||||
|
}
|
||||||
|
|
||||||
|
let system_columns_set: std::collections::HashSet<_> = system_columns.iter().map(|s| s.as_str()).collect();
|
||||||
|
|
||||||
|
let user_columns: Vec<&String> = columns.iter().map(|(name, _)| name).collect();
|
||||||
|
for key in request.data.keys() {
|
||||||
|
if !system_columns_set.contains(key.as_str()) &&
|
||||||
|
!user_columns.contains(&&key.to_string()) {
|
||||||
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
return Err(Status::invalid_argument(format!("Invalid column: {}", key)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare SQL parameters
|
let mut string_data_for_scripts = HashMap::new();
|
||||||
|
for (key, proto_value) in &request.data {
|
||||||
|
let str_val = match &proto_value.kind {
|
||||||
|
Some(Kind::StringValue(s)) => {
|
||||||
|
let trimmed = s.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
trimmed.to_string()
|
||||||
|
},
|
||||||
|
Some(Kind::NumberValue(n)) => n.to_string(),
|
||||||
|
Some(Kind::BoolValue(b)) => b.to_string(),
|
||||||
|
Some(Kind::NullValue(_)) | None => continue,
|
||||||
|
Some(Kind::StructValue(_)) | Some(Kind::ListValue(_)) => {
|
||||||
|
return Err(Status::invalid_argument(format!("Unsupported type for script validation in column '{}'", key)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
string_data_for_scripts.insert(key.clone(), str_val);
|
||||||
|
}
|
||||||
|
|
||||||
|
let scripts = sqlx::query!(
|
||||||
|
"SELECT target_column, script FROM table_scripts WHERE table_definitions_id = $1",
|
||||||
|
table_def.id
|
||||||
|
)
|
||||||
|
.fetch_all(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Status::internal(format!("Failed to fetch scripts: {}", e)))?;
|
||||||
|
|
||||||
|
for script_record in scripts {
|
||||||
|
let target_column = script_record.target_column;
|
||||||
|
|
||||||
|
if let Some(user_value) = string_data_for_scripts.get(&target_column) {
|
||||||
|
let context = SteelContext {
|
||||||
|
current_table: table_name.clone(),
|
||||||
|
schema_id,
|
||||||
|
schema_name: profile_name.clone(),
|
||||||
|
row_data: string_data_for_scripts.clone(),
|
||||||
|
db_pool: Arc::new(db_pool.clone()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let script_result = execution::execute_script(
|
||||||
|
script_record.script,
|
||||||
|
"STRINGS",
|
||||||
|
Arc::new(db_pool.clone()),
|
||||||
|
context,
|
||||||
|
)
|
||||||
|
.map_err(|e| Status::invalid_argument(
|
||||||
|
format!("Script execution failed for '{}': {}", target_column, e)
|
||||||
|
))?;
|
||||||
|
|
||||||
|
let Value::Strings(mut script_output) = script_result else {
|
||||||
|
return Err(Status::internal("Script must return string values"));
|
||||||
|
};
|
||||||
|
|
||||||
|
let expected_value = script_output.pop()
|
||||||
|
.ok_or_else(|| Status::internal("Script returned no values"))?;
|
||||||
|
|
||||||
|
if user_value != &expected_value {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Validation failed for column '{}': Expected '{}', Got '{}'",
|
||||||
|
target_column, expected_value, user_value
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut params = PgArguments::default();
|
let mut params = PgArguments::default();
|
||||||
let mut set_clauses = Vec::new();
|
let mut set_clauses = Vec::new();
|
||||||
let mut param_idx = 1;
|
let mut param_idx = 1;
|
||||||
|
|
||||||
// Add data parameters for non-empty fields
|
for (col, proto_value) in request.data {
|
||||||
for (col, value) in &processed_data {
|
let sql_type = if system_columns_set.contains(col.as_str()) {
|
||||||
// CORRECTED: The logic for "firma" is removed from this match.
|
|
||||||
// It will now fall through to the `else` block and have its type
|
|
||||||
// correctly looked up from the `columns` vector.
|
|
||||||
let sql_type = if system_columns.contains(&col.as_str()) {
|
|
||||||
match col.as_str() {
|
match col.as_str() {
|
||||||
"deleted" => "BOOLEAN",
|
"deleted" => "BOOLEAN",
|
||||||
|
_ if col.ends_with("_id") => "BIGINT",
|
||||||
_ => return Err(Status::invalid_argument("Invalid system column")),
|
_ => return Err(Status::invalid_argument("Invalid system column")),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
columns.iter()
|
columns.iter()
|
||||||
.find(|(name, _)| name == col)
|
.find(|(name, _)| name == &col)
|
||||||
.map(|(_, sql_type)| sql_type.as_str())
|
.map(|(_, sql_type)| sql_type.as_str())
|
||||||
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
.ok_or_else(|| Status::invalid_argument(format!("Column not found: {}", col)))?
|
||||||
};
|
};
|
||||||
|
|
||||||
match sql_type {
|
let kind = match &proto_value.kind {
|
||||||
"TEXT" | "VARCHAR(15)" | "VARCHAR(255)" => {
|
None | Some(Kind::NullValue(_)) => {
|
||||||
if let Some(max_len) = sql_type.strip_prefix("VARCHAR(")
|
match sql_type {
|
||||||
.and_then(|s| s.strip_suffix(')'))
|
"BOOLEAN" => params.add(None::<bool>),
|
||||||
.and_then(|s| s.parse::<usize>().ok())
|
"TEXT" => params.add(None::<String>),
|
||||||
{
|
"TIMESTAMPTZ" => params.add(None::<DateTime<Utc>>),
|
||||||
if value.len() > max_len {
|
"BIGINT" => params.add(None::<i64>),
|
||||||
|
"INTEGER" => params.add(None::<i32>),
|
||||||
|
s if s.starts_with("NUMERIC") => params.add(None::<Decimal>),
|
||||||
|
_ => return Err(Status::invalid_argument(format!("Unsupported type for null value: {}", sql_type))),
|
||||||
|
}.map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||||
|
|
||||||
|
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
||||||
|
param_idx += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Some(k) => k,
|
||||||
|
};
|
||||||
|
|
||||||
|
if sql_type == "TEXT" {
|
||||||
|
if let Kind::StringValue(value) = kind {
|
||||||
|
let trimmed_value = value.trim();
|
||||||
|
|
||||||
|
if trimmed_value.is_empty() {
|
||||||
|
params.add(None::<String>).map_err(|e| Status::internal(format!("Failed to add null parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
if col == "telefon" && trimmed_value.len() > 15 {
|
||||||
return Err(Status::internal(format!("Value too long for {}", col)));
|
return Err(Status::internal(format!("Value too long for {}", col)));
|
||||||
}
|
}
|
||||||
|
params.add(trimmed_value).map_err(|e| Status::invalid_argument(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
||||||
}
|
}
|
||||||
params.add(value)
|
} else {
|
||||||
.map_err(|e| Status::internal(format!("Failed to add text parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected string for column '{}'", col)));
|
||||||
},
|
}
|
||||||
"BOOLEAN" => {
|
} else if sql_type == "BOOLEAN" {
|
||||||
let val = value.parse::<bool>()
|
if let Kind::BoolValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid boolean for {}", col)))?;
|
params.add(val).map_err(|e| Status::invalid_argument(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
||||||
params.add(val)
|
} else {
|
||||||
.map_err(|e| Status::internal(format!("Failed to add boolean parameter for {}: {}", col, e)))?;
|
return Err(Status::invalid_argument(format!("Expected boolean for column '{}'", col)));
|
||||||
},
|
}
|
||||||
"TIMESTAMPTZ" => {
|
} else if sql_type == "TIMESTAMPTZ" {
|
||||||
let dt = DateTime::parse_from_rfc3339(value)
|
if let Kind::StringValue(value) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
let dt = DateTime::parse_from_rfc3339(value).map_err(|_| Status::invalid_argument(format!("Invalid timestamp for {}", col)))?;
|
||||||
params.add(dt.with_timezone(&Utc))
|
params.add(dt.with_timezone(&Utc)).map_err(|e| Status::invalid_argument(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
||||||
.map_err(|e| Status::internal(format!("Failed to add timestamp parameter for {}: {}", col, e)))?;
|
} else {
|
||||||
},
|
return Err(Status::invalid_argument(format!("Expected ISO 8601 string for column '{}'", col)));
|
||||||
// ADDED: BIGINT handling for completeness, if needed for other columns.
|
}
|
||||||
"BIGINT" => {
|
} else if sql_type == "BIGINT" {
|
||||||
let val = value.parse::<i64>()
|
if let Kind::NumberValue(val) = kind {
|
||||||
.map_err(|_| Status::invalid_argument(format!("Invalid integer for {}", col)))?;
|
if val.fract() != 0.0 {
|
||||||
params.add(val)
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
.map_err(|e| Status::internal(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
}
|
||||||
},
|
let as_i64 = *val as i64;
|
||||||
_ => return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type))),
|
if (as_i64 as f64) != *val {
|
||||||
|
return Err(Status::invalid_argument(format!("Integer value out of range for BIGINT column '{}'", col)));
|
||||||
|
}
|
||||||
|
params.add(as_i64).map_err(|e| Status::invalid_argument(format!("Failed to add bigint parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
|
} else if sql_type == "INTEGER" {
|
||||||
|
if let Kind::NumberValue(val) = kind {
|
||||||
|
if val.fract() != 0.0 {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected integer for column '{}', but got a float", col)));
|
||||||
|
}
|
||||||
|
let as_i32 = *val as i32;
|
||||||
|
if (as_i32 as f64) != *val {
|
||||||
|
return Err(Status::invalid_argument(format!("Integer value out of range for INTEGER column '{}'", col)));
|
||||||
|
}
|
||||||
|
params.add(as_i32).map_err(|e| Status::invalid_argument(format!("Failed to add integer parameter for {}: {}", col, e)))?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Expected number for column '{}'", col)));
|
||||||
|
}
|
||||||
|
} else if sql_type.starts_with("NUMERIC") {
|
||||||
|
let decimal_val = match kind {
|
||||||
|
Kind::StringValue(s) => {
|
||||||
|
let trimmed = s.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(Decimal::from_str(trimmed).map_err(|_| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Invalid decimal string format for column '{}': {}",
|
||||||
|
col, s
|
||||||
|
))
|
||||||
|
})?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(Status::invalid_argument(format!(
|
||||||
|
"Expected a string representation for decimal column '{}', but received a different type.",
|
||||||
|
col
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
params.add(decimal_val).map_err(|e| {
|
||||||
|
Status::invalid_argument(format!(
|
||||||
|
"Failed to add decimal parameter for {}: {}",
|
||||||
|
col, e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
} else {
|
||||||
|
return Err(Status::invalid_argument(format!("Unsupported type {}", sql_type)));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
set_clauses.push(format!("\"{}\" = ${}", col, param_idx));
|
||||||
param_idx += 1;
|
param_idx += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add NULL clauses for empty fields
|
// --- End of copied logic ---
|
||||||
for field in null_fields {
|
|
||||||
// Make sure the field is valid
|
|
||||||
if !system_columns.contains(&field.as_str()) && !user_columns.contains(&&field) {
|
|
||||||
return Err(Status::invalid_argument(format!("Invalid column to set NULL: {}", field)));
|
|
||||||
}
|
|
||||||
set_clauses.push(format!("\"{}\" = NULL", field));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure we have at least one field to update
|
|
||||||
if set_clauses.is_empty() {
|
if set_clauses.is_empty() {
|
||||||
return Err(Status::invalid_argument("No valid fields to update"));
|
return Ok(PutTableDataResponse {
|
||||||
|
success: true,
|
||||||
|
message: "No valid fields to update after processing.".into(),
|
||||||
|
updated_id: record_id,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add ID parameter at the end
|
let qualified_table = crate::shared::schema_qualifier::qualify_table_name_for_data(
|
||||||
params.add(record_id)
|
db_pool,
|
||||||
.map_err(|e| Status::internal(format!("Failed to add record_id parameter: {}", e)))?;
|
&profile_name,
|
||||||
|
&table_name,
|
||||||
// Qualify table name with schema
|
)
|
||||||
let qualified_table = qualify_table_name_for_data(&table_name)?;
|
.await?;
|
||||||
|
|
||||||
let set_clause = set_clauses.join(", ");
|
let set_clause = set_clauses.join(", ");
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"UPDATE {} SET {} WHERE id = ${} AND deleted = FALSE RETURNING id",
|
"UPDATE {} SET {} WHERE id = ${} RETURNING id",
|
||||||
qualified_table,
|
qualified_table,
|
||||||
set_clause,
|
set_clause,
|
||||||
param_idx
|
param_idx
|
||||||
);
|
);
|
||||||
|
|
||||||
let result = sqlx::query_scalar_with::<Postgres, i64, _>(&sql, params)
|
params.add(record_id).map_err(|e| Status::internal(format!("Failed to add record_id parameter: {}", e)))?;
|
||||||
|
|
||||||
|
let result = sqlx::query_scalar_with::<_, i64, _>(&sql, params)
|
||||||
.fetch_optional(db_pool)
|
.fetch_optional(db_pool)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
match result {
|
let updated_id = match result {
|
||||||
Ok(Some(updated_id)) => Ok(PutTableDataResponse {
|
Ok(Some(id)) => id,
|
||||||
success: true,
|
Ok(None) => return Err(Status::not_found("Record not found")),
|
||||||
message: "Data updated successfully".into(),
|
|
||||||
updated_id,
|
|
||||||
}),
|
|
||||||
Ok(None) => Err(Status::not_found("Record not found or already deleted")),
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
// Handle "relation does not exist" error specifically
|
|
||||||
if let Some(db_err) = e.as_database_error() {
|
if let Some(db_err) = e.as_database_error() {
|
||||||
if db_err.code() == Some(std::borrow::Cow::Borrowed("42P01")) {
|
if db_err.code() == Some(std::borrow::Cow::Borrowed("22P02")) ||
|
||||||
return Err(Status::internal(format!(
|
db_err.code() == Some(std::borrow::Cow::Borrowed("22003")) {
|
||||||
"Table '{}' is defined but does not physically exist in the database as {}",
|
return Err(Status::invalid_argument(format!(
|
||||||
table_name, qualified_table
|
"Numeric field overflow or invalid format. Check precision and scale. Details: {}", db_err.message()
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(Status::internal(format!("Update failed: {}", e)))
|
return Err(Status::internal(format!("Update failed: {}", e)));
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let command = IndexCommand::AddOrUpdate(IndexCommandData {
|
||||||
|
table_name: table_name.clone(),
|
||||||
|
row_id: updated_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Err(e) = indexer_tx.send(command).await {
|
||||||
|
error!(
|
||||||
|
"CRITICAL: DB update for table '{}' (id: {}) succeeded but failed to queue for indexing: {}. Search index is now inconsistent.",
|
||||||
|
table_name, updated_id, e
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(PutTableDataResponse {
|
||||||
|
success: true,
|
||||||
|
message: "Data updated successfully".into(),
|
||||||
|
updated_id,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,58 +0,0 @@
|
|||||||
POST
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"adresar_id": 1,
|
|
||||||
"c_dokladu": "DOC123",
|
|
||||||
"datum": "01:10:2023",
|
|
||||||
"c_faktury": "INV123",
|
|
||||||
"obsah": "Sample content",
|
|
||||||
"stredisko": "Center A",
|
|
||||||
"c_uctu": "ACC123",
|
|
||||||
"md": "MD123",
|
|
||||||
"identif": "ID123",
|
|
||||||
"poznanka": "Sample note",
|
|
||||||
"firma": "AAA"
|
|
||||||
}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/PostUctovnictvo
|
|
||||||
{
|
|
||||||
"id": "3",
|
|
||||||
"adresarId": "1",
|
|
||||||
"cDokladu": "DOC123",
|
|
||||||
"datum": "2023-10-01",
|
|
||||||
"cFaktury": "INV123",
|
|
||||||
"obsah": "Sample content",
|
|
||||||
"stredisko": "Center A",
|
|
||||||
"cUctu": "ACC123",
|
|
||||||
"md": "MD123",
|
|
||||||
"identif": "ID123",
|
|
||||||
"poznanka": "Sample note",
|
|
||||||
"firma": "AAA"
|
|
||||||
}
|
|
||||||
|
|
||||||
PUT
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"id": '1',
|
|
||||||
"adresar_id": 1,
|
|
||||||
"c_dokladu": "UPDATED-DOC",
|
|
||||||
"datum": "15.11.2023",
|
|
||||||
"c_faktury": "UPDATED-INV",
|
|
||||||
"obsah": "Updated content",
|
|
||||||
"stredisko": "Updated Center",
|
|
||||||
"c_uctu": "UPD-ACC",
|
|
||||||
"md": "UPD-MD",
|
|
||||||
"identif": "UPD-ID",
|
|
||||||
"poznanka": "Updated note",
|
|
||||||
"firma": "UPD"
|
|
||||||
}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/PutUctovnictvo
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"adresarId": "1",
|
|
||||||
"cDokladu": "UPDATED-DOC",
|
|
||||||
"datum": "15.11.2023",
|
|
||||||
"cFaktury": "UPDATED-INV",
|
|
||||||
"obsah": "Updated content",
|
|
||||||
"stredisko": "Updated Center",
|
|
||||||
"cUctu": "UPD-ACC",
|
|
||||||
"md": "UPD-MD",
|
|
||||||
"identif": "UPD-ID",
|
|
||||||
"poznanka": "Updated note",
|
|
||||||
"firma": "UPD"
|
|
||||||
}
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
❯ grpcurl -plaintext -d '{}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvoCount
|
|
||||||
|
|
||||||
{
|
|
||||||
"count": "4"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"position": 2
|
|
||||||
}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvoByPosition
|
|
||||||
|
|
||||||
{
|
|
||||||
"id": "2",
|
|
||||||
"adresarId": "1",
|
|
||||||
"cDokladu": "DOC123",
|
|
||||||
"datum": "01.10.2023",
|
|
||||||
"cFaktury": "INV123",
|
|
||||||
"obsah": "Sample content",
|
|
||||||
"stredisko": "Center A",
|
|
||||||
"cUctu": "ACC123",
|
|
||||||
"md": "MD123",
|
|
||||||
"identif": "ID123",
|
|
||||||
"poznanka": "Sample note",
|
|
||||||
"firma": "AAA"
|
|
||||||
}
|
|
||||||
❯ grpcurl -plaintext -d '{
|
|
||||||
"id": 1
|
|
||||||
}' localhost:50051 multieko2.uctovnictvo.Uctovnictvo/GetUctovnictvo
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"adresarId": "1",
|
|
||||||
"cDokladu": "DOC123",
|
|
||||||
"datum": "01.10.2023",
|
|
||||||
"cFaktury": "INV123",
|
|
||||||
"obsah": "Sample content",
|
|
||||||
"stredisko": "Center A",
|
|
||||||
"cUctu": "ACC123",
|
|
||||||
"md": "MD123",
|
|
||||||
"identif": "ID123",
|
|
||||||
"poznanka": "Sample note",
|
|
||||||
"firma": "AAA"
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
// src/uctovnictvo/handlers.rs
|
|
||||||
pub mod post_uctovnictvo;
|
|
||||||
pub mod get_uctovnictvo;
|
|
||||||
pub mod get_uctovnictvo_count;
|
|
||||||
pub mod get_uctovnictvo_by_position;
|
|
||||||
pub mod put_uctovnictvo;
|
|
||||||
|
|
||||||
pub use post_uctovnictvo::post_uctovnictvo;
|
|
||||||
pub use get_uctovnictvo::get_uctovnictvo;
|
|
||||||
pub use get_uctovnictvo_count::get_uctovnictvo_count;
|
|
||||||
pub use get_uctovnictvo_by_position::get_uctovnictvo_by_position;
|
|
||||||
pub use put_uctovnictvo::put_uctovnictvo;
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user