search crate created
This commit is contained in:
@@ -14,6 +14,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
"proto/table_definition.proto",
|
||||
"proto/tables_data.proto",
|
||||
"proto/table_script.proto",
|
||||
"proto/search.proto",
|
||||
],
|
||||
&["proto"],
|
||||
)?;
|
||||
|
||||
20
common/proto/search.proto
Normal file
20
common/proto/search.proto
Normal file
@@ -0,0 +1,20 @@
|
||||
// In common/proto/search.proto
|
||||
syntax = "proto3";
|
||||
package multieko2.search;
|
||||
|
||||
service Searcher {
|
||||
rpc SearchTable(SearchRequest) returns (SearchResponse);
|
||||
}
|
||||
|
||||
message SearchRequest {
|
||||
string table_name = 1;
|
||||
string query = 2;
|
||||
}
|
||||
|
||||
message SearchResponse {
|
||||
message Hit {
|
||||
int64 id = 1; // The PostgreSQL row ID
|
||||
float score = 2;
|
||||
}
|
||||
repeated Hit hits = 1;
|
||||
}
|
||||
@@ -25,6 +25,9 @@ pub mod proto {
|
||||
pub mod table_script {
|
||||
include!("proto/multieko2.table_script.rs");
|
||||
}
|
||||
pub mod search {
|
||||
include!("proto/multieko2.search.rs");
|
||||
}
|
||||
pub const FILE_DESCRIPTOR_SET: &[u8] =
|
||||
include_bytes!("proto/descriptor.bin");
|
||||
}
|
||||
|
||||
Binary file not shown.
315
common/src/proto/multieko2.search.rs
Normal file
315
common/src/proto/multieko2.search.rs
Normal file
@@ -0,0 +1,315 @@
|
||||
// This file is @generated by prost-build.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct SearchRequest {
|
||||
#[prost(string, tag = "1")]
|
||||
pub table_name: ::prost::alloc::string::String,
|
||||
#[prost(string, tag = "2")]
|
||||
pub query: ::prost::alloc::string::String,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct SearchResponse {
|
||||
#[prost(message, repeated, tag = "1")]
|
||||
pub hits: ::prost::alloc::vec::Vec<search_response::Hit>,
|
||||
}
|
||||
/// Nested message and enum types in `SearchResponse`.
|
||||
pub mod search_response {
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct Hit {
|
||||
/// The PostgreSQL row ID
|
||||
#[prost(int64, tag = "1")]
|
||||
pub id: i64,
|
||||
#[prost(float, tag = "2")]
|
||||
pub score: f32,
|
||||
}
|
||||
}
|
||||
/// Generated client implementations.
|
||||
pub mod searcher_client {
|
||||
#![allow(
|
||||
unused_variables,
|
||||
dead_code,
|
||||
missing_docs,
|
||||
clippy::wildcard_imports,
|
||||
clippy::let_unit_value,
|
||||
)]
|
||||
use tonic::codegen::*;
|
||||
use tonic::codegen::http::Uri;
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SearcherClient<T> {
|
||||
inner: tonic::client::Grpc<T>,
|
||||
}
|
||||
impl SearcherClient<tonic::transport::Channel> {
|
||||
/// Attempt to create a new client by connecting to a given endpoint.
|
||||
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
|
||||
where
|
||||
D: TryInto<tonic::transport::Endpoint>,
|
||||
D::Error: Into<StdError>,
|
||||
{
|
||||
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
|
||||
Ok(Self::new(conn))
|
||||
}
|
||||
}
|
||||
impl<T> SearcherClient<T>
|
||||
where
|
||||
T: tonic::client::GrpcService<tonic::body::Body>,
|
||||
T::Error: Into<StdError>,
|
||||
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
|
||||
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
|
||||
{
|
||||
pub fn new(inner: T) -> Self {
|
||||
let inner = tonic::client::Grpc::new(inner);
|
||||
Self { inner }
|
||||
}
|
||||
pub fn with_origin(inner: T, origin: Uri) -> Self {
|
||||
let inner = tonic::client::Grpc::with_origin(inner, origin);
|
||||
Self { inner }
|
||||
}
|
||||
pub fn with_interceptor<F>(
|
||||
inner: T,
|
||||
interceptor: F,
|
||||
) -> SearcherClient<InterceptedService<T, F>>
|
||||
where
|
||||
F: tonic::service::Interceptor,
|
||||
T::ResponseBody: Default,
|
||||
T: tonic::codegen::Service<
|
||||
http::Request<tonic::body::Body>,
|
||||
Response = http::Response<
|
||||
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
|
||||
>,
|
||||
>,
|
||||
<T as tonic::codegen::Service<
|
||||
http::Request<tonic::body::Body>,
|
||||
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
|
||||
{
|
||||
SearcherClient::new(InterceptedService::new(inner, interceptor))
|
||||
}
|
||||
/// Compress requests with the given encoding.
|
||||
///
|
||||
/// This requires the server to support it otherwise it might respond with an
|
||||
/// error.
|
||||
#[must_use]
|
||||
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.inner = self.inner.send_compressed(encoding);
|
||||
self
|
||||
}
|
||||
/// Enable decompressing responses.
|
||||
#[must_use]
|
||||
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.inner = self.inner.accept_compressed(encoding);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of a decoded message.
|
||||
///
|
||||
/// Default: `4MB`
|
||||
#[must_use]
|
||||
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.inner = self.inner.max_decoding_message_size(limit);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of an encoded message.
|
||||
///
|
||||
/// Default: `usize::MAX`
|
||||
#[must_use]
|
||||
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.inner = self.inner.max_encoding_message_size(limit);
|
||||
self
|
||||
}
|
||||
pub async fn search_table(
|
||||
&mut self,
|
||||
request: impl tonic::IntoRequest<super::SearchRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::SearchResponse>, tonic::Status> {
|
||||
self.inner
|
||||
.ready()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tonic::Status::unknown(
|
||||
format!("Service was not ready: {}", e.into()),
|
||||
)
|
||||
})?;
|
||||
let codec = tonic::codec::ProstCodec::default();
|
||||
let path = http::uri::PathAndQuery::from_static(
|
||||
"/multieko2.search.Searcher/SearchTable",
|
||||
);
|
||||
let mut req = request.into_request();
|
||||
req.extensions_mut()
|
||||
.insert(GrpcMethod::new("multieko2.search.Searcher", "SearchTable"));
|
||||
self.inner.unary(req, path, codec).await
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generated server implementations.
|
||||
pub mod searcher_server {
|
||||
#![allow(
|
||||
unused_variables,
|
||||
dead_code,
|
||||
missing_docs,
|
||||
clippy::wildcard_imports,
|
||||
clippy::let_unit_value,
|
||||
)]
|
||||
use tonic::codegen::*;
|
||||
/// Generated trait containing gRPC methods that should be implemented for use with SearcherServer.
|
||||
#[async_trait]
|
||||
pub trait Searcher: std::marker::Send + std::marker::Sync + 'static {
|
||||
async fn search_table(
|
||||
&self,
|
||||
request: tonic::Request<super::SearchRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::SearchResponse>, tonic::Status>;
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub struct SearcherServer<T> {
|
||||
inner: Arc<T>,
|
||||
accept_compression_encodings: EnabledCompressionEncodings,
|
||||
send_compression_encodings: EnabledCompressionEncodings,
|
||||
max_decoding_message_size: Option<usize>,
|
||||
max_encoding_message_size: Option<usize>,
|
||||
}
|
||||
impl<T> SearcherServer<T> {
|
||||
pub fn new(inner: T) -> Self {
|
||||
Self::from_arc(Arc::new(inner))
|
||||
}
|
||||
pub fn from_arc(inner: Arc<T>) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
accept_compression_encodings: Default::default(),
|
||||
send_compression_encodings: Default::default(),
|
||||
max_decoding_message_size: None,
|
||||
max_encoding_message_size: None,
|
||||
}
|
||||
}
|
||||
pub fn with_interceptor<F>(
|
||||
inner: T,
|
||||
interceptor: F,
|
||||
) -> InterceptedService<Self, F>
|
||||
where
|
||||
F: tonic::service::Interceptor,
|
||||
{
|
||||
InterceptedService::new(Self::new(inner), interceptor)
|
||||
}
|
||||
/// Enable decompressing requests with the given encoding.
|
||||
#[must_use]
|
||||
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.accept_compression_encodings.enable(encoding);
|
||||
self
|
||||
}
|
||||
/// Compress responses with the given encoding, if the client supports it.
|
||||
#[must_use]
|
||||
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.send_compression_encodings.enable(encoding);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of a decoded message.
|
||||
///
|
||||
/// Default: `4MB`
|
||||
#[must_use]
|
||||
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.max_decoding_message_size = Some(limit);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of an encoded message.
|
||||
///
|
||||
/// Default: `usize::MAX`
|
||||
#[must_use]
|
||||
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.max_encoding_message_size = Some(limit);
|
||||
self
|
||||
}
|
||||
}
|
||||
impl<T, B> tonic::codegen::Service<http::Request<B>> for SearcherServer<T>
|
||||
where
|
||||
T: Searcher,
|
||||
B: Body + std::marker::Send + 'static,
|
||||
B::Error: Into<StdError> + std::marker::Send + 'static,
|
||||
{
|
||||
type Response = http::Response<tonic::body::Body>;
|
||||
type Error = std::convert::Infallible;
|
||||
type Future = BoxFuture<Self::Response, Self::Error>;
|
||||
fn poll_ready(
|
||||
&mut self,
|
||||
_cx: &mut Context<'_>,
|
||||
) -> Poll<std::result::Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
fn call(&mut self, req: http::Request<B>) -> Self::Future {
|
||||
match req.uri().path() {
|
||||
"/multieko2.search.Searcher/SearchTable" => {
|
||||
#[allow(non_camel_case_types)]
|
||||
struct SearchTableSvc<T: Searcher>(pub Arc<T>);
|
||||
impl<T: Searcher> tonic::server::UnaryService<super::SearchRequest>
|
||||
for SearchTableSvc<T> {
|
||||
type Response = super::SearchResponse;
|
||||
type Future = BoxFuture<
|
||||
tonic::Response<Self::Response>,
|
||||
tonic::Status,
|
||||
>;
|
||||
fn call(
|
||||
&mut self,
|
||||
request: tonic::Request<super::SearchRequest>,
|
||||
) -> Self::Future {
|
||||
let inner = Arc::clone(&self.0);
|
||||
let fut = async move {
|
||||
<T as Searcher>::search_table(&inner, request).await
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
}
|
||||
let accept_compression_encodings = self.accept_compression_encodings;
|
||||
let send_compression_encodings = self.send_compression_encodings;
|
||||
let max_decoding_message_size = self.max_decoding_message_size;
|
||||
let max_encoding_message_size = self.max_encoding_message_size;
|
||||
let inner = self.inner.clone();
|
||||
let fut = async move {
|
||||
let method = SearchTableSvc(inner);
|
||||
let codec = tonic::codec::ProstCodec::default();
|
||||
let mut grpc = tonic::server::Grpc::new(codec)
|
||||
.apply_compression_config(
|
||||
accept_compression_encodings,
|
||||
send_compression_encodings,
|
||||
)
|
||||
.apply_max_message_size_config(
|
||||
max_decoding_message_size,
|
||||
max_encoding_message_size,
|
||||
);
|
||||
let res = grpc.unary(method, req).await;
|
||||
Ok(res)
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
_ => {
|
||||
Box::pin(async move {
|
||||
let mut response = http::Response::new(
|
||||
tonic::body::Body::default(),
|
||||
);
|
||||
let headers = response.headers_mut();
|
||||
headers
|
||||
.insert(
|
||||
tonic::Status::GRPC_STATUS,
|
||||
(tonic::Code::Unimplemented as i32).into(),
|
||||
);
|
||||
headers
|
||||
.insert(
|
||||
http::header::CONTENT_TYPE,
|
||||
tonic::metadata::GRPC_CONTENT_TYPE,
|
||||
);
|
||||
Ok(response)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T> Clone for SearcherServer<T> {
|
||||
fn clone(&self) -> Self {
|
||||
let inner = self.inner.clone();
|
||||
Self {
|
||||
inner,
|
||||
accept_compression_encodings: self.accept_compression_encodings,
|
||||
send_compression_encodings: self.send_compression_encodings,
|
||||
max_decoding_message_size: self.max_decoding_message_size,
|
||||
max_encoding_message_size: self.max_encoding_message_size,
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generated gRPC service name
|
||||
pub const SERVICE_NAME: &str = "multieko2.search.Searcher";
|
||||
impl<T> tonic::server::NamedService for SearcherServer<T> {
|
||||
const NAME: &'static str = SERVICE_NAME;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user