summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJari Vetoniemi <jari.vetoniemi@indooratlas.com>2022-02-17 05:31:06 +0900
committerJari Vetoniemi <mailroxas@gmail.com>2022-02-18 02:10:42 +0900
commit47d0f971fcf64d1bb036f0081d2ff209ec5007ee (patch)
tree7756cad0c4c915e277aba2c42e3110b0a2bf4e6f /src
initial commitHEADmaster
Diffstat (limited to 'src')
-rw-r--r--src/cors.rs25
-rw-r--r--src/main.rs63
-rw-r--r--src/pagination.rs103
3 files changed, 191 insertions, 0 deletions
diff --git a/src/cors.rs b/src/cors.rs
new file mode 100644
index 0000000..700e4bb
--- /dev/null
+++ b/src/cors.rs
@@ -0,0 +1,25 @@
+use rocket::fairing::{Fairing, Info, Kind};
+use rocket::http::Header;
+use rocket::{Request, Response};
+
+pub struct CORS;
+
+#[rocket::async_trait]
+impl Fairing for CORS {
+ fn info(&self) -> Info {
+ Info {
+ name: "Add CORS headers to responses",
+ kind: Kind::Response,
+ }
+ }
+
+ async fn on_response<'r>(&self, _req: &'r Request<'_>, response: &mut Response<'r>) {
+ response.set_header(Header::new("Access-Control-Allow-Origin", "*"));
+ response.set_header(Header::new(
+ "Access-Control-Allow-Methods",
+ "POST, GET, PATCH, OPTIONS",
+ ));
+ response.set_header(Header::new("Access-Control-Allow-Headers", "*"));
+ response.set_header(Header::new("Access-Control-Allow-Credentials", "true"));
+ }
+}
diff --git a/src/main.rs b/src/main.rs
new file mode 100644
index 0000000..f28d6df
--- /dev/null
+++ b/src/main.rs
@@ -0,0 +1,63 @@
+#[macro_use]
+extern crate rocket;
+#[macro_use]
+extern crate diesel;
+
+mod cors;
+mod kanta;
+mod pagination;
+use crate::diesel::QueryDsl;
+use crate::diesel::TextExpressionMethods;
+use crate::pagination::SortingAndPaging;
+use cors::CORS;
+use rocket::serde::json::Json as InOut;
+// use rocket::serde::msgpack::MsgPack as InOut;
+
+#[derive(FromForm)]
+struct ProductFilter {
+ prefix: Option<String>,
+ sort_by: Option<String>,
+ sort_direction: Option<String>,
+ page_num: Option<i64>,
+ page_size: Option<i64>,
+}
+
+#[get("/products?<filter..>")]
+async fn filter_products(
+ filter: ProductFilter,
+ db: kanta::Db,
+) -> Option<InOut<pagination::Page<kanta::Product>>> {
+ info!("prefix: {:?}", Some(filter.prefix.clone()));
+ db.run(move |conn| {
+ let mut query = kanta::products::table.into_boxed();
+ if let Some(i) = filter.prefix {
+ query = query.filter(kanta::products::title.like(format!("%{}%", i)));
+ }
+ query
+ .paginate(filter.page_num.unwrap_or(0))
+ .per_page(filter.page_size.unwrap_or(100))
+ .sort(
+ filter.sort_by.unwrap_or("created_at".to_string()),
+ filter.sort_direction.unwrap_or("".to_string()),
+ )
+ .load_and_count_items::<kanta::Product>(conn)
+ })
+ .await
+ .map(InOut)
+ .ok()
+}
+
+#[get("/")]
+async fn index() -> String {
+ "Server is running".to_string()
+}
+
+#[launch]
+fn rocket() -> _ {
+ kanta::mount_at(
+ rocket::build()
+ .attach(CORS)
+ .mount("/", routes![index, filter_products]),
+ "/",
+ )
+}
diff --git a/src/pagination.rs b/src/pagination.rs
new file mode 100644
index 0000000..961b079
--- /dev/null
+++ b/src/pagination.rs
@@ -0,0 +1,103 @@
+use diesel::prelude::*;
+use diesel::query_builder::*;
+use diesel::query_dsl::methods::LoadQuery;
+use diesel::sql_types::BigInt;
+use diesel::sqlite::Sqlite;
+use rocket::serde::Serialize;
+
+pub trait SortingAndPaging: Sized {
+ fn paginate(self, page: i64) -> SortedAndPaginated<Self>;
+}
+
+impl<T> SortingAndPaging for T {
+ fn paginate(self, page: i64) -> SortedAndPaginated<Self> {
+ SortedAndPaginated {
+ query: self,
+ sort_by: "".to_string(),
+ sort_direction: "".to_string(),
+ per_page: 100,
+ page,
+ }
+ }
+}
+
+#[derive(Serialize)]
+#[serde(crate = "rocket::serde")]
+pub struct Page<T> {
+ pub data: Vec<T>,
+ pub page_num: i64,
+ pub page_size: i64,
+ pub total_elements: i64,
+}
+
+impl<T> Page<T> {
+ pub fn new(data: Vec<T>, page_num: i64, page_size: i64, total_elements: i64) -> Page<T> {
+ Page {
+ data,
+ page_num,
+ page_size,
+ total_elements,
+ }
+ }
+}
+
+#[derive(Debug, Clone, QueryId)]
+pub struct SortedAndPaginated<T> {
+ query: T,
+ sort_by: String,
+ sort_direction: String,
+ page: i64,
+ per_page: i64,
+}
+
+impl<T> SortedAndPaginated<T> {
+ pub fn per_page(self, per_page: i64) -> Self {
+ SortedAndPaginated { per_page, ..self }
+ }
+
+ pub fn sort(self, sort_by: String, sort_direction: String) -> Self {
+ SortedAndPaginated {
+ sort_by,
+ sort_direction,
+ ..self
+ }
+ }
+
+ pub fn load_and_count_items<U>(self, conn: &SqliteConnection) -> QueryResult<Page<U>>
+ where
+ Self: LoadQuery<SqliteConnection, (U, i64)>,
+ {
+ let page = self.page;
+ let per_page = self.per_page;
+ let results = self.load::<(U, i64)>(conn)?;
+ let total = results.get(0).map(|x| x.1).unwrap_or(0);
+ let records = results.into_iter().map(|x| x.0).collect();
+ Ok(Page::new(records, page, per_page, total))
+ }
+}
+
+impl<T: Query> Query for SortedAndPaginated<T> {
+ type SqlType = (T::SqlType, BigInt);
+}
+
+impl<T> RunQueryDsl<SqliteConnection> for SortedAndPaginated<T> {}
+
+impl<T> QueryFragment<Sqlite> for SortedAndPaginated<T>
+where
+ T: QueryFragment<Sqlite>,
+{
+ fn walk_ast(&self, mut out: AstPass<Sqlite>) -> QueryResult<()> {
+ out.push_sql("SELECT *, COUNT(*) OVER () FROM (");
+ self.query.walk_ast(out.reborrow())?;
+ out.push_sql(") t ");
+ if &self.sort_by.as_str().len() > &0 {
+ out.push_sql(format!(" ORDER BY {} {}", &self.sort_by, &self.sort_direction).as_str());
+ }
+ out.push_sql(" LIMIT ");
+ out.push_bind_param::<BigInt, _>(&self.per_page)?;
+ out.push_sql(" OFFSET ");
+ let offset = (self.page - 1) * self.per_page;
+ out.push_bind_param::<BigInt, _>(&offset)?;
+ Ok(())
+ }
+}