mirror of
https://github.com/flibusta-apps/book_library_server.git
synced 2025-12-08 01:20:44 +01:00
Rewrite to rust init
This commit is contained in:
324
src/views/authors.rs
Normal file
324
src/views/authors.rs
Normal file
@@ -0,0 +1,324 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, extract::{Query, Path}, Json, response::IntoResponse, routing::get, http::StatusCode};
|
||||
|
||||
use rand::Rng;
|
||||
|
||||
use crate::{prisma::{author, author_annotation::{self}, book, book_author, translator, book_sequence}, serializers::{pagination::{Pagination, Page, PageWithParent}, author::{Author, AuthorBook}, author_annotation::AuthorAnnotation, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
async fn get_authors(
|
||||
db: Database,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let authors_count = db
|
||||
.author()
|
||||
.count(vec![])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let authors = db
|
||||
.author()
|
||||
.find_many(vec![])
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<Author> = Page::new(
|
||||
authors.iter().map(|item| item.clone().into()).collect(),
|
||||
authors_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
async fn get_random_author(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!(
|
||||
"author_langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_filter(&filter)
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let author_id = {
|
||||
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_limit(1)
|
||||
.with_offset(offset)
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let author = &result.hits.get(0).unwrap().result;
|
||||
|
||||
author.id
|
||||
};
|
||||
|
||||
let author = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(author_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
Json::<Author>(author.into())
|
||||
}
|
||||
|
||||
|
||||
async fn get_author(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>
|
||||
) -> impl IntoResponse {
|
||||
let author = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(author_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match author {
|
||||
Some(author) => Json::<Author>(author.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async fn get_author_annotation(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>,
|
||||
) -> impl IntoResponse {
|
||||
let author_annotation = db
|
||||
.author_annotation()
|
||||
.find_unique(
|
||||
author_annotation::author_id::equals(author_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match author_annotation {
|
||||
Some(annotation) => Json::<AuthorAnnotation>(annotation.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async fn get_author_books(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let author = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(author_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let author = match author {
|
||||
Some(author) => author,
|
||||
None => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(vec![
|
||||
book::book_authors::some(vec![
|
||||
book_author::author_id::equals(author_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_authors::some(vec![
|
||||
book_author::author_id::equals(author_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: PageWithParent<AuthorBook, Author> = PageWithParent::new(
|
||||
author.into(),
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page).into_response()
|
||||
}
|
||||
|
||||
|
||||
async fn get_author_books_available_types(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_authors::some(vec![
|
||||
book_author::author_id::equals(author_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut file_types: HashSet<String> = HashSet::new();
|
||||
|
||||
for book in books {
|
||||
file_types.insert(book.file_type.clone());
|
||||
}
|
||||
|
||||
if file_types.contains(&"fb2".to_string()) {
|
||||
file_types.insert("epub".to_string());
|
||||
file_types.insert("mobi".to_string());
|
||||
file_types.insert("fb2zip".to_string());
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(file_types.into_iter().collect())
|
||||
}
|
||||
|
||||
|
||||
async fn search_authors(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!(
|
||||
"author_langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let author_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut authors = db
|
||||
.author()
|
||||
.find_many(vec![
|
||||
author::id::in_vec(author_ids.clone())
|
||||
])
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
authors.sort_by(|a, b| {
|
||||
let a_pos = author_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
let b_pos = author_ids.iter().position(|i| *i == b.id).unwrap();
|
||||
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Author> = Page::new(
|
||||
authors.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_authors_router() -> Router {
|
||||
Router::new()
|
||||
.route("/", get(get_authors))
|
||||
.route("/random", get(get_random_author))
|
||||
.route("/:author_id", get(get_author))
|
||||
.route("/:author_id/annotation", get(get_author_annotation))
|
||||
.route("/:author_id/books", get(get_author_books))
|
||||
.route("/:author_id/available_types", get(get_author_books_available_types))
|
||||
.route("/search/:query", get(search_authors))
|
||||
}
|
||||
424
src/views/books.rs
Normal file
424
src/views/books.rs
Normal file
@@ -0,0 +1,424 @@
|
||||
use axum::{Router, routing::get, extract::{Query, Path}, Json, response::IntoResponse, http::StatusCode};
|
||||
use prisma_client_rust::Direction;
|
||||
use rand::Rng;
|
||||
|
||||
use crate::{serializers::{book::{BookFilter, RemoteBook, BaseBook, DetailBook, RandomBookFilter, Book}, pagination::{Pagination, Page}, book_annotation::BookAnnotation, allowed_langs::AllowedLangs}, prisma::{book::{self}, book_author, author, translator, book_sequence, book_genre, book_annotation, genre}, meilisearch::{get_meili_client, BookMeili}};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
pub async fn get_books(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<BookFilter>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let filter = book_filter.get_filter_vec();
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(filter.clone())
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(filter)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<RemoteBook> = Page::new(
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
pub async fn get_base_books(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<BookFilter>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let filter = book_filter.get_filter_vec();
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(filter.clone())
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(filter)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.order_by(book::id::order(Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<BaseBook> = Page::new(
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
pub async fn get_random_book(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<RandomBookFilter>,
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("books");
|
||||
|
||||
let filter = {
|
||||
let langs_filter = format!(
|
||||
"lang IN [{}]",
|
||||
book_filter.allowed_langs.join(", ")
|
||||
);
|
||||
let genre_filter = match book_filter.genre {
|
||||
Some(v) => format!(" AND genres = {v}"),
|
||||
None => "".to_string(),
|
||||
};
|
||||
|
||||
format!("{langs_filter}{genre_filter}")
|
||||
};
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_filter(&filter)
|
||||
.execute::<BookMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let book_id = {
|
||||
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_limit(1)
|
||||
.with_offset(offset)
|
||||
.execute::<BookMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let book = &result.hits.get(0).unwrap().result;
|
||||
|
||||
book.id
|
||||
};
|
||||
|
||||
let book = db
|
||||
.book()
|
||||
.find_unique(book::id::equals(book_id))
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_genres::fetch(vec![])
|
||||
.with(
|
||||
book_genre::genre::fetch()
|
||||
.with(
|
||||
genre::source::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
Json::<DetailBook>(book.into()).into_response()
|
||||
}
|
||||
|
||||
pub async fn get_remote_book(
|
||||
db: Database,
|
||||
Path((source_id, remote_id)): Path<(i32, i32)>,
|
||||
) -> impl IntoResponse {
|
||||
let book = db
|
||||
.book()
|
||||
.find_unique(book::source_id_remote_id(source_id, remote_id))
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_genres::fetch(vec![])
|
||||
.with(
|
||||
book_genre::genre::fetch()
|
||||
.with(
|
||||
genre::source::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match book {
|
||||
Some(book) => Json::<DetailBook>(book.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn search_books(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let book_index = client.index("books");
|
||||
|
||||
let filter = format!(
|
||||
"lang IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = book_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<BookMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let book_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut books = db
|
||||
.book()
|
||||
.find_many(vec![book::id::in_vec(book_ids.clone())])
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
books.sort_by(|a, b| {
|
||||
let a_pos = book_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
let b_pos = book_ids.iter().position(|i| *i == b.id).unwrap();
|
||||
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Book> = Page::new(
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
pub async fn get_book(
|
||||
db: Database,
|
||||
Path(book_id): Path<i32>,
|
||||
) -> impl IntoResponse {
|
||||
let book = db
|
||||
.book()
|
||||
.find_unique(book::id::equals(book_id))
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_genres::fetch(vec![])
|
||||
.with(
|
||||
book_genre::genre::fetch()
|
||||
.with(
|
||||
genre::source::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match book {
|
||||
Some(book) => Json::<DetailBook>(book.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_book_annotation(
|
||||
db: Database,
|
||||
Path(book_id): Path<i32>,
|
||||
) -> impl IntoResponse {
|
||||
let book_annotaion = db
|
||||
.book_annotation()
|
||||
.find_unique(
|
||||
book_annotation::book_id::equals(book_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match book_annotaion {
|
||||
Some(book_annotation) => Json::<BookAnnotation>(book_annotation.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_books_router() -> Router {
|
||||
Router::new()
|
||||
.route("/", get(get_books))
|
||||
.route("/base/", get(get_base_books))
|
||||
.route("/random", get(get_random_book))
|
||||
.route("/remote/:source_id/:remote_id", get(get_remote_book))
|
||||
.route("/search/:query", get(search_books))
|
||||
.route("/:book_id", get(get_book))
|
||||
.route("/:book_id/annotation", get(get_book_annotation))
|
||||
}
|
||||
80
src/views/genres.rs
Normal file
80
src/views/genres.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, routing::get, extract::Query, Json, response::IntoResponse};
|
||||
use prisma_client_rust::Direction;
|
||||
|
||||
use crate::{serializers::{pagination::{Pagination, Page}, genre::{Genre, GenreFilter}}, prisma::genre};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
pub async fn get_genres(
|
||||
db: Database,
|
||||
pagination: Query<Pagination>,
|
||||
Query(GenreFilter { meta }): Query<GenreFilter>
|
||||
) -> impl IntoResponse {
|
||||
let filter = {
|
||||
match meta {
|
||||
Some(meta) => vec![
|
||||
genre::meta::equals(meta)
|
||||
],
|
||||
None => vec![],
|
||||
}
|
||||
};
|
||||
|
||||
let genres_count = db
|
||||
.genre()
|
||||
.count(filter.clone())
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let genres = db
|
||||
.genre()
|
||||
.find_many(filter)
|
||||
.with(
|
||||
genre::source::fetch()
|
||||
)
|
||||
.order_by(genre::id::order(Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<Genre> = Page::new(
|
||||
genres.iter().map(|item| item.clone().into()).collect(),
|
||||
genres_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_genre_metas(
|
||||
db: Database
|
||||
) -> impl IntoResponse {
|
||||
let genres = db
|
||||
.genre()
|
||||
.find_many(vec![])
|
||||
.order_by(genre::id::order(Direction::Asc))
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut metas: HashSet<String> = HashSet::new();
|
||||
|
||||
for genre in genres {
|
||||
metas.insert(genre.meta.clone());
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(metas.into_iter().collect())
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_genres_router() -> Router {
|
||||
Router::new()
|
||||
.route("/", get(get_genres))
|
||||
.route("/metas", get(get_genre_metas))
|
||||
}
|
||||
72
src/views/mod.rs
Normal file
72
src/views/mod.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{Router, routing::get, middleware::{self, Next}, Extension, http::{Request, StatusCode, self}, response::Response};
|
||||
use axum_prometheus::PrometheusMetricLayer;
|
||||
use tower_http::trace::{TraceLayer, self};
|
||||
use tracing::Level;
|
||||
|
||||
use crate::{config::CONFIG, db::get_prisma_client, prisma::PrismaClient};
|
||||
|
||||
use self::{authors::get_authors_router, genres::get_genres_router, books::get_books_router, sequences::get_sequences_router};
|
||||
use self::translators::get_translators_router;
|
||||
|
||||
pub mod authors;
|
||||
pub mod books;
|
||||
pub mod genres;
|
||||
pub mod sequences;
|
||||
pub mod translators;
|
||||
|
||||
|
||||
pub type Database = Extension<Arc<PrismaClient>>;
|
||||
|
||||
|
||||
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
|
||||
let auth_header = req.headers()
|
||||
.get(http::header::AUTHORIZATION)
|
||||
.and_then(|header| header.to_str().ok());
|
||||
|
||||
let auth_header = if let Some(auth_header) = auth_header {
|
||||
auth_header
|
||||
} else {
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
};
|
||||
|
||||
if auth_header != CONFIG.api_key {
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
Ok(next.run(req).await)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_router() -> Router {
|
||||
let client = Arc::new(get_prisma_client().await);
|
||||
|
||||
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();
|
||||
|
||||
let app_router = Router::new()
|
||||
|
||||
.nest("/api/v1/authors", get_authors_router().await)
|
||||
.nest("/api/v1/translators", get_translators_router().await)
|
||||
.nest("/api/v1/genres", get_genres_router().await)
|
||||
.nest("/api/v1/books", get_books_router().await)
|
||||
.nest("/api/v1/sequences", get_sequences_router().await)
|
||||
|
||||
.layer(middleware::from_fn(auth))
|
||||
.layer(Extension(client))
|
||||
.layer(prometheus_layer);
|
||||
|
||||
let metric_router = Router::new()
|
||||
.route("/metrics", get(|| async move { metric_handle.render() }));
|
||||
|
||||
Router::new()
|
||||
.nest("/", app_router)
|
||||
.nest("/", metric_router)
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(trace::DefaultMakeSpan::new()
|
||||
.level(Level::INFO))
|
||||
.on_response(trace::DefaultOnResponse::new()
|
||||
.level(Level::INFO)),
|
||||
)
|
||||
}
|
||||
253
src/views/sequences.rs
Normal file
253
src/views/sequences.rs
Normal file
@@ -0,0 +1,253 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, routing::get, extract::{Path, Query}, http::StatusCode, response::IntoResponse, Json};
|
||||
use rand::Rng;
|
||||
|
||||
use crate::{prisma::{sequence, book_sequence, book, book_author, author, translator}, serializers::{sequence::{Sequence, SequenceBook}, allowed_langs::AllowedLangs, pagination::{PageWithParent, Pagination, Page}}, meilisearch::{get_meili_client, SequenceMeili}};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
async fn get_random_sequence(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("sequences");
|
||||
|
||||
let filter = format!(
|
||||
"langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_filter(&filter)
|
||||
.execute::<SequenceMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let sequence_id = {
|
||||
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_limit(1)
|
||||
.with_offset(offset)
|
||||
.execute::<SequenceMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let sequence = &result.hits.get(0).unwrap().result;
|
||||
|
||||
sequence.id
|
||||
};
|
||||
|
||||
let sequence = db
|
||||
.sequence()
|
||||
.find_unique(
|
||||
sequence::id::equals(sequence_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
Json::<Sequence>(sequence.into())
|
||||
}
|
||||
|
||||
async fn search_sequence(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let sequence_index = client.index("sequences");
|
||||
|
||||
let filter = format!(
|
||||
"langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = sequence_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<SequenceMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let sequence_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut sequences = db
|
||||
.sequence()
|
||||
.find_many(vec![
|
||||
sequence::id::in_vec(sequence_ids.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sequences.sort_by(|a, b| {
|
||||
let a_pos = sequence_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
let b_pos: usize = sequence_ids.iter().position(|i| *i == b.id).unwrap();
|
||||
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Sequence> = Page::new(
|
||||
sequences.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
async fn get_sequence(
|
||||
db: Database,
|
||||
Path(sequence_id): Path<i32>
|
||||
) -> impl IntoResponse {
|
||||
let sequence = db
|
||||
.sequence()
|
||||
.find_unique(
|
||||
sequence::id::equals(sequence_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match sequence {
|
||||
Some(sequence) => Json::<Sequence>(sequence.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_sequence_available_types(
|
||||
db: Database,
|
||||
Path(sequence_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_sequences::some(vec![
|
||||
book_sequence::sequence_id::equals(sequence_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut file_types: HashSet<String> = HashSet::new();
|
||||
|
||||
for book in books {
|
||||
file_types.insert(book.file_type.clone());
|
||||
}
|
||||
|
||||
if file_types.contains(&"fb2".to_string()) {
|
||||
file_types.insert("epub".to_string());
|
||||
file_types.insert("mobi".to_string());
|
||||
file_types.insert("fb2zip".to_string());
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(file_types.into_iter().collect())
|
||||
}
|
||||
|
||||
async fn get_sequence_books(
|
||||
db: Database,
|
||||
Path(sequence_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let sequence = db
|
||||
.sequence()
|
||||
.find_unique(
|
||||
sequence::id::equals(sequence_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let sequence = match sequence {
|
||||
Some(v) => v,
|
||||
None => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(vec![
|
||||
book::book_sequences::some(vec![
|
||||
book_sequence::sequence_id::equals(sequence_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_sequences::some(vec![
|
||||
book_sequence::sequence_id::equals(sequence_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: PageWithParent<SequenceBook, Sequence> = PageWithParent::new(
|
||||
sequence.into(),
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page).into_response()
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_sequences_router() -> Router {
|
||||
Router::new()
|
||||
.route("/random", get(get_random_sequence))
|
||||
.route("/search/:query", get(search_sequence))
|
||||
.route("/:sequence_id", get(get_sequence))
|
||||
.route("/:sequence_id/available_types", get(get_sequence_available_types))
|
||||
.route("/:sequence_id/books", get(get_sequence_books))
|
||||
}
|
||||
188
src/views/translators.rs
Normal file
188
src/views/translators.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, routing::get, extract::{Path, Query}, response::IntoResponse, Json, http::StatusCode};
|
||||
|
||||
use crate::{serializers::{pagination::{Pagination, Page, PageWithParent}, author::Author, translator::TranslatorBook, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}, prisma::{author, book::{self}, translator, book_author, book_sequence}};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
async fn get_translated_books(
|
||||
db: Database,
|
||||
Path(translator_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let translator = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(translator_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let translator = match translator {
|
||||
Some(translator) => translator,
|
||||
None => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(vec![
|
||||
book::translations::some(vec![
|
||||
translator::author_id::equals(translator_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::translations::some(vec![
|
||||
translator::author_id::equals(translator_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: PageWithParent<TranslatorBook, Author> = PageWithParent::new(
|
||||
translator.into(),
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page).into_response()
|
||||
}
|
||||
|
||||
|
||||
async fn get_translated_books_available_types(
|
||||
db: Database,
|
||||
Path(translator_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::translations::some(vec![
|
||||
translator::author_id::equals(translator_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut file_types: HashSet<String> = HashSet::new();
|
||||
|
||||
for book in books {
|
||||
file_types.insert(book.file_type.clone());
|
||||
}
|
||||
|
||||
if file_types.contains(&"fb2".to_string()) {
|
||||
file_types.insert("epub".to_string());
|
||||
file_types.insert("mobi".to_string());
|
||||
file_types.insert("fb2zip".to_string());
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(file_types.into_iter().collect())
|
||||
}
|
||||
|
||||
|
||||
async fn search_translators(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!(
|
||||
"translator_langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let translator_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut translators = db
|
||||
.author()
|
||||
.find_many(vec![
|
||||
author::id::in_vec(translator_ids.clone())
|
||||
])
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
translators.sort_by(|a, b| {
|
||||
let a_pos = translator_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
let b_pos = translator_ids.iter().position(|i| *i == b.id).unwrap();
|
||||
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Author> = Page::new(
|
||||
translators.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_translators_router() -> Router {
|
||||
Router::new()
|
||||
.route("/:translator_id/books", get(get_translated_books))
|
||||
.route("/:translator_id/available_types", get(get_translated_books_available_types))
|
||||
.route("/search/:query", get(search_translators))
|
||||
}
|
||||
Reference in New Issue
Block a user