Rewrite to rust init

This commit is contained in:
2023-08-11 01:11:27 +02:00
parent 22d8b33bf4
commit 9acdb20463
84 changed files with 28739 additions and 4196 deletions

43
src/config.rs Normal file
View File

@@ -0,0 +1,43 @@
use once_cell::sync::Lazy;
fn get_env(env: &'static str) -> String {
std::env::var(env).unwrap_or_else(|_| panic!("Cannot get the {} env variable", env))
}
pub struct Config {
pub api_key: String,
pub postgres_user: String,
pub postgres_password: String,
pub postgres_host: String,
pub postgres_port: u32,
pub postgres_db: String,
pub meili_host: String,
pub meili_master_key: String,
pub sentry_dsn: String
}
impl Config {
pub fn load() -> Config {
Config {
api_key: get_env("API_KEY"),
postgres_user: get_env("POSTGRES_USER"),
postgres_password: get_env("POSTGRES_PASSWORD"),
postgres_host: get_env("POSTGRES_HOST"),
postgres_port: get_env("POSTGRES_PORT").parse().unwrap(),
postgres_db: get_env("POSTGRES_DB"),
meili_host: get_env("MEILI_HOST"),
meili_master_key: get_env("MEILI_MASTER_KEY"),
sentry_dsn: get_env("SENTRY_DSN")
}
}
}
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
Config::load()
});

19
src/db.rs Normal file
View File

@@ -0,0 +1,19 @@
use crate::{prisma::PrismaClient, config::CONFIG};
pub async fn get_prisma_client() -> PrismaClient {
let database_url: String = format!(
"postgresql://{}:{}@{}:{}/{}?connection_limit=4",
CONFIG.postgres_user,
CONFIG.postgres_password,
CONFIG.postgres_host,
CONFIG.postgres_port,
CONFIG.postgres_db
);
PrismaClient::_builder()
.with_url(database_url)
.build()
.await
.unwrap()
}

41
src/main.rs Normal file
View File

@@ -0,0 +1,41 @@
pub mod config;
pub mod views;
pub mod prisma;
pub mod db;
pub mod serializers;
pub mod meilisearch;
use std::{net::SocketAddr, str::FromStr};
use sentry::{ClientOptions, types::Dsn, integrations::debug_images::DebugImagesIntegration};
use tracing::info;
use crate::views::get_router;
#[tokio::main]
async fn main() {
let options = ClientOptions {
dsn: Some(Dsn::from_str(&config::CONFIG.sentry_dsn).unwrap()),
default_integrations: false,
..Default::default()
}
.add_integration(DebugImagesIntegration::new());
let _guard = sentry::init(options);
tracing_subscriber::fmt()
.with_target(false)
.compact()
.init();
let addr = SocketAddr::from(([0, 0, 0, 0], 8080));
let app = get_router().await;
info!("Start webserver...");
axum::Server::bind(&addr)
.serve(app.into_make_service())
.await
.unwrap();
info!("Webserver shutdown...")
}

48
src/meilisearch.rs Normal file
View File

@@ -0,0 +1,48 @@
use meilisearch_sdk::Client;
use serde::Deserialize;
use crate::config::CONFIG;
pub fn get_meili_client() -> Client {
Client::new(
&CONFIG.meili_host,
Some(CONFIG.meili_master_key.clone())
)
}
#[derive(Deserialize)]
pub struct AuthorMeili {
pub id: i32,
pub first_name: String,
pub last_name: String,
pub middle_name: String,
pub author_langs: Vec<String>,
pub translator_langs: Vec<String>,
pub books_count: i32
}
#[derive(Deserialize)]
pub struct BookMeili {
pub id: i32,
pub title: String,
pub lang: String,
pub genres: Vec<i32>
}
#[derive(Deserialize)]
pub struct GenreMeili {
pub id: i32,
pub description: String,
pub meta: String,
pub langs: Vec<String>,
pub books_count: i32
}
#[derive(Deserialize)]
pub struct SequenceMeili {
pub id: i32,
pub name: String,
pub langs: Vec<String>,
pub books_count: i32
}

16099
src/prisma.rs Normal file

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,6 @@
use serde::Deserialize;
#[derive(Deserialize)]
pub struct AllowedLangs {
pub allowed_langs: Vec<String>
}

78
src/serializers/author.rs Normal file
View File

@@ -0,0 +1,78 @@
use serde::Serialize;
use crate::prisma::{author, book};
use super::{sequence::Sequence, utils::{get_available_types, get_translators, get_sequences}};
#[derive(Serialize)]
pub struct Author {
pub id: i32,
pub first_name: String,
pub last_name: String,
pub middle_name: String,
pub annotation_exists: bool,
}
impl From<author::Data> for Author {
fn from(val: author::Data) -> Self {
let author::Data {
id,
first_name,
last_name,
middle_name,
author_annotation,
..
} = val;
Author {
id,
first_name,
last_name,
middle_name: middle_name.unwrap_or("".to_string()),
annotation_exists: author_annotation.unwrap().is_some(),
}
}
}
#[derive(Serialize)]
pub struct AuthorBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub available_types: Vec<String>,
pub uploaded: String,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
}
impl From<book::Data> for AuthorBook {
fn from(val: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
translations,
book_sequences,
book_annotation,
source,
..
} = val;
AuthorBook {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -0,0 +1,24 @@
use serde::Serialize;
use crate::prisma::author_annotation;
#[derive(Serialize)]
pub struct AuthorAnnotation {
pub id: i32,
pub title: String,
pub text: String,
pub file: Option<String>
}
impl From<author_annotation::Data> for AuthorAnnotation {
fn from(val: author_annotation::Data) -> Self {
let author_annotation::Data { id, title, text, file, .. } = val;
AuthorAnnotation {
id,
title,
text,
file
}
}
}

248
src/serializers/book.rs Normal file
View File

@@ -0,0 +1,248 @@
use chrono::{DateTime, Utc};
use serde::{Serialize, Deserialize};
use crate::prisma::book::{self};
use super::{source::Source, utils::{get_available_types, get_translators, get_sequences, get_authors, get_genres}, author::Author, sequence::Sequence, genre::Genre};
#[derive(Deserialize)]
pub struct BookFilter {
pub allowed_langs: Vec<String>,
pub is_deleted: Option<bool>,
pub uploaded_gte: Option<DateTime<Utc>>,
pub uploaded_lte: Option<DateTime<Utc>>,
pub id_gte: Option<i32>,
pub id_lte: Option<i32>,
}
impl BookFilter {
pub fn get_filter_vec(self) -> Vec<book::WhereParam> {
let mut result = vec![];
result.push(
book::lang::in_vec(self.allowed_langs)
);
match self.is_deleted {
Some(v) => {
result.push(
book::is_deleted::equals(v)
);
},
None => {
result.push(
book::is_deleted::equals(false)
);
},
};
if let Some(uploaded_gte) = self.uploaded_gte {
result.push(
book::uploaded::gte(uploaded_gte.into())
);
};
if let Some(uploaded_lte) = self.uploaded_lte {
result.push(
book::uploaded::lte(uploaded_lte.into())
);
};
if let Some(id_gte) = self.id_gte {
result.push(
book::id::gte(id_gte)
);
};
if let Some(id_lte) = self.id_lte {
result.push(
book::id::lte(id_lte)
);
};
result
}
}
#[derive(Serialize)]
pub struct RemoteBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub available_types: Vec<String>,
pub uploaded: String,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
pub source: Source,
pub remote_id: i32,
}
impl From<book::Data> for RemoteBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
remote_id,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
source: source.unwrap().as_ref().clone().into(),
remote_id
}
}
}
#[derive(Serialize)]
pub struct BaseBook {
pub id: i32,
pub available_types: Vec<String>,
}
impl From<book::Data> for BaseBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
file_type,
source,
..
} = value;
Self {
id,
available_types: get_available_types(file_type, source.clone().unwrap().name),
}
}
}
#[derive(Serialize)]
pub struct DetailBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub available_types: Vec<String>,
pub uploaded: String,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
pub source: Source,
pub remote_id: i32,
pub genres: Vec<Genre>,
pub is_deleted: bool,
pub pages: Option<i32>
}
impl From<book::Data> for DetailBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
remote_id,
book_genres,
is_deleted,
pages,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
source: source.unwrap().as_ref().clone().into(),
remote_id,
genres: get_genres(book_genres),
is_deleted,
pages,
}
}
}
#[derive(Deserialize)]
pub struct RandomBookFilter {
pub allowed_langs: Vec<String>,
pub genre: Option<i32>
}
#[derive(Serialize)]
pub struct Book {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub available_types: Vec<String>,
pub uploaded: String,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
}
impl From<book::Data> for Book {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -0,0 +1,31 @@
use serde::Serialize;
use crate::prisma::book_annotation;
#[derive(Serialize)]
pub struct BookAnnotation {
pub id: i32,
pub title: String,
pub text: String,
pub file: Option<String>
}
impl From<book_annotation::Data> for BookAnnotation {
fn from(value: book_annotation::Data) -> Self {
let book_annotation::Data {
id,
title,
text,
file,
..
} = value;
Self {
id,
title,
text,
file
}
}
}

44
src/serializers/genre.rs Normal file
View File

@@ -0,0 +1,44 @@
use serde::{Serialize, Deserialize};
use crate::prisma::genre;
use super::source::Source;
#[derive(Serialize)]
pub struct Genre {
pub id: i32,
pub source: Source,
pub remote_id: i32,
pub code: String,
pub description: String,
pub meta: String
}
impl From<genre::Data> for Genre {
fn from(val: genre::Data) -> Self {
let genre::Data {
id,
remote_id,
code,
description,
meta,
source,
..
} = val;
Genre {
id,
remote_id,
code,
description,
meta,
source: source.unwrap().as_ref().clone().into()
}
}
}
#[derive(Deserialize)]
pub struct GenreFilter {
pub meta: Option<String>,
}

11
src/serializers/mod.rs Normal file
View File

@@ -0,0 +1,11 @@
pub mod pagination;
pub mod author;
pub mod author_annotation;
pub mod genre;
pub mod source;
pub mod book;
pub mod sequence;
pub mod utils;
pub mod translator;
pub mod allowed_langs;
pub mod book_annotation;

View File

@@ -0,0 +1,63 @@
use serde::{Deserialize, Serialize};
fn default_page() -> i64 {
1
}
fn default_size() -> i64 {
50
}
#[derive(Deserialize)]
pub struct Pagination {
#[serde(default = "default_page")]
pub page: i64,
#[serde(default = "default_size")]
pub size: i64
}
#[derive(Serialize)]
pub struct Page<T> {
pub items: Vec<T>,
pub total: i64,
pub page: i64,
pub size: i64,
pub pages: i64
}
#[derive(Serialize)]
pub struct PageWithParent<T, P> {
pub items: Vec<T>,
pub total: i64,
pub page: i64,
pub size: i64,
pub pages: i64,
pub parent_item: P
}
impl<T> Page<T> {
pub fn new(items: Vec<T>, total: i64, pagination: &Pagination) -> Self {
Self {
items,
total,
page: pagination.page,
size: pagination.size,
pages: (total + pagination.size - 1) / pagination.size
}
}
}
impl<T, P> PageWithParent<T, P> {
pub fn new(parent_item: P, items: Vec<T>, total: i64, pagination: &Pagination) -> Self {
Self {
items,
total,
page: pagination.page,
size: pagination.size,
pages: (total + pagination.size - 1) / pagination.size,
parent_item
}
}
}

View File

@@ -0,0 +1,62 @@
use serde::Serialize;
use crate::prisma::{sequence, book};
use super::{author::Author, utils::{get_available_types, get_authors, get_translators}};
#[derive(Serialize)]
pub struct Sequence {
pub id: i32,
pub name: String,
}
impl From<sequence::Data> for Sequence {
fn from(val: sequence::Data) -> Self {
let sequence::Data { id, name, .. } = val;
Sequence { id, name }
}
}
#[derive(Serialize)]
pub struct SequenceBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub available_types: Vec<String>,
pub uploaded: String,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub annotation_exists: bool,
}
impl From<book::Data> for SequenceBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_annotation,
source,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

25
src/serializers/source.rs Normal file
View File

@@ -0,0 +1,25 @@
use serde::Serialize;
use crate::prisma::source;
#[derive(Serialize)]
pub struct Source {
pub id: i32,
pub name: String
}
impl From<source::Data> for Source
{
fn from(val: source::Data) -> Self {
let source::Data {
id,
name,
..
} = val;
Source {
id,
name
}
}
}

View File

@@ -0,0 +1,47 @@
use serde::Serialize;
use crate::prisma::book;
use super::{author::Author, sequence::Sequence, utils::{get_available_types, get_authors, get_sequences}};
#[derive(Serialize)]
pub struct TranslatorBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub available_types: Vec<String>,
pub uploaded: String,
pub authors: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
}
impl From<book::Data> for TranslatorBook {
fn from(val: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
book_sequences,
book_annotation,
source,
..
} = val;
TranslatorBook {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type.clone(), source.unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

56
src/serializers/utils.rs Normal file
View File

@@ -0,0 +1,56 @@
use crate::prisma::{translator, book_sequence, book_author, book_genre};
use super::{author::Author, sequence::Sequence, genre::Genre};
pub fn get_available_types(file_type: String, source_name: String) -> Vec<String> {
if file_type == "fb2" && source_name == "flibusta" {
vec![
"fb2".to_string(),
"fb2zip".to_string(),
"epub".to_string(),
"mobi".to_string(),
]
} else {
vec![file_type]
}
}
pub fn get_authors(
book_authors: Option<Vec<book_author::Data>>
) -> Vec<Author> {
book_authors
.unwrap()
.iter()
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_translators(
translations: Option<Vec<translator::Data>>
) -> Vec<Author> {
translations
.unwrap()
.iter()
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_sequences(
book_sequences: Option<Vec<book_sequence::Data>>
) -> Vec<Sequence> {
book_sequences
.unwrap()
.iter()
.map(|item| item.sequence.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_genres(
book_genres: Option<Vec<book_genre::Data>>
) -> Vec<Genre> {
book_genres
.unwrap()
.iter()
.map(|item| item.genre.clone().unwrap().as_ref().clone().into())
.collect()
}

324
src/views/authors.rs Normal file
View File

@@ -0,0 +1,324 @@
use std::collections::HashSet;
use axum::{Router, extract::{Query, Path}, Json, response::IntoResponse, routing::get, http::StatusCode};
use rand::Rng;
use crate::{prisma::{author, author_annotation::{self}, book, book_author, translator, book_sequence}, serializers::{pagination::{Pagination, Page, PageWithParent}, author::{Author, AuthorBook}, author_annotation::AuthorAnnotation, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}};
use super::Database;
async fn get_authors(
db: Database,
pagination: Query<Pagination>
) -> impl IntoResponse {
let authors_count = db
.author()
.count(vec![])
.exec()
.await
.unwrap();
let authors = db
.author()
.find_many(vec![])
.with(
author::author_annotation::fetch()
)
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: Page<Author> = Page::new(
authors.iter().map(|item| item.clone().into()).collect(),
authors_count,
&pagination
);
Json(page)
}
async fn get_random_author(
db: Database,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
) -> impl IntoResponse {
let client = get_meili_client();
let authors_index = client.index("authors");
let filter = format!(
"author_langs IN [{}]",
allowed_langs.join(", ")
);
let result = authors_index
.search()
.with_filter(&filter)
.execute::<AuthorMeili>()
.await
.unwrap();
let author_id = {
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
let result = authors_index
.search()
.with_limit(1)
.with_offset(offset)
.execute::<AuthorMeili>()
.await
.unwrap();
let author = &result.hits.get(0).unwrap().result;
author.id
};
let author = db
.author()
.find_unique(
author::id::equals(author_id)
)
.with(
author::author_annotation::fetch()
)
.exec()
.await
.unwrap()
.unwrap();
Json::<Author>(author.into())
}
async fn get_author(
db: Database,
Path(author_id): Path<i32>
) -> impl IntoResponse {
let author = db
.author()
.find_unique(
author::id::equals(author_id)
)
.with(
author::author_annotation::fetch()
)
.exec()
.await
.unwrap();
match author {
Some(author) => Json::<Author>(author.into()).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
async fn get_author_annotation(
db: Database,
Path(author_id): Path<i32>,
) -> impl IntoResponse {
let author_annotation = db
.author_annotation()
.find_unique(
author_annotation::author_id::equals(author_id)
)
.exec()
.await
.unwrap();
match author_annotation {
Some(annotation) => Json::<AuthorAnnotation>(annotation.into()).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
async fn get_author_books(
db: Database,
Path(author_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
) -> impl IntoResponse {
let author = db
.author()
.find_unique(
author::id::equals(author_id)
)
.with(
author::author_annotation::fetch()
)
.exec()
.await
.unwrap();
let author = match author {
Some(author) => author,
None => return StatusCode::NOT_FOUND.into_response(),
};
let books_count = db
.book()
.count(vec![
book::book_authors::some(vec![
book_author::author_id::equals(author_id)
]),
book::lang::in_vec(allowed_langs.clone())
])
.exec()
.await
.unwrap();
let books = db
.book()
.find_many(vec![
book::book_authors::some(vec![
book_author::author_id::equals(author_id)
]),
book::lang::in_vec(allowed_langs)
])
.with(
book::source::fetch()
)
.with(
book::book_annotation::fetch()
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: PageWithParent<AuthorBook, Author> = PageWithParent::new(
author.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
Json(page).into_response()
}
async fn get_author_books_available_types(
db: Database,
Path(author_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
) -> impl IntoResponse {
let books = db
.book()
.find_many(vec![
book::book_authors::some(vec![
book_author::author_id::equals(author_id)
]),
book::lang::in_vec(allowed_langs)
])
.exec()
.await
.unwrap();
let mut file_types: HashSet<String> = HashSet::new();
for book in books {
file_types.insert(book.file_type.clone());
}
if file_types.contains(&"fb2".to_string()) {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
}
Json::<Vec<String>>(file_types.into_iter().collect())
}
async fn search_authors(
db: Database,
Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
) -> impl IntoResponse {
let client = get_meili_client();
let authors_index = client.index("authors");
let filter = format!(
"author_langs IN [{}]",
allowed_langs.join(", ")
);
let result = authors_index
.search()
.with_query(&query)
.with_filter(&filter)
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
.with_limit(pagination.size.try_into().unwrap())
.execute::<AuthorMeili>()
.await
.unwrap();
let total = result.estimated_total_hits.unwrap();
let author_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut authors = db
.author()
.find_many(vec![
author::id::in_vec(author_ids.clone())
])
.with(
author::author_annotation::fetch()
)
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
.exec()
.await
.unwrap();
authors.sort_by(|a, b| {
let a_pos = author_ids.iter().position(|i| *i == a.id).unwrap();
let b_pos = author_ids.iter().position(|i| *i == b.id).unwrap();
a_pos.cmp(&b_pos)
});
let page: Page<Author> = Page::new(
authors.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
Json(page)
}
pub async fn get_authors_router() -> Router {
Router::new()
.route("/", get(get_authors))
.route("/random", get(get_random_author))
.route("/:author_id", get(get_author))
.route("/:author_id/annotation", get(get_author_annotation))
.route("/:author_id/books", get(get_author_books))
.route("/:author_id/available_types", get(get_author_books_available_types))
.route("/search/:query", get(search_authors))
}

424
src/views/books.rs Normal file
View File

@@ -0,0 +1,424 @@
use axum::{Router, routing::get, extract::{Query, Path}, Json, response::IntoResponse, http::StatusCode};
use prisma_client_rust::Direction;
use rand::Rng;
use crate::{serializers::{book::{BookFilter, RemoteBook, BaseBook, DetailBook, RandomBookFilter, Book}, pagination::{Pagination, Page}, book_annotation::BookAnnotation, allowed_langs::AllowedLangs}, prisma::{book::{self}, book_author, author, translator, book_sequence, book_genre, book_annotation, genre}, meilisearch::{get_meili_client, BookMeili}};
use super::Database;
pub async fn get_books(
db: Database,
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<BookFilter>,
pagination: Query<Pagination>
) -> impl IntoResponse {
let filter = book_filter.get_filter_vec();
let books_count = db
.book()
.count(filter.clone())
.exec()
.await
.unwrap();
let books = db
.book()
.find_many(filter)
.with(
book::book_annotation::fetch()
)
.with(
book::source::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.order_by(book::id::order(Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: Page<RemoteBook> = Page::new(
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
Json(page)
}
pub async fn get_base_books(
db: Database,
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<BookFilter>,
pagination: Query<Pagination>
) -> impl IntoResponse {
let filter = book_filter.get_filter_vec();
let books_count = db
.book()
.count(filter.clone())
.exec()
.await
.unwrap();
let books = db
.book()
.find_many(filter)
.with(
book::source::fetch()
)
.order_by(book::id::order(Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: Page<BaseBook> = Page::new(
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
Json(page)
}
pub async fn get_random_book(
db: Database,
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<RandomBookFilter>,
) -> impl IntoResponse {
let client = get_meili_client();
let authors_index = client.index("books");
let filter = {
let langs_filter = format!(
"lang IN [{}]",
book_filter.allowed_langs.join(", ")
);
let genre_filter = match book_filter.genre {
Some(v) => format!(" AND genres = {v}"),
None => "".to_string(),
};
format!("{langs_filter}{genre_filter}")
};
let result = authors_index
.search()
.with_filter(&filter)
.execute::<BookMeili>()
.await
.unwrap();
let book_id = {
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
let result = authors_index
.search()
.with_limit(1)
.with_offset(offset)
.execute::<BookMeili>()
.await
.unwrap();
let book = &result.hits.get(0).unwrap().result;
book.id
};
let book = db
.book()
.find_unique(book::id::equals(book_id))
.with(
book::book_annotation::fetch()
)
.with(
book::source::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.with(
book::book_genres::fetch(vec![])
.with(
book_genre::genre::fetch()
.with(
genre::source::fetch()
)
)
)
.exec()
.await
.unwrap()
.unwrap();
Json::<DetailBook>(book.into()).into_response()
}
pub async fn get_remote_book(
db: Database,
Path((source_id, remote_id)): Path<(i32, i32)>,
) -> impl IntoResponse {
let book = db
.book()
.find_unique(book::source_id_remote_id(source_id, remote_id))
.with(
book::book_annotation::fetch()
)
.with(
book::source::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.with(
book::book_genres::fetch(vec![])
.with(
book_genre::genre::fetch()
.with(
genre::source::fetch()
)
)
)
.exec()
.await
.unwrap();
match book {
Some(book) => Json::<DetailBook>(book.into()).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
pub async fn search_books(
db: Database,
Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
) -> impl IntoResponse {
let client = get_meili_client();
let book_index = client.index("books");
let filter = format!(
"lang IN [{}]",
allowed_langs.join(", ")
);
let result = book_index
.search()
.with_query(&query)
.with_filter(&filter)
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
.with_limit(pagination.size.try_into().unwrap())
.execute::<BookMeili>()
.await
.unwrap();
let total = result.estimated_total_hits.unwrap();
let book_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut books = db
.book()
.find_many(vec![book::id::in_vec(book_ids.clone())])
.with(
book::book_annotation::fetch()
)
.with(
book::source::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.exec()
.await
.unwrap();
books.sort_by(|a, b| {
let a_pos = book_ids.iter().position(|i| *i == a.id).unwrap();
let b_pos = book_ids.iter().position(|i| *i == b.id).unwrap();
a_pos.cmp(&b_pos)
});
let page: Page<Book> = Page::new(
books.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
Json(page)
}
pub async fn get_book(
db: Database,
Path(book_id): Path<i32>,
) -> impl IntoResponse {
let book = db
.book()
.find_unique(book::id::equals(book_id))
.with(
book::book_annotation::fetch()
)
.with(
book::source::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.with(
book::book_genres::fetch(vec![])
.with(
book_genre::genre::fetch()
.with(
genre::source::fetch()
)
)
)
.exec()
.await
.unwrap();
match book {
Some(book) => Json::<DetailBook>(book.into()).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
pub async fn get_book_annotation(
db: Database,
Path(book_id): Path<i32>,
) -> impl IntoResponse {
let book_annotaion = db
.book_annotation()
.find_unique(
book_annotation::book_id::equals(book_id)
)
.exec()
.await
.unwrap();
match book_annotaion {
Some(book_annotation) => Json::<BookAnnotation>(book_annotation.into()).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
pub async fn get_books_router() -> Router {
Router::new()
.route("/", get(get_books))
.route("/base/", get(get_base_books))
.route("/random", get(get_random_book))
.route("/remote/:source_id/:remote_id", get(get_remote_book))
.route("/search/:query", get(search_books))
.route("/:book_id", get(get_book))
.route("/:book_id/annotation", get(get_book_annotation))
}

80
src/views/genres.rs Normal file
View File

@@ -0,0 +1,80 @@
use std::collections::HashSet;
use axum::{Router, routing::get, extract::Query, Json, response::IntoResponse};
use prisma_client_rust::Direction;
use crate::{serializers::{pagination::{Pagination, Page}, genre::{Genre, GenreFilter}}, prisma::genre};
use super::Database;
pub async fn get_genres(
db: Database,
pagination: Query<Pagination>,
Query(GenreFilter { meta }): Query<GenreFilter>
) -> impl IntoResponse {
let filter = {
match meta {
Some(meta) => vec![
genre::meta::equals(meta)
],
None => vec![],
}
};
let genres_count = db
.genre()
.count(filter.clone())
.exec()
.await
.unwrap();
let genres = db
.genre()
.find_many(filter)
.with(
genre::source::fetch()
)
.order_by(genre::id::order(Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: Page<Genre> = Page::new(
genres.iter().map(|item| item.clone().into()).collect(),
genres_count,
&pagination
);
Json(page)
}
pub async fn get_genre_metas(
db: Database
) -> impl IntoResponse {
let genres = db
.genre()
.find_many(vec![])
.order_by(genre::id::order(Direction::Asc))
.exec()
.await
.unwrap();
let mut metas: HashSet<String> = HashSet::new();
for genre in genres {
metas.insert(genre.meta.clone());
}
Json::<Vec<String>>(metas.into_iter().collect())
}
pub async fn get_genres_router() -> Router {
Router::new()
.route("/", get(get_genres))
.route("/metas", get(get_genre_metas))
}

72
src/views/mod.rs Normal file
View File

@@ -0,0 +1,72 @@
use std::sync::Arc;
use axum::{Router, routing::get, middleware::{self, Next}, Extension, http::{Request, StatusCode, self}, response::Response};
use axum_prometheus::PrometheusMetricLayer;
use tower_http::trace::{TraceLayer, self};
use tracing::Level;
use crate::{config::CONFIG, db::get_prisma_client, prisma::PrismaClient};
use self::{authors::get_authors_router, genres::get_genres_router, books::get_books_router, sequences::get_sequences_router};
use self::translators::get_translators_router;
pub mod authors;
pub mod books;
pub mod genres;
pub mod sequences;
pub mod translators;
pub type Database = Extension<Arc<PrismaClient>>;
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
let auth_header = req.headers()
.get(http::header::AUTHORIZATION)
.and_then(|header| header.to_str().ok());
let auth_header = if let Some(auth_header) = auth_header {
auth_header
} else {
return Err(StatusCode::UNAUTHORIZED);
};
if auth_header != CONFIG.api_key {
return Err(StatusCode::UNAUTHORIZED);
}
Ok(next.run(req).await)
}
pub async fn get_router() -> Router {
let client = Arc::new(get_prisma_client().await);
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();
let app_router = Router::new()
.nest("/api/v1/authors", get_authors_router().await)
.nest("/api/v1/translators", get_translators_router().await)
.nest("/api/v1/genres", get_genres_router().await)
.nest("/api/v1/books", get_books_router().await)
.nest("/api/v1/sequences", get_sequences_router().await)
.layer(middleware::from_fn(auth))
.layer(Extension(client))
.layer(prometheus_layer);
let metric_router = Router::new()
.route("/metrics", get(|| async move { metric_handle.render() }));
Router::new()
.nest("/", app_router)
.nest("/", metric_router)
.layer(
TraceLayer::new_for_http()
.make_span_with(trace::DefaultMakeSpan::new()
.level(Level::INFO))
.on_response(trace::DefaultOnResponse::new()
.level(Level::INFO)),
)
}

253
src/views/sequences.rs Normal file
View File

@@ -0,0 +1,253 @@
use std::collections::HashSet;
use axum::{Router, routing::get, extract::{Path, Query}, http::StatusCode, response::IntoResponse, Json};
use rand::Rng;
use crate::{prisma::{sequence, book_sequence, book, book_author, author, translator}, serializers::{sequence::{Sequence, SequenceBook}, allowed_langs::AllowedLangs, pagination::{PageWithParent, Pagination, Page}}, meilisearch::{get_meili_client, SequenceMeili}};
use super::Database;
async fn get_random_sequence(
db: Database,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
) -> impl IntoResponse {
let client = get_meili_client();
let authors_index = client.index("sequences");
let filter = format!(
"langs IN [{}]",
allowed_langs.join(", ")
);
let result = authors_index
.search()
.with_filter(&filter)
.execute::<SequenceMeili>()
.await
.unwrap();
let sequence_id = {
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
let result = authors_index
.search()
.with_limit(1)
.with_offset(offset)
.execute::<SequenceMeili>()
.await
.unwrap();
let sequence = &result.hits.get(0).unwrap().result;
sequence.id
};
let sequence = db
.sequence()
.find_unique(
sequence::id::equals(sequence_id)
)
.exec()
.await
.unwrap()
.unwrap();
Json::<Sequence>(sequence.into())
}
async fn search_sequence(
db: Database,
Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
) -> impl IntoResponse {
let client = get_meili_client();
let sequence_index = client.index("sequences");
let filter = format!(
"langs IN [{}]",
allowed_langs.join(", ")
);
let result = sequence_index
.search()
.with_query(&query)
.with_filter(&filter)
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
.with_limit(pagination.size.try_into().unwrap())
.execute::<SequenceMeili>()
.await
.unwrap();
let total = result.estimated_total_hits.unwrap();
let sequence_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut sequences = db
.sequence()
.find_many(vec![
sequence::id::in_vec(sequence_ids.clone())
])
.exec()
.await
.unwrap();
sequences.sort_by(|a, b| {
let a_pos = sequence_ids.iter().position(|i| *i == a.id).unwrap();
let b_pos: usize = sequence_ids.iter().position(|i| *i == b.id).unwrap();
a_pos.cmp(&b_pos)
});
let page: Page<Sequence> = Page::new(
sequences.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
Json(page)
}
async fn get_sequence(
db: Database,
Path(sequence_id): Path<i32>
) -> impl IntoResponse {
let sequence = db
.sequence()
.find_unique(
sequence::id::equals(sequence_id)
)
.exec()
.await
.unwrap();
match sequence {
Some(sequence) => Json::<Sequence>(sequence.into()).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
async fn get_sequence_available_types(
db: Database,
Path(sequence_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
) -> impl IntoResponse {
let books = db
.book()
.find_many(vec![
book::book_sequences::some(vec![
book_sequence::sequence_id::equals(sequence_id)
]),
book::lang::in_vec(allowed_langs)
])
.exec()
.await
.unwrap();
let mut file_types: HashSet<String> = HashSet::new();
for book in books {
file_types.insert(book.file_type.clone());
}
if file_types.contains(&"fb2".to_string()) {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
}
Json::<Vec<String>>(file_types.into_iter().collect())
}
async fn get_sequence_books(
db: Database,
Path(sequence_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
) -> impl IntoResponse {
let sequence = db
.sequence()
.find_unique(
sequence::id::equals(sequence_id)
)
.exec()
.await
.unwrap();
let sequence = match sequence {
Some(v) => v,
None => return StatusCode::NOT_FOUND.into_response(),
};
let books_count = db
.book()
.count(vec![
book::book_sequences::some(vec![
book_sequence::sequence_id::equals(sequence_id)
]),
book::lang::in_vec(allowed_langs.clone())
])
.exec()
.await
.unwrap();
let books = db
.book()
.find_many(vec![
book::book_sequences::some(vec![
book_sequence::sequence_id::equals(sequence_id)
]),
book::lang::in_vec(allowed_langs.clone())
])
.with(
book::source::fetch()
)
.with(
book::book_annotation::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: PageWithParent<SequenceBook, Sequence> = PageWithParent::new(
sequence.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
Json(page).into_response()
}
pub async fn get_sequences_router() -> Router {
Router::new()
.route("/random", get(get_random_sequence))
.route("/search/:query", get(search_sequence))
.route("/:sequence_id", get(get_sequence))
.route("/:sequence_id/available_types", get(get_sequence_available_types))
.route("/:sequence_id/books", get(get_sequence_books))
}

188
src/views/translators.rs Normal file
View File

@@ -0,0 +1,188 @@
use std::collections::HashSet;
use axum::{Router, routing::get, extract::{Path, Query}, response::IntoResponse, Json, http::StatusCode};
use crate::{serializers::{pagination::{Pagination, Page, PageWithParent}, author::Author, translator::TranslatorBook, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}, prisma::{author, book::{self}, translator, book_author, book_sequence}};
use super::Database;
async fn get_translated_books(
db: Database,
Path(translator_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
) -> impl IntoResponse {
let translator = db
.author()
.find_unique(
author::id::equals(translator_id)
)
.with(
author::author_annotation::fetch()
)
.exec()
.await
.unwrap();
let translator = match translator {
Some(translator) => translator,
None => return StatusCode::NOT_FOUND.into_response(),
};
let books_count = db
.book()
.count(vec![
book::translations::some(vec![
translator::author_id::equals(translator_id)
]),
book::lang::in_vec(allowed_langs.clone())
])
.exec()
.await
.unwrap();
let books = db
.book()
.find_many(vec![
book::translations::some(vec![
translator::author_id::equals(translator_id)
]),
book::lang::in_vec(allowed_langs)
])
.with(
book::source::fetch()
)
.with(
book::book_annotation::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: PageWithParent<TranslatorBook, Author> = PageWithParent::new(
translator.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
Json(page).into_response()
}
async fn get_translated_books_available_types(
db: Database,
Path(translator_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
) -> impl IntoResponse {
let books = db
.book()
.find_many(vec![
book::translations::some(vec![
translator::author_id::equals(translator_id)
]),
book::lang::in_vec(allowed_langs)
])
.exec()
.await
.unwrap();
let mut file_types: HashSet<String> = HashSet::new();
for book in books {
file_types.insert(book.file_type.clone());
}
if file_types.contains(&"fb2".to_string()) {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
}
Json::<Vec<String>>(file_types.into_iter().collect())
}
async fn search_translators(
db: Database,
Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
) -> impl IntoResponse {
let client = get_meili_client();
let authors_index = client.index("authors");
let filter = format!(
"translator_langs IN [{}]",
allowed_langs.join(", ")
);
let result = authors_index
.search()
.with_query(&query)
.with_filter(&filter)
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
.with_limit(pagination.size.try_into().unwrap())
.execute::<AuthorMeili>()
.await
.unwrap();
let total = result.estimated_total_hits.unwrap();
let translator_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut translators = db
.author()
.find_many(vec![
author::id::in_vec(translator_ids.clone())
])
.with(
author::author_annotation::fetch()
)
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
.exec()
.await
.unwrap();
translators.sort_by(|a, b| {
let a_pos = translator_ids.iter().position(|i| *i == a.id).unwrap();
let b_pos = translator_ids.iter().position(|i| *i == b.id).unwrap();
a_pos.cmp(&b_pos)
});
let page: Page<Author> = Page::new(
translators.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
Json(page)
}
pub async fn get_translators_router() -> Router {
Router::new()
.route("/:translator_id/books", get(get_translated_books))
.route("/:translator_id/available_types", get(get_translated_books_available_types))
.route("/search/:query", get(search_translators))
}