Move to sqlx

This commit is contained in:
2024-12-25 23:28:22 +01:00
parent 3ee5e51767
commit 8002a93069
28 changed files with 2508 additions and 22526 deletions

View File

@@ -1,2 +0,0 @@
[alias]
prisma = "run -p prisma-cli --"

3541
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -3,34 +3,28 @@ name = "book_library_server"
version = "0.1.0"
edition = "2021"
[workspace]
members = [
"prisma-cli"
]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
once_cell = "1.20.1"
once_cell = "1.20.2"
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.11", default-features = false, features = ["postgresql"] }
tokio = { version = "1.42.0", features = ["full"] }
tokio = { version = "1.40.0", features = ["full"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"]}
sentry-tracing = "0.35.0"
tower-http = { version = "0.6.2", features = ["trace"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"]}
sentry-tracing = "0.34.0"
tower-http = { version = "0.6.1", features = ["trace"] }
axum = { version = "0.7.7", features = ["json"] }
axum-extra = { version ="0.9.4", features = ["query"] }
axum = { version = "0.7.9", features = ["json"] }
axum-extra = { version ="0.9.6", features = ["query"] }
axum-prometheus = "0.7.0"
serde = { version = "1.0.210", features = ["derive"] }
serde = { version = "1.0.216", features = ["derive"] }
sentry = { version = "0.34.0", features = ["debug-images"] }
sentry = { version = "0.35.0", features = ["debug-images"] }
meilisearch-sdk = "0.27.1"
rand = "0.8.5"
chrono = "0.4.38"
chrono = { version = "0.4.39", features = ["serde"] }
sqlx = { version = "0.8.2", features = ["runtime-tokio", "postgres", "macros", "chrono"] }

View File

@@ -1,3 +0,0 @@
node_modules
# Keep environment variables out of version control
.env

3195
prisma-cli/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +0,0 @@
[package]
name = "prisma-cli"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.11", default-features = false, features = ["postgresql"] }

View File

@@ -1,3 +0,0 @@
fn main() {
prisma_client_rust_cli::run();
}

View File

@@ -1,164 +0,0 @@
generator client {
provider = "cargo prisma"
output = "../src/prisma.rs"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
model AuthorAnnotation {
id Int @id @default(autoincrement())
author_id Int @unique @map("author")
title String @db.VarChar(256)
text String
file String? @db.VarChar(256)
author Author @relation(fields: [author_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_author_annotations_authors_id_author")
@@index([author_id], map: "author_annotation_author_id")
@@map("author_annotations")
}
model Author {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
first_name String @db.VarChar(256)
last_name String @db.VarChar(256)
middle_name String? @db.VarChar(256)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_authors_sources_id_source")
author_annotation AuthorAnnotation?
book_authors BookAuthor[]
translations Translator[]
@@unique([source_id, remote_id], map: "uc_authors_source_remote_id")
@@map("authors")
}
model BookAnnotation {
id Int @id @default(autoincrement())
book_id Int @unique @map("book")
title String @db.VarChar(256)
text String
file String? @db.VarChar(256)
book Book @relation(fields: [book_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_book_annotations_books_id_book")
@@index([book_id], map: "book_annotation_book_id")
@@map("book_annotations")
}
model BookAuthor {
id Int @id @default(autoincrement())
author_id Int @map("author")
book_id Int @map("book")
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_book_authors_authors_author_id")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_authors_books_book_id")
@@unique([book_id, author_id], map: "uc_book_authors_book_author")
@@index([author_id], map: "book_authors_author")
@@index([book_id], map: "book_authors_book")
@@map("book_authors")
}
model BookGenre {
id Int @id @default(autoincrement())
genre_id Int @map("genre")
book_id Int @map("book")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_genres_books_book_id")
genre Genre @relation(fields: [genre_id], references: [id], onDelete: Cascade, map: "fk_book_genres_genres_genre_id")
@@unique([book_id, genre_id], map: "uc_book_genres_book_genre")
@@index([book_id], map: "book_genres_book")
@@index([genre_id], map: "book_genres_genre")
@@map("book_genres")
}
model BookSequence {
id Int @id @default(autoincrement())
position Int @db.SmallInt
sequence_id Int @map("sequence")
book_id Int @map("book")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_books_book_id")
sequence Sequence @relation(fields: [sequence_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_sequences_sequence_id")
@@unique([book_id, sequence_id], map: "uc_book_sequences_book_sequence")
@@index([book_id], map: "book_sequences_book")
@@index([sequence_id], map: "book_sequences_sequence")
@@map("book_sequences")
}
model Book {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
title String @db.VarChar(256)
lang String @db.VarChar(3)
file_type String @db.VarChar(4)
uploaded DateTime @db.Date
is_deleted Boolean @default(false)
pages Int?
year Int @db.SmallInt @default(0)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_books_sources_id_source")
book_annotation BookAnnotation?
book_authors BookAuthor[]
book_genres BookGenre[]
book_sequences BookSequence[]
translations Translator[]
@@unique([source_id, remote_id], map: "uc_books_source_remote_id")
@@index([file_type], map: "ix_books_file_type")
@@index([uploaded], map: "ix_books_uploaded")
@@map("books")
}
model Genre {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
code String @db.VarChar(45)
description String @db.VarChar(99)
meta String @db.VarChar(45)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_genres_sources_id_source")
book_genres BookGenre[]
@@unique([source_id, remote_id], map: "uc_genres_source_remote_id")
@@map("genres")
}
model Sequence {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
name String @db.VarChar(256)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_sequences_sources_id_source")
book_sequences BookSequence[]
@@unique([source_id, remote_id], map: "uc_sequences_source_remote_id")
@@map("sequences")
}
model Source {
id Int @id @default(autoincrement()) @db.SmallInt
name String @unique @db.VarChar(32)
authors Author[]
books Book[]
genres Genre[]
sequences Sequence[]
@@map("sources")
}
model Translator {
id Int @id @default(autoincrement())
position Int @db.SmallInt
author_id Int @map("author")
book_id Int @map("book")
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_translations_authors_author_id")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_translations_books_book_id")
@@unique([book_id, author_id], map: "uc_translations_book_author")
@@index([author_id], map: "translations_author")
@@index([book_id], map: "translations_book")
@@map("translations")
}

View File

@@ -1,8 +1,10 @@
use crate::{config::CONFIG, prisma::PrismaClient};
use crate::config::CONFIG;
pub async fn get_prisma_client() -> PrismaClient {
use sqlx::{postgres::PgPoolOptions, PgPool};
pub async fn get_postgres_pool() -> PgPool {
let database_url: String = format!(
"postgresql://{}:{}@{}:{}/{}?connection_limit=10&pool_timeout=300",
"postgresql://{}:{}@{}:{}/{}",
CONFIG.postgres_user,
CONFIG.postgres_password,
CONFIG.postgres_host,
@@ -10,9 +12,10 @@ pub async fn get_prisma_client() -> PrismaClient {
CONFIG.postgres_db
);
PrismaClient::_builder()
.with_url(database_url)
.build()
PgPoolOptions::new()
.max_connections(10)
.acquire_timeout(std::time::Duration::from_secs(300))
.connect(&database_url)
.await
.unwrap()
}

View File

@@ -1,7 +1,6 @@
pub mod config;
pub mod db;
pub mod meilisearch;
pub mod prisma;
pub mod serializers;
pub mod views;

File diff suppressed because one or more lines are too long

View File

@@ -1,13 +1,10 @@
use chrono::NaiveDate;
use serde::Serialize;
use crate::prisma::{author, book};
use super::date::naive_date_serializer;
use super::sequence::Sequence;
use super::{
sequence::Sequence,
utils::{get_available_types, get_sequences, get_translators},
};
#[derive(Serialize)]
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
pub struct Author {
pub id: i32,
pub first_name: String,
@@ -16,28 +13,7 @@ pub struct Author {
pub annotation_exists: bool,
}
impl From<author::Data> for Author {
fn from(val: author::Data) -> Self {
let author::Data {
id,
first_name,
last_name,
middle_name,
author_annotation,
..
} = val;
Author {
id,
first_name,
last_name,
middle_name: middle_name.unwrap_or("".to_string()),
annotation_exists: author_annotation.unwrap().is_some(),
}
}
}
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct AuthorBook {
pub id: i32,
pub title: String,
@@ -45,39 +21,9 @@ pub struct AuthorBook {
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
}
impl From<book::Data> for AuthorBook {
fn from(val: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
year,
uploaded,
translations,
book_sequences,
book_annotation,
source,
..
} = val;
AuthorBook {
id,
title,
lang,
file_type: file_type.clone(),
year,
available_types: get_available_types(file_type, source.unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -1,30 +1,9 @@
use serde::Serialize;
use crate::prisma::author_annotation;
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct AuthorAnnotation {
pub id: i32,
pub title: String,
pub text: String,
pub file: Option<String>,
}
impl From<author_annotation::Data> for AuthorAnnotation {
fn from(val: author_annotation::Data) -> Self {
let author_annotation::Data {
id,
title,
text,
file,
..
} = val;
AuthorAnnotation {
id,
title,
text,
file,
}
}
}

View File

@@ -1,15 +1,9 @@
use chrono::{NaiveDate, NaiveDateTime, NaiveTime};
use chrono::NaiveDate;
use serde::{Deserialize, Serialize};
use crate::prisma::book::{self};
use super::date::naive_date_serializer;
use super::{
author::Author,
genre::Genre,
sequence::Sequence,
source::Source,
utils::{get_authors, get_available_types, get_genres, get_sequences, get_translators},
};
use super::{author::Author, genre::Genre, sequence::Sequence, source::Source};
fn default_langs() -> Vec<String> {
vec!["ru".to_string(), "be".to_string(), "uk".to_string()]
@@ -26,49 +20,6 @@ pub struct BookFilter {
pub id_lte: Option<i32>,
}
impl BookFilter {
pub fn get_filter_vec(self) -> Vec<book::WhereParam> {
let mut result = vec![];
result.push(book::lang::in_vec(self.allowed_langs));
match self.is_deleted {
Some(v) => {
result.push(book::is_deleted::equals(v));
}
None => {
result.push(book::is_deleted::equals(false));
}
};
if let Some(uploaded_gte) = self.uploaded_gte {
result.push(book::uploaded::gte(
NaiveDateTime::new(uploaded_gte, NaiveTime::default())
.and_utc()
.into(),
));
};
if let Some(uploaded_lte) = self.uploaded_lte {
result.push(book::uploaded::lte(
NaiveDateTime::new(uploaded_lte, NaiveTime::default())
.and_utc()
.into(),
));
};
if let Some(id_gte) = self.id_gte {
result.push(book::id::gte(id_gte));
};
if let Some(id_lte) = self.id_lte {
result.push(book::id::lte(id_lte));
};
result
}
}
#[derive(Serialize)]
pub struct RemoteBook {
pub id: i32,
@@ -77,7 +28,8 @@ pub struct RemoteBook {
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
@@ -86,64 +38,12 @@ pub struct RemoteBook {
pub remote_id: i32,
}
impl From<book::Data> for RemoteBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
year,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
remote_id,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
year,
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
source: source.unwrap().as_ref().clone().into(),
remote_id,
}
}
}
#[derive(Serialize)]
pub struct BaseBook {
pub id: i32,
pub available_types: Vec<String>,
}
impl From<book::Data> for BaseBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
file_type,
source,
..
} = value;
Self {
id,
available_types: get_available_types(file_type, source.clone().unwrap().name),
}
}
}
#[derive(Serialize)]
pub struct DetailBook {
pub id: i32,
@@ -152,7 +52,8 @@ pub struct DetailBook {
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
@@ -164,48 +65,6 @@ pub struct DetailBook {
pub pages: Option<i32>,
}
impl From<book::Data> for DetailBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
year,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
remote_id,
book_genres,
is_deleted,
pages,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
year,
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
source: source.unwrap().as_ref().clone().into(),
remote_id,
genres: get_genres(book_genres),
is_deleted,
pages,
}
}
}
#[derive(Deserialize)]
pub struct RandomBookFilter {
pub allowed_langs: Vec<String>,
@@ -220,42 +79,10 @@ pub struct Book {
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
}
impl From<book::Data> for Book {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
year,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
year,
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -1,30 +1,9 @@
use serde::Serialize;
use crate::prisma::book_annotation;
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct BookAnnotation {
pub id: i32,
pub title: String,
pub text: String,
pub file: Option<String>,
}
impl From<book_annotation::Data> for BookAnnotation {
fn from(value: book_annotation::Data) -> Self {
let book_annotation::Data {
id,
title,
text,
file,
..
} = value;
Self {
id,
title,
text,
file,
}
}
}

16
src/serializers/date.rs Normal file
View File

@@ -0,0 +1,16 @@
use chrono::NaiveDate;
use serde::Serializer;
const FORMAT: &str = "%Y-%m-%d";
pub mod naive_date_serializer {
use super::*;
pub fn serialize<S>(date: &NaiveDate, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let formatted_date = date.format(FORMAT).to_string();
serializer.serialize_str(&formatted_date)
}
}

View File

@@ -1,10 +1,8 @@
use serde::{Deserialize, Serialize};
use crate::prisma::genre;
use super::source::Source;
#[derive(Serialize)]
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
pub struct Genre {
pub id: i32,
pub source: Source,
@@ -14,29 +12,6 @@ pub struct Genre {
pub meta: String,
}
impl From<genre::Data> for Genre {
fn from(val: genre::Data) -> Self {
let genre::Data {
id,
remote_id,
code,
description,
meta,
source,
..
} = val;
Genre {
id,
remote_id,
code,
description,
meta,
source: source.unwrap().as_ref().clone().into(),
}
}
}
#[derive(Deserialize)]
pub struct GenreFilter {
pub meta: Option<String>,

View File

@@ -3,6 +3,7 @@ pub mod author;
pub mod author_annotation;
pub mod book;
pub mod book_annotation;
pub mod date;
pub mod genre;
pub mod pagination;
pub mod sequence;

View File

@@ -1,27 +1,16 @@
use chrono::NaiveDate;
use serde::Serialize;
use crate::prisma::{book, sequence};
use super::author::Author;
use super::date::naive_date_serializer;
use super::{
author::Author,
utils::{get_authors, get_available_types, get_translators},
};
#[derive(Serialize)]
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
pub struct Sequence {
pub id: i32,
pub name: String,
}
impl From<sequence::Data> for Sequence {
fn from(val: sequence::Data) -> Self {
let sequence::Data { id, name, .. } = val;
Sequence { id, name }
}
}
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct SequenceBook {
pub id: i32,
pub title: String,
@@ -29,42 +18,10 @@ pub struct SequenceBook {
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub annotation_exists: bool,
pub position: i32,
}
impl From<book::Data> for SequenceBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
year,
uploaded,
book_authors,
translations,
book_annotation,
source,
book_sequences,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
year,
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
annotation_exists: book_annotation.unwrap().is_some(),
position: book_sequences.unwrap().first().unwrap().position,
}
}
}

View File

@@ -1,17 +1,7 @@
use serde::Serialize;
use crate::prisma::source;
#[derive(Serialize)]
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
pub struct Source {
pub id: i32,
pub name: String,
}
impl From<source::Data> for Source {
fn from(val: source::Data) -> Self {
let source::Data { id, name, .. } = val;
Source { id, name }
}
}

View File

@@ -1,14 +1,11 @@
use chrono::NaiveDate;
use serde::Serialize;
use crate::prisma::book;
use super::date::naive_date_serializer;
use super::{
author::Author,
sequence::Sequence,
utils::{get_authors, get_available_types, get_sequences},
};
use super::{author::Author, sequence::Sequence};
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct TranslatorBook {
pub id: i32,
pub title: String,
@@ -16,39 +13,9 @@ pub struct TranslatorBook {
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
}
impl From<book::Data> for TranslatorBook {
fn from(val: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
year,
uploaded,
book_authors,
book_sequences,
book_annotation,
source,
..
} = val;
TranslatorBook {
id,
title,
lang,
file_type: file_type.clone(),
year,
available_types: get_available_types(file_type.clone(), source.unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -1,7 +1,3 @@
use crate::prisma::{book_author, book_genre, book_sequence, translator};
use super::{author::Author, genre::Genre, sequence::Sequence};
pub fn get_available_types(file_type: String, source_name: String) -> Vec<String> {
if file_type == "fb2" && source_name == "flibusta" {
vec![
@@ -14,35 +10,3 @@ pub fn get_available_types(file_type: String, source_name: String) -> Vec<String
vec![file_type]
}
}
pub fn get_authors(book_authors: Option<Vec<book_author::Data>>) -> Vec<Author> {
book_authors
.unwrap()
.iter()
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_translators(translations: Option<Vec<translator::Data>>) -> Vec<Author> {
translations
.unwrap()
.iter()
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_sequences(book_sequences: Option<Vec<book_sequence::Data>>) -> Vec<Sequence> {
book_sequences
.unwrap()
.iter()
.map(|item| item.sequence.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_genres(book_genres: Option<Vec<book_genre::Data>>) -> Vec<Genre> {
book_genres
.unwrap()
.iter()
.map(|item| item.genre.clone().unwrap().as_ref().clone().into())
.collect()
}

View File

@@ -10,40 +10,51 @@ use axum::{
use crate::{
meilisearch::{get_meili_client, AuthorMeili},
prisma::{
author,
author_annotation::{self},
book, book_author, book_sequence, translator,
},
serializers::{
allowed_langs::AllowedLangs,
author::{Author, AuthorBook},
author_annotation::AuthorAnnotation,
book::BaseBook,
pagination::{Page, PageWithParent, Pagination},
sequence::Sequence,
},
};
use super::{common::get_random_item::get_random_item, Database};
async fn get_authors(db: Database, pagination: Query<Pagination>) -> impl IntoResponse {
let authors_count = db.author().count(vec![]).exec().await.unwrap();
let authors_count = sqlx::query_scalar!("SELECT COUNT(*) FROM authors",)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let authors = db
.author()
.find_many(vec![])
.with(author::author_annotation::fetch())
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
let authors = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
ORDER BY a.id ASC
OFFSET $1
LIMIT $2
"#,
(pagination.page - 1) * pagination.size,
pagination.size
)
.fetch_all(&db.0)
.await
.unwrap();
let page: Page<Author> = Page::new(
authors.iter().map(|item| item.clone().into()).collect(),
authors_count,
&pagination,
);
let page: Page<Author> = Page::new(authors, authors_count, &pagination);
Json(page)
}
@@ -64,43 +75,80 @@ async fn get_random_author(
get_random_item::<AuthorMeili>(authors_index, filter).await
};
let author = db
.author()
.find_unique(author::id::equals(author_id))
.with(author::author_annotation::fetch())
.exec()
let author = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
author_id
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
Json::<Author>(author.into())
Json::<Author>(author)
}
async fn get_author(db: Database, Path(author_id): Path<i32>) -> impl IntoResponse {
let author = db
.author()
.find_unique(author::id::equals(author_id))
.with(author::author_annotation::fetch())
.exec()
let author = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
author_id
)
.fetch_optional(&db.0)
.await
.unwrap();
match author {
Some(author) => Json::<Author>(author.into()).into_response(),
Some(author) => Json::<Author>(author).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
async fn get_author_annotation(db: Database, Path(author_id): Path<i32>) -> impl IntoResponse {
let author_annotation = db
.author_annotation()
.find_unique(author_annotation::author_id::equals(author_id))
.exec()
let author_annotation = sqlx::query_as!(
AuthorAnnotation,
r#"
SELECT
aa.id,
aa.title,
aa.text,
aa.file
FROM author_annotations aa
WHERE aa.author = $1
"#,
author_id
)
.fetch_optional(&db.0)
.await
.unwrap();
match author_annotation {
Some(annotation) => Json::<AuthorAnnotation>(annotation.into()).into_response(),
Some(annotation) => Json::<AuthorAnnotation>(annotation).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
@@ -113,11 +161,25 @@ async fn get_author_books(
>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let author = db
.author()
.find_unique(author::id::equals(author_id))
.with(author::author_annotation::fetch())
.exec()
let author = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
author_id
)
.fetch_optional(&db.0)
.await
.unwrap();
@@ -126,37 +188,82 @@ async fn get_author_books(
None => return StatusCode::NOT_FOUND.into_response(),
};
let books_filter = vec![
book::is_deleted::equals(false),
book::book_authors::some(vec![book_author::author_id::equals(author_id)]),
book::lang::in_vec(allowed_langs.clone()),
];
let books_count = db.book().count(books_filter.clone()).exec().await.unwrap();
let books = db
.book()
.find_many(books_filter)
.with(book::source::fetch())
.with(book::book_annotation::fetch())
.with(
book::translations::fetch(vec![])
.with(translator::author::fetch().with(author::author_annotation::fetch())),
let books_count = sqlx::query_scalar!(
r#"
SELECT COUNT(*)
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
"#,
author_id,
&allowed_langs
)
.with(book::book_sequences::fetch(vec![]).with(book_sequence::sequence::fetch()))
.order_by(book::title::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let books = sqlx::query_as!(
AuthorBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>",
b.uploaded,
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
) AS "translators!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', sequences.id,
'name', sequences.name
)
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
) AS "sequences!: Vec<Sequence>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool"
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
ORDER BY b.title ASC
OFFSET $3
LIMIT $4
"#,
author_id,
&allowed_langs,
(pagination.page - 1) * pagination.size,
pagination.size
)
.fetch_all(&db.0)
.await
.unwrap();
let page: PageWithParent<AuthorBook, Author> = PageWithParent::new(
author.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination,
);
let page: PageWithParent<AuthorBook, Author> =
PageWithParent::new(author, books, books_count, &pagination);
Json(page).into_response()
}
@@ -168,27 +275,31 @@ async fn get_author_books_available_types(
AllowedLangs,
>,
) -> impl IntoResponse {
let books = db
.book()
.find_many(vec![
book::is_deleted::equals(false),
book::book_authors::some(vec![book_author::author_id::equals(author_id)]),
book::lang::in_vec(allowed_langs),
])
.exec()
// TODO: refactor
let books = sqlx::query_as!(
BaseBook,
r#"
SELECT
b.id,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>"
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
"#,
author_id,
&allowed_langs
)
.fetch_all(&db.0)
.await
.unwrap();
let mut file_types: HashSet<String> = HashSet::new();
for book in books {
file_types.insert(book.file_type.clone());
for file_type in book.available_types {
file_types.insert(file_type);
}
if file_types.contains("fb2") {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
}
Json::<Vec<String>>(file_types.into_iter().collect())
@@ -225,12 +336,25 @@ async fn search_authors(
let total = result.estimated_total_hits.unwrap();
let author_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut authors = db
.author()
.find_many(vec![author::id::in_vec(author_ids.clone())])
.with(author::author_annotation::fetch())
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
.exec()
let mut authors = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = ANY($1)
"#,
&author_ids
)
.fetch_all(&db.0)
.await
.unwrap();
@@ -241,11 +365,7 @@ async fn search_authors(
a_pos.cmp(&b_pos)
});
let page: Page<Author> = Page::new(
authors.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination,
);
let page: Page<Author> = Page::new(authors, total.try_into().unwrap(), &pagination);
Json(page)
}

View File

@@ -5,20 +5,18 @@ use axum::{
routing::get,
Json, Router,
};
use prisma_client_rust::Direction;
use crate::{
meilisearch::{get_meili_client, BookMeili},
prisma::{
author,
book::{self},
book_annotation, book_author, book_genre, book_sequence, genre, translator,
},
serializers::{
allowed_langs::AllowedLangs,
author::Author,
book::{BaseBook, Book, BookFilter, DetailBook, RandomBookFilter, RemoteBook},
book_annotation::BookAnnotation,
genre::Genre,
pagination::{Page, Pagination},
sequence::Sequence,
source::Source,
},
};
@@ -29,36 +27,123 @@ pub async fn get_books(
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<BookFilter>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let filter = book_filter.get_filter_vec();
let books_count = db.book().count(filter.clone()).exec().await.unwrap();
let books = db
.book()
.find_many(filter)
.with(book::book_annotation::fetch())
.with(book::source::fetch())
.with(
book::book_authors::fetch(vec![])
.with(book_author::author::fetch().with(author::author_annotation::fetch())),
let books_count = sqlx::query_scalar!(
r#"
SELECT COUNT(*) FROM books
WHERE lang = ANY($1) AND
($2::boolean IS NULL OR is_deleted = $2) AND
($3::date IS NULL OR uploaded >= $3) AND
($4::date IS NULL OR uploaded <= $4) AND
($5::integer IS NULL OR id >= $5) AND
($6::integer IS NULL OR id <= $6)
"#,
&book_filter.allowed_langs,
book_filter.is_deleted,
book_filter.uploaded_gte,
book_filter.uploaded_lte,
book_filter.id_gte,
book_filter.id_lte,
)
.with(
book::translations::fetch(vec![])
.with(translator::author::fetch().with(author::author_annotation::fetch())),
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let books = sqlx::query_as!(
RemoteBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>",
b.uploaded,
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
.with(book::book_sequences::fetch(vec![]).with(book_sequence::sequence::fetch()))
.order_by(book::id::order(Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
)
)
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
) AS "authors!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
) AS "translators!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', sequences.id,
'name', sequences.name
)
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
) AS "sequences!: Vec<Sequence>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool",
(
SELECT
JSON_BUILD_OBJECT(
'id', sources.id,
'name', sources.name
)
FROM sources
WHERE sources.id = b.source
) AS "source!: Source",
b.remote_id
FROM books b
WHERE lang = ANY($1) AND
($2::boolean IS NULL OR is_deleted = $2) AND
($3::date IS NULL OR uploaded >= $3) AND
($4::date IS NULL OR uploaded <= $4) AND
($5::integer IS NULL OR id >= $5) AND
($6::integer IS NULL OR id <= $6)
ORDER BY b.id ASC
OFFSET $7
LIMIT $8
"#,
&book_filter.allowed_langs,
book_filter.is_deleted,
book_filter.uploaded_gte,
book_filter.uploaded_lte,
book_filter.id_gte,
book_filter.id_lte,
(pagination.page - 1) * pagination.size,
pagination.size,
)
.fetch_all(&db.0)
.await
.unwrap();
let page: Page<RemoteBook> = Page::new(
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination,
);
let page: Page<RemoteBook> = Page::new(books, books_count, &pagination);
Json(page)
}
@@ -68,26 +153,59 @@ pub async fn get_base_books(
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<BookFilter>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let filter = book_filter.get_filter_vec();
let books_count = sqlx::query_scalar!(
r#"
SELECT COUNT(*) FROM books
WHERE lang = ANY($1) AND
($2::boolean IS NULL OR is_deleted = $2) AND
($3::date IS NULL OR uploaded >= $3) AND
($4::date IS NULL OR uploaded <= $4) AND
($5::integer IS NULL OR id >= $5) AND
($6::integer IS NULL OR id <= $6)
"#,
&book_filter.allowed_langs,
book_filter.is_deleted,
book_filter.uploaded_gte,
book_filter.uploaded_lte,
book_filter.id_gte,
book_filter.id_lte,
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let books_count = db.book().count(filter.clone()).exec().await.unwrap();
let books = db
.book()
.find_many(filter)
.with(book::source::fetch())
.order_by(book::id::order(Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
let books = sqlx::query_as!(
BaseBook,
r#"
SELECT
b.id,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>"
FROM books b
WHERE lang = ANY($1) AND
($2::boolean IS NULL OR is_deleted = $2) AND
($3::date IS NULL OR uploaded >= $3) AND
($4::date IS NULL OR uploaded <= $4) AND
($5::integer IS NULL OR id >= $5) AND
($6::integer IS NULL OR id <= $6)
ORDER BY b.id ASC
OFFSET $7
LIMIT $8
"#,
&book_filter.allowed_langs,
book_filter.is_deleted,
book_filter.uploaded_gte,
book_filter.uploaded_lte,
book_filter.id_gte,
book_filter.id_lte,
(pagination.page - 1) * pagination.size,
pagination.size,
)
.fetch_all(&db.0)
.await
.unwrap();
let page: Page<BaseBook> = Page::new(
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination,
);
let page: Page<BaseBook> = Page::new(books, books_count, &pagination);
Json(page)
}
@@ -114,60 +232,217 @@ pub async fn get_random_book(
get_random_item::<BookMeili>(authors_index, filter).await
};
let book = db
.book()
.find_unique(book::id::equals(book_id))
.with(book::book_annotation::fetch())
.with(book::source::fetch())
.with(
book::book_authors::fetch(vec![])
.with(book_author::author::fetch().with(author::author_annotation::fetch())),
let book = sqlx::query_as!(
DetailBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>",
b.uploaded,
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
.with(
book::translations::fetch(vec![])
.with(translator::author::fetch().with(author::author_annotation::fetch())),
)
.with(book::book_sequences::fetch(vec![]).with(book_sequence::sequence::fetch()))
.with(
book::book_genres::fetch(vec![])
.with(book_genre::genre::fetch().with(genre::source::fetch())),
)
.exec()
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
) AS "authors!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
) AS "translators!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', sequences.id,
'name', sequences.name
)
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
) AS "sequences!: Vec<Sequence>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', genres.id,
'code', genres.code,
'description', genres.description,
'meta', genres.meta,
'source', JSON_BUILD_OBJECT(
'id', sources.id,
'name', sources.name
)
)
)
FROM book_genres
JOIN genres ON genres.id = book_genres.genre
JOIN sources ON sources.id = genres.source
WHERE book_genres.book = b.id
) AS "genres!: Vec<Genre>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool",
(
SELECT
JSON_BUILD_OBJECT(
'id', sources.id,
'name', sources.name
)
FROM sources
WHERE sources.id = b.source
) AS "source!: Source",
b.remote_id,
b.is_deleted,
b.pages
FROM books b
WHERE b.id = $1
"#,
book_id
)
.fetch_optional(&db.0)
.await
.unwrap()
.unwrap();
Json::<DetailBook>(book.into()).into_response()
Json::<DetailBook>(book).into_response()
}
pub async fn get_remote_book(
db: Database,
Path((source_id, remote_id)): Path<(i32, i32)>,
Path((source_id, remote_id)): Path<(i16, i32)>,
) -> impl IntoResponse {
let book = db
.book()
.find_unique(book::source_id_remote_id(source_id, remote_id))
.with(book::book_annotation::fetch())
.with(book::source::fetch())
.with(
book::book_authors::fetch(vec![])
.with(book_author::author::fetch().with(author::author_annotation::fetch())),
let book = sqlx::query_as!(
DetailBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>",
b.uploaded,
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
.with(
book::translations::fetch(vec![])
.with(translator::author::fetch().with(author::author_annotation::fetch())),
)
.with(book::book_sequences::fetch(vec![]).with(book_sequence::sequence::fetch()))
.with(
book::book_genres::fetch(vec![])
.with(book_genre::genre::fetch().with(genre::source::fetch())),
)
.exec()
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
) AS "authors!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
) AS "translators!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', sequences.id,
'name', sequences.name
)
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
) AS "sequences!: Vec<Sequence>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', genres.id,
'code', genres.code,
'description', genres.description,
'meta', genres.meta,
'source', JSON_BUILD_OBJECT(
'id', sources.id,
'name', sources.name
)
)
)
FROM book_genres
JOIN genres ON genres.id = book_genres.genre
JOIN sources ON sources.id = genres.source
WHERE book_genres.book = b.id
) AS "genres!: Vec<Genre>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool",
(
SELECT
JSON_BUILD_OBJECT(
'id', sources.id,
'name', sources.name
)
FROM sources
WHERE sources.id = b.source
) AS "source!: Source",
b.remote_id,
b.is_deleted,
b.pages
FROM books b
WHERE b.source = $1 AND b.remote_id = $2
"#,
source_id,
remote_id
)
.fetch_optional(&db.0)
.await
.unwrap();
match book {
Some(book) => Json::<DetailBook>(book.into()).into_response(),
Some(book) => Json::<DetailBook>(book).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
@@ -203,21 +478,72 @@ pub async fn search_books(
let total = result.estimated_total_hits.unwrap();
let book_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut books = db
.book()
.find_many(vec![book::id::in_vec(book_ids.clone())])
.with(book::book_annotation::fetch())
.with(book::source::fetch())
.with(
book::book_authors::fetch(vec![])
.with(book_author::author::fetch().with(author::author_annotation::fetch())),
let mut books = sqlx::query_as!(
Book,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>",
b.uploaded,
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
.with(
book::translations::fetch(vec![])
.with(translator::author::fetch().with(author::author_annotation::fetch())),
)
.with(book::book_sequences::fetch(vec![]).with(book_sequence::sequence::fetch()))
.exec()
)
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
) AS "authors!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
) AS "translators!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', sequences.id,
'name', sequences.name
)
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
) AS "sequences!: Vec<Sequence>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool"
FROM books b
WHERE b.id = ANY($1)
"#,
&book_ids
)
.fetch_all(&db.0)
.await
.unwrap();
@@ -228,54 +554,138 @@ pub async fn search_books(
a_pos.cmp(&b_pos)
});
let page: Page<Book> = Page::new(
books.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination,
);
let page: Page<Book> = Page::new(books, total.try_into().unwrap(), &pagination);
Json(page)
}
pub async fn get_book(db: Database, Path(book_id): Path<i32>) -> impl IntoResponse {
let book = db
.book()
.find_unique(book::id::equals(book_id))
.with(book::book_annotation::fetch())
.with(book::source::fetch())
.with(
book::book_authors::fetch(vec![])
.with(book_author::author::fetch().with(author::author_annotation::fetch())),
let book = sqlx::query_as!(
DetailBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>",
b.uploaded,
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
.with(
book::translations::fetch(vec![])
.with(translator::author::fetch().with(author::author_annotation::fetch())),
)
.with(book::book_sequences::fetch(vec![]).with(book_sequence::sequence::fetch()))
.with(
book::book_genres::fetch(vec![])
.with(book_genre::genre::fetch().with(genre::source::fetch())),
)
.exec()
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
) AS "authors!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
) AS "translators!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', sequences.id,
'name', sequences.name
)
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
) AS "sequences!: Vec<Sequence>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', genres.id,
'code', genres.code,
'description', genres.description,
'meta', genres.meta,
'source', JSON_BUILD_OBJECT(
'id', sources.id,
'name', sources.name
)
)
)
FROM book_genres
JOIN genres ON genres.id = book_genres.genre
JOIN sources ON sources.id = genres.source
WHERE book_genres.book = b.id
) AS "genres!: Vec<Genre>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool",
(
SELECT
JSON_BUILD_OBJECT(
'id', sources.id,
'name', sources.name
)
FROM sources
WHERE sources.id = b.source
) AS "source!: Source",
b.remote_id,
b.is_deleted,
b.pages
FROM books b
WHERE b.id = $1
"#,
book_id
)
.fetch_optional(&db.0)
.await
.unwrap();
match book {
Some(book) => Json::<DetailBook>(book.into()).into_response(),
Some(book) => Json::<DetailBook>(book).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
pub async fn get_book_annotation(db: Database, Path(book_id): Path<i32>) -> impl IntoResponse {
let book_annotation = db
.book_annotation()
.find_unique(book_annotation::book_id::equals(book_id))
.exec()
let book_annotation = sqlx::query_as!(
BookAnnotation,
r#"
SELECT
id,
title,
text,
file
FROM book_annotations
WHERE book = $1
"#,
book_id
)
.fetch_optional(&db.0)
.await
.unwrap();
match book_annotation {
Some(book_annotation) => Json::<BookAnnotation>(book_annotation.into()).into_response(),
Some(book_annotation) => Json::<BookAnnotation>(book_annotation).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}

View File

@@ -1,16 +1,14 @@
use std::collections::HashSet;
use axum::{extract::Query, response::IntoResponse, routing::get, Json, Router};
use prisma_client_rust::Direction;
use crate::{
prisma::genre,
serializers::{
use crate::serializers::{
genre::{Genre, GenreFilter},
pagination::{Page, Pagination},
},
};
use crate::serializers::source::Source;
use super::Database;
pub async fn get_genres(
@@ -18,41 +16,78 @@ pub async fn get_genres(
pagination: Query<Pagination>,
Query(GenreFilter { meta }): Query<GenreFilter>,
) -> impl IntoResponse {
let filter = {
match meta {
Some(meta) => vec![genre::meta::equals(meta)],
None => vec![],
}
};
let genres_count = sqlx::query_scalar!(
r#"
SELECT COUNT(*) FROM genres
WHERE (meta = $1 OR $1 IS NULL)
"#,
meta
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let genres_count = db.genre().count(filter.clone()).exec().await.unwrap();
let genres = db
.genre()
.find_many(filter)
.with(genre::source::fetch())
.order_by(genre::id::order(Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
let genres = sqlx::query_as!(
Genre,
r#"
SELECT
genres.id,
genres.remote_id,
genres.code,
genres.description,
genres.meta,
(
SELECT
JSON_BUILD_OBJECT(
'id', sources.id,
'name', sources.name
)
FROM sources
WHERE sources.id = genres.source
) AS "source!: Source"
FROM genres
WHERE (meta = $1 OR $1 IS NULL)
ORDER BY genres.id ASC
LIMIT $2 OFFSET $3
"#,
meta,
pagination.size,
(pagination.page - 1) * pagination.size
)
.fetch_all(&db.0)
.await
.unwrap();
let page: Page<Genre> = Page::new(
genres.iter().map(|item| item.clone().into()).collect(),
genres_count,
&pagination,
);
let page: Page<Genre> = Page::new(genres, genres_count, &pagination);
Json(page)
}
pub async fn get_genre_metas(db: Database) -> impl IntoResponse {
let genres = db
.genre()
.find_many(vec![])
.order_by(genre::id::order(Direction::Asc))
.exec()
let genres = sqlx::query_as!(
Genre,
r#"
SELECT
genres.id,
genres.remote_id,
genres.code,
genres.description,
genres.meta,
(
SELECT
JSON_BUILD_OBJECT(
'id', sources.id,
'name', sources.name
)
FROM sources
WHERE sources.id = genres.source
) AS "source!: Source"
FROM genres
ORDER BY genres.id ASC
"#
)
.fetch_all(&db.0)
.await
.unwrap();

View File

@@ -1,5 +1,3 @@
use std::sync::Arc;
use axum::{
http::{self, Request, StatusCode},
middleware::{self, Next},
@@ -8,10 +6,11 @@ use axum::{
Extension, Router,
};
use axum_prometheus::PrometheusMetricLayer;
use sqlx::PgPool;
use tower_http::trace::{self, TraceLayer};
use tracing::Level;
use crate::{config::CONFIG, db::get_prisma_client, prisma::PrismaClient};
use crate::{config::CONFIG, db::get_postgres_pool};
use self::translators::get_translators_router;
use self::{
@@ -26,7 +25,7 @@ pub mod genres;
pub mod sequences;
pub mod translators;
pub type Database = Extension<Arc<PrismaClient>>;
pub type Database = Extension<PgPool>;
async fn auth(req: Request<axum::body::Body>, next: Next) -> Result<Response, StatusCode> {
let auth_header = req
@@ -48,7 +47,7 @@ async fn auth(req: Request<axum::body::Body>, next: Next) -> Result<Response, St
}
pub async fn get_router() -> Router {
let client = Arc::new(get_prisma_client().await);
let client = get_postgres_pool().await;
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();

View File

@@ -10,9 +10,10 @@ use axum::{
use crate::{
meilisearch::{get_meili_client, SequenceMeili},
prisma::{author, book, book_author, book_sequence, sequence, translator},
serializers::{
allowed_langs::AllowedLangs,
author::Author,
book::BaseBook,
pagination::{Page, PageWithParent, Pagination},
sequence::{Sequence, SequenceBook},
},
@@ -36,15 +37,18 @@ async fn get_random_sequence(
get_random_item::<SequenceMeili>(authors_index, filter).await
};
let sequence = db
.sequence()
.find_unique(sequence::id::equals(sequence_id))
.exec()
let sequence = sqlx::query_as!(
Sequence,
r#"
SELECT id, name FROM sequences WHERE id = $1
"#,
sequence_id
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
Json::<Sequence>(sequence.into())
Json::<Sequence>(sequence)
}
async fn search_sequence(
@@ -78,10 +82,14 @@ async fn search_sequence(
let total = result.estimated_total_hits.unwrap();
let sequence_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut sequences = db
.sequence()
.find_many(vec![sequence::id::in_vec(sequence_ids.clone())])
.exec()
let mut sequences = sqlx::query_as!(
Sequence,
r#"
SELECT id, name FROM sequences WHERE id = ANY($1)
"#,
&sequence_ids
)
.fetch_all(&db.0)
.await
.unwrap();
@@ -92,25 +100,25 @@ async fn search_sequence(
a_pos.cmp(&b_pos)
});
let page: Page<Sequence> = Page::new(
sequences.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination,
);
let page: Page<Sequence> = Page::new(sequences, total.try_into().unwrap(), &pagination);
Json(page)
}
async fn get_sequence(db: Database, Path(sequence_id): Path<i32>) -> impl IntoResponse {
let sequence = db
.sequence()
.find_unique(sequence::id::equals(sequence_id))
.exec()
let sequence = sqlx::query_as!(
Sequence,
r#"
SELECT id, name FROM sequences WHERE id = $1
"#,
sequence_id
)
.fetch_optional(&db.0)
.await
.unwrap();
match sequence {
Some(sequence) => Json::<Sequence>(sequence.into()).into_response(),
Some(sequence) => Json::<Sequence>(sequence).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
@@ -122,27 +130,34 @@ async fn get_sequence_available_types(
AllowedLangs,
>,
) -> impl IntoResponse {
let books = db
.book()
.find_many(vec![
book::is_deleted::equals(false),
book::book_sequences::some(vec![book_sequence::sequence_id::equals(sequence_id)]),
book::lang::in_vec(allowed_langs),
])
.exec()
// TODO: refactor
let books = sqlx::query_as!(
BaseBook,
r#"
SELECT
b.id,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>"
FROM books b
JOIN book_sequences bs ON b.id = bs.book
WHERE
b.is_deleted = FALSE AND
bs.sequence = $1 AND
b.lang = ANY($2)
"#,
sequence_id,
&allowed_langs
)
.fetch_all(&db.0)
.await
.unwrap();
let mut file_types: HashSet<String> = HashSet::new();
for book in books {
file_types.insert(book.file_type.clone());
for file_type in book.available_types {
file_types.insert(file_type);
}
if file_types.contains("fb2") {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
}
Json::<Vec<String>>(file_types.into_iter().collect())
@@ -156,10 +171,14 @@ async fn get_sequence_books(
>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let sequence = db
.sequence()
.find_unique(sequence::id::equals(sequence_id))
.exec()
let sequence = sqlx::query_as!(
Sequence,
r#"
SELECT id, name FROM sequences WHERE id = $1
"#,
sequence_id
)
.fetch_optional(&db.0)
.await
.unwrap();
@@ -168,71 +187,121 @@ async fn get_sequence_books(
None => return StatusCode::NOT_FOUND.into_response(),
};
let books_filter = vec![
book_sequence::book::is(vec![
book::is_deleted::equals(false),
book::lang::in_vec(allowed_langs.clone()),
]),
book_sequence::sequence_id::equals(sequence.id),
];
// let books_filter = vec![
// book_sequence::book::is(vec![
// book::is_deleted::equals(false),
// book::lang::in_vec(allowed_langs.clone()),
// ]),
// book_sequence::sequence_id::equals(sequence.id),
// ];
let books_count = db
.book_sequence()
.count(books_filter.clone())
.exec()
.await
.unwrap();
let book_sequences = db
.book_sequence()
.find_many(vec![
book_sequence::book::is(vec![
book::is_deleted::equals(false),
book::lang::in_vec(allowed_langs.clone()),
]),
book_sequence::sequence_id::equals(sequence.id),
])
.order_by(book_sequence::position::order(
prisma_client_rust::Direction::Asc,
))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let book_ids: Vec<i32> = book_sequences.iter().map(|a| a.book_id).collect();
let books = db
.book()
.find_many(vec![book::id::in_vec(book_ids)])
.with(book::source::fetch())
.with(book::book_annotation::fetch())
.with(
book::book_authors::fetch(vec![])
.with(book_author::author::fetch().with(author::author_annotation::fetch())),
let books_count = sqlx::query_scalar!(
"SELECT COUNT(*) FROM book_sequences bs
JOIN books b ON b.id = bs.book
WHERE
b.is_deleted = FALSE AND
bs.sequence = $1 AND
b.lang = ANY($2)",
sequence.id,
&allowed_langs
)
.with(
book::translations::fetch(vec![])
.with(translator::author::fetch().with(author::author_annotation::fetch())),
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let mut books = sqlx::query_as!(
SequenceBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>",
b.uploaded,
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
.with(book::book_sequences::fetch(vec![
book_sequence::sequence_id::equals(sequence.id),
]))
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.exec()
)
)
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
) AS "authors!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
) AS "translators!: Vec<Author>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool",
bs.position
FROM books b
JOIN book_sequences bs ON b.id = bs.book
WHERE
b.is_deleted = FALSE AND
bs.sequence = $1 AND
b.lang = ANY($2)
ORDER BY bs.position
LIMIT $3 OFFSET $4
"#,
sequence.id,
&allowed_langs,
pagination.size,
(pagination.page - 1) * pagination.size,
)
.fetch_all(&db.0)
.await
.unwrap();
let mut sequence_books = books
.iter()
.map(|item| item.clone().into())
.collect::<Vec<SequenceBook>>();
// let books = db
// .book()
// .find_many(vec![book::id::in_vec(book_ids)])
// .with(book::source::fetch())
// .with(book::book_annotation::fetch())
// .with(
// book::book_authors::fetch(vec![])
// .with(book_author::author::fetch().with(author::author_annotation::fetch())),
// )
// .with(
// book::translations::fetch(vec![])
// .with(translator::author::fetch().with(author::author_annotation::fetch())),
// )
// .with(book::book_sequences::fetch(vec![
// book_sequence::sequence_id::equals(sequence.id),
// ]))
// .order_by(book::id::order(prisma_client_rust::Direction::Asc))
// .exec()
// .await
// .unwrap();
sequence_books.sort_by(|a, b| a.position.cmp(&b.position));
books.sort_by(|a, b| a.position.cmp(&b.position));
let page: PageWithParent<SequenceBook, Sequence> =
PageWithParent::new(sequence.into(), sequence_books, books_count, &pagination);
PageWithParent::new(sequence, books, books_count, &pagination);
Json(page).into_response()
}

View File

@@ -10,15 +10,12 @@ use axum::{
use crate::{
meilisearch::{get_meili_client, AuthorMeili},
prisma::{
author,
book::{self},
book_author, book_sequence, translator,
},
serializers::{
allowed_langs::AllowedLangs,
author::Author,
book::BaseBook,
pagination::{Page, PageWithParent, Pagination},
sequence::Sequence,
translator::TranslatorBook,
},
};
@@ -33,11 +30,25 @@ async fn get_translated_books(
>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let translator = db
.author()
.find_unique(author::id::equals(translator_id))
.with(author::author_annotation::fetch())
.exec()
let translator = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
translator_id
)
.fetch_optional(&db.0)
.await
.unwrap();
@@ -46,37 +57,76 @@ async fn get_translated_books(
None => return StatusCode::NOT_FOUND.into_response(),
};
let books_filter = vec![
book::is_deleted::equals(false),
book::translations::some(vec![translator::author_id::equals(translator_id)]),
book::lang::in_vec(allowed_langs.clone()),
];
let books_count = db.book().count(books_filter.clone()).exec().await.unwrap();
let books = db
.book()
.find_many(books_filter)
.with(book::source::fetch())
.with(book::book_annotation::fetch())
.with(
book::book_authors::fetch(vec![])
.with(book_author::author::fetch().with(author::author_annotation::fetch())),
let books_count = sqlx::query_scalar!(
r#"
SELECT COUNT(*)
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE
b.is_deleted = false
AND ba.author = $1
AND b.lang = ANY($2)
"#,
translator_id,
&allowed_langs
)
.with(book::book_sequences::fetch(vec![]).with(book_sequence::sequence::fetch()))
.order_by(book::title::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let books = sqlx::query_as!(
TranslatorBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>",
b.uploaded,
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', authors.id,
'first_name', authors.first_name,
'last_name', authors.last_name,
'middle_name', authors.middle_name,
'annotation_exists', EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)
)
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
) AS "authors!: Vec<Author>",
(
SELECT
JSON_AGG(
JSON_BUILD_OBJECT(
'id', sequences.id,
'name', sequences.name
)
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
) AS "sequences!: Vec<Sequence>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool"
FROM books b
"#,
)
.fetch_all(&db.0)
.await
.unwrap();
let page: PageWithParent<TranslatorBook, Author> = PageWithParent::new(
translator.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination,
);
let page: PageWithParent<TranslatorBook, Author> =
PageWithParent::new(translator, books, books_count, &pagination);
Json(page).into_response()
}
@@ -88,27 +138,34 @@ async fn get_translated_books_available_types(
AllowedLangs,
>,
) -> impl IntoResponse {
let books = db
.book()
.find_many(vec![
book::is_deleted::equals(false),
book::translations::some(vec![translator::author_id::equals(translator_id)]),
book::lang::in_vec(allowed_langs),
])
.exec()
// TODO: refactor
let books = sqlx::query_as!(
BaseBook,
r#"
SELECT
b.id,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip'] ELSE ARRAY[b.file_type] END AS "available_types!: Vec<String>"
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE
b.is_deleted = false
AND ba.author = $1
AND b.lang = ANY($2)
"#,
translator_id,
&allowed_langs
)
.fetch_all(&db.0)
.await
.unwrap();
let mut file_types: HashSet<String> = HashSet::new();
for book in books {
file_types.insert(book.file_type.clone());
for file_type in book.available_types {
file_types.insert(file_type);
}
if file_types.contains("fb2") {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
}
Json::<Vec<String>>(file_types.into_iter().collect())
@@ -145,12 +202,25 @@ async fn search_translators(
let total = result.estimated_total_hits.unwrap();
let translator_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut translators = db
.author()
.find_many(vec![author::id::in_vec(translator_ids.clone())])
.with(author::author_annotation::fetch())
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
.exec()
let mut translators = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = ANY($1)
"#,
&translator_ids
)
.fetch_all(&db.0)
.await
.unwrap();
@@ -161,11 +231,7 @@ async fn search_translators(
a_pos.cmp(&b_pos)
});
let page: Page<Author> = Page::new(
translators.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination,
);
let page: Page<Author> = Page::new(translators, total.try_into().unwrap(), &pagination);
Json(page)
}