mirror of
https://github.com/flibusta-apps/book_library_server.git
synced 2025-12-06 07:05:36 +01:00
Rewrite to rust init
This commit is contained in:
2
.cargo/config.toml
Normal file
2
.cargo/config.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[alias]
|
||||
prisma = "run -p prisma-cli --"
|
||||
35
.github/workflows/codeql-analysis.yml
vendored
35
.github/workflows/codeql-analysis.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
schedule:
|
||||
- cron: '0 12 * * *'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
35
.github/workflows/linters.yaml
vendored
35
.github/workflows/linters.yaml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Linters
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
jobs:
|
||||
Run-Pre-Commit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 32
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install pre-commit
|
||||
run: pip3 install pre-commit
|
||||
|
||||
- name: Pre-commit (Push)
|
||||
env:
|
||||
SETUPTOOLS_USE_DISTUTILS: stdlib
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
run: pre-commit run --source ${{ github.event.before }} --origin ${{ github.event.after }} --show-diff-on-failure
|
||||
|
||||
- name: Pre-commit (Pull-Request)
|
||||
env:
|
||||
SETUPTOOLS_USE_DISTUTILS: stdlib
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
run: pre-commit run --source ${{ github.event.pull_request.base.sha }} --origin ${{ github.event.pull_request.head.sha }} --show-diff-on-failure
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -1,7 +1,4 @@
|
||||
/target
|
||||
|
||||
.env
|
||||
.vscode
|
||||
.idea
|
||||
.ruff_cache
|
||||
|
||||
__pycache__
|
||||
|
||||
venv
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
exclude: 'docs|node_modules|migrations|.git|.tox'
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 23.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.11
|
||||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: 'v0.0.265'
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: typos-dict-v0.9.26
|
||||
hooks:
|
||||
- id: typos
|
||||
5611
Cargo.lock
generated
Normal file
5611
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
35
Cargo.toml
Normal file
35
Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "book_library_server"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"prisma-cli"
|
||||
]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
once_cell = "1.18.0"
|
||||
|
||||
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }
|
||||
|
||||
tokio = { version = "1.28.2", features = ["full"] }
|
||||
|
||||
tracing = "0.1.37"
|
||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"]}
|
||||
tower-http = { version = "0.4.3", features = ["trace"] }
|
||||
|
||||
axum = { version = "0.6.18", features = ["json"] }
|
||||
axum-extra = { version ="0.7.7", features = ["query"] }
|
||||
axum-prometheus = "0.4.0"
|
||||
serde = { version = "1.0.163", features = ["derive"] }
|
||||
|
||||
sentry = { version = "0.31.3", features = ["debug-images"] }
|
||||
|
||||
meilisearch-sdk = "0.24.1"
|
||||
|
||||
rand = "0.8.5"
|
||||
|
||||
chrono = "0.4.26"
|
||||
@@ -1 +0,0 @@
|
||||
__version__ = "0.1.0"
|
||||
@@ -1,98 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = ./app/alembic
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator"
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. Valid values are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # default: use os.pathsep
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1 +0,0 @@
|
||||
Generic single-database configuration.
|
||||
@@ -1,68 +0,0 @@
|
||||
from logging.config import fileConfig
|
||||
import os
|
||||
import sys
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy.engine import create_engine
|
||||
|
||||
from core.db import DATABASE_URL
|
||||
|
||||
|
||||
myPath = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.insert(0, myPath + "/../../")
|
||||
|
||||
config = context.config
|
||||
|
||||
|
||||
from app.models import BaseMeta
|
||||
|
||||
|
||||
target_metadata = BaseMeta.metadata
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = create_engine(DATABASE_URL)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata, compare_type=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
@@ -1,24 +0,0 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -1,143 +0,0 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 08193b547a80
|
||||
Revises: b44117a41998
|
||||
Create Date: 2021-11-19 14:04:16.589304
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "08193b547a80"
|
||||
down_revision = "b44117a41998"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index(op.f("ix_books_title"), "books", ["title"], unique=False)
|
||||
op.create_index(
|
||||
op.f("ix_books_is_deleted"),
|
||||
"books",
|
||||
[sa.text("is_deleted = 'f'")],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(op.f("ix_books_file_type"), "books", ["file_type"], unique=False)
|
||||
op.create_index(op.f("ix_sequences_name"), "sequences", ["name"], unique=False)
|
||||
op.create_index(
|
||||
op.f("tgrm_books_title"),
|
||||
"books",
|
||||
["title"],
|
||||
postgresql_using="gin",
|
||||
postgresql_ops={"description": "gin_trgm_ops"},
|
||||
)
|
||||
op.create_index(
|
||||
op.f("tgrm_sequences_name"),
|
||||
"sequences",
|
||||
["name"],
|
||||
postgresql_using="gin",
|
||||
postgresql_ops={"description": "gin_trgm_ops"},
|
||||
)
|
||||
op.create_index(
|
||||
op.f("tgrm_authors_lfm"),
|
||||
"authors",
|
||||
[sa.text("(last_name || ' ' || first_name || ' ' || middle_name)")],
|
||||
postgresql_using="gin",
|
||||
postgresql_ops={"description": "gin_trgm_ops"},
|
||||
)
|
||||
op.create_index(
|
||||
op.f("tgrm_authors_lf"),
|
||||
"authors",
|
||||
[sa.text("(last_name || ' ' || first_name)")],
|
||||
postgresql_using="gin",
|
||||
postgresql_ops={"description": "gin_trgm_ops"},
|
||||
)
|
||||
op.create_index(
|
||||
op.f("tgrm_authors_l"),
|
||||
"authors",
|
||||
["last_name"],
|
||||
postgresql_using="gin",
|
||||
postgresql_ops={"description": "gin_trgm_ops"},
|
||||
)
|
||||
op.create_index(
|
||||
op.f("book_authors_book"),
|
||||
"book_authors",
|
||||
["book"],
|
||||
unique=False,
|
||||
postgresql_using="btree",
|
||||
)
|
||||
op.create_index(
|
||||
op.f("book_authors_author"),
|
||||
"book_authors",
|
||||
["author"],
|
||||
unique=False,
|
||||
postgresql_using="btree",
|
||||
)
|
||||
op.create_index(
|
||||
op.f("book_sequences_book"),
|
||||
"book_sequences",
|
||||
["book"],
|
||||
unique=False,
|
||||
postgresql_using="btree",
|
||||
)
|
||||
op.create_index(
|
||||
op.f("book_sequences_sequence"),
|
||||
"book_sequences",
|
||||
["sequence"],
|
||||
unique=False,
|
||||
postgresql_using="btree",
|
||||
)
|
||||
op.create_index(
|
||||
op.f("translations_book"),
|
||||
"translations",
|
||||
["book"],
|
||||
unique=False,
|
||||
postgresql_using="btree",
|
||||
)
|
||||
op.create_index(
|
||||
op.f("translations_author"),
|
||||
"translations",
|
||||
["author"],
|
||||
unique=False,
|
||||
postgresql_using="btree",
|
||||
)
|
||||
op.create_index(
|
||||
op.f("book_genres_book"),
|
||||
"book_genres",
|
||||
["book"],
|
||||
unique=False,
|
||||
postgresql_using="btree",
|
||||
),
|
||||
op.create_index(
|
||||
op.f("book_genres_genre"),
|
||||
"book_genres",
|
||||
["genre"],
|
||||
unique=False,
|
||||
postgresql_using="btree",
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f("ix_sequences_name"), table_name="sequences")
|
||||
op.drop_index(op.f("ix_books_title"), table_name="books")
|
||||
op.drop_index(op.f("ix_books_is_deleted"), table_name="books")
|
||||
op.drop_index(op.f("ix_books_file_type"), table_name="books")
|
||||
op.drop_index(op.f("tgrm_books_title"), table_name="books")
|
||||
op.drop_index(op.f("tgrm_sequences_name"), table_name="books")
|
||||
op.drop_index(op.f("tgrm_authors_lfm"), table_name="books")
|
||||
op.drop_index(op.f("tgrm_authors_lf"), table_name="books")
|
||||
op.drop_index(op.f("tgrm_authors_l"), table_name="books")
|
||||
op.drop_index(op.f("book_authors_book"), table_name="book_authors")
|
||||
op.drop_index(op.f("book_authors_author"), table_name="book_authors")
|
||||
op.drop_index(op.f("book_sequences_book"), table_name="book_sequences")
|
||||
op.drop_index(op.f("book_sequences_sequence"), table_name="book_sequences")
|
||||
op.drop_index(op.f("translations_book"), table_name="translations")
|
||||
op.drop_index(op.f("translations_author"), table_name="translations")
|
||||
op.drop_index(op.f("book_genres_book"), table_name="book_genres"),
|
||||
op.drop_index(op.f("book_genres_genre"), table_name="book_genres"),
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,221 +0,0 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: b44117a41998
|
||||
Revises:
|
||||
Create Date: 2021-11-18 18:25:06.921287
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql.schema import UniqueConstraint
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "b44117a41998"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"sources",
|
||||
sa.Column("id", sa.SmallInteger(), nullable=False),
|
||||
sa.Column("name", sa.String(length=32), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("name"),
|
||||
)
|
||||
op.create_table(
|
||||
"authors",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("source", sa.SmallInteger(), nullable=False),
|
||||
sa.Column("remote_id", sa.Integer(), nullable=False),
|
||||
sa.Column("first_name", sa.String(length=256), nullable=False),
|
||||
sa.Column("last_name", sa.String(length=256), nullable=False),
|
||||
sa.Column("middle_name", sa.String(length=256), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["source"], ["sources.id"], name="fk_authors_sources_id_source"
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("source", "remote_id", name="uc_authors_source_remote_id"),
|
||||
)
|
||||
op.create_table(
|
||||
"books",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("source", sa.SmallInteger(), nullable=False),
|
||||
sa.Column("remote_id", sa.Integer(), nullable=False),
|
||||
sa.Column("title", sa.String(length=256), nullable=False),
|
||||
sa.Column("lang", sa.String(length=3), nullable=False),
|
||||
sa.Column("file_type", sa.String(length=4), nullable=False),
|
||||
sa.Column("uploaded", sa.Date(), nullable=False),
|
||||
sa.Column(
|
||||
"is_deleted", sa.Boolean(), server_default=sa.text("false"), nullable=False
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["source"], ["sources.id"], name="fk_books_sources_id_source"
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("source", "remote_id", name="uc_books_source_remote_id"),
|
||||
)
|
||||
op.create_table(
|
||||
"genres",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("source", sa.SmallInteger(), nullable=False),
|
||||
sa.Column("remote_id", sa.Integer(), nullable=False),
|
||||
sa.Column("code", sa.String(length=45), nullable=False),
|
||||
sa.Column("description", sa.String(length=99), nullable=False),
|
||||
sa.Column("meta", sa.String(length=45), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["source"], ["sources.id"], name="fk_genres_sources_id_source"
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("source", "remote_id", name="uc_genres_source_remote_id"),
|
||||
)
|
||||
op.create_table(
|
||||
"sequences",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("source", sa.SmallInteger(), nullable=False),
|
||||
sa.Column("remote_id", sa.Integer(), nullable=False),
|
||||
sa.Column("name", sa.String(length=256), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["source"], ["sources.id"], name="fk_sequences_sources_id_source"
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint(
|
||||
"source", "remote_id", name="uc_sequences_source_remote_id"
|
||||
),
|
||||
)
|
||||
op.create_table(
|
||||
"author_annotations",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("author", sa.Integer(), nullable=False),
|
||||
sa.Column("title", sa.String(length=256), nullable=False),
|
||||
sa.Column("text", sa.Text(), nullable=False),
|
||||
sa.Column("file", sa.String(length=256), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["author"], ["authors.id"], name="fk_author_annotations_authors_id_author"
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("author"),
|
||||
)
|
||||
op.create_table(
|
||||
"book_annotations",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("book", sa.Integer(), nullable=False),
|
||||
sa.Column("title", sa.String(length=256), nullable=False),
|
||||
sa.Column("text", sa.Text(), nullable=False),
|
||||
sa.Column("file", sa.String(length=256), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["book"], ["books.id"], name="fk_book_annotations_books_id_book"
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("book"),
|
||||
)
|
||||
op.create_table(
|
||||
"book_authors",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("author", sa.Integer(), nullable=True),
|
||||
sa.Column("book", sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["author"],
|
||||
["authors.id"],
|
||||
name="fk_book_authors_authors_author_id",
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["book"],
|
||||
["books.id"],
|
||||
name="fk_book_authors_books_book_id",
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("book", "author", name="uc_book_authors_book_author"),
|
||||
)
|
||||
op.create_table(
|
||||
"book_genres",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("genre", sa.Integer(), nullable=True),
|
||||
sa.Column("book", sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["book"],
|
||||
["books.id"],
|
||||
name="fk_book_genres_books_book_id",
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["genre"],
|
||||
["genres.id"],
|
||||
name="fk_book_genres_genres_genre_id",
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("book", "genre", name="uc_book_genres_book_genre"),
|
||||
)
|
||||
op.create_table(
|
||||
"book_sequences",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("position", sa.SmallInteger(), nullable=False),
|
||||
sa.Column("sequence", sa.Integer(), nullable=True),
|
||||
sa.Column("book", sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["book"],
|
||||
["books.id"],
|
||||
name="fk_book_sequences_books_book_id",
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["sequence"],
|
||||
["sequences.id"],
|
||||
name="fk_book_sequences_sequences_sequence_id",
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("book", "sequence", name="uc_book_sequences_book_sequence"),
|
||||
)
|
||||
op.create_table(
|
||||
"translations",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("position", sa.SmallInteger(), nullable=False),
|
||||
sa.Column("author", sa.Integer(), nullable=True),
|
||||
sa.Column("book", sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["author"],
|
||||
["authors.id"],
|
||||
name="fk_translations_authors_author_id",
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["book"],
|
||||
["books.id"],
|
||||
name="fk_translations_books_book_id",
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("book", "author", name="uc_translations_book_author"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("translations")
|
||||
op.drop_table("book_sequences")
|
||||
op.drop_table("book_genres")
|
||||
op.drop_table("book_authors")
|
||||
op.drop_table("book_annotations")
|
||||
op.drop_table("author_annotations")
|
||||
op.drop_table("sequences")
|
||||
op.drop_table("genres")
|
||||
op.drop_table("books")
|
||||
op.drop_table("authors")
|
||||
op.drop_table("sources")
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,29 +0,0 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: b44117a41999
|
||||
Revises: 08193b547a80
|
||||
Create Date: 2021-11-18 18:25:06.921287
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql.schema import UniqueConstraint
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "b44117a41999"
|
||||
down_revision = "08193b547a80"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column("books", sa.Column("pages", sa.Integer(), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column("books", "pages")
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,22 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import HTTPException, Query, Security, status
|
||||
|
||||
from core.auth import default_security
|
||||
from core.config import env_config
|
||||
|
||||
|
||||
def check_token(api_key: str = Security(default_security)):
|
||||
if api_key != env_config.API_KEY:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN, detail="Wrong api key!"
|
||||
)
|
||||
|
||||
|
||||
def get_allowed_langs(
|
||||
allowed_langs: Optional[list[str]] = Query(None),
|
||||
) -> frozenset[str]:
|
||||
if allowed_langs is not None:
|
||||
return frozenset(allowed_langs)
|
||||
|
||||
return frozenset(("ru", "be", "uk"))
|
||||
@@ -1,40 +0,0 @@
|
||||
from datetime import date
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Query
|
||||
|
||||
from app.depends import get_allowed_langs
|
||||
|
||||
|
||||
def get_book_filter(
|
||||
is_deleted: Optional[bool] = None,
|
||||
allowed_langs: Optional[list[str]] = Query(None), # type: ignore
|
||||
uploaded_gte: Optional[date] = None,
|
||||
uploaded_lte: Optional[date] = None,
|
||||
id_gte: Optional[int] = None,
|
||||
id_lte: Optional[int] = None,
|
||||
no_cache: bool = False,
|
||||
) -> dict:
|
||||
result = {}
|
||||
|
||||
if is_deleted is not None:
|
||||
result["is_deleted"] = is_deleted
|
||||
|
||||
if not (allowed_langs and "__all__" in allowed_langs):
|
||||
result["lang__in"] = get_allowed_langs(allowed_langs)
|
||||
|
||||
if uploaded_gte:
|
||||
result["uploaded__gte"] = uploaded_gte
|
||||
|
||||
if uploaded_lte:
|
||||
result["uploaded__lte"] = uploaded_lte
|
||||
|
||||
if id_gte:
|
||||
result["id__gte"] = id_gte
|
||||
|
||||
if id_lte:
|
||||
result["id__lte"] = id_lte
|
||||
|
||||
result["no_cache"] = no_cache
|
||||
|
||||
return result
|
||||
@@ -1,10 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def get_genre_filter(meta: Optional[str] = None) -> dict:
|
||||
result = {}
|
||||
|
||||
if meta:
|
||||
result["meta"] = meta
|
||||
|
||||
return result
|
||||
@@ -1,189 +0,0 @@
|
||||
from datetime import date
|
||||
from typing import Optional
|
||||
|
||||
import ormar
|
||||
from sqlalchemy import text
|
||||
|
||||
from core.db import database, metadata
|
||||
|
||||
|
||||
class BaseMeta(ormar.ModelMeta):
|
||||
metadata = metadata
|
||||
database = database
|
||||
|
||||
|
||||
class Source(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "sources"
|
||||
|
||||
id: int = ormar.SmallInteger(primary_key=True, nullable=False) # type: ignore
|
||||
name: str = ormar.String(max_length=32, nullable=False, unique=True) # type: ignore
|
||||
|
||||
|
||||
class Genre(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "genres"
|
||||
constraints = [
|
||||
ormar.UniqueColumns("source", "remote_id"),
|
||||
]
|
||||
|
||||
id: int = ormar.Integer(primary_key=True, nullable=False) # type: ignore
|
||||
|
||||
source: Source = ormar.ForeignKey(Source, nullable=False)
|
||||
remote_id: int = ormar.Integer(minimum=0, nullable=False) # type: ignore
|
||||
|
||||
code: str = ormar.String(max_length=45, nullable=False) # type: ignore
|
||||
description: str = ormar.String(max_length=99, nullable=False) # type: ignore
|
||||
meta: str = ormar.String(max_length=45, nullable=False) # type: ignore
|
||||
|
||||
|
||||
class Author(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "authors"
|
||||
constraints = [
|
||||
ormar.UniqueColumns("source", "remote_id"),
|
||||
]
|
||||
|
||||
id: int = ormar.Integer(primary_key=True, nullable=False) # type: ignore
|
||||
|
||||
source: Source = ormar.ForeignKey(Source, nullable=False)
|
||||
remote_id: int = ormar.Integer(minimum=0, nullable=False) # type: ignore
|
||||
|
||||
first_name: str = ormar.String(max_length=256, nullable=False) # type: ignore
|
||||
last_name: str = ormar.String(max_length=256, nullable=False) # type: ignore
|
||||
middle_name: str = ormar.String(max_length=256, nullable=True) # type: ignore
|
||||
|
||||
@ormar.property_field
|
||||
def annotation_exists(self) -> bool:
|
||||
return len(self.annotations) != 0
|
||||
|
||||
|
||||
class AuthorAnnotation(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "author_annotations"
|
||||
|
||||
id = ormar.Integer(primary_key=True, nullable=False)
|
||||
|
||||
author: Author = ormar.ForeignKey(
|
||||
Author, nullable=False, unique=True, related_name="annotations"
|
||||
)
|
||||
|
||||
title: str = ormar.String(
|
||||
max_length=256, nullable=False, default=""
|
||||
) # type: ignore
|
||||
text: str = ormar.Text(nullable=False, default="") # type: ignore
|
||||
file: str = ormar.String(max_length=256, nullable=True) # type: ignore
|
||||
|
||||
|
||||
class Sequence(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "sequences"
|
||||
constraints = [
|
||||
ormar.UniqueColumns("source", "remote_id"),
|
||||
]
|
||||
|
||||
id: int = ormar.Integer(primary_key=True, nullable=False) # type: ignore
|
||||
|
||||
source: Source = ormar.ForeignKey(Source, nullable=False)
|
||||
remote_id: int = ormar.Integer(minimum=0, nullable=False) # type: ignore
|
||||
|
||||
name: str = ormar.String(max_length=256, nullable=False, index=True) # type: ignore
|
||||
|
||||
|
||||
class BookAuthors(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "book_authors"
|
||||
|
||||
id: int = ormar.Integer(primary_key=True, nullable=False) # type: ignore
|
||||
|
||||
|
||||
class BookGenres(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "book_genres"
|
||||
|
||||
id: int = ormar.Integer(primary_key=True, nullable=False) # type: ignore
|
||||
|
||||
|
||||
class BookSequences(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "book_sequences"
|
||||
orders_by = [
|
||||
"position",
|
||||
]
|
||||
|
||||
id: int = ormar.Integer(primary_key=True, nullable=False) # type: ignore
|
||||
|
||||
position: int = ormar.SmallInteger(minimum=0, nullable=False) # type: ignore
|
||||
|
||||
|
||||
class Translation(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "translations"
|
||||
orders_by = [
|
||||
"position",
|
||||
]
|
||||
|
||||
id: int = ormar.Integer(primary_key=True, nullable=False) # type: ignore
|
||||
|
||||
position: int = ormar.SmallInteger(nullable=False) # type: ignore
|
||||
|
||||
|
||||
class Book(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "books"
|
||||
constraints = [
|
||||
ormar.UniqueColumns("source", "remote_id"),
|
||||
]
|
||||
|
||||
id: int = ormar.Integer(primary_key=True, nullable=False) # type: ignore
|
||||
|
||||
source: Source = ormar.ForeignKey(Source, nullable=False)
|
||||
remote_id: int = ormar.Integer(minimum=0, nullable=False) # type: ignore
|
||||
|
||||
title: str = ormar.String(
|
||||
max_length=256, nullable=False, index=True
|
||||
) # type: ignore
|
||||
lang: str = ormar.String(max_length=3, nullable=False, index=True) # type: ignore
|
||||
file_type: str = ormar.String(
|
||||
max_length=4, nullable=False, index=True
|
||||
) # type: ignore
|
||||
uploaded: date = ormar.Date() # type: ignore
|
||||
is_deleted: bool = ormar.Boolean(
|
||||
default=False, server_default=text("false"), nullable=False
|
||||
)
|
||||
pages: Optional[int] = ormar.Integer(minimum=0, nullable=True) # type: ignore
|
||||
|
||||
authors = ormar.ManyToMany(Author, through=BookAuthors)
|
||||
translators = ormar.ManyToMany(
|
||||
Author, through=Translation, related_name="translated_books"
|
||||
)
|
||||
genres = ormar.ManyToMany(Genre, through=BookGenres)
|
||||
sequences = ormar.ManyToMany(Sequence, through=BookSequences)
|
||||
|
||||
@ormar.property_field
|
||||
def available_types(self) -> list[str]:
|
||||
if self.file_type == "fb2" and self.source.name == "flibusta":
|
||||
return ["fb2", "fb2zip", "epub", "mobi"]
|
||||
|
||||
return [self.file_type]
|
||||
|
||||
@ormar.property_field
|
||||
def annotation_exists(self) -> bool:
|
||||
return len(self.annotations) != 0
|
||||
|
||||
|
||||
class BookAnnotation(ormar.Model):
|
||||
class Meta(BaseMeta):
|
||||
tablename = "book_annotations"
|
||||
|
||||
id = ormar.Integer(primary_key=True, nullable=False)
|
||||
|
||||
book: Book = ormar.ForeignKey(
|
||||
Book, nullable=False, unique=True, related_name="annotations"
|
||||
)
|
||||
|
||||
title: str = ormar.String(
|
||||
max_length=256, nullable=False, default=""
|
||||
) # type: ignore
|
||||
text: str = ormar.Text(nullable=False, default="") # type: ignore
|
||||
file: str = ormar.String(max_length=256, nullable=True) # type: ignore
|
||||
@@ -1,48 +0,0 @@
|
||||
from datetime import date
|
||||
from typing import Optional
|
||||
|
||||
from fastapi_pagination import Page
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.serializers.sequence import Sequence
|
||||
|
||||
|
||||
class Author(BaseModel):
|
||||
id: int
|
||||
|
||||
first_name: str
|
||||
last_name: str
|
||||
middle_name: Optional[str]
|
||||
|
||||
annotation_exists: bool
|
||||
|
||||
|
||||
class AuthorBook(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
lang: str
|
||||
file_type: str
|
||||
available_types: list[str]
|
||||
uploaded: date
|
||||
translators: list[Author]
|
||||
sequences: list[Sequence]
|
||||
annotation_exists: bool
|
||||
|
||||
|
||||
class TranslatedBook(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
lang: str
|
||||
file_type: str
|
||||
available_types: list[str]
|
||||
authors: list[Author]
|
||||
sequences: list[Sequence]
|
||||
annotation_exists: bool
|
||||
|
||||
|
||||
class PageWithAuthorBook(Page[AuthorBook]):
|
||||
parent_item: Author | None
|
||||
|
||||
|
||||
class PageWithTranslatedBook(Page[TranslatedBook]):
|
||||
parent_item: Author | None
|
||||
@@ -1,10 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AuthorAnnotation(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
text: str
|
||||
file: Optional[str]
|
||||
@@ -1,46 +0,0 @@
|
||||
from datetime import date
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.serializers.author import Author
|
||||
from app.serializers.sequence import Sequence
|
||||
|
||||
|
||||
class BookSource(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
|
||||
|
||||
class BookGenre(BaseModel):
|
||||
id: int
|
||||
description: str
|
||||
|
||||
|
||||
class Book(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
lang: str
|
||||
file_type: str
|
||||
available_types: list[str]
|
||||
uploaded: date
|
||||
authors: list[Author]
|
||||
translators: list[Author]
|
||||
sequences: list[Sequence]
|
||||
annotation_exists: bool
|
||||
|
||||
|
||||
class RemoteBook(Book):
|
||||
source: BookSource
|
||||
remote_id: int
|
||||
|
||||
|
||||
class BookBaseInfo(BaseModel):
|
||||
id: int
|
||||
available_types: list[str]
|
||||
|
||||
|
||||
class BookDetail(RemoteBook):
|
||||
genres: list[BookGenre]
|
||||
is_deleted: bool
|
||||
pages: Optional[int]
|
||||
@@ -1,10 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class BookAnnotation(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
text: str
|
||||
file: Optional[str]
|
||||
@@ -1,15 +0,0 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class GenreSource(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
|
||||
|
||||
class Genre(BaseModel):
|
||||
id: int
|
||||
source: GenreSource
|
||||
remote_id: int
|
||||
code: str
|
||||
description: str
|
||||
meta: str
|
||||
@@ -1,36 +0,0 @@
|
||||
from datetime import date
|
||||
from typing import Optional
|
||||
|
||||
from fastapi_pagination import Page
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Sequence(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
|
||||
|
||||
class Author(BaseModel):
|
||||
id: int
|
||||
|
||||
first_name: str
|
||||
last_name: str
|
||||
middle_name: Optional[str]
|
||||
|
||||
annotation_exists: bool
|
||||
|
||||
|
||||
class Book(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
lang: str
|
||||
file_type: str
|
||||
available_types: list[str]
|
||||
uploaded: date
|
||||
authors: list[Author]
|
||||
translators: list[Author]
|
||||
annotation_exists: bool
|
||||
|
||||
|
||||
class PageWithSequence(Page[Book]):
|
||||
parent_item: Sequence | None
|
||||
@@ -1,6 +0,0 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Source(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
@@ -1,21 +0,0 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class TranslationBook(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
lang: str
|
||||
file_type: str
|
||||
|
||||
|
||||
class TranslationTranslator(BaseModel):
|
||||
id: int
|
||||
first_name: str
|
||||
last_name: str
|
||||
middle_name: str
|
||||
|
||||
|
||||
class Translation(BaseModel):
|
||||
book: TranslationBook
|
||||
translator: TranslationTranslator
|
||||
position: int
|
||||
@@ -1,86 +0,0 @@
|
||||
from typing import TypedDict
|
||||
|
||||
from app.models import Author
|
||||
from app.services.common import GetRandomService, MeiliSearchService, TRGMSearchService
|
||||
|
||||
|
||||
GET_OBJECT_IDS_QUERY = """
|
||||
SELECT ARRAY(
|
||||
WITH filtered_authors AS (
|
||||
SELECT
|
||||
id,
|
||||
GREATEST(
|
||||
similarity(
|
||||
(last_name || ' ' || first_name || ' ' || middle_name),
|
||||
:query
|
||||
),
|
||||
similarity((last_name || ' ' || first_name), :query),
|
||||
similarity((last_name), :query)
|
||||
) as sml,
|
||||
(
|
||||
SELECT count(*) FROM book_authors
|
||||
LEFT JOIN books ON (books.id = book)
|
||||
WHERE author = authors.id AND books.is_deleted = 'f'
|
||||
AND books.lang = ANY(:langs ::text[])
|
||||
) as books_count
|
||||
FROM authors
|
||||
WHERE (
|
||||
(last_name || ' ' || first_name || ' ' || middle_name) % :query OR
|
||||
(last_name || ' ' || first_name) % :query OR
|
||||
(last_name) % :query
|
||||
) AND
|
||||
EXISTS (
|
||||
SELECT * FROM book_authors
|
||||
LEFT JOIN books ON (books.id = book)
|
||||
WHERE author = authors.id AND books.is_deleted = 'f'
|
||||
AND books.lang = ANY(:langs ::text[])
|
||||
)
|
||||
)
|
||||
SELECT fauthors.id FROM filtered_authors as fauthors
|
||||
ORDER BY fauthors.sml DESC, fauthors.books_count DESC
|
||||
LIMIT 210
|
||||
);
|
||||
"""
|
||||
|
||||
|
||||
class AuthorTGRMSearchService(TRGMSearchService):
|
||||
MODEL_CLASS = Author
|
||||
PREFETCH_RELATED = ["source"]
|
||||
SELECT_RELATED = ["annotations"]
|
||||
|
||||
GET_OBJECT_IDS_QUERY = GET_OBJECT_IDS_QUERY
|
||||
|
||||
|
||||
GET_OBJECTS_ID_QUERY = """
|
||||
WITH filtered_authors AS (
|
||||
SELECT id FROM authors
|
||||
WHERE EXISTS (
|
||||
SELECT * FROM book_authors
|
||||
LEFT JOIN books ON (books.id = book)
|
||||
WHERE author = authors.id AND books.is_deleted = 'f'
|
||||
AND books.lang = ANY(:langs ::text[])
|
||||
)
|
||||
)
|
||||
SELECT id FROM filtered_authors;
|
||||
"""
|
||||
|
||||
|
||||
class RandomAuthorServiceQuery(TypedDict):
|
||||
allowed_langs: frozenset[str]
|
||||
|
||||
|
||||
class GetRandomAuthorService(GetRandomService[Author, RandomAuthorServiceQuery]):
|
||||
MODEL_CLASS = Author # type: ignore
|
||||
PREFETCH_RELATED = ["source"]
|
||||
SELECT_RELATED = ["annotations"]
|
||||
|
||||
GET_OBJECTS_ID_QUERY = GET_OBJECTS_ID_QUERY
|
||||
|
||||
|
||||
class AuthorMeiliSearchService(MeiliSearchService):
|
||||
MODEL_CLASS = Author
|
||||
PREFETCH_RELATED = ["source"]
|
||||
SELECT_RELATED = ["annotations"]
|
||||
|
||||
MS_INDEX_NAME = "authors"
|
||||
MS_INDEX_LANG_KEY = "author_langs"
|
||||
@@ -1,98 +0,0 @@
|
||||
from typing import Optional, TypedDict
|
||||
|
||||
from app.models import Book as BookDB
|
||||
from app.services.common import (
|
||||
BaseFilterService,
|
||||
GetRandomService,
|
||||
MeiliSearchService,
|
||||
TRGMSearchService,
|
||||
)
|
||||
|
||||
|
||||
GET_OBJECT_IDS_QUERY = """
|
||||
SELECT ARRAY(
|
||||
WITH filtered_books AS (
|
||||
SELECT id, similarity(title, :query) as sml FROM books
|
||||
WHERE books.title % :query AND books.is_deleted = 'f'
|
||||
AND books.lang = ANY(:langs ::text[])
|
||||
)
|
||||
SELECT fbooks.id FROM filtered_books as fbooks
|
||||
ORDER BY fbooks.sml DESC, fbooks.id
|
||||
LIMIT 210
|
||||
);
|
||||
"""
|
||||
|
||||
|
||||
class BookTGRMSearchService(TRGMSearchService):
|
||||
MODEL_CLASS = BookDB
|
||||
PREFETCH_RELATED = ["source"]
|
||||
SELECT_RELATED = ["authors", "translators", "annotations"]
|
||||
|
||||
GET_OBJECT_IDS_QUERY = GET_OBJECT_IDS_QUERY
|
||||
|
||||
|
||||
class BookBaseInfoFilterService(BaseFilterService):
|
||||
MODEL_CLASS = BookDB
|
||||
PREFETCH_RELATED = []
|
||||
SELECT_RELATED = []
|
||||
|
||||
|
||||
class BookFilterService(BaseFilterService):
|
||||
MODEL_CLASS = BookDB
|
||||
PREFETCH_RELATED = ["source"]
|
||||
SELECT_RELATED = ["authors", "translators", "annotations"]
|
||||
|
||||
|
||||
GET_OBJECTS_ID_QUERY = """
|
||||
WITH filtered_books AS (
|
||||
SELECT id FROM books
|
||||
WHERE books.is_deleted = 'f' AND books.lang = ANY(:langs ::text[])
|
||||
)
|
||||
SELECT id FROM filtered_books;
|
||||
"""
|
||||
|
||||
|
||||
GET_OBJECTS_ID_BY_GENRE_QUERY = """
|
||||
WITH filtered_books AS (
|
||||
SELECT books.id FROM books
|
||||
LEFT JOIN book_genres ON (book_genres.book = books.id)
|
||||
WHERE books.is_deleted = 'f' AND book_genres.genre = :genre
|
||||
AND books.lang = ANY(:langs ::text[])
|
||||
)
|
||||
SELECT id FROM filtered_books;
|
||||
"""
|
||||
|
||||
|
||||
class RandomBookServiceQuery(TypedDict):
|
||||
genre: Optional[int]
|
||||
allowed_langs: frozenset[str]
|
||||
|
||||
|
||||
class GetRandomBookService(GetRandomService[BookDB, RandomBookServiceQuery]):
|
||||
MODEL_CLASS = BookDB # type: ignore
|
||||
PREFETCH_RELATED = ["source"]
|
||||
SELECT_RELATED = ["authors", "translators", "annotations"]
|
||||
|
||||
GET_OBJECTS_ID_QUERY = GET_OBJECTS_ID_QUERY
|
||||
GET_OBJECTS_ID_BY_GENRE_QUERY = GET_OBJECTS_ID_BY_GENRE_QUERY
|
||||
|
||||
@classmethod
|
||||
async def _get_objects_from_db(cls, query: RandomBookServiceQuery) -> list[int]:
|
||||
if query.get("genre") is None:
|
||||
ex_query = cls.objects_id_query
|
||||
params = {"langs": query["allowed_langs"]}
|
||||
else:
|
||||
ex_query = cls.GET_OBJECTS_ID_BY_GENRE_QUERY
|
||||
params = {"langs": query["allowed_langs"], "genre": query["genre"]}
|
||||
|
||||
objects = await cls.database.fetch_all(ex_query, params)
|
||||
return [obj["id"] for obj in objects]
|
||||
|
||||
|
||||
class BookMeiliSearchService(MeiliSearchService):
|
||||
MODEL_CLASS = BookDB
|
||||
PREFETCH_RELATED = ["source"]
|
||||
SELECT_RELATED = ["authors", "translators", "annotations", "sequences"]
|
||||
|
||||
MS_INDEX_NAME = "books"
|
||||
MS_INDEX_LANG_KEY = "lang"
|
||||
@@ -1,347 +0,0 @@
|
||||
import abc
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import hashlib
|
||||
from random import choice
|
||||
from typing import Generic, Optional, TypedDict, TypeVar, Union
|
||||
|
||||
from databases import Database
|
||||
from fastapi_pagination import Page
|
||||
from fastapi_pagination.api import resolve_params
|
||||
from fastapi_pagination.bases import AbstractParams, RawParams
|
||||
import meilisearch
|
||||
import orjson
|
||||
from ormar import Model, QuerySet
|
||||
from redis import asyncio as aioredis
|
||||
from sqlalchemy import Table
|
||||
|
||||
from app.utils.orjson_default import default as orjson_default
|
||||
from core.config import env_config
|
||||
|
||||
|
||||
MODEL = TypeVar("MODEL", bound=Model)
|
||||
QUERY = TypeVar("QUERY", bound=TypedDict)
|
||||
|
||||
|
||||
class BaseService(Generic[MODEL, QUERY], abc.ABC):
|
||||
MODEL_CLASS: Optional[MODEL] = None
|
||||
CACHE_PREFIX: str = ""
|
||||
CUSTOM_MODEL_CACHE_NAME: Optional[str] = None
|
||||
CACHE_TTL = 6 * 60 * 60
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def model(cls) -> MODEL:
|
||||
assert cls.MODEL_CLASS is not None, f"MODEL in {cls.__name__} don't set!"
|
||||
return cls.MODEL_CLASS
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def table(cls) -> Table:
|
||||
return cls.model.Meta.table
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def database(cls) -> Database:
|
||||
return cls.model.Meta.database
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def cache_prefix(cls) -> str:
|
||||
return cls.CUSTOM_MODEL_CACHE_NAME or cls.model.Meta.tablename
|
||||
|
||||
@staticmethod
|
||||
def _get_query_hash(query: QUERY) -> str:
|
||||
json_value = orjson.dumps(query, orjson_default, option=orjson.OPT_SORT_KEYS)
|
||||
return hashlib.md5(json_value).hexdigest()
|
||||
|
||||
@classmethod
|
||||
def get_cache_key(cls, query: QUERY) -> str:
|
||||
model_class_name = cls.cache_prefix
|
||||
query_hash = cls._get_query_hash(query)
|
||||
cache_key = f"{model_class_name}_{query_hash}"
|
||||
return f"{cls.CACHE_PREFIX}_{cache_key}" if cls.CACHE_PREFIX else cache_key
|
||||
|
||||
@classmethod
|
||||
async def cache_object_ids(
|
||||
cls,
|
||||
query: QUERY,
|
||||
object_ids: list[int],
|
||||
redis: aioredis.Redis,
|
||||
) -> bool:
|
||||
try:
|
||||
key = cls.get_cache_key(query)
|
||||
active_key = f"{key}_active"
|
||||
|
||||
p = redis.pipeline()
|
||||
|
||||
await p.delete(key)
|
||||
await p.set(active_key, 1, ex=cls.CACHE_TTL)
|
||||
await p.sadd(key, *object_ids)
|
||||
|
||||
await p.execute()
|
||||
|
||||
return True
|
||||
except aioredis.RedisError as e:
|
||||
print(e)
|
||||
return False
|
||||
|
||||
|
||||
class BaseSearchService(Generic[MODEL, QUERY], BaseService[MODEL, QUERY]):
|
||||
SELECT_RELATED: Optional[Union[list[str], str]] = None
|
||||
PREFETCH_RELATED: Optional[Union[list[str], str]] = None
|
||||
|
||||
@classmethod
|
||||
def get_params(cls) -> AbstractParams:
|
||||
return resolve_params()
|
||||
|
||||
@classmethod
|
||||
def get_raw_params(cls) -> RawParams:
|
||||
return resolve_params().to_raw_params()
|
||||
|
||||
@classmethod
|
||||
async def _get_object_ids(cls, query: QUERY) -> list[int]:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
async def get_cached_ids(
|
||||
cls,
|
||||
query: QUERY,
|
||||
redis: aioredis.Redis,
|
||||
params: RawParams,
|
||||
) -> Optional[tuple[int, list[int]]]:
|
||||
try:
|
||||
key = cls.get_cache_key(query)
|
||||
active_key = f"{key}_active"
|
||||
|
||||
if not await redis.exists(active_key):
|
||||
return None
|
||||
|
||||
assert params.offset is not None
|
||||
assert params.limit is not None
|
||||
|
||||
objects_count, objects = await asyncio.gather(
|
||||
redis.llen(key),
|
||||
redis.lrange(key, params.offset, params.offset + params.limit),
|
||||
)
|
||||
|
||||
return objects_count, [int(item.decode()) for item in objects]
|
||||
except aioredis.RedisError as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def get_object_ids(
|
||||
cls, query: QUERY, redis: aioredis.Redis, no_cache: bool
|
||||
) -> tuple[int, list[int]]:
|
||||
params = cls.get_raw_params()
|
||||
|
||||
if not no_cache and (
|
||||
cached_object_ids := await cls.get_cached_ids(query, redis, params)
|
||||
):
|
||||
return cached_object_ids
|
||||
|
||||
assert params.limit is not None
|
||||
assert params.offset is not None
|
||||
|
||||
object_ids = await cls._get_object_ids(query)
|
||||
limited_object_ids = object_ids[params.offset : params.offset + params.limit]
|
||||
|
||||
if not no_cache and len(object_ids) != 0:
|
||||
await cls.cache_object_ids(query, object_ids, redis)
|
||||
|
||||
return len(object_ids), limited_object_ids
|
||||
|
||||
@classmethod
|
||||
async def get_limited_objects(
|
||||
cls, query: QUERY, redis: aioredis.Redis, no_cache: bool
|
||||
) -> tuple[int, list[MODEL]]:
|
||||
count, object_ids = await cls.get_object_ids(query, redis, no_cache)
|
||||
|
||||
queryset: QuerySet[MODEL] = cls.model.objects
|
||||
|
||||
if cls.PREFETCH_RELATED is not None:
|
||||
queryset = queryset.prefetch_related(cls.PREFETCH_RELATED)
|
||||
|
||||
if cls.SELECT_RELATED:
|
||||
queryset = queryset.select_related(cls.SELECT_RELATED)
|
||||
|
||||
db_objects = await queryset.filter(id__in=object_ids).all()
|
||||
return count, sorted(db_objects, key=lambda o: object_ids.index(o.id))
|
||||
|
||||
@classmethod
|
||||
async def get(cls, query: QUERY, redis: aioredis.Redis) -> Page[MODEL]:
|
||||
no_cache: bool = query.pop("no_cache", False) # type: ignore
|
||||
|
||||
params = cls.get_params()
|
||||
|
||||
total, objects = await cls.get_limited_objects(query, redis, no_cache)
|
||||
|
||||
return Page.create(items=objects, total=total, params=params)
|
||||
|
||||
|
||||
class SearchQuery(TypedDict):
|
||||
query: str
|
||||
allowed_langs: frozenset[str]
|
||||
|
||||
|
||||
class TRGMSearchService(Generic[MODEL], BaseSearchService[MODEL, SearchQuery]):
|
||||
GET_OBJECT_IDS_QUERY: Optional[str] = None
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def object_ids_query(cls) -> str:
|
||||
assert (
|
||||
cls.GET_OBJECT_IDS_QUERY is not None
|
||||
), f"GET_OBJECT_IDS_QUERY in {cls.__name__} don't set!"
|
||||
return cls.GET_OBJECT_IDS_QUERY
|
||||
|
||||
@classmethod
|
||||
async def _get_object_ids(cls, query: SearchQuery) -> list[int]:
|
||||
row = await cls.database.fetch_one(
|
||||
cls.object_ids_query,
|
||||
{"query": query["query"], "langs": query["allowed_langs"]},
|
||||
)
|
||||
|
||||
if row is None:
|
||||
raise ValueError("Something is wrong!")
|
||||
|
||||
return row["array"]
|
||||
|
||||
|
||||
class MeiliSearchService(Generic[MODEL], BaseSearchService[MODEL, SearchQuery]):
|
||||
MS_INDEX_NAME: Optional[str] = None
|
||||
MS_INDEX_LANG_KEY: Optional[str] = None
|
||||
|
||||
_executor = ThreadPoolExecutor(2)
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def lang_key(cls) -> str:
|
||||
assert (
|
||||
cls.MS_INDEX_LANG_KEY is not None
|
||||
), f"MS_INDEX_LANG_KEY in {cls.__name__} don't set!"
|
||||
return cls.MS_INDEX_LANG_KEY
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def index_name(cls) -> str:
|
||||
assert (
|
||||
cls.MS_INDEX_NAME is not None
|
||||
), f"MS_INDEX_NAME in {cls.__name__} don't set!"
|
||||
return cls.MS_INDEX_NAME
|
||||
|
||||
@classmethod
|
||||
def get_allowed_langs_filter(cls, allowed_langs: frozenset[str]) -> list[str]:
|
||||
langs_values = ", ".join(allowed_langs)
|
||||
return [f"{cls.lang_key} IN [{langs_values}]"]
|
||||
|
||||
@classmethod
|
||||
def make_request(
|
||||
cls, query: str, allowed_langs_filter: list[str], offset: int, limit: int
|
||||
) -> tuple[int, list[int]]:
|
||||
client = meilisearch.Client(env_config.MEILI_HOST, env_config.MEILI_MASTER_KEY)
|
||||
index = client.index(cls.index_name)
|
||||
|
||||
result = index.search(
|
||||
query,
|
||||
{
|
||||
"filter": allowed_langs_filter,
|
||||
"offset": offset,
|
||||
"limit": limit,
|
||||
"attributesToRetrieve": ["id"],
|
||||
},
|
||||
)
|
||||
|
||||
total: int = result["estimatedTotalHits"]
|
||||
ids: list[int] = [r["id"] for r in result["hits"][:total]]
|
||||
|
||||
return total, ids
|
||||
|
||||
@classmethod
|
||||
async def _get_object_ids(cls, query: SearchQuery) -> tuple[int, list[int]]:
|
||||
params = cls.get_raw_params()
|
||||
allowed_langs_filter = cls.get_allowed_langs_filter(query["allowed_langs"])
|
||||
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
cls._executor,
|
||||
cls.make_request,
|
||||
query["query"],
|
||||
allowed_langs_filter,
|
||||
params.offset,
|
||||
params.limit,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def get_object_ids(
|
||||
cls, query: SearchQuery, redis: aioredis.Redis, no_cache: bool
|
||||
) -> tuple[int, list[int]]:
|
||||
return await cls._get_object_ids(query)
|
||||
|
||||
|
||||
class GetRandomService(Generic[MODEL, QUERY], BaseService[MODEL, QUERY]):
|
||||
GET_OBJECTS_ID_QUERY: Optional[str] = None
|
||||
CACHE_PREFIX: str = "random"
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def objects_id_query(cls) -> str:
|
||||
assert (
|
||||
cls.GET_OBJECTS_ID_QUERY is not None
|
||||
), f"GET_OBJECT_IDS_QUERY in {cls.__name__} don't set!"
|
||||
return cls.GET_OBJECTS_ID_QUERY
|
||||
|
||||
@classmethod
|
||||
async def _get_objects_from_db(cls, query: QUERY) -> list[int]:
|
||||
objects = await cls.database.fetch_all(
|
||||
cls.objects_id_query, {"langs": query["allowed_langs"]}
|
||||
)
|
||||
return [obj["id"] for obj in objects]
|
||||
|
||||
@classmethod
|
||||
async def _get_random_object_from_cache(
|
||||
cls, query: QUERY, redis: aioredis.Redis
|
||||
) -> Optional[int]:
|
||||
try:
|
||||
key = cls.get_cache_key(query)
|
||||
active_key = f"{key}_active"
|
||||
|
||||
if not await redis.exists(active_key):
|
||||
return None
|
||||
|
||||
data: str = await redis.srandmember(key) # type: ignore
|
||||
|
||||
return int(data)
|
||||
except aioredis.RedisError as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def get_random_id(
|
||||
cls,
|
||||
query: QUERY,
|
||||
redis: aioredis.Redis,
|
||||
) -> int | None:
|
||||
cached_object_id = await cls._get_random_object_from_cache(query, redis)
|
||||
|
||||
if cached_object_id is not None:
|
||||
return cached_object_id
|
||||
|
||||
object_ids = await cls._get_objects_from_db(query)
|
||||
|
||||
await cls.cache_object_ids(query, object_ids, redis)
|
||||
|
||||
if len(object_ids) == 0:
|
||||
return None
|
||||
|
||||
return choice(object_ids)
|
||||
|
||||
|
||||
class BaseFilterService(Generic[MODEL, QUERY], BaseSearchService[MODEL, QUERY]):
|
||||
@classmethod
|
||||
async def _get_object_ids(cls, query: QUERY) -> list[int]:
|
||||
return (
|
||||
await cls.model.objects.filter(**query)
|
||||
.fields("id")
|
||||
.values_list(flatten=True)
|
||||
)
|
||||
@@ -1,10 +0,0 @@
|
||||
from app.models import Genre
|
||||
from app.services.common import MeiliSearchService
|
||||
|
||||
|
||||
class GenreMeiliSearchService(MeiliSearchService):
|
||||
MODEL_CLASS = Genre
|
||||
PREFETCH_RELATED = ["source"]
|
||||
|
||||
MS_INDEX_NAME = "genres"
|
||||
MS_INDEX_LANG_KEY = "langs"
|
||||
@@ -1,74 +0,0 @@
|
||||
from typing import TypedDict
|
||||
|
||||
from app.models import Sequence
|
||||
from app.services.common import GetRandomService, MeiliSearchService, TRGMSearchService
|
||||
|
||||
|
||||
GET_OBJECT_IDS_QUERY = """
|
||||
SELECT ARRAY (
|
||||
WITH filtered_sequences AS (
|
||||
SELECT
|
||||
id,
|
||||
similarity(name, :query) as sml,
|
||||
(
|
||||
SELECT count(*) FROM book_sequences
|
||||
LEFT JOIN books ON (books.id = book)
|
||||
WHERE sequence = sequences.id AND books.is_deleted = 'f'
|
||||
AND books.lang = ANY(:langs ::text[])
|
||||
) as books_count
|
||||
FROM sequences
|
||||
WHERE name % :query AND
|
||||
EXISTS (
|
||||
SELECT * FROM book_sequences
|
||||
LEFT JOIN books ON (books.id = book)
|
||||
WHERE sequence = sequences.id AND books.is_deleted = 'f' AND
|
||||
books.lang = ANY(:langs ::text[])
|
||||
)
|
||||
)
|
||||
SELECT fsequences.id FROM filtered_sequences as fsequences
|
||||
ORDER BY fsequences.sml DESC, fsequences.books_count DESC
|
||||
LIMIT 210
|
||||
);
|
||||
"""
|
||||
|
||||
|
||||
class SequenceTGRMSearchService(TRGMSearchService):
|
||||
MODEL_CLASS = Sequence
|
||||
PREFETCH_RELATED = ["source"]
|
||||
|
||||
GET_OBJECT_IDS_QUERY = GET_OBJECT_IDS_QUERY
|
||||
|
||||
|
||||
GET_OBJECTS_ID_QUERY = """
|
||||
WITH filtered_sequences AS (
|
||||
SELECT id FROM sequences
|
||||
WHERE EXISTS (
|
||||
SELECT * FROM book_sequences
|
||||
LEFT JOIN books
|
||||
ON (books.id = book AND
|
||||
books.is_deleted = 'f' AND
|
||||
books.lang = ANY(:langs ::text[]))
|
||||
WHERE sequence = sequences.id
|
||||
)
|
||||
)
|
||||
SELECT id FROM filtered_sequences;
|
||||
"""
|
||||
|
||||
|
||||
class RandomSequenceServiceQuery(TypedDict):
|
||||
allowed_langs: frozenset[str]
|
||||
|
||||
|
||||
class GetRandomSequenceService(GetRandomService[Sequence, RandomSequenceServiceQuery]):
|
||||
MODEL_CLASS = Sequence # type: ignore
|
||||
PREFETCH_RELATED = ["source"]
|
||||
|
||||
GET_OBJECTS_ID_QUERY = GET_OBJECTS_ID_QUERY
|
||||
|
||||
|
||||
class SequenceMeiliSearchService(MeiliSearchService):
|
||||
MODEL_CLASS = Sequence
|
||||
PREFETCH_RELATED = ["source"]
|
||||
|
||||
MS_INDEX_NAME = "sequences"
|
||||
MS_INDEX_LANG_KEY = "langs"
|
||||
@@ -1,60 +0,0 @@
|
||||
from app.models import Author
|
||||
from app.services.common import MeiliSearchService, TRGMSearchService
|
||||
|
||||
|
||||
GET_OBJECT_IDS_QUERY = """
|
||||
SELECT ARRAY(
|
||||
WITH filtered_authors AS (
|
||||
SELECT
|
||||
id,
|
||||
GREATEST(
|
||||
similarity(
|
||||
(last_name || ' ' || first_name || ' ' || middle_name),
|
||||
:query
|
||||
),
|
||||
similarity((last_name || ' ' || first_name), :query),
|
||||
similarity((last_name), :query)
|
||||
) as sml,
|
||||
(
|
||||
SELECT count(*) FROM translations
|
||||
LEFT JOIN books ON (books.id = book)
|
||||
WHERE author = authors.id AND books.is_deleted = 'f' AND
|
||||
books.lang = ANY(:langs ::text[])
|
||||
) as books_count
|
||||
FROM authors
|
||||
WHERE (
|
||||
(last_name || ' ' || first_name || ' ' || middle_name) % :query OR
|
||||
(last_name || ' ' || first_name) % :query OR
|
||||
(last_name) % :query
|
||||
) AND
|
||||
EXISTS (
|
||||
SELECT * FROM translations
|
||||
LEFT JOIN books ON (books.id = book)
|
||||
WHERE author = authors.id AND books.is_deleted = 'f' AND
|
||||
books.lang = ANY(:langs ::text[])
|
||||
)
|
||||
)
|
||||
SELECT fauthors.id FROM filtered_authors as fauthors
|
||||
ORDER BY fauthors.sml DESC, fauthors.books_count DESC
|
||||
LIMIT 210
|
||||
);
|
||||
"""
|
||||
|
||||
|
||||
class TranslatorTGRMSearchService(TRGMSearchService):
|
||||
MODEL_CLASS = Author
|
||||
CUSTOM_CACHE_PREFIX = "translator"
|
||||
PREFETCH_RELATED = ["source"]
|
||||
SELECT_RELATED = ["annotations"]
|
||||
|
||||
GET_OBJECT_IDS_QUERY = GET_OBJECT_IDS_QUERY
|
||||
|
||||
|
||||
class TranslatorMeiliSearchService(MeiliSearchService):
|
||||
MODEL_CLASS = Author
|
||||
CUSTOM_CACHE_PREFIX = "translator"
|
||||
PREFETCH_RELATED = ["source"]
|
||||
SELECT_RELATED = ["annotations"]
|
||||
|
||||
MS_INDEX_NAME = "authors"
|
||||
MS_INDEX_LANG_KEY = "translator_langs"
|
||||
@@ -1,15 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
import orjson
|
||||
|
||||
|
||||
def default(value: Any):
|
||||
if isinstance(value, frozenset):
|
||||
list_value = list(value)
|
||||
return "-".join(sorted(list_value))
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def orjson_dumps(v, *, default) -> str:
|
||||
return orjson.dumps(v, default=default).decode()
|
||||
@@ -1,10 +0,0 @@
|
||||
from typing import Any, Sequence, TypeVar
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
T = TypeVar("T", bound=BaseModel)
|
||||
|
||||
|
||||
async def dict_transformer(items: Sequence[T]) -> Sequence[dict[str, Any]]:
|
||||
return [item.dict() for item in items]
|
||||
@@ -1,23 +0,0 @@
|
||||
from app.views.author import author_router, translator_router
|
||||
from app.views.author_annotation import author_annotation_router
|
||||
from app.views.book import book_router
|
||||
from app.views.book_annotation import book_annotation_router
|
||||
from app.views.genre import genre_router
|
||||
from app.views.healthcheck import healtcheck_router
|
||||
from app.views.sequence import sequence_router
|
||||
from app.views.source import source_router
|
||||
from app.views.translation import translation_router
|
||||
|
||||
|
||||
routers = [
|
||||
source_router,
|
||||
author_router,
|
||||
translator_router,
|
||||
author_annotation_router,
|
||||
book_router,
|
||||
book_annotation_router,
|
||||
translation_router,
|
||||
sequence_router,
|
||||
genre_router,
|
||||
healtcheck_router,
|
||||
]
|
||||
@@ -1,214 +0,0 @@
|
||||
from typing import Annotated, cast
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
|
||||
from fastapi_pagination import Page, Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
|
||||
from app.depends import check_token, get_allowed_langs
|
||||
from app.models import Author as AuthorDB
|
||||
from app.models import AuthorAnnotation as AuthorAnnotationDB
|
||||
from app.models import Book as BookDB
|
||||
from app.serializers.author import (
|
||||
Author,
|
||||
PageWithAuthorBook,
|
||||
PageWithTranslatedBook,
|
||||
)
|
||||
from app.serializers.author_annotation import AuthorAnnotation
|
||||
from app.services.author import AuthorMeiliSearchService, GetRandomAuthorService
|
||||
from app.services.translator import TranslatorMeiliSearchService
|
||||
from app.utils.transformer import dict_transformer
|
||||
|
||||
|
||||
author_router = APIRouter(
|
||||
prefix="/api/v1/authors",
|
||||
tags=["author"],
|
||||
dependencies=[Depends(check_token)],
|
||||
)
|
||||
|
||||
|
||||
PREFETCH_RELATED_FIELDS = ["source"]
|
||||
SELECT_RELATED_FIELDS = ["annotations"]
|
||||
|
||||
|
||||
@author_router.get("/", response_model=Page[Author], dependencies=[Depends(Params)])
|
||||
async def get_authors():
|
||||
return await paginate(
|
||||
AuthorDB.objects.select_related(SELECT_RELATED_FIELDS).prefetch_related(
|
||||
PREFETCH_RELATED_FIELDS
|
||||
),
|
||||
transformer=dict_transformer,
|
||||
)
|
||||
|
||||
|
||||
@author_router.get("/random", response_model=Author)
|
||||
async def get_random_author(
|
||||
request: Request,
|
||||
allowed_langs: Annotated[frozenset[str], Depends(get_allowed_langs)],
|
||||
):
|
||||
author_id = await GetRandomAuthorService.get_random_id(
|
||||
{"allowed_langs": allowed_langs}, request.app.state.redis
|
||||
)
|
||||
|
||||
if author_id is None:
|
||||
raise HTTPException(status.HTTP_204_NO_CONTENT)
|
||||
|
||||
return (
|
||||
await AuthorDB.objects.select_related(SELECT_RELATED_FIELDS)
|
||||
.prefetch_related(PREFETCH_RELATED_FIELDS)
|
||||
.get(id=author_id)
|
||||
)
|
||||
|
||||
|
||||
@author_router.get("/{id}", response_model=Author)
|
||||
async def get_author(id: int):
|
||||
author = (
|
||||
await AuthorDB.objects.select_related(SELECT_RELATED_FIELDS)
|
||||
.prefetch_related(PREFETCH_RELATED_FIELDS)
|
||||
.get_or_none(id=id)
|
||||
)
|
||||
|
||||
if author is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
|
||||
return author
|
||||
|
||||
|
||||
@author_router.get("/{id}/annotation", response_model=AuthorAnnotation)
|
||||
async def get_author_annotation(id: int):
|
||||
annotation = await AuthorAnnotationDB.objects.get_or_none(author__id=id)
|
||||
|
||||
if annotation is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
|
||||
return annotation
|
||||
|
||||
|
||||
@author_router.get(
|
||||
"/{id}/books", response_model=PageWithAuthorBook, dependencies=[Depends(Params)]
|
||||
)
|
||||
async def get_author_books(
|
||||
id: int, allowed_langs: Annotated[list[str], Depends(get_allowed_langs)]
|
||||
):
|
||||
page = await paginate(
|
||||
BookDB.objects.prefetch_related(["source"])
|
||||
.select_related(["annotations", "translators", "sequences"])
|
||||
.filter(authors__id=id, lang__in=allowed_langs, is_deleted=False)
|
||||
.order_by("title"),
|
||||
transformer=dict_transformer,
|
||||
)
|
||||
|
||||
author = await AuthorDB.objects.get_or_none(id=id)
|
||||
|
||||
return PageWithAuthorBook(
|
||||
items=page.items,
|
||||
total=page.total,
|
||||
page=page.page,
|
||||
size=page.size,
|
||||
pages=page.pages,
|
||||
parent_item=Author.parse_obj(author.dict()) if author else None,
|
||||
)
|
||||
|
||||
|
||||
@author_router.get("/{id}/available_types", response_model=list[str])
|
||||
async def get_author_books_available_types(
|
||||
id: int, allowed_langs: Annotated[list[str], Depends(get_allowed_langs)]
|
||||
) -> list[str]:
|
||||
books = await (
|
||||
BookDB.objects.prefetch_related(["source"])
|
||||
.filter(authors__id=id, lang__in=allowed_langs, is_deleted=False)
|
||||
.all()
|
||||
)
|
||||
|
||||
file_types: set[str] = set()
|
||||
|
||||
for book in books:
|
||||
for file_type in cast(list[str], book.available_types):
|
||||
file_types.add(file_type)
|
||||
|
||||
return sorted(file_types)
|
||||
|
||||
|
||||
@author_router.get(
|
||||
"/search/{query}", response_model=Page[Author], dependencies=[Depends(Params)]
|
||||
)
|
||||
async def search_authors(
|
||||
query: str,
|
||||
request: Request,
|
||||
allowed_langs: Annotated[frozenset[str], Depends(get_allowed_langs)],
|
||||
):
|
||||
return await AuthorMeiliSearchService.get(
|
||||
{"query": query, "allowed_langs": allowed_langs},
|
||||
request.app.state.redis,
|
||||
)
|
||||
|
||||
|
||||
translator_router = APIRouter(
|
||||
prefix="/api/v1/translators",
|
||||
tags=["author"],
|
||||
dependencies=[Depends(check_token)],
|
||||
)
|
||||
|
||||
|
||||
@translator_router.get("/{id}/books", response_model=PageWithTranslatedBook)
|
||||
async def get_translated_books(
|
||||
id: int, allowed_langs: Annotated[list[str], Depends(get_allowed_langs)]
|
||||
):
|
||||
page = await paginate(
|
||||
BookDB.objects.prefetch_related(["source"])
|
||||
.select_related(["annotations", "authors", "sequences"])
|
||||
.filter(
|
||||
translators__id=id,
|
||||
lang__in=allowed_langs,
|
||||
is_deleted=False,
|
||||
),
|
||||
transformer=dict_transformer,
|
||||
)
|
||||
|
||||
translator = await AuthorDB.objects.get(id=id)
|
||||
|
||||
return PageWithTranslatedBook(
|
||||
items=page.items,
|
||||
total=page.total,
|
||||
page=page.page,
|
||||
size=page.size,
|
||||
pages=page.pages,
|
||||
parent_item=Author.parse_obj(translator.dict()) if translator else None,
|
||||
)
|
||||
|
||||
|
||||
@translator_router.get("/{id}/available_types", response_model=list[str])
|
||||
async def get_translator_books_available_types(
|
||||
id: int, allowed_langs: Annotated[list[str], Depends(get_allowed_langs)]
|
||||
) -> list[str]:
|
||||
books = await (
|
||||
BookDB.objects.prefetch_related(["source"])
|
||||
.filter(
|
||||
translators__id=id,
|
||||
lang__in=allowed_langs,
|
||||
is_deleted=False,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
file_types: set[str] = set()
|
||||
|
||||
for book in books:
|
||||
for file_type in cast(list[str], book.available_types):
|
||||
file_types.add(file_type)
|
||||
|
||||
return sorted(file_types)
|
||||
|
||||
|
||||
@translator_router.get(
|
||||
"/search/{query}", response_model=Page[Author], dependencies=[Depends(Params)]
|
||||
)
|
||||
async def search_translators(
|
||||
query: str,
|
||||
request: Request,
|
||||
allowed_langs: Annotated[frozenset[str], Depends(get_allowed_langs)],
|
||||
):
|
||||
return await TranslatorMeiliSearchService.get(
|
||||
{"query": query, "allowed_langs": allowed_langs},
|
||||
request.app.state.redis,
|
||||
)
|
||||
@@ -1,33 +0,0 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
|
||||
from fastapi_pagination import Page, Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
|
||||
from app.depends import check_token
|
||||
from app.models import AuthorAnnotation as AuthorAnnotationDB
|
||||
from app.serializers.author_annotation import AuthorAnnotation
|
||||
from app.utils.transformer import dict_transformer
|
||||
|
||||
|
||||
author_annotation_router = APIRouter(
|
||||
prefix="/api/v1/author_annotations",
|
||||
tags=["author_annotation"],
|
||||
dependencies=[Depends(check_token)],
|
||||
)
|
||||
|
||||
|
||||
@author_annotation_router.get(
|
||||
"/", response_model=Page[AuthorAnnotation], dependencies=[Depends(Params)]
|
||||
)
|
||||
async def get_author_annotations():
|
||||
return await paginate(AuthorAnnotationDB.objects, transformer=dict_transformer)
|
||||
|
||||
|
||||
@author_annotation_router.get("/{id}", response_model=AuthorAnnotation)
|
||||
async def get_author_annotation(id: int):
|
||||
annotation = await AuthorAnnotationDB.objects.get_or_none(id=id)
|
||||
|
||||
if annotation is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
|
||||
return annotation
|
||||
@@ -1,131 +0,0 @@
|
||||
from typing import Annotated, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
|
||||
from fastapi_pagination import Page, Params
|
||||
|
||||
from app.depends import check_token, get_allowed_langs
|
||||
from app.filters.book import get_book_filter
|
||||
from app.models import Book as BookDB
|
||||
from app.models import BookAnnotation as BookAnnotationDB
|
||||
from app.serializers.book import Book, BookBaseInfo, BookDetail, RemoteBook
|
||||
from app.serializers.book_annotation import BookAnnotation
|
||||
from app.services.book import (
|
||||
BookBaseInfoFilterService,
|
||||
BookFilterService,
|
||||
BookMeiliSearchService,
|
||||
GetRandomBookService,
|
||||
)
|
||||
|
||||
|
||||
book_router = APIRouter(
|
||||
prefix="/api/v1/books",
|
||||
tags=["book"],
|
||||
dependencies=[Depends(check_token)],
|
||||
)
|
||||
|
||||
PREFETCH_RELATED_FIELDS = ["source"]
|
||||
SELECT_RELATED_FIELDS = ["authors", "translators", "annotations"]
|
||||
|
||||
DETAIL_SELECT_RELATED_FIELDS = ["sequences", "genres"]
|
||||
|
||||
|
||||
@book_router.get("/", response_model=Page[RemoteBook], dependencies=[Depends(Params)])
|
||||
async def get_books(
|
||||
request: Request,
|
||||
book_filter: Annotated[dict, Depends(get_book_filter)],
|
||||
):
|
||||
return await BookFilterService.get(book_filter, request.app.state.redis)
|
||||
|
||||
|
||||
@book_router.get(
|
||||
"/base/", response_model=Page[BookBaseInfo], dependencies=[Depends(Params)]
|
||||
)
|
||||
async def get_base_books_info(
|
||||
request: Request, book_filter: Annotated[dict, Depends(get_book_filter)]
|
||||
):
|
||||
return await BookBaseInfoFilterService.get(book_filter, request.app.state.redis)
|
||||
|
||||
|
||||
@book_router.get("/last", response_model=int)
|
||||
async def get_last_book_id():
|
||||
book = await BookDB.objects.order_by("-id").first()
|
||||
return book.id
|
||||
|
||||
|
||||
@book_router.get("/random", response_model=BookDetail)
|
||||
async def get_random_book(
|
||||
request: Request,
|
||||
allowed_langs: Annotated[frozenset[str], Depends(get_allowed_langs)],
|
||||
genre: Optional[int] = None,
|
||||
):
|
||||
book_id = await GetRandomBookService.get_random_id(
|
||||
{"allowed_langs": allowed_langs, "genre": genre}, request.app.state.redis
|
||||
)
|
||||
|
||||
if book_id is None:
|
||||
raise HTTPException(status.HTTP_204_NO_CONTENT)
|
||||
|
||||
book = (
|
||||
await BookDB.objects.select_related(
|
||||
SELECT_RELATED_FIELDS + DETAIL_SELECT_RELATED_FIELDS
|
||||
)
|
||||
.prefetch_related(PREFETCH_RELATED_FIELDS)
|
||||
.get(id=book_id)
|
||||
)
|
||||
|
||||
return book
|
||||
|
||||
|
||||
@book_router.get("/{id}", response_model=BookDetail)
|
||||
async def get_book(id: int):
|
||||
book = (
|
||||
await BookDB.objects.select_related(
|
||||
SELECT_RELATED_FIELDS + DETAIL_SELECT_RELATED_FIELDS
|
||||
)
|
||||
.prefetch_related(PREFETCH_RELATED_FIELDS)
|
||||
.get_or_none(id=id)
|
||||
)
|
||||
|
||||
if book is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
|
||||
return book
|
||||
|
||||
|
||||
@book_router.get("/remote/{source_id}/{remote_id}", response_model=Book)
|
||||
async def get_remote_book(source_id: int, remote_id: int):
|
||||
book = (
|
||||
await BookDB.objects.select_related(SELECT_RELATED_FIELDS)
|
||||
.prefetch_related(PREFETCH_RELATED_FIELDS)
|
||||
.get_or_none(source=source_id, remote_id=remote_id)
|
||||
)
|
||||
|
||||
if book is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
|
||||
return book
|
||||
|
||||
|
||||
@book_router.get("/{id}/annotation", response_model=BookAnnotation)
|
||||
async def get_book_annotation(id: int):
|
||||
annotation = await BookAnnotationDB.objects.get_or_none(book__id=id)
|
||||
|
||||
if annotation is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
|
||||
return annotation
|
||||
|
||||
|
||||
@book_router.get(
|
||||
"/search/{query}", response_model=Page[Book], dependencies=[Depends(Params)]
|
||||
)
|
||||
async def search_books(
|
||||
query: str,
|
||||
request: Request,
|
||||
allowed_langs: Annotated[frozenset[str], Depends(get_allowed_langs)],
|
||||
):
|
||||
return await BookMeiliSearchService.get(
|
||||
{"query": query, "allowed_langs": allowed_langs},
|
||||
request.app.state.redis,
|
||||
)
|
||||
@@ -1,33 +0,0 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
|
||||
from fastapi_pagination import Page, Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
|
||||
from app.depends import check_token
|
||||
from app.models import BookAnnotation as BookAnnotationDB
|
||||
from app.serializers.book_annotation import BookAnnotation
|
||||
from app.utils.transformer import dict_transformer
|
||||
|
||||
|
||||
book_annotation_router = APIRouter(
|
||||
prefix="/api/v1/book_annotations",
|
||||
tags=["book_annotation"],
|
||||
dependencies=[Depends(check_token)],
|
||||
)
|
||||
|
||||
|
||||
@book_annotation_router.get(
|
||||
"/", response_model=Page[BookAnnotation], dependencies=[Depends(Params)]
|
||||
)
|
||||
async def get_book_annotations():
|
||||
return await paginate(BookAnnotationDB.objects, transformer=dict_transformer)
|
||||
|
||||
|
||||
@book_annotation_router.get("/{id}", response_model=BookAnnotation)
|
||||
async def get_book_annotation(id: int):
|
||||
annotation = await BookAnnotationDB.objects.get_or_none(id=id)
|
||||
|
||||
if annotation is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
|
||||
return annotation
|
||||
@@ -1,64 +0,0 @@
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
|
||||
from fastapi_pagination import Page, Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
|
||||
from app.depends import check_token, get_allowed_langs
|
||||
from app.filters.genre import get_genre_filter
|
||||
from app.models import Genre as GenreDB
|
||||
from app.serializers.genre import Genre
|
||||
from app.services.genre import GenreMeiliSearchService
|
||||
from app.utils.transformer import dict_transformer
|
||||
|
||||
|
||||
genre_router = APIRouter(
|
||||
prefix="/api/v1/genres", tags=["genres"], dependencies=[Depends(check_token)]
|
||||
)
|
||||
|
||||
|
||||
PREFETCH_RELATED_FIELDS = ["source"]
|
||||
|
||||
|
||||
@genre_router.get("/", response_model=Page[Genre], dependencies=[Depends(Params)])
|
||||
async def get_genres(genre_filter: Annotated[dict, Depends(get_genre_filter)]):
|
||||
return await paginate(
|
||||
GenreDB.objects.prefetch_related(PREFETCH_RELATED_FIELDS)
|
||||
.filter(**genre_filter)
|
||||
.order_by("description"),
|
||||
transformer=dict_transformer,
|
||||
)
|
||||
|
||||
|
||||
@genre_router.get("/metas", response_model=list[str])
|
||||
async def get_genre_metas():
|
||||
genres = await GenreDB.objects.fields("meta").values_list(flatten=True)
|
||||
genres.sort()
|
||||
return list(set(genres))
|
||||
|
||||
|
||||
@genre_router.get("/{id}", response_model=Genre)
|
||||
async def get_genre(id: int):
|
||||
genre = await GenreDB.objects.prefetch_related(PREFETCH_RELATED_FIELDS).get_or_none(
|
||||
id=id
|
||||
)
|
||||
|
||||
if genre is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
|
||||
return genre
|
||||
|
||||
|
||||
@genre_router.get(
|
||||
"/search/{query}", response_model=Page[Genre], dependencies=[Depends(Params)]
|
||||
)
|
||||
async def search_genres(
|
||||
query: str,
|
||||
request: Request,
|
||||
allowed_langs: Annotated[frozenset[str], Depends(get_allowed_langs)],
|
||||
):
|
||||
return await GenreMeiliSearchService.get(
|
||||
{"query": query, "allowed_langs": allowed_langs},
|
||||
request.app.state.redis,
|
||||
)
|
||||
@@ -1,9 +0,0 @@
|
||||
from fastapi import APIRouter
|
||||
|
||||
|
||||
healtcheck_router = APIRouter(tags=["healthcheck"])
|
||||
|
||||
|
||||
@healtcheck_router.get("/healthcheck")
|
||||
async def healthcheck():
|
||||
return "Ok!"
|
||||
@@ -1,113 +0,0 @@
|
||||
from typing import Annotated, cast
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
|
||||
from fastapi_pagination import Page, Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
|
||||
from app.depends import check_token, get_allowed_langs
|
||||
from app.models import Book as BookDB
|
||||
from app.models import Sequence as SequenceDB
|
||||
from app.serializers.sequence import Book as SequenceBook
|
||||
from app.serializers.sequence import PageWithSequence, Sequence
|
||||
from app.services.sequence import GetRandomSequenceService, SequenceMeiliSearchService
|
||||
from app.utils.transformer import dict_transformer
|
||||
|
||||
|
||||
sequence_router = APIRouter(
|
||||
prefix="/api/v1/sequences",
|
||||
tags=["sequence"],
|
||||
dependencies=[Depends(check_token)],
|
||||
)
|
||||
|
||||
|
||||
@sequence_router.get("/", response_model=Page[Sequence], dependencies=[Depends(Params)])
|
||||
async def get_sequences():
|
||||
return await paginate(SequenceDB.objects, transformer=dict_transformer)
|
||||
|
||||
|
||||
@sequence_router.get("/random", response_model=Sequence)
|
||||
async def get_random_sequence(
|
||||
request: Request,
|
||||
allowed_langs: Annotated[frozenset[str], Depends(get_allowed_langs)],
|
||||
):
|
||||
sequence_id = await GetRandomSequenceService.get_random_id(
|
||||
{"allowed_langs": allowed_langs},
|
||||
request.app.state.redis,
|
||||
)
|
||||
|
||||
if sequence_id is None:
|
||||
raise HTTPException(status.HTTP_204_NO_CONTENT)
|
||||
|
||||
return await SequenceDB.objects.get(id=sequence_id)
|
||||
|
||||
|
||||
@sequence_router.get("/{id}", response_model=Sequence)
|
||||
async def get_sequence(id: int):
|
||||
return await SequenceDB.objects.get(id=id)
|
||||
|
||||
|
||||
@sequence_router.get(
|
||||
"/{id}/books",
|
||||
response_model=PageWithSequence,
|
||||
dependencies=[Depends(Params)],
|
||||
)
|
||||
async def get_sequence_books(
|
||||
id: int, allowed_langs: Annotated[list[str], Depends(get_allowed_langs)]
|
||||
):
|
||||
page: Page[SequenceBook] = await paginate(
|
||||
BookDB.objects.prefetch_related(["source"])
|
||||
.select_related(["annotations", "authors", "translators"])
|
||||
.filter(sequences__id=id, lang__in=allowed_langs, is_deleted=False)
|
||||
.order_by("sequences__booksequences__position"),
|
||||
transformer=dict_transformer,
|
||||
)
|
||||
|
||||
sequence = await SequenceDB.objects.get_or_none(id=id)
|
||||
|
||||
return PageWithSequence(
|
||||
items=page.items,
|
||||
total=page.total,
|
||||
page=page.page,
|
||||
size=page.size,
|
||||
pages=page.pages,
|
||||
parent_item=Sequence.parse_obj(sequence.dict()) if sequence else None,
|
||||
)
|
||||
|
||||
|
||||
@sequence_router.get(
|
||||
"/{id}/available_types",
|
||||
response_model=list[str],
|
||||
)
|
||||
async def sequence_available_types(
|
||||
id: int, allowed_langs: Annotated[list[str], Depends(get_allowed_langs)]
|
||||
) -> list[str]:
|
||||
books = await (
|
||||
BookDB.objects.prefetch_related(["source"])
|
||||
.filter(sequences__id=id, lang__in=allowed_langs, is_deleted=False)
|
||||
.all()
|
||||
)
|
||||
|
||||
file_types: set[str] = set()
|
||||
|
||||
for book in books:
|
||||
for file_type in cast(list[str], book.available_types):
|
||||
file_types.add(file_type)
|
||||
|
||||
return sorted(file_types)
|
||||
|
||||
|
||||
@sequence_router.get(
|
||||
"/search/{query}",
|
||||
response_model=Page[Sequence],
|
||||
dependencies=[Depends(Params)],
|
||||
)
|
||||
async def search_sequences(
|
||||
query: str,
|
||||
request: Request,
|
||||
allowed_langs: Annotated[frozenset[str], Depends(get_allowed_langs)],
|
||||
):
|
||||
return await SequenceMeiliSearchService.get(
|
||||
{"query": query, "allowed_langs": allowed_langs},
|
||||
request.app.state.redis,
|
||||
)
|
||||
@@ -1,21 +0,0 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from fastapi_pagination import Page, Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
|
||||
from app.depends import check_token
|
||||
from app.models import Source as SourceDB
|
||||
from app.serializers.source import Source
|
||||
from app.utils.transformer import dict_transformer
|
||||
|
||||
|
||||
source_router = APIRouter(
|
||||
prefix="/api/v1/sources",
|
||||
tags=["source"],
|
||||
dependencies=[Depends(check_token)],
|
||||
)
|
||||
|
||||
|
||||
@source_router.get("", response_model=Page[Source], dependencies=[Depends(Params)])
|
||||
async def get_sources():
|
||||
return await paginate(SourceDB.objects, transformer=dict_transformer)
|
||||
@@ -1,26 +0,0 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from fastapi_pagination import Page, Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
|
||||
from app.depends import check_token
|
||||
from app.models import Translation as TranslationDB
|
||||
from app.serializers.translation import Translation
|
||||
from app.utils.transformer import dict_transformer
|
||||
|
||||
|
||||
translation_router = APIRouter(
|
||||
prefix="/api/v1/translation",
|
||||
tags=["translation"],
|
||||
dependencies=[Depends(check_token)],
|
||||
)
|
||||
|
||||
|
||||
@translation_router.get(
|
||||
"/", response_model=Page[Translation], dependencies=[Depends(Params)]
|
||||
)
|
||||
async def get_translations():
|
||||
return await paginate(
|
||||
TranslationDB.objects.select_related(["book", "author"]),
|
||||
transformer=dict_transformer,
|
||||
)
|
||||
@@ -1,58 +0,0 @@
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.responses import ORJSONResponse
|
||||
|
||||
from fastapi_pagination import add_pagination
|
||||
from prometheus_fastapi_instrumentator import Instrumentator
|
||||
from redis import asyncio as aioredis
|
||||
import sentry_sdk
|
||||
|
||||
from app.views import routers
|
||||
from core.config import env_config
|
||||
from core.db import database
|
||||
|
||||
|
||||
sentry_sdk.init(
|
||||
env_config.SENTRY_SDN,
|
||||
)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
database_ = app.state.database
|
||||
if not database_.is_connected:
|
||||
await database_.connect()
|
||||
|
||||
yield
|
||||
|
||||
database_ = app.state.database
|
||||
if database_.is_connected:
|
||||
await database_.disconnect()
|
||||
|
||||
await app.state.redis.close()
|
||||
|
||||
|
||||
def start_app() -> FastAPI:
|
||||
app = FastAPI(default_response_class=ORJSONResponse, lifespan=lifespan)
|
||||
|
||||
app.state.database = database
|
||||
|
||||
app.state.redis = aioredis.Redis(
|
||||
host=env_config.REDIS_HOST,
|
||||
port=env_config.REDIS_PORT,
|
||||
db=env_config.REDIS_DB,
|
||||
password=env_config.REDIS_PASSWORD,
|
||||
)
|
||||
|
||||
for router in routers:
|
||||
app.include_router(router)
|
||||
|
||||
add_pagination(app)
|
||||
|
||||
Instrumentator(
|
||||
should_ignore_untemplated=True,
|
||||
excluded_handlers=["/docs", "/metrics", "/healthcheck"],
|
||||
).instrument(app).expose(app, include_in_schema=True)
|
||||
|
||||
return app
|
||||
@@ -1,4 +0,0 @@
|
||||
from fastapi.security import APIKeyHeader
|
||||
|
||||
|
||||
default_security = APIKeyHeader(name="Authorization")
|
||||
@@ -1,35 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseSettings
|
||||
|
||||
|
||||
class EnvConfig(BaseSettings):
|
||||
API_KEY: str
|
||||
|
||||
POSTGRES_USER: str
|
||||
POSTGRES_PASSWORD: str
|
||||
POSTGRES_HOST: str
|
||||
POSTGRES_PORT: int
|
||||
POSTGRES_DB: str
|
||||
|
||||
REDIS_HOST: str
|
||||
REDIS_PORT: int
|
||||
REDIS_DB: int
|
||||
REDIS_PASSWORD: Optional[str]
|
||||
|
||||
MEILI_HOST: str
|
||||
MEILI_MASTER_KEY: str
|
||||
|
||||
PUSH_GETAWAY_ENABLED: bool
|
||||
PUSH_GETAWAY_HOST: str
|
||||
PUSH_GETAWAY_JOB: str
|
||||
PUSH_GETAWAY_INTERVAL: int
|
||||
|
||||
SENTRY_SDN: str
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
env_file_encoding = "utf-8"
|
||||
|
||||
|
||||
env_config = EnvConfig()
|
||||
@@ -1,15 +0,0 @@
|
||||
from urllib.parse import quote
|
||||
|
||||
from databases import Database
|
||||
from sqlalchemy import MetaData
|
||||
|
||||
from core.config import env_config
|
||||
|
||||
|
||||
DATABASE_URL = (
|
||||
f"postgresql://{env_config.POSTGRES_USER}:{quote(env_config.POSTGRES_PASSWORD)}@"
|
||||
f"{env_config.POSTGRES_HOST}:{env_config.POSTGRES_PORT}/{env_config.POSTGRES_DB}"
|
||||
)
|
||||
|
||||
metadata = MetaData()
|
||||
database = Database(DATABASE_URL, min_size=2, max_size=10)
|
||||
@@ -1,4 +0,0 @@
|
||||
from core.app import start_app
|
||||
|
||||
|
||||
app = start_app()
|
||||
1481
poetry.lock
generated
1481
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
3
prisma-cli/.gitignore
vendored
Normal file
3
prisma-cli/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
# Keep environment variables out of version control
|
||||
.env
|
||||
4622
prisma-cli/Cargo.lock
generated
Normal file
4622
prisma-cli/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
9
prisma-cli/Cargo.toml
Normal file
9
prisma-cli/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "prisma-cli"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }
|
||||
3
prisma-cli/src/main.rs
Normal file
3
prisma-cli/src/main.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
fn main() {
|
||||
prisma_client_rust_cli::run();
|
||||
}
|
||||
165
prisma/schema.prisma
Normal file
165
prisma/schema.prisma
Normal file
@@ -0,0 +1,165 @@
|
||||
generator client {
|
||||
provider = "cargo prisma"
|
||||
output = "../src/prisma.rs"
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "postgresql"
|
||||
url = env("DATABASE_URL")
|
||||
}
|
||||
|
||||
model AuthorAnnotation {
|
||||
id Int @id @default(autoincrement())
|
||||
author_id Int @unique @map("author")
|
||||
title String @db.VarChar(256)
|
||||
text String
|
||||
file String? @db.VarChar(256)
|
||||
author Author @relation(fields: [author_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_author_annotations_authors_id_author")
|
||||
|
||||
@@map("author_annotations")
|
||||
}
|
||||
|
||||
model Author {
|
||||
id Int @id @default(autoincrement())
|
||||
source_id Int @map("source") @db.SmallInt
|
||||
remote_id Int
|
||||
first_name String @db.VarChar(256)
|
||||
last_name String @db.VarChar(256)
|
||||
middle_name String? @db.VarChar(256)
|
||||
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_authors_sources_id_source")
|
||||
author_annotation AuthorAnnotation?
|
||||
book_authors BookAuthor[]
|
||||
translations Translator[]
|
||||
|
||||
@@unique([source_id, remote_id], map: "uc_authors_source_remote_id")
|
||||
@@index([last_name(ops: raw("gin_trgm_ops"))], map: "tgrm_authors_l", type: Gin)
|
||||
@@map("authors")
|
||||
}
|
||||
|
||||
model BookAnnotation {
|
||||
id Int @id @default(autoincrement())
|
||||
book_id Int @unique @map("book")
|
||||
title String @db.VarChar(256)
|
||||
text String
|
||||
file String? @db.VarChar(256)
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_book_annotations_books_id_book")
|
||||
|
||||
@@map("book_annotations")
|
||||
}
|
||||
|
||||
model BookAuthor {
|
||||
id Int @id @default(autoincrement())
|
||||
author_id Int @map("author")
|
||||
book_id Int @map("book")
|
||||
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_book_authors_authors_author_id")
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_authors_books_book_id")
|
||||
|
||||
@@unique([book_id, author_id], map: "uc_book_authors_book_author")
|
||||
@@index([author_id], map: "book_authors_author")
|
||||
@@index([book_id], map: "book_authors_book")
|
||||
@@map("book_authors")
|
||||
}
|
||||
|
||||
model BookGenre {
|
||||
id Int @id @default(autoincrement())
|
||||
genre_id Int @map("genre")
|
||||
book_id Int @map("book")
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_genres_books_book_id")
|
||||
genre Genre @relation(fields: [genre_id], references: [id], onDelete: Cascade, map: "fk_book_genres_genres_genre_id")
|
||||
|
||||
@@unique([book_id, genre_id], map: "uc_book_genres_book_genre")
|
||||
@@index([book_id], map: "book_genres_book")
|
||||
@@index([genre_id], map: "book_genres_genre")
|
||||
@@map("book_genres")
|
||||
}
|
||||
|
||||
model BookSequence {
|
||||
id Int @id @default(autoincrement())
|
||||
position Int @db.SmallInt
|
||||
sequence_id Int @map("sequence")
|
||||
book_id Int @map("book")
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_books_book_id")
|
||||
sequence Sequence @relation(fields: [sequence_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_sequences_sequence_id")
|
||||
|
||||
@@unique([book_id, sequence_id], map: "uc_book_sequences_book_sequence")
|
||||
@@index([book_id], map: "book_sequences_book")
|
||||
@@index([sequence_id], map: "book_sequences_sequence")
|
||||
@@map("book_sequences")
|
||||
}
|
||||
|
||||
model Book {
|
||||
id Int @id @default(autoincrement())
|
||||
source_id Int @map("source") @db.SmallInt
|
||||
remote_id Int
|
||||
title String @db.VarChar(256)
|
||||
lang String @db.VarChar(3)
|
||||
file_type String @db.VarChar(4)
|
||||
uploaded DateTime @db.Date
|
||||
is_deleted Boolean @default(false)
|
||||
pages Int?
|
||||
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_books_sources_id_source")
|
||||
book_annotation BookAnnotation?
|
||||
book_authors BookAuthor[]
|
||||
book_genres BookGenre[]
|
||||
book_sequences BookSequence[]
|
||||
translations Translator[]
|
||||
|
||||
@@unique([source_id, remote_id], map: "uc_books_source_remote_id")
|
||||
@@index([file_type], map: "ix_books_file_type")
|
||||
@@index([title], map: "ix_books_title")
|
||||
@@index([title(ops: raw("gin_trgm_ops"))], map: "trgm_books_title", type: Gin)
|
||||
@@map("books")
|
||||
}
|
||||
|
||||
model Genre {
|
||||
id Int @id @default(autoincrement())
|
||||
source_id Int @map("source") @db.SmallInt
|
||||
remote_id Int
|
||||
code String @db.VarChar(45)
|
||||
description String @db.VarChar(99)
|
||||
meta String @db.VarChar(45)
|
||||
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_genres_sources_id_source")
|
||||
book_genres BookGenre[]
|
||||
|
||||
@@unique([source_id, remote_id], map: "uc_genres_source_remote_id")
|
||||
@@map("genres")
|
||||
}
|
||||
|
||||
model Sequence {
|
||||
id Int @id @default(autoincrement())
|
||||
source_id Int @map("source") @db.SmallInt
|
||||
remote_id Int
|
||||
name String @db.VarChar(256)
|
||||
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_sequences_sources_id_source")
|
||||
book_sequences BookSequence[]
|
||||
|
||||
@@unique([source_id, remote_id], map: "uc_sequences_source_remote_id")
|
||||
@@index([name], map: "ix_sequences_name")
|
||||
@@index([name(ops: raw("gin_trgm_ops"))], map: "tgrm_sequences_name", type: Gin)
|
||||
@@map("sequences")
|
||||
}
|
||||
|
||||
model Source {
|
||||
id Int @id @default(autoincrement()) @db.SmallInt
|
||||
name String @unique @db.VarChar(32)
|
||||
authors Author[]
|
||||
books Book[]
|
||||
genres Genre[]
|
||||
sequences Sequence[]
|
||||
|
||||
@@map("sources")
|
||||
}
|
||||
|
||||
model Translator {
|
||||
id Int @id @default(autoincrement())
|
||||
position Int @db.SmallInt
|
||||
author_id Int @map("author")
|
||||
book_id Int @map("book")
|
||||
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_translations_authors_author_id")
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_translations_books_book_id")
|
||||
|
||||
@@unique([book_id, author_id], map: "uc_translations_book_author")
|
||||
@@index([author_id], map: "translations_author")
|
||||
@@index([book_id], map: "translations_book")
|
||||
@@map("translations")
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
[tool.poetry]
|
||||
name = "fastapi_book_server"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["Kurbanov Bulat <kurbanovbul@gmail.com>"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
fastapi = "^0.101.0"
|
||||
pydantic = "^1.10.4"
|
||||
uvicorn = {extras = ["standard"], version = "^0.23.2"}
|
||||
ormar = {extras = ["postgresql"], version = "^0.12.2"}
|
||||
alembic = "^1.11.2"
|
||||
fastapi-pagination = {extras = ["ormar"], version = "^0.12.7"}
|
||||
orjson = "^3.9.4"
|
||||
httpx = "^0.24.1"
|
||||
meilisearch = "^0.28.2"
|
||||
prometheus-fastapi-instrumentator = "^6.1.0"
|
||||
uvloop = "^0.17.0"
|
||||
sentry-sdk = "^1.29.2"
|
||||
redis = {extras = ["hiredis"], version = "^4.6.0"}
|
||||
gunicorn = "^21.2.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pre-commit = "^2.21.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.black]
|
||||
include = '\.pyi?$'
|
||||
exclude = '''
|
||||
/(
|
||||
\.git
|
||||
| \.vscode
|
||||
| \venv
|
||||
| alembic
|
||||
)/
|
||||
'''
|
||||
|
||||
[tool.ruff]
|
||||
fix = true
|
||||
target-version = "py311"
|
||||
src = ["fastapi_book_server"]
|
||||
line-length=88
|
||||
ignore = []
|
||||
select = ["B", "C", "E", "F", "W", "B9", "I001"]
|
||||
exclude = [
|
||||
# No need to traverse our git directory
|
||||
".git",
|
||||
# There's no value in checking cache directories
|
||||
"__pycache__",
|
||||
# The conf file is mostly autogenerated, ignore it
|
||||
"fastapi_book_server/app/alembic",
|
||||
]
|
||||
|
||||
[tool.ruff.flake8-bugbear]
|
||||
extend-immutable-calls = ["fastapi.File", "fastapi.Form", "fastapi.Security", "fastapi.Query", "fastapi.Depends"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 15
|
||||
|
||||
[tool.ruff.isort]
|
||||
known-first-party = ["core", "app"]
|
||||
force-sort-within-sections = true
|
||||
force-wrap-aliases = true
|
||||
section-order = ["future", "standard-library", "base_framework", "framework_ext", "third-party", "first-party", "local-folder"]
|
||||
lines-after-imports = 2
|
||||
|
||||
[tool.ruff.isort.sections]
|
||||
base_framework = ["fastapi",]
|
||||
framework_ext = ["starlette"]
|
||||
|
||||
[tool.ruff.pyupgrade]
|
||||
keep-runtime-typing = true
|
||||
@@ -1,11 +0,0 @@
|
||||
import os
|
||||
|
||||
import httpx
|
||||
|
||||
|
||||
CHECK_URL = os.environ.get("HEALTHCHECK_URL", "http://localhost:8080/healthcheck")
|
||||
|
||||
response = httpx.get(CHECK_URL)
|
||||
|
||||
print(f"HEALTHCHECK STATUS: {response.text}")
|
||||
exit(0 if response.status_code == 200 else 1)
|
||||
@@ -1,8 +0,0 @@
|
||||
cd /app
|
||||
|
||||
rm -rf prometheus
|
||||
mkdir prometheus
|
||||
|
||||
alembic -c ./app/alembic.ini upgrade head
|
||||
|
||||
gunicorn -k uvicorn.workers.UvicornWorker main:app --bind 0.0.0.0:8080 --workers=2 --timeout 30 --max-requests=512000 --max-requests-jitter=3
|
||||
43
src/config.rs
Normal file
43
src/config.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
fn get_env(env: &'static str) -> String {
|
||||
std::env::var(env).unwrap_or_else(|_| panic!("Cannot get the {} env variable", env))
|
||||
}
|
||||
|
||||
pub struct Config {
|
||||
pub api_key: String,
|
||||
|
||||
pub postgres_user: String,
|
||||
pub postgres_password: String,
|
||||
pub postgres_host: String,
|
||||
pub postgres_port: u32,
|
||||
pub postgres_db: String,
|
||||
|
||||
pub meili_host: String,
|
||||
pub meili_master_key: String,
|
||||
|
||||
pub sentry_dsn: String
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn load() -> Config {
|
||||
Config {
|
||||
api_key: get_env("API_KEY"),
|
||||
|
||||
postgres_user: get_env("POSTGRES_USER"),
|
||||
postgres_password: get_env("POSTGRES_PASSWORD"),
|
||||
postgres_host: get_env("POSTGRES_HOST"),
|
||||
postgres_port: get_env("POSTGRES_PORT").parse().unwrap(),
|
||||
postgres_db: get_env("POSTGRES_DB"),
|
||||
|
||||
meili_host: get_env("MEILI_HOST"),
|
||||
meili_master_key: get_env("MEILI_MASTER_KEY"),
|
||||
|
||||
sentry_dsn: get_env("SENTRY_DSN")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
||||
Config::load()
|
||||
});
|
||||
19
src/db.rs
Normal file
19
src/db.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
use crate::{prisma::PrismaClient, config::CONFIG};
|
||||
|
||||
|
||||
pub async fn get_prisma_client() -> PrismaClient {
|
||||
let database_url: String = format!(
|
||||
"postgresql://{}:{}@{}:{}/{}?connection_limit=4",
|
||||
CONFIG.postgres_user,
|
||||
CONFIG.postgres_password,
|
||||
CONFIG.postgres_host,
|
||||
CONFIG.postgres_port,
|
||||
CONFIG.postgres_db
|
||||
);
|
||||
|
||||
PrismaClient::_builder()
|
||||
.with_url(database_url)
|
||||
.build()
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
41
src/main.rs
Normal file
41
src/main.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
pub mod config;
|
||||
pub mod views;
|
||||
pub mod prisma;
|
||||
pub mod db;
|
||||
pub mod serializers;
|
||||
pub mod meilisearch;
|
||||
|
||||
use std::{net::SocketAddr, str::FromStr};
|
||||
use sentry::{ClientOptions, types::Dsn, integrations::debug_images::DebugImagesIntegration};
|
||||
use tracing::info;
|
||||
|
||||
use crate::views::get_router;
|
||||
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let options = ClientOptions {
|
||||
dsn: Some(Dsn::from_str(&config::CONFIG.sentry_dsn).unwrap()),
|
||||
default_integrations: false,
|
||||
..Default::default()
|
||||
}
|
||||
.add_integration(DebugImagesIntegration::new());
|
||||
|
||||
let _guard = sentry::init(options);
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_target(false)
|
||||
.compact()
|
||||
.init();
|
||||
|
||||
let addr = SocketAddr::from(([0, 0, 0, 0], 8080));
|
||||
|
||||
let app = get_router().await;
|
||||
|
||||
info!("Start webserver...");
|
||||
axum::Server::bind(&addr)
|
||||
.serve(app.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
info!("Webserver shutdown...")
|
||||
}
|
||||
48
src/meilisearch.rs
Normal file
48
src/meilisearch.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
use meilisearch_sdk::Client;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::config::CONFIG;
|
||||
|
||||
|
||||
pub fn get_meili_client() -> Client {
|
||||
Client::new(
|
||||
&CONFIG.meili_host,
|
||||
Some(CONFIG.meili_master_key.clone())
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AuthorMeili {
|
||||
pub id: i32,
|
||||
pub first_name: String,
|
||||
pub last_name: String,
|
||||
pub middle_name: String,
|
||||
pub author_langs: Vec<String>,
|
||||
pub translator_langs: Vec<String>,
|
||||
pub books_count: i32
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BookMeili {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub genres: Vec<i32>
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GenreMeili {
|
||||
pub id: i32,
|
||||
pub description: String,
|
||||
pub meta: String,
|
||||
pub langs: Vec<String>,
|
||||
pub books_count: i32
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SequenceMeili {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub langs: Vec<String>,
|
||||
pub books_count: i32
|
||||
}
|
||||
16099
src/prisma.rs
Normal file
16099
src/prisma.rs
Normal file
File diff suppressed because one or more lines are too long
6
src/serializers/allowed_langs.rs
Normal file
6
src/serializers/allowed_langs.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AllowedLangs {
|
||||
pub allowed_langs: Vec<String>
|
||||
}
|
||||
78
src/serializers/author.rs
Normal file
78
src/serializers/author.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::{author, book};
|
||||
|
||||
use super::{sequence::Sequence, utils::{get_available_types, get_translators, get_sequences}};
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Author {
|
||||
pub id: i32,
|
||||
pub first_name: String,
|
||||
pub last_name: String,
|
||||
pub middle_name: String,
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<author::Data> for Author {
|
||||
fn from(val: author::Data) -> Self {
|
||||
let author::Data {
|
||||
id,
|
||||
first_name,
|
||||
last_name,
|
||||
middle_name,
|
||||
author_annotation,
|
||||
..
|
||||
} = val;
|
||||
|
||||
Author {
|
||||
id,
|
||||
first_name,
|
||||
last_name,
|
||||
middle_name: middle_name.unwrap_or("".to_string()),
|
||||
annotation_exists: author_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct AuthorBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
pub translators: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<book::Data> for AuthorBook {
|
||||
fn from(val: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
translations,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
..
|
||||
} = val;
|
||||
|
||||
AuthorBook {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
translators: get_translators(translations),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
24
src/serializers/author_annotation.rs
Normal file
24
src/serializers/author_annotation.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::author_annotation;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct AuthorAnnotation {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub text: String,
|
||||
pub file: Option<String>
|
||||
}
|
||||
|
||||
impl From<author_annotation::Data> for AuthorAnnotation {
|
||||
fn from(val: author_annotation::Data) -> Self {
|
||||
let author_annotation::Data { id, title, text, file, .. } = val;
|
||||
|
||||
AuthorAnnotation {
|
||||
id,
|
||||
title,
|
||||
text,
|
||||
file
|
||||
}
|
||||
}
|
||||
}
|
||||
248
src/serializers/book.rs
Normal file
248
src/serializers/book.rs
Normal file
@@ -0,0 +1,248 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use crate::prisma::book::{self};
|
||||
|
||||
use super::{source::Source, utils::{get_available_types, get_translators, get_sequences, get_authors, get_genres}, author::Author, sequence::Sequence, genre::Genre};
|
||||
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BookFilter {
|
||||
pub allowed_langs: Vec<String>,
|
||||
pub is_deleted: Option<bool>,
|
||||
pub uploaded_gte: Option<DateTime<Utc>>,
|
||||
pub uploaded_lte: Option<DateTime<Utc>>,
|
||||
pub id_gte: Option<i32>,
|
||||
pub id_lte: Option<i32>,
|
||||
}
|
||||
|
||||
impl BookFilter {
|
||||
pub fn get_filter_vec(self) -> Vec<book::WhereParam> {
|
||||
let mut result = vec![];
|
||||
|
||||
result.push(
|
||||
book::lang::in_vec(self.allowed_langs)
|
||||
);
|
||||
|
||||
match self.is_deleted {
|
||||
Some(v) => {
|
||||
result.push(
|
||||
book::is_deleted::equals(v)
|
||||
);
|
||||
},
|
||||
None => {
|
||||
result.push(
|
||||
book::is_deleted::equals(false)
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(uploaded_gte) = self.uploaded_gte {
|
||||
result.push(
|
||||
book::uploaded::gte(uploaded_gte.into())
|
||||
);
|
||||
};
|
||||
|
||||
if let Some(uploaded_lte) = self.uploaded_lte {
|
||||
result.push(
|
||||
book::uploaded::lte(uploaded_lte.into())
|
||||
);
|
||||
};
|
||||
|
||||
if let Some(id_gte) = self.id_gte {
|
||||
result.push(
|
||||
book::id::gte(id_gte)
|
||||
);
|
||||
};
|
||||
|
||||
if let Some(id_lte) = self.id_lte {
|
||||
result.push(
|
||||
book::id::lte(id_lte)
|
||||
);
|
||||
};
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct RemoteBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
pub authors: Vec<Author>,
|
||||
pub translators: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
pub annotation_exists: bool,
|
||||
pub source: Source,
|
||||
pub remote_id: i32,
|
||||
}
|
||||
|
||||
impl From<book::Data> for RemoteBook {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
translations,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
remote_id,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
translators: get_translators(translations),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
source: source.unwrap().as_ref().clone().into(),
|
||||
remote_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BaseBook {
|
||||
pub id: i32,
|
||||
pub available_types: Vec<String>,
|
||||
}
|
||||
|
||||
impl From<book::Data> for BaseBook {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
file_type,
|
||||
source,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DetailBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
pub authors: Vec<Author>,
|
||||
pub translators: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
pub annotation_exists: bool,
|
||||
pub source: Source,
|
||||
pub remote_id: i32,
|
||||
pub genres: Vec<Genre>,
|
||||
pub is_deleted: bool,
|
||||
pub pages: Option<i32>
|
||||
}
|
||||
|
||||
impl From<book::Data> for DetailBook {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
translations,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
remote_id,
|
||||
book_genres,
|
||||
is_deleted,
|
||||
pages,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
translators: get_translators(translations),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
source: source.unwrap().as_ref().clone().into(),
|
||||
remote_id,
|
||||
genres: get_genres(book_genres),
|
||||
is_deleted,
|
||||
pages,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct RandomBookFilter {
|
||||
pub allowed_langs: Vec<String>,
|
||||
pub genre: Option<i32>
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Book {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
pub authors: Vec<Author>,
|
||||
pub translators: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<book::Data> for Book {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
translations,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
translators: get_translators(translations),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
31
src/serializers/book_annotation.rs
Normal file
31
src/serializers/book_annotation.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::book_annotation;
|
||||
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BookAnnotation {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub text: String,
|
||||
pub file: Option<String>
|
||||
}
|
||||
|
||||
impl From<book_annotation::Data> for BookAnnotation {
|
||||
fn from(value: book_annotation::Data) -> Self {
|
||||
let book_annotation::Data {
|
||||
id,
|
||||
title,
|
||||
text,
|
||||
file,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
text,
|
||||
file
|
||||
}
|
||||
}
|
||||
}
|
||||
44
src/serializers/genre.rs
Normal file
44
src/serializers/genre.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use crate::prisma::genre;
|
||||
|
||||
use super::source::Source;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Genre {
|
||||
pub id: i32,
|
||||
pub source: Source,
|
||||
pub remote_id: i32,
|
||||
pub code: String,
|
||||
pub description: String,
|
||||
pub meta: String
|
||||
}
|
||||
|
||||
impl From<genre::Data> for Genre {
|
||||
fn from(val: genre::Data) -> Self {
|
||||
let genre::Data {
|
||||
id,
|
||||
remote_id,
|
||||
code,
|
||||
description,
|
||||
meta,
|
||||
source,
|
||||
..
|
||||
} = val;
|
||||
|
||||
Genre {
|
||||
id,
|
||||
remote_id,
|
||||
code,
|
||||
description,
|
||||
meta,
|
||||
source: source.unwrap().as_ref().clone().into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GenreFilter {
|
||||
pub meta: Option<String>,
|
||||
}
|
||||
11
src/serializers/mod.rs
Normal file
11
src/serializers/mod.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
pub mod pagination;
|
||||
pub mod author;
|
||||
pub mod author_annotation;
|
||||
pub mod genre;
|
||||
pub mod source;
|
||||
pub mod book;
|
||||
pub mod sequence;
|
||||
pub mod utils;
|
||||
pub mod translator;
|
||||
pub mod allowed_langs;
|
||||
pub mod book_annotation;
|
||||
63
src/serializers/pagination.rs
Normal file
63
src/serializers/pagination.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
|
||||
fn default_page() -> i64 {
|
||||
1
|
||||
}
|
||||
|
||||
fn default_size() -> i64 {
|
||||
50
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Pagination {
|
||||
#[serde(default = "default_page")]
|
||||
pub page: i64,
|
||||
#[serde(default = "default_size")]
|
||||
pub size: i64
|
||||
}
|
||||
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Page<T> {
|
||||
pub items: Vec<T>,
|
||||
pub total: i64,
|
||||
pub page: i64,
|
||||
pub size: i64,
|
||||
pub pages: i64
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct PageWithParent<T, P> {
|
||||
pub items: Vec<T>,
|
||||
pub total: i64,
|
||||
pub page: i64,
|
||||
pub size: i64,
|
||||
pub pages: i64,
|
||||
pub parent_item: P
|
||||
}
|
||||
|
||||
impl<T> Page<T> {
|
||||
pub fn new(items: Vec<T>, total: i64, pagination: &Pagination) -> Self {
|
||||
Self {
|
||||
items,
|
||||
total,
|
||||
page: pagination.page,
|
||||
size: pagination.size,
|
||||
pages: (total + pagination.size - 1) / pagination.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, P> PageWithParent<T, P> {
|
||||
pub fn new(parent_item: P, items: Vec<T>, total: i64, pagination: &Pagination) -> Self {
|
||||
Self {
|
||||
items,
|
||||
total,
|
||||
page: pagination.page,
|
||||
size: pagination.size,
|
||||
pages: (total + pagination.size - 1) / pagination.size,
|
||||
parent_item
|
||||
}
|
||||
}
|
||||
}
|
||||
62
src/serializers/sequence.rs
Normal file
62
src/serializers/sequence.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::{sequence, book};
|
||||
|
||||
use super::{author::Author, utils::{get_available_types, get_authors, get_translators}};
|
||||
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Sequence {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl From<sequence::Data> for Sequence {
|
||||
fn from(val: sequence::Data) -> Self {
|
||||
let sequence::Data { id, name, .. } = val;
|
||||
|
||||
Sequence { id, name }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct SequenceBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
pub authors: Vec<Author>,
|
||||
pub translators: Vec<Author>,
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<book::Data> for SequenceBook {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
translations,
|
||||
book_annotation,
|
||||
source,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
translators: get_translators(translations),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
25
src/serializers/source.rs
Normal file
25
src/serializers/source.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::source;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Source {
|
||||
pub id: i32,
|
||||
pub name: String
|
||||
}
|
||||
|
||||
impl From<source::Data> for Source
|
||||
{
|
||||
fn from(val: source::Data) -> Self {
|
||||
let source::Data {
|
||||
id,
|
||||
name,
|
||||
..
|
||||
} = val;
|
||||
|
||||
Source {
|
||||
id,
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
47
src/serializers/translator.rs
Normal file
47
src/serializers/translator.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::book;
|
||||
|
||||
use super::{author::Author, sequence::Sequence, utils::{get_available_types, get_authors, get_sequences}};
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct TranslatorBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
pub authors: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<book::Data> for TranslatorBook {
|
||||
fn from(val: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
..
|
||||
} = val;
|
||||
|
||||
TranslatorBook {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type.clone(), source.unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
56
src/serializers/utils.rs
Normal file
56
src/serializers/utils.rs
Normal file
@@ -0,0 +1,56 @@
|
||||
use crate::prisma::{translator, book_sequence, book_author, book_genre};
|
||||
|
||||
use super::{author::Author, sequence::Sequence, genre::Genre};
|
||||
|
||||
pub fn get_available_types(file_type: String, source_name: String) -> Vec<String> {
|
||||
if file_type == "fb2" && source_name == "flibusta" {
|
||||
vec![
|
||||
"fb2".to_string(),
|
||||
"fb2zip".to_string(),
|
||||
"epub".to_string(),
|
||||
"mobi".to_string(),
|
||||
]
|
||||
} else {
|
||||
vec![file_type]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_authors(
|
||||
book_authors: Option<Vec<book_author::Data>>
|
||||
) -> Vec<Author> {
|
||||
book_authors
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_translators(
|
||||
translations: Option<Vec<translator::Data>>
|
||||
) -> Vec<Author> {
|
||||
translations
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_sequences(
|
||||
book_sequences: Option<Vec<book_sequence::Data>>
|
||||
) -> Vec<Sequence> {
|
||||
book_sequences
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| item.sequence.clone().unwrap().as_ref().clone().into())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_genres(
|
||||
book_genres: Option<Vec<book_genre::Data>>
|
||||
) -> Vec<Genre> {
|
||||
book_genres
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| item.genre.clone().unwrap().as_ref().clone().into())
|
||||
.collect()
|
||||
}
|
||||
324
src/views/authors.rs
Normal file
324
src/views/authors.rs
Normal file
@@ -0,0 +1,324 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, extract::{Query, Path}, Json, response::IntoResponse, routing::get, http::StatusCode};
|
||||
|
||||
use rand::Rng;
|
||||
|
||||
use crate::{prisma::{author, author_annotation::{self}, book, book_author, translator, book_sequence}, serializers::{pagination::{Pagination, Page, PageWithParent}, author::{Author, AuthorBook}, author_annotation::AuthorAnnotation, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
async fn get_authors(
|
||||
db: Database,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let authors_count = db
|
||||
.author()
|
||||
.count(vec![])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let authors = db
|
||||
.author()
|
||||
.find_many(vec![])
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<Author> = Page::new(
|
||||
authors.iter().map(|item| item.clone().into()).collect(),
|
||||
authors_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
async fn get_random_author(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!(
|
||||
"author_langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_filter(&filter)
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let author_id = {
|
||||
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_limit(1)
|
||||
.with_offset(offset)
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let author = &result.hits.get(0).unwrap().result;
|
||||
|
||||
author.id
|
||||
};
|
||||
|
||||
let author = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(author_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
Json::<Author>(author.into())
|
||||
}
|
||||
|
||||
|
||||
async fn get_author(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>
|
||||
) -> impl IntoResponse {
|
||||
let author = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(author_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match author {
|
||||
Some(author) => Json::<Author>(author.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async fn get_author_annotation(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>,
|
||||
) -> impl IntoResponse {
|
||||
let author_annotation = db
|
||||
.author_annotation()
|
||||
.find_unique(
|
||||
author_annotation::author_id::equals(author_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match author_annotation {
|
||||
Some(annotation) => Json::<AuthorAnnotation>(annotation.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async fn get_author_books(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let author = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(author_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let author = match author {
|
||||
Some(author) => author,
|
||||
None => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(vec![
|
||||
book::book_authors::some(vec![
|
||||
book_author::author_id::equals(author_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_authors::some(vec![
|
||||
book_author::author_id::equals(author_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: PageWithParent<AuthorBook, Author> = PageWithParent::new(
|
||||
author.into(),
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page).into_response()
|
||||
}
|
||||
|
||||
|
||||
async fn get_author_books_available_types(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_authors::some(vec![
|
||||
book_author::author_id::equals(author_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut file_types: HashSet<String> = HashSet::new();
|
||||
|
||||
for book in books {
|
||||
file_types.insert(book.file_type.clone());
|
||||
}
|
||||
|
||||
if file_types.contains(&"fb2".to_string()) {
|
||||
file_types.insert("epub".to_string());
|
||||
file_types.insert("mobi".to_string());
|
||||
file_types.insert("fb2zip".to_string());
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(file_types.into_iter().collect())
|
||||
}
|
||||
|
||||
|
||||
async fn search_authors(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!(
|
||||
"author_langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let author_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut authors = db
|
||||
.author()
|
||||
.find_many(vec![
|
||||
author::id::in_vec(author_ids.clone())
|
||||
])
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
authors.sort_by(|a, b| {
|
||||
let a_pos = author_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
let b_pos = author_ids.iter().position(|i| *i == b.id).unwrap();
|
||||
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Author> = Page::new(
|
||||
authors.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_authors_router() -> Router {
|
||||
Router::new()
|
||||
.route("/", get(get_authors))
|
||||
.route("/random", get(get_random_author))
|
||||
.route("/:author_id", get(get_author))
|
||||
.route("/:author_id/annotation", get(get_author_annotation))
|
||||
.route("/:author_id/books", get(get_author_books))
|
||||
.route("/:author_id/available_types", get(get_author_books_available_types))
|
||||
.route("/search/:query", get(search_authors))
|
||||
}
|
||||
424
src/views/books.rs
Normal file
424
src/views/books.rs
Normal file
@@ -0,0 +1,424 @@
|
||||
use axum::{Router, routing::get, extract::{Query, Path}, Json, response::IntoResponse, http::StatusCode};
|
||||
use prisma_client_rust::Direction;
|
||||
use rand::Rng;
|
||||
|
||||
use crate::{serializers::{book::{BookFilter, RemoteBook, BaseBook, DetailBook, RandomBookFilter, Book}, pagination::{Pagination, Page}, book_annotation::BookAnnotation, allowed_langs::AllowedLangs}, prisma::{book::{self}, book_author, author, translator, book_sequence, book_genre, book_annotation, genre}, meilisearch::{get_meili_client, BookMeili}};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
pub async fn get_books(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<BookFilter>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let filter = book_filter.get_filter_vec();
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(filter.clone())
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(filter)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<RemoteBook> = Page::new(
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
pub async fn get_base_books(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<BookFilter>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let filter = book_filter.get_filter_vec();
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(filter.clone())
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(filter)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.order_by(book::id::order(Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<BaseBook> = Page::new(
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
pub async fn get_random_book(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(book_filter): axum_extra::extract::Query<RandomBookFilter>,
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("books");
|
||||
|
||||
let filter = {
|
||||
let langs_filter = format!(
|
||||
"lang IN [{}]",
|
||||
book_filter.allowed_langs.join(", ")
|
||||
);
|
||||
let genre_filter = match book_filter.genre {
|
||||
Some(v) => format!(" AND genres = {v}"),
|
||||
None => "".to_string(),
|
||||
};
|
||||
|
||||
format!("{langs_filter}{genre_filter}")
|
||||
};
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_filter(&filter)
|
||||
.execute::<BookMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let book_id = {
|
||||
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_limit(1)
|
||||
.with_offset(offset)
|
||||
.execute::<BookMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let book = &result.hits.get(0).unwrap().result;
|
||||
|
||||
book.id
|
||||
};
|
||||
|
||||
let book = db
|
||||
.book()
|
||||
.find_unique(book::id::equals(book_id))
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_genres::fetch(vec![])
|
||||
.with(
|
||||
book_genre::genre::fetch()
|
||||
.with(
|
||||
genre::source::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
Json::<DetailBook>(book.into()).into_response()
|
||||
}
|
||||
|
||||
pub async fn get_remote_book(
|
||||
db: Database,
|
||||
Path((source_id, remote_id)): Path<(i32, i32)>,
|
||||
) -> impl IntoResponse {
|
||||
let book = db
|
||||
.book()
|
||||
.find_unique(book::source_id_remote_id(source_id, remote_id))
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_genres::fetch(vec![])
|
||||
.with(
|
||||
book_genre::genre::fetch()
|
||||
.with(
|
||||
genre::source::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match book {
|
||||
Some(book) => Json::<DetailBook>(book.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn search_books(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let book_index = client.index("books");
|
||||
|
||||
let filter = format!(
|
||||
"lang IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = book_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<BookMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let book_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut books = db
|
||||
.book()
|
||||
.find_many(vec![book::id::in_vec(book_ids.clone())])
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
books.sort_by(|a, b| {
|
||||
let a_pos = book_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
let b_pos = book_ids.iter().position(|i| *i == b.id).unwrap();
|
||||
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Book> = Page::new(
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
pub async fn get_book(
|
||||
db: Database,
|
||||
Path(book_id): Path<i32>,
|
||||
) -> impl IntoResponse {
|
||||
let book = db
|
||||
.book()
|
||||
.find_unique(book::id::equals(book_id))
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_genres::fetch(vec![])
|
||||
.with(
|
||||
book_genre::genre::fetch()
|
||||
.with(
|
||||
genre::source::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match book {
|
||||
Some(book) => Json::<DetailBook>(book.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_book_annotation(
|
||||
db: Database,
|
||||
Path(book_id): Path<i32>,
|
||||
) -> impl IntoResponse {
|
||||
let book_annotaion = db
|
||||
.book_annotation()
|
||||
.find_unique(
|
||||
book_annotation::book_id::equals(book_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match book_annotaion {
|
||||
Some(book_annotation) => Json::<BookAnnotation>(book_annotation.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_books_router() -> Router {
|
||||
Router::new()
|
||||
.route("/", get(get_books))
|
||||
.route("/base/", get(get_base_books))
|
||||
.route("/random", get(get_random_book))
|
||||
.route("/remote/:source_id/:remote_id", get(get_remote_book))
|
||||
.route("/search/:query", get(search_books))
|
||||
.route("/:book_id", get(get_book))
|
||||
.route("/:book_id/annotation", get(get_book_annotation))
|
||||
}
|
||||
80
src/views/genres.rs
Normal file
80
src/views/genres.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, routing::get, extract::Query, Json, response::IntoResponse};
|
||||
use prisma_client_rust::Direction;
|
||||
|
||||
use crate::{serializers::{pagination::{Pagination, Page}, genre::{Genre, GenreFilter}}, prisma::genre};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
pub async fn get_genres(
|
||||
db: Database,
|
||||
pagination: Query<Pagination>,
|
||||
Query(GenreFilter { meta }): Query<GenreFilter>
|
||||
) -> impl IntoResponse {
|
||||
let filter = {
|
||||
match meta {
|
||||
Some(meta) => vec![
|
||||
genre::meta::equals(meta)
|
||||
],
|
||||
None => vec![],
|
||||
}
|
||||
};
|
||||
|
||||
let genres_count = db
|
||||
.genre()
|
||||
.count(filter.clone())
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let genres = db
|
||||
.genre()
|
||||
.find_many(filter)
|
||||
.with(
|
||||
genre::source::fetch()
|
||||
)
|
||||
.order_by(genre::id::order(Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<Genre> = Page::new(
|
||||
genres.iter().map(|item| item.clone().into()).collect(),
|
||||
genres_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_genre_metas(
|
||||
db: Database
|
||||
) -> impl IntoResponse {
|
||||
let genres = db
|
||||
.genre()
|
||||
.find_many(vec![])
|
||||
.order_by(genre::id::order(Direction::Asc))
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut metas: HashSet<String> = HashSet::new();
|
||||
|
||||
for genre in genres {
|
||||
metas.insert(genre.meta.clone());
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(metas.into_iter().collect())
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_genres_router() -> Router {
|
||||
Router::new()
|
||||
.route("/", get(get_genres))
|
||||
.route("/metas", get(get_genre_metas))
|
||||
}
|
||||
72
src/views/mod.rs
Normal file
72
src/views/mod.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{Router, routing::get, middleware::{self, Next}, Extension, http::{Request, StatusCode, self}, response::Response};
|
||||
use axum_prometheus::PrometheusMetricLayer;
|
||||
use tower_http::trace::{TraceLayer, self};
|
||||
use tracing::Level;
|
||||
|
||||
use crate::{config::CONFIG, db::get_prisma_client, prisma::PrismaClient};
|
||||
|
||||
use self::{authors::get_authors_router, genres::get_genres_router, books::get_books_router, sequences::get_sequences_router};
|
||||
use self::translators::get_translators_router;
|
||||
|
||||
pub mod authors;
|
||||
pub mod books;
|
||||
pub mod genres;
|
||||
pub mod sequences;
|
||||
pub mod translators;
|
||||
|
||||
|
||||
pub type Database = Extension<Arc<PrismaClient>>;
|
||||
|
||||
|
||||
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
|
||||
let auth_header = req.headers()
|
||||
.get(http::header::AUTHORIZATION)
|
||||
.and_then(|header| header.to_str().ok());
|
||||
|
||||
let auth_header = if let Some(auth_header) = auth_header {
|
||||
auth_header
|
||||
} else {
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
};
|
||||
|
||||
if auth_header != CONFIG.api_key {
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
Ok(next.run(req).await)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_router() -> Router {
|
||||
let client = Arc::new(get_prisma_client().await);
|
||||
|
||||
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();
|
||||
|
||||
let app_router = Router::new()
|
||||
|
||||
.nest("/api/v1/authors", get_authors_router().await)
|
||||
.nest("/api/v1/translators", get_translators_router().await)
|
||||
.nest("/api/v1/genres", get_genres_router().await)
|
||||
.nest("/api/v1/books", get_books_router().await)
|
||||
.nest("/api/v1/sequences", get_sequences_router().await)
|
||||
|
||||
.layer(middleware::from_fn(auth))
|
||||
.layer(Extension(client))
|
||||
.layer(prometheus_layer);
|
||||
|
||||
let metric_router = Router::new()
|
||||
.route("/metrics", get(|| async move { metric_handle.render() }));
|
||||
|
||||
Router::new()
|
||||
.nest("/", app_router)
|
||||
.nest("/", metric_router)
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(trace::DefaultMakeSpan::new()
|
||||
.level(Level::INFO))
|
||||
.on_response(trace::DefaultOnResponse::new()
|
||||
.level(Level::INFO)),
|
||||
)
|
||||
}
|
||||
253
src/views/sequences.rs
Normal file
253
src/views/sequences.rs
Normal file
@@ -0,0 +1,253 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, routing::get, extract::{Path, Query}, http::StatusCode, response::IntoResponse, Json};
|
||||
use rand::Rng;
|
||||
|
||||
use crate::{prisma::{sequence, book_sequence, book, book_author, author, translator}, serializers::{sequence::{Sequence, SequenceBook}, allowed_langs::AllowedLangs, pagination::{PageWithParent, Pagination, Page}}, meilisearch::{get_meili_client, SequenceMeili}};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
async fn get_random_sequence(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("sequences");
|
||||
|
||||
let filter = format!(
|
||||
"langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_filter(&filter)
|
||||
.execute::<SequenceMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let sequence_id = {
|
||||
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_limit(1)
|
||||
.with_offset(offset)
|
||||
.execute::<SequenceMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let sequence = &result.hits.get(0).unwrap().result;
|
||||
|
||||
sequence.id
|
||||
};
|
||||
|
||||
let sequence = db
|
||||
.sequence()
|
||||
.find_unique(
|
||||
sequence::id::equals(sequence_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
Json::<Sequence>(sequence.into())
|
||||
}
|
||||
|
||||
async fn search_sequence(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let sequence_index = client.index("sequences");
|
||||
|
||||
let filter = format!(
|
||||
"langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = sequence_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<SequenceMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let sequence_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut sequences = db
|
||||
.sequence()
|
||||
.find_many(vec![
|
||||
sequence::id::in_vec(sequence_ids.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sequences.sort_by(|a, b| {
|
||||
let a_pos = sequence_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
let b_pos: usize = sequence_ids.iter().position(|i| *i == b.id).unwrap();
|
||||
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Sequence> = Page::new(
|
||||
sequences.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
async fn get_sequence(
|
||||
db: Database,
|
||||
Path(sequence_id): Path<i32>
|
||||
) -> impl IntoResponse {
|
||||
let sequence = db
|
||||
.sequence()
|
||||
.find_unique(
|
||||
sequence::id::equals(sequence_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match sequence {
|
||||
Some(sequence) => Json::<Sequence>(sequence.into()).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_sequence_available_types(
|
||||
db: Database,
|
||||
Path(sequence_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_sequences::some(vec![
|
||||
book_sequence::sequence_id::equals(sequence_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut file_types: HashSet<String> = HashSet::new();
|
||||
|
||||
for book in books {
|
||||
file_types.insert(book.file_type.clone());
|
||||
}
|
||||
|
||||
if file_types.contains(&"fb2".to_string()) {
|
||||
file_types.insert("epub".to_string());
|
||||
file_types.insert("mobi".to_string());
|
||||
file_types.insert("fb2zip".to_string());
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(file_types.into_iter().collect())
|
||||
}
|
||||
|
||||
async fn get_sequence_books(
|
||||
db: Database,
|
||||
Path(sequence_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let sequence = db
|
||||
.sequence()
|
||||
.find_unique(
|
||||
sequence::id::equals(sequence_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let sequence = match sequence {
|
||||
Some(v) => v,
|
||||
None => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(vec![
|
||||
book::book_sequences::some(vec![
|
||||
book_sequence::sequence_id::equals(sequence_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_sequences::some(vec![
|
||||
book_sequence::sequence_id::equals(sequence_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: PageWithParent<SequenceBook, Sequence> = PageWithParent::new(
|
||||
sequence.into(),
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page).into_response()
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_sequences_router() -> Router {
|
||||
Router::new()
|
||||
.route("/random", get(get_random_sequence))
|
||||
.route("/search/:query", get(search_sequence))
|
||||
.route("/:sequence_id", get(get_sequence))
|
||||
.route("/:sequence_id/available_types", get(get_sequence_available_types))
|
||||
.route("/:sequence_id/books", get(get_sequence_books))
|
||||
}
|
||||
188
src/views/translators.rs
Normal file
188
src/views/translators.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, routing::get, extract::{Path, Query}, response::IntoResponse, Json, http::StatusCode};
|
||||
|
||||
use crate::{serializers::{pagination::{Pagination, Page, PageWithParent}, author::Author, translator::TranslatorBook, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}, prisma::{author, book::{self}, translator, book_author, book_sequence}};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
async fn get_translated_books(
|
||||
db: Database,
|
||||
Path(translator_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let translator = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(translator_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let translator = match translator {
|
||||
Some(translator) => translator,
|
||||
None => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(vec![
|
||||
book::translations::some(vec![
|
||||
translator::author_id::equals(translator_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::translations::some(vec![
|
||||
translator::author_id::equals(translator_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: PageWithParent<TranslatorBook, Author> = PageWithParent::new(
|
||||
translator.into(),
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page).into_response()
|
||||
}
|
||||
|
||||
|
||||
async fn get_translated_books_available_types(
|
||||
db: Database,
|
||||
Path(translator_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::translations::some(vec![
|
||||
translator::author_id::equals(translator_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut file_types: HashSet<String> = HashSet::new();
|
||||
|
||||
for book in books {
|
||||
file_types.insert(book.file_type.clone());
|
||||
}
|
||||
|
||||
if file_types.contains(&"fb2".to_string()) {
|
||||
file_types.insert("epub".to_string());
|
||||
file_types.insert("mobi".to_string());
|
||||
file_types.insert("fb2zip".to_string());
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(file_types.into_iter().collect())
|
||||
}
|
||||
|
||||
|
||||
async fn search_translators(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!(
|
||||
"translator_langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let translator_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut translators = db
|
||||
.author()
|
||||
.find_many(vec![
|
||||
author::id::in_vec(translator_ids.clone())
|
||||
])
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
translators.sort_by(|a, b| {
|
||||
let a_pos = translator_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
let b_pos = translator_ids.iter().position(|i| *i == b.id).unwrap();
|
||||
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Author> = Page::new(
|
||||
translators.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_translators_router() -> Router {
|
||||
Router::new()
|
||||
.route("/:translator_id/books", get(get_translated_books))
|
||||
.route("/:translator_id/available_types", get(get_translated_books_available_types))
|
||||
.route("/search/:query", get(search_translators))
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
from fastapi_book_server import __version__
|
||||
|
||||
|
||||
def test_version():
|
||||
assert __version__ == "0.1.0"
|
||||
Reference in New Issue
Block a user