mirror of
https://github.com/flibusta-apps/book_library_server.git
synced 2025-12-06 07:05:36 +01:00
Add search result cache
This commit is contained in:
@@ -1,15 +1,17 @@
|
||||
from typing import Optional, Generic, TypeVar, Union
|
||||
from itertools import permutations
|
||||
from databases import Database
|
||||
import json
|
||||
|
||||
from fastapi_pagination.api import resolve_params
|
||||
from fastapi_pagination.bases import AbstractParams, RawParams
|
||||
from app.utils.pagination import Page, CustomPage
|
||||
import aioredis
|
||||
import orjson
|
||||
|
||||
from ormar import Model, QuerySet
|
||||
from sqlalchemy import text, func, select, or_, Table, Column, cast, Text
|
||||
from sqlalchemy.orm import Session
|
||||
from databases import Database
|
||||
|
||||
|
||||
def join_fields(fields):
|
||||
@@ -30,6 +32,7 @@ class TRGMSearchService(Generic[T]):
|
||||
SELECT_RELATED: Optional[Union[list[str], str]] = None
|
||||
PREFETCH_RELATED: Optional[Union[list[str], str]] = None
|
||||
FILTERS = []
|
||||
CACHE_TTL = 5 * 60
|
||||
|
||||
@classmethod
|
||||
def get_params(cls) -> AbstractParams:
|
||||
@@ -78,15 +81,13 @@ class TRGMSearchService(Generic[T]):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def get_objects(cls, query_data: str) -> tuple[int, list[T]]:
|
||||
async def _get_object_ids(cls, query_data: str) -> list[int]:
|
||||
similarity = cls.get_similarity_subquery(query_data)
|
||||
similarity_filter = cls.get_similarity_filter_subquery(query_data)
|
||||
|
||||
params = cls.get_raw_params()
|
||||
|
||||
session = Session(cls.database.connection())
|
||||
|
||||
q1 = session.query(
|
||||
filtered_objects_query = session.query(
|
||||
cls.table.c.id, similarity
|
||||
).order_by(
|
||||
text('sml DESC')
|
||||
@@ -95,23 +96,57 @@ class TRGMSearchService(Generic[T]):
|
||||
*cls.FILTERS
|
||||
).cte('objs')
|
||||
|
||||
sq = session.query(q1.c.id).limit(params.limit).offset(params.offset).subquery()
|
||||
|
||||
q2 = session.query(
|
||||
func.json_build_object(
|
||||
text("'total'"), func.count(q1.c.id),
|
||||
text("'items'"), select(func.array_to_json(func.array_agg(sq.c.id)))
|
||||
)
|
||||
object_ids_query = session.query(
|
||||
func.array_agg(filtered_objects_query.c.id)
|
||||
).cte()
|
||||
|
||||
print(str(q2))
|
||||
|
||||
row = await cls.database.fetch_one(q2)
|
||||
row = await cls.database.fetch_one(object_ids_query)
|
||||
|
||||
if row is None:
|
||||
raise ValueError('Something is wrong!')
|
||||
|
||||
result = json.loads(row['json_build_object_1'])
|
||||
return row['array_agg_1']
|
||||
|
||||
@classmethod
|
||||
def get_cache_key(cls, query_data: str) -> str:
|
||||
model_class_name = cls.model.__class__.__name__
|
||||
return f"{model_class_name}_{query_data}"
|
||||
|
||||
@classmethod
|
||||
async def get_cached_ids(cls, query_data: str, redis: aioredis.Redis) -> Optional[list[int]]:
|
||||
try:
|
||||
key = cls.get_cache_key(query_data)
|
||||
data = await redis.get(key)
|
||||
|
||||
if data is None:
|
||||
return data
|
||||
|
||||
return orjson.loads(data)
|
||||
except aioredis.RedisError as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def cache_object_ids(cls, query_data: str, object_ids: list[int], redis: aioredis.Redis):
|
||||
try:
|
||||
key = cls.get_cache_key(query_data)
|
||||
await redis.set(key, orjson.dumps(object_ids), ex=cls.CACHE_TTL)
|
||||
except aioredis.RedisError as e:
|
||||
print(e)
|
||||
|
||||
@classmethod
|
||||
async def get_objects(cls, query_data: str, redis: aioredis.Redis) -> tuple[int, list[T]]:
|
||||
params = cls.get_raw_params()
|
||||
|
||||
cached_object_ids = await cls.get_cached_ids(query_data, redis)
|
||||
|
||||
if cached_object_ids is None:
|
||||
object_ids = await cls._get_object_ids(query_data)
|
||||
await cls.cache_object_ids(query_data, object_ids, redis)
|
||||
else:
|
||||
object_ids = cached_object_ids
|
||||
|
||||
limited_object_ids = object_ids[params.offset:params.offset + params.limit]
|
||||
|
||||
queryset: QuerySet[T] = cls.model.objects
|
||||
|
||||
@@ -121,14 +156,13 @@ class TRGMSearchService(Generic[T]):
|
||||
if cls.SELECT_RELATED:
|
||||
queryset = queryset.select_related(cls.SELECT_RELATED)
|
||||
|
||||
return result['total'], await queryset.filter(id__in=result['items']).all()
|
||||
|
||||
return len(object_ids), await queryset.filter(id__in=limited_object_ids).all()
|
||||
|
||||
@classmethod
|
||||
async def get(cls, query: str) -> Page[T]:
|
||||
async def get(cls, query: str, redis: aioredis.Redis) -> Page[T]:
|
||||
params = cls.get_params()
|
||||
|
||||
total, objects = await cls.get_objects(query)
|
||||
total, objects = await cls.get_objects(query, redis)
|
||||
|
||||
return CustomPage.create(
|
||||
items=objects,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi import APIRouter, Depends, Request, HTTPException, status
|
||||
|
||||
from fastapi_pagination import Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
@@ -81,5 +81,5 @@ async def get_translated_books(id: int):
|
||||
|
||||
|
||||
@author_router.get("/search/{query}", response_model=CustomPage[Author], dependencies=[Depends(Params)])
|
||||
async def search_authors(query: str):
|
||||
return await AuthorTGRMSearchService.get(query)
|
||||
async def search_authors(query: str, request: Request):
|
||||
return await AuthorTGRMSearchService.get(query, request.app.state.redis)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Union
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi import APIRouter, Depends, Request, HTTPException, status
|
||||
|
||||
from fastapi_pagination import Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
@@ -92,5 +92,5 @@ async def get_book_annotation(id: int):
|
||||
|
||||
|
||||
@book_router.get("/search/{query}", response_model=CustomPage[Book], dependencies=[Depends(Params)])
|
||||
async def search_books(query: str):
|
||||
return await BookTGRMSearchService.get(query)
|
||||
async def search_books(query: str, request: Request):
|
||||
return await BookTGRMSearchService.get(query, request.app.state.redis)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi import APIRouter, Depends, Request
|
||||
|
||||
from fastapi_pagination import Params
|
||||
from fastapi_pagination.ext.ormar import paginate
|
||||
@@ -37,5 +37,5 @@ async def create_sequence(data: CreateSequence):
|
||||
|
||||
|
||||
@sequence_router.get("/search/{query}", response_model=CustomPage[Sequence], dependencies=[Depends(Params)])
|
||||
async def search_sequences(query: str):
|
||||
return await SequenceTGRMSearchService.get(query)
|
||||
async def search_sequences(query: str, request: Request):
|
||||
return await SequenceTGRMSearchService.get(query, request.app.state.redis)
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from operator import add
|
||||
from fastapi import FastAPI
|
||||
from fastapi_pagination import add_pagination
|
||||
import aioredis
|
||||
|
||||
from core.db import database
|
||||
from core.config import env_config
|
||||
|
||||
from app.views import routers
|
||||
|
||||
|
||||
@@ -11,6 +13,13 @@ def start_app() -> FastAPI:
|
||||
|
||||
app.state.database = database
|
||||
|
||||
app.state.redis = aioredis.Redis(
|
||||
host=env_config.REDIS_HOST,
|
||||
port=env_config.REDIS_PORT,
|
||||
db=env_config.REDIS_DB,
|
||||
password=env_config.REDIS_PASSWORD,
|
||||
)
|
||||
|
||||
for router in routers:
|
||||
app.include_router(router)
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseSettings
|
||||
|
||||
|
||||
@@ -10,6 +12,11 @@ class EnvConfig(BaseSettings):
|
||||
POSTGRES_PORT: int
|
||||
POSTGRES_DB: str
|
||||
|
||||
REDIS_HOST: str
|
||||
REDIS_PORT: int
|
||||
REDIS_DB: int
|
||||
REDIS_PASSWORD: Optional[str]
|
||||
|
||||
class Config:
|
||||
env_file = '.env'
|
||||
env_file_encoding = 'utf-8'
|
||||
|
||||
46
poetry.lock
generated
46
poetry.lock
generated
@@ -9,6 +9,21 @@ python-versions = ">=3.6"
|
||||
[package.extras]
|
||||
aiofiles = ["aiofiles (==0.4.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "aioredis"
|
||||
version = "2.0.0"
|
||||
description = "asyncio (PEP 3156) Redis support"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
async-timeout = "*"
|
||||
typing-extensions = "*"
|
||||
|
||||
[package.extras]
|
||||
hiredis = ["hiredis (>=1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "aiosqlite"
|
||||
version = "0.17.0"
|
||||
@@ -63,6 +78,17 @@ python-versions = ">=3.6"
|
||||
[package.extras]
|
||||
tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"]
|
||||
|
||||
[[package]]
|
||||
name = "async-timeout"
|
||||
version = "4.0.1"
|
||||
description = "Timeout context manager for asyncio programs"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=3.6.5"
|
||||
|
||||
[[package]]
|
||||
name = "asyncpg"
|
||||
version = "0.24.0"
|
||||
@@ -167,13 +193,13 @@ pydantic = ">=1.7.2"
|
||||
|
||||
[package.extras]
|
||||
gino = ["gino[starlette] (>=1.0.1)", "SQLAlchemy (>=1.3.20)"]
|
||||
all = ["gino[starlette] (>=1.0.1)", "SQLAlchemy (>=1.3.20)", "databases[sqlite,mysql,postgresql] (>=0.4.0)", "orm (>=0.1.5)", "tortoise-orm[aiosqlite,aiomysql,asyncpg] (>=0.16.18,<0.18.0)", "asyncpg (>=0.24.0)", "ormar (>=0.10.5)", "Django (<3.3.0)", "piccolo (>=0.29,<0.35)", "motor (>=2.5.1,<3.0.0)"]
|
||||
all = ["gino[starlette] (>=1.0.1)", "SQLAlchemy (>=1.3.20)", "databases[mysql,sqlite,postgresql] (>=0.4.0)", "orm (>=0.1.5)", "tortoise-orm[aiomysql,asyncpg,aiosqlite] (>=0.16.18,<0.18.0)", "asyncpg (>=0.24.0)", "ormar (>=0.10.5)", "Django (<3.3.0)", "piccolo (>=0.29,<0.35)", "motor (>=2.5.1,<3.0.0)"]
|
||||
sqlalchemy = ["SQLAlchemy (>=1.3.20)"]
|
||||
asyncpg = ["SQLAlchemy (>=1.3.20)", "asyncpg (>=0.24.0)"]
|
||||
databases = ["databases[sqlite,mysql,postgresql] (>=0.4.0)"]
|
||||
orm = ["databases[sqlite,mysql,postgresql] (>=0.4.0)", "orm (>=0.1.5)", "typesystem (>=0.2.0,<0.3.0)"]
|
||||
django = ["databases[sqlite,mysql,postgresql] (>=0.4.0)", "Django (<3.3.0)"]
|
||||
tortoise = ["tortoise-orm[aiosqlite,aiomysql,asyncpg] (>=0.16.18,<0.18.0)"]
|
||||
databases = ["databases[mysql,sqlite,postgresql] (>=0.4.0)"]
|
||||
orm = ["databases[mysql,sqlite,postgresql] (>=0.4.0)", "orm (>=0.1.5)", "typesystem (>=0.2.0,<0.3.0)"]
|
||||
django = ["databases[mysql,sqlite,postgresql] (>=0.4.0)", "Django (<3.3.0)"]
|
||||
tortoise = ["tortoise-orm[aiomysql,asyncpg,aiosqlite] (>=0.16.18,<0.18.0)"]
|
||||
ormar = ["ormar (>=0.10.5)"]
|
||||
piccolo = ["piccolo (>=0.29,<0.35)"]
|
||||
motor = ["motor (>=2.5.1,<3.0.0)"]
|
||||
@@ -450,12 +476,16 @@ python-versions = "*"
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "3b7e5ca291ce8f5727c82271194da5aebbca90c6fc982c01c96a601c1b15266b"
|
||||
content-hash = "5b4ac04ebb04c19722d632a991d225da3daa519bdc045d7b46cf11f7ca11cad0"
|
||||
|
||||
[metadata.files]
|
||||
aiologger = [
|
||||
{file = "aiologger-0.6.1.tar.gz", hash = "sha256:1b6b8f00d74a588339b657ff60ffa9f64c53873887a008934c66e1a673ea68cd"},
|
||||
]
|
||||
aioredis = [
|
||||
{file = "aioredis-2.0.0-py3-none-any.whl", hash = "sha256:9921d68a3df5c5cdb0d5b49ad4fc88a4cfdd60c108325df4f0066e8410c55ffb"},
|
||||
{file = "aioredis-2.0.0.tar.gz", hash = "sha256:3a2de4b614e6a5f8e104238924294dc4e811aefbe17ddf52c04a93cbf06e67db"},
|
||||
]
|
||||
aiosqlite = [
|
||||
{file = "aiosqlite-0.17.0-py3-none-any.whl", hash = "sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231"},
|
||||
{file = "aiosqlite-0.17.0.tar.gz", hash = "sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"},
|
||||
@@ -472,6 +502,10 @@ asgiref = [
|
||||
{file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"},
|
||||
{file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"},
|
||||
]
|
||||
async-timeout = [
|
||||
{file = "async-timeout-4.0.1.tar.gz", hash = "sha256:b930cb161a39042f9222f6efb7301399c87eeab394727ec5437924a36d6eef51"},
|
||||
{file = "async_timeout-4.0.1-py3-none-any.whl", hash = "sha256:a22c0b311af23337eb05fcf05a8b51c3ea53729d46fb5460af62bee033cec690"},
|
||||
]
|
||||
asyncpg = [
|
||||
{file = "asyncpg-0.24.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c4fc0205fe4ddd5aeb3dfdc0f7bafd43411181e1f5650189608e5971cceacff1"},
|
||||
{file = "asyncpg-0.24.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a7095890c96ba36f9f668eb552bb020dddb44f8e73e932f8573efc613ee83843"},
|
||||
|
||||
@@ -16,6 +16,7 @@ psycopg2 = "^2.9.1"
|
||||
fastapi-pagination = {extras = ["ormar"], version = "^0.9.0"}
|
||||
aiologger = "^0.6.1"
|
||||
orjson = "^3.6.4"
|
||||
aioredis = "^2.0.0"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^5.2"
|
||||
|
||||
Reference in New Issue
Block a user