Use taskiq

This commit is contained in:
2023-05-20 20:16:00 +02:00
parent 67478536e2
commit 7f6837b8f5
11 changed files with 241 additions and 211 deletions

View File

@@ -1,38 +1,46 @@
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.responses import ORJSONResponse
from prometheus_fastapi_instrumentator import Instrumentator
from redis.asyncio import ConnectionPool
from app.views import healthcheck_router, router
from core.arq_pool import get_arq_pool
from core.config import REDIS_URL
from core.db import database
from core.redis_client import get_client
import core.sentry # noqa: F401
from core.taskiq_worker import broker
@asynccontextmanager
async def lifespan(app: FastAPI):
database = app.state.database
if not database.is_connected:
await database.connect()
if not broker.is_worker_process:
await broker.startup()
yield
if database.is_connected:
await database.disconnect()
if not broker.is_worker_process:
await broker.shutdown()
await app.state.redis_pool.disconnect()
def start_app() -> FastAPI:
app = FastAPI(default_response_class=ORJSONResponse)
app = FastAPI(default_response_class=ORJSONResponse, lifespan=lifespan)
app.state.database = database
app.state.redis_pool = ConnectionPool.from_url(REDIS_URL)
app.include_router(router)
app.include_router(healthcheck_router)
@app.on_event("startup")
async def startup() -> None:
database_ = app.state.database
if not database_.is_connected:
await database_.connect()
app.state.arq_pool = await get_arq_pool()
app.state.redis_client = get_client()
@app.on_event("shutdown")
async def shutdown() -> None:
database_ = app.state.database
if database_.is_connected:
await database_.disconnect()
Instrumentator(
should_ignore_untemplated=True,
excluded_handlers=["/docs", "/metrics", "/healthcheck"],

View File

@@ -1,48 +0,0 @@
import asyncio
from typing import Any
from arq.connections import ArqRedis, RedisSettings, create_pool
from arq.worker import JobExecutionFailed
import msgpack
from core.config import env_config
def default(obj: Any):
if isinstance(obj, asyncio.TimeoutError):
return msgpack.ExtType(0, "")
elif isinstance(obj, JobExecutionFailed):
return msgpack.ExtType(1, obj.args[0].encode())
raise TypeError("Unknown type: %r" % (obj,))
def ext_hook(code: int, data: bytes):
if code == 0:
return asyncio.TimeoutError()
elif code == 1:
return JobExecutionFailed((data.decode()))
return msgpack.ExtType(code, data)
def job_serializer(d):
return msgpack.packb(d, default=default, use_bin_type=True) # noqa: E731
def job_deserializer(b):
return msgpack.unpackb(b, ext_hook=ext_hook, raw=False) # noqa: E731
def get_redis_settings() -> RedisSettings:
return RedisSettings(
host=env_config.REDIS_HOST,
port=env_config.REDIS_PORT,
database=env_config.REDIS_DB,
)
async def get_arq_pool() -> ArqRedis:
return await create_pool(
get_redis_settings(),
job_serializer=job_serializer, # type: ignore
job_deserializer=job_deserializer, # noqa: E731
)

View File

@@ -26,4 +26,8 @@ class EnvConfig(BaseSettings):
SENTRY_DSN: str
env_config = EnvConfig()
env_config = EnvConfig() # type: ignore
REDIS_URL = (
f"redis://{env_config.REDIS_HOST}:{env_config.REDIS_PORT}/{env_config.REDIS_DB}"
)

View File

@@ -1,9 +0,0 @@
from redis import asyncio as aioredis
from core.config import env_config
def get_client() -> aioredis.Redis:
return aioredis.Redis(
host=env_config.REDIS_HOST, port=env_config.REDIS_PORT, db=env_config.REDIS_DB
)

View File

@@ -1,40 +0,0 @@
from app.services.cache_updater import (
cache_file_by_book_id,
check_books,
check_books_page,
)
from core.arq_pool import (
get_arq_pool,
get_redis_settings,
job_deserializer,
job_serializer,
)
from core.db import database
from core.redis_client import get_client
import core.sentry # noqa: F401
async def startup(ctx):
if not database.is_connected:
await database.connect()
ctx["arq_pool"] = await get_arq_pool()
ctx["redis"] = get_client()
async def shutdown(ctx):
if database.is_connected:
await database.disconnect()
class WorkerSettings:
functions = [check_books, check_books_page, cache_file_by_book_id]
on_startup = startup
on_shutdown = shutdown
redis_settings = get_redis_settings()
max_jobs = 2
max_tries = 2
job_timeout = 10 * 60
expires_extra_ms = 7 * 24 * 60 * 1000
job_serializer = job_serializer
job_deserializer = job_deserializer

17
src/core/taskiq_worker.py Normal file
View File

@@ -0,0 +1,17 @@
from taskiq import SimpleRetryMiddleware
import taskiq_fastapi
from taskiq_redis import ListQueueBroker, RedisAsyncResultBackend
from core.config import REDIS_URL
broker = (
ListQueueBroker(url=REDIS_URL)
.with_result_backend(
RedisAsyncResultBackend(redis_url=REDIS_URL, result_ex_time=5 * 60)
)
.with_middlewares(SimpleRetryMiddleware())
)
taskiq_fastapi.init(broker, "main:app")