diff --git a/.github/workflows/build_docker_image.yml b/.github/workflows/build_docker_image.yml index fd94750..1eeafb8 100644 --- a/.github/workflows/build_docker_image.yml +++ b/.github/workflows/build_docker_image.yml @@ -47,3 +47,9 @@ jobs: uses: joelwmale/webhook-action@master with: url: ${{ secrets.WEBHOOK_URL }} + + - + name: Invoke deployment hook + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.WEBHOOK_URL2 }} diff --git a/poetry.lock b/poetry.lock index 00a3a32..5a7bc71 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,15 @@ +[[package]] +name = "aioredis" +version = "1.3.1" +description = "asyncio (PEP 3156) Redis support" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +async-timeout = "*" +hiredis = "*" + [[package]] name = "aiosqlite" version = "0.17.0" @@ -41,6 +53,22 @@ doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] trio = ["trio (>=0.16)"] +[[package]] +name = "arq" +version = "0.22" +description = "Job queues in python with asyncio and redis" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +aioredis = ">=1.1.0,<2.0.0" +click = ">=6.7" +pydantic = ">=1" + +[package.extras] +watch = ["watchgod (>=0.4)"] + [[package]] name = "asgiref" version = "3.4.1" @@ -52,6 +80,14 @@ python-versions = ">=3.6" [package.extras] tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "asyncpg" version = "0.25.0" @@ -105,7 +141,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "databases" -version = "0.5.3" +version = "0.5.4" description = "Async database support for Python." category = "main" optional = false @@ -116,6 +152,7 @@ sqlalchemy = ">=1.4,<1.5" [package.extras] mysql = ["aiomysql"] +mysql_asyncmy = ["asyncmy"] postgresql = ["asyncpg"] postgresql_aiopg = ["aiopg"] sqlite = ["aiosqlite"] @@ -157,6 +194,14 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "hiredis" +version = "2.0.0" +description = "Python wrapper for hiredis" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "httpcore" version = "0.14.3" @@ -227,7 +272,7 @@ python-versions = ">=3.6" [[package]] name = "ormar" -version = "0.10.23" +version = "0.10.24" description = "A simple async ORM with fastapi in mind and pydantic validation." category = "main" optional = false @@ -236,10 +281,10 @@ python-versions = ">=3.6.2,<4.0.0" [package.dependencies] aiosqlite = ">=0.17.0,<0.18.0" asyncpg = {version = ">=0.24,<0.26", optional = true, markers = "extra == \"postgresql\" or extra == \"postgres\" or extra == \"dev\""} -databases = ">=0.3.2,<0.5.4" +databases = ">=0.3.2,<0.5.0 || >0.5.0,<0.5.1 || >0.5.1,<0.5.2 || >0.5.2,<0.5.3 || >0.5.3,<0.5.5" psycopg2-binary = {version = ">=2.9.1,<3.0.0", optional = true, markers = "extra == \"postgresql\" or extra == \"postgres\" or extra == \"dev\""} -pydantic = ">=1.6.1,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<=1.8.2" -SQLAlchemy = ">=1.3.18,<1.4.29" +pydantic = ">=1.6.1,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<=1.9.1" +SQLAlchemy = ">=1.3.18,<=1.4.29" [package.extras] postgresql = ["asyncpg (>=0.24,<0.26)", "psycopg2-binary (>=2.9.1,<3.0.0)"] @@ -367,9 +412,13 @@ standard = ["httptools (>=0.2.0,<0.4.0)", "watchgod (>=0.6)", "python-dotenv (>= [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "5e8a793c8e9fef35a5cc2c0853a23589de7e632b675057158ef2c0ea17965efc" +content-hash = "c9e085cfec882f455cef07ef2438830f486b583bdec75519f5e33926d209090b" [metadata.files] +aioredis = [ + {file = "aioredis-1.3.1-py3-none-any.whl", hash = "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3"}, + {file = "aioredis-1.3.1.tar.gz", hash = "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a"}, +] aiosqlite = [ {file = "aiosqlite-0.17.0-py3-none-any.whl", hash = "sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231"}, {file = "aiosqlite-0.17.0.tar.gz", hash = "sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"}, @@ -382,10 +431,18 @@ anyio = [ {file = "anyio-3.4.0-py3-none-any.whl", hash = "sha256:2855a9423524abcdd652d942f8932fda1735210f77a6b392eafd9ff34d3fe020"}, {file = "anyio-3.4.0.tar.gz", hash = "sha256:24adc69309fb5779bc1e06158e143e0b6d2c56b302a3ac3de3083c705a6ed39d"}, ] +arq = [ + {file = "arq-0.22-py3-none-any.whl", hash = "sha256:55a0f933636c804b82c366a0e3710e9e5ed26a716251fa6742777d0b039f7f30"}, + {file = "arq-0.22.tar.gz", hash = "sha256:c7bd98151cc83cec941ce5f660ede4bee888effd9a4692258ec8a9a0aff2f9f9"}, +] asgiref = [ {file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"}, {file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"}, ] +async-timeout = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] asyncpg = [ {file = "asyncpg-0.25.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf5e3408a14a17d480f36ebaf0401a12ff6ae5457fdf45e4e2775c51cc9517d3"}, {file = "asyncpg-0.25.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2bc197fc4aca2fd24f60241057998124012469d2e414aed3f992579db0c88e3a"}, @@ -431,8 +488,8 @@ colorama = [ {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] databases = [ - {file = "databases-0.5.3-py3-none-any.whl", hash = "sha256:23862bd96241d8fcbf97eea82995ccb3baa8415c3cb106832b7509f296322f86"}, - {file = "databases-0.5.3.tar.gz", hash = "sha256:b69d74ee0b47fa30bb6e76db0c58da998e973393259d29215d8fb29352162bd6"}, + {file = "databases-0.5.4-py3-none-any.whl", hash = "sha256:85a6b0dd92e4bc95205c08141baf1e192c8aedb2159ce03bee39bb4117cfed83"}, + {file = "databases-0.5.4.tar.gz", hash = "sha256:04a3294d053bd8d9f4162fc4975ab11a3e9ad01ae37992adce84440725957fec"}, ] fastapi = [ {file = "fastapi-0.70.1-py3-none-any.whl", hash = "sha256:5367226c7bcd7bfb2e17edaf225fd9a983095b1372281e9a3eb661336fb93748"}, @@ -494,6 +551,49 @@ h11 = [ {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, ] +hiredis = [ + {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"}, + {file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"}, + {file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"}, + {file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"}, + {file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"}, + {file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"}, + {file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"}, + {file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"}, + {file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"}, + {file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"}, + {file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"}, + {file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"}, + {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"}, +] httpcore = [ {file = "httpcore-0.14.3-py3-none-any.whl", hash = "sha256:9a98d2416b78976fc5396ff1f6b26ae9885efbb3105d24eed490f20ab4c95ec1"}, {file = "httpcore-0.14.3.tar.gz", hash = "sha256:d10162a63265a0228d5807964bd964478cbdb5178f9a2eedfebb2faba27eef5d"}, @@ -567,8 +667,8 @@ markupsafe = [ {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, ] ormar = [ - {file = "ormar-0.10.23-py3-none-any.whl", hash = "sha256:168541fd1e59e5a2f193ec97a97b6af3125eb2a1468e9ecb93c3e1e1c1bb0f43"}, - {file = "ormar-0.10.23.tar.gz", hash = "sha256:bd8f86389db87f3844b25d7e1e47e18fa52b113877dcbad180f67d5a9597c9d3"}, + {file = "ormar-0.10.24-py3-none-any.whl", hash = "sha256:0ac7765bc14237cb4ed828c823cae3a4a9f5dea6daa402e0999c80b36662c410"}, + {file = "ormar-0.10.24.tar.gz", hash = "sha256:908eba2cb7350c5ef0c8e7d9653d061f357e2c7706b78298bd446e0848000762"}, ] psycopg2-binary = [ {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, diff --git a/pyproject.toml b/pyproject.toml index 044d9fd..1ef54f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,6 +11,7 @@ httpx = "^0.21.1" alembic = "^1.7.5" ormar = {extras = ["postgresql"], version = "^0.10.23"} uvicorn = {extras = ["standart"], version = "^0.16.0"} +arq = "^0.22" [tool.poetry.dev-dependencies] diff --git a/src/app/services/cache_updater.py b/src/app/services/cache_updater.py index 7764441..897383c 100644 --- a/src/app/services/cache_updater.py +++ b/src/app/services/cache_updater.py @@ -1,7 +1,8 @@ -import asyncio import logging from typing import Optional +from arq.connections import ArqRedis + from app.models import CachedFile from app.services.caption_getter import get_caption from app.services.downloader import download @@ -20,79 +21,57 @@ class FileTypeNotAllowed(Exception): super().__init__(message) -class CacheUpdater: - def __init__(self): - self.queue = asyncio.Queue(maxsize=10) - self.all_books_checked = False +async def check_book(book: Book, arq_pool: ArqRedis) -> None: + for file_type in book.available_types: + exists = await CachedFile.objects.filter( + object_id=book.id, object_type=file_type + ).exists() - async def _check_book(self, book: Book): - for file_type in book.available_types: - exists = await CachedFile.objects.filter( - object_id=book.id, object_type=file_type - ).exists() + if not exists: + await arq_pool.enqueue_job("cache_file_by_book_id", book.id, file_type) - if not exists: - await self.queue.put((book, file_type)) - async def _start_producer(self): - books_page = await get_books(1, PAGE_SIZE) +async def check_books_page(ctx, page_number: int) -> None: + arq_pool: ArqRedis = ctx["arc_pool"] - for page_number in range(1, books_page.total_pages + 1): - page = await get_books(page_number, PAGE_SIZE) + page = await get_books(page_number, PAGE_SIZE) - for book in page.items: - await self._check_book(book) + for book in page.items: + await check_book(book, arq_pool) - self.all_books_checked = True - @classmethod - async def _cache_file(cls, book: Book, file_type) -> Optional[CachedFile]: - logger.info(f"Cache {book.id} {file_type}...") - data = await download(book.source.id, book.remote_id, file_type) +async def check_books(ctx) -> None: + arq_pool: ArqRedis = ctx["arc_pool"] + books_page = await get_books(1, PAGE_SIZE) - if data is None: - return None + for page_number in range(1, books_page.total_pages + 1): + await arq_pool.enqueue_job("check_books_page", page_number) - content, filename = data - caption = get_caption(book) +async def cache_file(book: Book, file_type) -> Optional[CachedFile]: + logger.info(f"Cache {book.id} {file_type}...") + data = await download(book.source.id, book.remote_id, file_type) - upload_data = await upload_file(content, filename, caption) + if data is None: + return None - return await CachedFile.objects.create( - object_id=book.id, object_type=file_type, data=upload_data.data - ) + content, filename = data - async def _start_worker(self): - while not self.all_books_checked or not self.queue.empty(): - try: - task = self.queue.get_nowait() - book: Book = task[0] - file_type: str = task[1] - except asyncio.QueueEmpty: - await asyncio.sleep(0.1) - continue + caption = get_caption(book) - await self._cache_file(book, file_type) + upload_data = await upload_file(content, filename, caption) - async def _update(self): - logger.info("Start update...") - await asyncio.gather( - self._start_producer(), - *[self._start_worker() for _ in range(4)], - ) - logger.info("Update complete!") + return await CachedFile.objects.create( + object_id=book.id, object_type=file_type, data=upload_data.data + ) - @classmethod - async def update(cls): - updater = cls() - return await updater._update() - @classmethod - async def cache_file(cls, book_id: int, file_type: str) -> Optional[CachedFile]: - book = await get_book(book_id) +async def cache_file_by_book_id( + ctx, book_id: int, file_type: str +) -> Optional[CachedFile]: + book = await get_book(book_id) - if file_type not in book.available_types: - raise FileTypeNotAllowed(f"{file_type} not in {book.available_types}!") + if file_type not in book.available_types: + raise FileTypeNotAllowed(f"{file_type} not in {book.available_types}!") - return await cls._cache_file(book, file_type) + return await cache_file(book, file_type) diff --git a/src/app/services/library_client.py b/src/app/services/library_client.py index d8d2cf6..2b840a9 100644 --- a/src/app/services/library_client.py +++ b/src/app/services/library_client.py @@ -58,7 +58,7 @@ async def get_book(book_id: int) -> BookDetail: async def get_books(page: int, page_size: int) -> Page[Book]: - async with httpx.AsyncClient(timeout=60) as client: + async with httpx.AsyncClient(timeout=5 * 60) as client: response = await client.get( ( f"{env_config.LIBRARY_URL}/api/v1/books/" diff --git a/src/app/views.py b/src/app/views.py index 140984e..4ac4489 100644 --- a/src/app/views.py +++ b/src/app/views.py @@ -1,15 +1,16 @@ import base64 -from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks, status +from fastapi import APIRouter, Depends, HTTPException, status, Request from starlette.responses import Response +from arq.connections import ArqRedis from asyncpg import exceptions from app.depends import check_token from app.models import CachedFile as CachedFileDB from app.serializers import CachedFile, CreateCachedFile -from app.services.cache_updater import CacheUpdater +from app.services.cache_updater import cache_file_by_book_id from app.services.caption_getter import get_caption from app.services.downloader import get_filename from app.services.files_client import download_file as download_file_from_cache @@ -28,7 +29,7 @@ async def get_cached_file(object_id: int, object_type: str): ) if not cached_file: - cached_file = await CacheUpdater.cache_file(object_id, object_type) + cached_file = await cache_file_by_book_id(object_id, object_type) if not cached_file: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) @@ -43,7 +44,7 @@ async def download_cached_file(object_id: int, object_type: str): ) if not cached_file: - cached_file = await CacheUpdater.cache_file(object_id, object_type) + cached_file = await cache_file_by_book_id(object_id, object_type) if not cached_file: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) @@ -102,7 +103,8 @@ async def create_or_update_cached_file(data: CreateCachedFile): @router.post("/update_cache") -async def update_cache(background_tasks: BackgroundTasks): - background_tasks.add_task(CacheUpdater.update) +async def update_cache(request: Request): + arq_pool: ArqRedis = request.app.state.arq_pool + await arq_pool.enqueue_job("check_books") return "Ok!" diff --git a/src/core/app.py b/src/core/app.py index 6c8565c..d84ffd8 100644 --- a/src/core/app.py +++ b/src/core/app.py @@ -1,6 +1,7 @@ from fastapi import FastAPI from app.views import router +from core.arq_pool import get_arq_pool from core.db import database @@ -17,6 +18,8 @@ def start_app() -> FastAPI: if not database_.is_connected: await database_.connect() + app.state.arq_pool = await get_arq_pool() + @app.on_event("shutdown") async def shutdown() -> None: database_ = app.state.database diff --git a/src/core/arq_pool.py b/src/core/arq_pool.py new file mode 100644 index 0000000..e5de84d --- /dev/null +++ b/src/core/arq_pool.py @@ -0,0 +1,15 @@ +from arq.connections import create_pool, RedisSettings, ArqRedis + +from core.config import env_config + + +def get_redis_settings() -> RedisSettings: + return RedisSettings( + host=env_config.REDIS_HOST, + port=env_config.REDIS_PORT, + database=env_config.REDIS_DB, + ) + + +async def get_arq_pool() -> ArqRedis: + return await create_pool(get_redis_settings()) diff --git a/src/core/config.py b/src/core/config.py index 8ae15f0..9042cf5 100644 --- a/src/core/config.py +++ b/src/core/config.py @@ -19,5 +19,9 @@ class EnvConfig(BaseSettings): FILES_SERVER_API_KEY: str FILES_SERVER_URL: str + REDIS_HOST: str + REDIS_PORT: int + REDIS_DB: int + env_config = EnvConfig() diff --git a/src/core/setup_arq.py b/src/core/setup_arq.py new file mode 100644 index 0000000..a8388f3 --- /dev/null +++ b/src/core/setup_arq.py @@ -0,0 +1,27 @@ +from app.services.cache_updater import ( + check_books, + cache_file_by_book_id, + check_books_page, +) +from core.arq_pool import get_redis_settings, get_arq_pool +from core.db import database + + +async def startup(ctx): + if not database.is_connected: + await database.connect() + + ctx["arc_pool"] = await get_arq_pool() + + +async def shutdown(ctx): + if database.is_connected: + await database.disconnect() + + +class WorkerSettings: + functions = [check_books, cache_file_by_book_id, check_books_page] + on_startup = startup + on_shutdown = shutdown + redis_settings = get_redis_settings() + max_jobs = 4