Prevent tasks duplicates

This commit is contained in:
2022-12-27 20:03:05 +01:00
parent 83dd6dc7ed
commit 5e011fd05e
2 changed files with 12 additions and 3 deletions

View File

@@ -48,7 +48,11 @@ async def check_books_page(ctx, page_number: int) -> None:
for file_type in book.available_types: for file_type in book.available_types:
if file_type not in cached_files_map[book.id]: if file_type not in cached_files_map[book.id]:
await arq_pool.enqueue_job( await arq_pool.enqueue_job(
"cache_file_by_book_id", book.id, file_type, by_request=False "cache_file_by_book_id",
book.id,
file_type,
by_request=False,
_job_id=f"cache_file_by_book_id_{book.id}_{file_type}",
) )
@@ -60,7 +64,12 @@ async def check_books(ctx: dict, *args, **kwargs) -> None: # NOSONAR
raise Retry(defer=15) raise Retry(defer=15)
for i, page_number in enumerate(range(books_page.total_pages, 0, -1)): for i, page_number in enumerate(range(books_page.total_pages, 0, -1)):
await arq_pool.enqueue_job("check_books_page", page_number, _defer_by=2 * i) await arq_pool.enqueue_job(
"check_books_page",
page_number,
_defer_by=2 * i,
_job_id=f"check_books_page_{page_number}",
)
async def cache_file(book: Book, file_type: str) -> Optional[CachedFile]: async def cache_file(book: Book, file_type: str) -> Optional[CachedFile]:

View File

@@ -128,7 +128,7 @@ async def create_or_update_cached_file(data: CreateCachedFile):
@router.post("/update_cache") @router.post("/update_cache")
async def update_cache(request: Request): async def update_cache(request: Request):
arq_pool: ArqRedis = request.app.state.arq_pool arq_pool: ArqRedis = request.app.state.arq_pool
await arq_pool.enqueue_job("check_books") await arq_pool.enqueue_job("check_books", _job_id="check_books")
return "Ok!" return "Ok!"