mirror of
https://github.com/flibusta-apps/telegram_files_cache_server.git
synced 2025-12-06 14:45:36 +01:00
Add revers order caching
This commit is contained in:
@@ -1,9 +1,10 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Optional, cast
|
|
||||||
from tempfile import SpooledTemporaryFile
|
from tempfile import SpooledTemporaryFile
|
||||||
|
from typing import Optional, cast
|
||||||
|
|
||||||
|
from fastapi import UploadFile
|
||||||
|
|
||||||
from arq.connections import ArqRedis
|
from arq.connections import ArqRedis
|
||||||
from fastapi import UploadFile
|
|
||||||
|
|
||||||
from app.models import CachedFile
|
from app.models import CachedFile
|
||||||
from app.services.caption_getter import get_caption
|
from app.services.caption_getter import get_caption
|
||||||
@@ -46,12 +47,11 @@ async def check_books(ctx: dict, *args, **kwargs) -> None:
|
|||||||
arq_pool: ArqRedis = ctx["arc_pool"]
|
arq_pool: ArqRedis = ctx["arc_pool"]
|
||||||
books_page = await get_books(1, PAGE_SIZE)
|
books_page = await get_books(1, PAGE_SIZE)
|
||||||
|
|
||||||
for page_number in range(1, books_page.total_pages + 1):
|
for page_number in range(books_page.total_pages, 0, -1):
|
||||||
await arq_pool.enqueue_job("check_books_page", page_number)
|
await arq_pool.enqueue_job("check_books_page", page_number)
|
||||||
|
|
||||||
|
|
||||||
async def cache_file(book: Book, file_type) -> Optional[CachedFile]:
|
async def cache_file(book: Book, file_type) -> Optional[CachedFile]:
|
||||||
logger.info(f"Cache {book.id} {file_type}...")
|
|
||||||
data = await download(book.source.id, book.remote_id, file_type)
|
data = await download(book.source.id, book.remote_id, file_type)
|
||||||
|
|
||||||
if data is None:
|
if data is None:
|
||||||
@@ -68,7 +68,9 @@ async def cache_file(book: Book, file_type) -> Optional[CachedFile]:
|
|||||||
await response.aclose()
|
await response.aclose()
|
||||||
await client.aclose()
|
await client.aclose()
|
||||||
|
|
||||||
upload_data = await upload_file(cast(SpooledTemporaryFile, temp_file.file), filename, caption)
|
upload_data = await upload_file(
|
||||||
|
cast(SpooledTemporaryFile, temp_file.file), filename, caption
|
||||||
|
)
|
||||||
|
|
||||||
if upload_data is None:
|
if upload_data is None:
|
||||||
return None
|
return None
|
||||||
@@ -79,7 +81,7 @@ async def cache_file(book: Book, file_type) -> Optional[CachedFile]:
|
|||||||
|
|
||||||
|
|
||||||
async def cache_file_by_book_id(
|
async def cache_file_by_book_id(
|
||||||
ctx, book_id: int, file_type: str
|
ctx: dict, book_id: int, file_type: str
|
||||||
) -> Optional[CachedFile]:
|
) -> Optional[CachedFile]:
|
||||||
book = await get_book(book_id)
|
book = await get_book(book_id)
|
||||||
|
|
||||||
|
|||||||
@@ -22,12 +22,10 @@ async def shutdown(ctx):
|
|||||||
|
|
||||||
|
|
||||||
class WorkerSettings:
|
class WorkerSettings:
|
||||||
functions = [check_books, cache_file_by_book_id, check_books_page]
|
functions = [check_books, check_books_page, cache_file_by_book_id]
|
||||||
on_startup = startup
|
on_startup = startup
|
||||||
on_shutdown = shutdown
|
on_shutdown = shutdown
|
||||||
redis_settings = get_redis_settings()
|
redis_settings = get_redis_settings()
|
||||||
max_jobs = 1
|
max_jobs = 1
|
||||||
job_timeout = 3 * 60
|
job_timeout = 3 * 60
|
||||||
cron_jobs = [
|
cron_jobs = [cron(check_books, hour={7}, minute=0)]
|
||||||
cron(check_books, hour={7}, minute=0)
|
|
||||||
]
|
|
||||||
|
|||||||
Reference in New Issue
Block a user