mirror of
https://github.com/flibusta-apps/telegram_files_cache_server.git
synced 2025-12-06 14:45:36 +01:00
Fix downloading
This commit is contained in:
@@ -2,6 +2,7 @@ from base64 import b64decode
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
from core.config import env_config
|
from core.config import env_config
|
||||||
|
|
||||||
@@ -37,6 +38,7 @@ async def download(
|
|||||||
async def get_filename(book_id: int, file_type: str) -> Optional[str]:
|
async def get_filename(book_id: int, file_type: str) -> Optional[str]:
|
||||||
headers = {"Authorization": env_config.DOWNLOADER_API_KEY}
|
headers = {"Authorization": env_config.DOWNLOADER_API_KEY}
|
||||||
|
|
||||||
|
try:
|
||||||
async with httpx.AsyncClient() as client:
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
f"{env_config.DOWNLOADER_URL}/filename/{book_id}/{file_type}",
|
f"{env_config.DOWNLOADER_URL}/filename/{book_id}/{file_type}",
|
||||||
@@ -48,3 +50,6 @@ async def get_filename(book_id: int, file_type: str) -> Optional[str]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
return response.text
|
return response.text
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return None
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from typing import Generic, Optional, TypeVar
|
|||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
from core.config import env_config
|
from core.config import env_config
|
||||||
|
|
||||||
@@ -47,8 +48,13 @@ class BookDetail(Book):
|
|||||||
AUTH_HEADERS = {"Authorization": env_config.LIBRARY_API_KEY}
|
AUTH_HEADERS = {"Authorization": env_config.LIBRARY_API_KEY}
|
||||||
|
|
||||||
|
|
||||||
async def get_book(book_id: int, retry: int = 3) -> Optional[BookDetail]:
|
async def get_book(
|
||||||
|
book_id: int, retry: int = 3, last_exp: Exception | None = None
|
||||||
|
) -> Optional[BookDetail]:
|
||||||
if retry == 0:
|
if retry == 0:
|
||||||
|
if last_exp:
|
||||||
|
capture_exception(last_exp)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -61,8 +67,8 @@ async def get_book(book_id: int, retry: int = 3) -> Optional[BookDetail]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
return BookDetail.parse_obj(response.json())
|
return BookDetail.parse_obj(response.json())
|
||||||
except (httpx.ConnectError, httpx.ReadError, httpx.ReadTimeout):
|
except httpx.HTTPError as e:
|
||||||
return await get_book(book_id, retry=retry - 1)
|
return await get_book(book_id, retry=retry - 1, last_exp=e)
|
||||||
|
|
||||||
|
|
||||||
async def get_books(page: int, page_size: int) -> Page[Book]:
|
async def get_books(page: int, page_size: int) -> Page[Book]:
|
||||||
|
|||||||
22
src/app/utils.py
Normal file
22
src/app/utils.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
from fastapi import HTTPException, Request, status
|
||||||
|
|
||||||
|
from app.models import CachedFile as CachedFileDB
|
||||||
|
from app.services.cache_updater import cache_file_by_book_id
|
||||||
|
|
||||||
|
|
||||||
|
async def get_cached_file_or_cache(
|
||||||
|
request: Request, object_id: int, object_type: str
|
||||||
|
) -> CachedFileDB:
|
||||||
|
cached_file = await CachedFileDB.objects.get_or_none(
|
||||||
|
object_id=object_id, object_type=object_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if not cached_file:
|
||||||
|
cached_file = await cache_file_by_book_id(
|
||||||
|
{"redis": request.app.state.redis_client}, object_id, object_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if not cached_file:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
return cached_file
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
import asyncio
|
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
from arq.connections import ArqRedis
|
from arq.connections import ArqRedis
|
||||||
@@ -14,6 +13,7 @@ from app.services.caption_getter import get_caption
|
|||||||
from app.services.downloader import get_filename
|
from app.services.downloader import get_filename
|
||||||
from app.services.files_client import download_file as download_file_from_cache
|
from app.services.files_client import download_file as download_file_from_cache
|
||||||
from app.services.library_client import get_book
|
from app.services.library_client import get_book
|
||||||
|
from app.utils import get_cached_file_or_cache
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(
|
||||||
prefix="/api/v1", tags=["files"], dependencies=[Depends(check_token)]
|
prefix="/api/v1", tags=["files"], dependencies=[Depends(check_token)]
|
||||||
@@ -39,34 +39,29 @@ async def get_cached_file(request: Request, object_id: int, object_type: str):
|
|||||||
|
|
||||||
@router.get("/download/{object_id}/{object_type}")
|
@router.get("/download/{object_id}/{object_type}")
|
||||||
async def download_cached_file(request: Request, object_id: int, object_type: str):
|
async def download_cached_file(request: Request, object_id: int, object_type: str):
|
||||||
cached_file = await CachedFileDB.objects.get_or_none(
|
cached_file = await get_cached_file_or_cache(request, object_id, object_type)
|
||||||
object_id=object_id, object_type=object_type
|
|
||||||
)
|
|
||||||
|
|
||||||
if not cached_file:
|
|
||||||
cached_file = await cache_file_by_book_id(
|
|
||||||
{"redis": request.app.state.redis_client}, object_id, object_type
|
|
||||||
)
|
|
||||||
|
|
||||||
if not cached_file:
|
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
cache_data: dict = cached_file.data # type: ignore
|
cache_data: dict = cached_file.data # type: ignore
|
||||||
|
|
||||||
data, filename, book = await asyncio.gather(
|
data = await download_file_from_cache(
|
||||||
download_file_from_cache(cache_data["chat_id"], cache_data["message_id"]),
|
cache_data["chat_id"], cache_data["message_id"]
|
||||||
get_filename(object_id, object_type),
|
|
||||||
get_book(object_id),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if data is None:
|
if data is None:
|
||||||
await CachedFileDB.objects.filter(id=cached_file.id).delete()
|
await CachedFileDB.objects.filter(id=cached_file.id).delete()
|
||||||
|
|
||||||
arq_pool: ArqRedis = request.app.state.arq_pool
|
cached_file = await get_cached_file_or_cache(request, object_id, object_type)
|
||||||
await arq_pool.enqueue_job(
|
cache_data: dict = cached_file.data # type: ignore
|
||||||
"cache_file_by_book_id", object_id, object_type, by_request=False
|
|
||||||
|
data = await download_file_from_cache(
|
||||||
|
cache_data["chat_id"], cache_data["message_id"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
if (filename := await get_filename(object_id, object_type)) is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
if (book := await get_book(object_id)) is None:
|
||||||
raise HTTPException(status_code=status.HTTP_204_NO_CONTENT)
|
raise HTTPException(status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
response, client = data
|
response, client = data
|
||||||
@@ -75,9 +70,6 @@ async def download_cached_file(request: Request, object_id: int, object_type: st
|
|||||||
await response.aclose()
|
await response.aclose()
|
||||||
await client.aclose()
|
await client.aclose()
|
||||||
|
|
||||||
assert book
|
|
||||||
assert filename
|
|
||||||
|
|
||||||
filename_ascii = filename.encode("ascii", "ignore").decode("ascii")
|
filename_ascii = filename.encode("ascii", "ignore").decode("ascii")
|
||||||
|
|
||||||
return StreamingResponse(
|
return StreamingResponse(
|
||||||
|
|||||||
Reference in New Issue
Block a user