Remove data field

This commit is contained in:
2022-12-30 23:46:51 +01:00
parent 339ae64363
commit 8ce7cce01a
5 changed files with 61 additions and 38 deletions

View File

@@ -0,0 +1,32 @@
"""empty message
Revision ID: 62d57916ec53
Revises: f77b0b14f9eb
Create Date: 2022-12-30 23:30:50.867163
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "62d57916ec53"
down_revision = "f77b0b14f9eb"
branch_labels = None
depends_on = None
def upgrade():
op.drop_column("cached_files", "data")
op.create_unique_constraint(
"uc_cached_files_message_id_chat_id", "cached_files", ["message_id", "chat_id"]
)
op.create_index(
op.f("ix_cached_files_message_id"), "cached_files", ["message_id"], unique=True
)
def downgrade():
op.add_column("cached_files", sa.Column("data", sa.JSON(), nullable=False))
op.drop_constraint("uc_cached_files_message_id_chat_id", "cached_files")
op.drop_index("ix_cached_files_message_id", "cached_files")

View File

@@ -11,13 +11,20 @@ class BaseMeta(ormar.ModelMeta):
class CachedFile(ormar.Model):
class Meta(BaseMeta):
tablename = "cached_files"
constraints = [ormar.UniqueColumns("object_id", "object_type")]
constraints = [
ormar.UniqueColumns("object_id", "object_type"),
ormar.UniqueColumns("message_id", "chat_id"),
]
id: int = ormar.Integer(primary_key=True) # type: ignore
object_id: int = ormar.Integer(index=True) # type: ignore
object_type: str = ormar.String(max_length=8, index=True) # type: ignore
object_type: str = ormar.String(
max_length=8, index=True, unique=True
) # type: ignore
message_id: int = ormar.BigInteger() # type: ignore
message_id: int = ormar.BigInteger(index=True) # type: ignore
chat_id: int = ormar.BigInteger() # type: ignore
data: dict = ormar.JSON() # type: ignore
@property
def data(self) -> dict:
return {"message_id": self.message_id, "chat_id": self.chat_id}

View File

@@ -107,7 +107,10 @@ async def cache_file(book: Book, file_type: str) -> Optional[CachedFile]:
return None
return await CachedFile.objects.create(
object_id=book.id, object_type=file_type, data=upload_data.data
object_id=book.id,
object_type=file_type,
message_id=upload_data.data["message_id"],
chat_id=upload_data.data["chat_id"],
)

View File

@@ -1,3 +0,0 @@
class DummyWriter:
def write(self, line):
return line

View File

@@ -1,7 +1,5 @@
import asyncio
import base64
import csv
from typing import AsyncIterator
from fastapi import APIRouter, Depends, HTTPException, status, Request
from fastapi.responses import StreamingResponse
@@ -9,7 +7,6 @@ from fastapi.responses import StreamingResponse
from starlette.background import BackgroundTask
from arq.connections import ArqRedis
from asyncpg import exceptions
from app.depends import check_token
from app.models import CachedFile as CachedFileDB
@@ -19,7 +16,6 @@ from app.services.caption_getter import get_caption
from app.services.downloader import get_filename
from app.services.files_client import download_file as download_file_from_cache
from app.services.library_client import get_book
from app.utils import DummyWriter
router = APIRouter(
@@ -112,18 +108,22 @@ async def delete_cached_file(object_id: int, object_type: str):
@router.post("/", response_model=CachedFile)
async def create_or_update_cached_file(data: CreateCachedFile):
try:
return await CachedFileDB.objects.create(**data.dict())
except exceptions.UniqueViolationError:
data_dict = data.dict()
object_id = data_dict.pop("object_id")
object_type = data_dict.pop("object_type")
cached_file = await CachedFileDB.objects.get(
object_id=object_id, object_type=object_type
cached_file = await CachedFileDB.objects.get_or_none(
object_id=data.data["object_id"], object_type=data.data["object_type"]
)
cached_file.update_from_dict(data_dict)
if cached_file is not None:
cached_file.message_id = data.data["message_id"]
cached_file.chat_id = data.data["chat_id"]
return await cached_file.update()
return await CachedFileDB.objects.create(
object_id=data.object_id,
object_type=data.object_type,
message_id=data.data["message_id"],
chat_id=data.data["chat_id"],
)
@router.post("/update_cache")
async def update_cache(request: Request):
@@ -133,22 +133,6 @@ async def update_cache(request: Request):
return "Ok!"
@router.get("/download_dump")
async def download_dump():
async def get_data() -> AsyncIterator[str]:
writer = csv.writer(DummyWriter())
async for c_file in CachedFileDB.objects.iterate():
yield writer.writerow([c_file.object_id, c_file.object_type, c_file.data])
return StreamingResponse(
get_data(),
headers={
"Content-Disposition": "attachment; filename=dump.csv",
},
)
healthcheck_router = APIRouter(
tags=["healthcheck"],
)