This commit is contained in:
2023-08-06 14:05:34 +02:00
parent 217adb85ed
commit 2f1fdecd5f
21 changed files with 273 additions and 1053 deletions

View File

@@ -1,98 +0,0 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = ./app/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator"
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. Valid values are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # default: use os.pathsep
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -1 +0,0 @@
Generic single-database configuration.

View File

@@ -1,63 +0,0 @@
import os
import sys
from alembic import context
from sqlalchemy.engine import create_engine
from core.db import DATABASE_URL
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + "/../../")
config = context.config
from app.models import BaseMeta # noqa: E402
target_metadata = BaseMeta.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = create_engine(DATABASE_URL)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -1,24 +0,0 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -1,39 +0,0 @@
"""empty message
Revision ID: 3bbf7cb4eaa2
Revises: 5a32159504fd
Create Date: 2021-09-27 17:18:27.635063
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "3bbf7cb4eaa2"
down_revision = "5a32159504fd"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"uploaded_files",
"upload_time",
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=True,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"uploaded_files",
"upload_time",
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=False,
)
# ### end Alembic commands ###

View File

@@ -1,35 +0,0 @@
"""empty message
Revision ID: 5a32159504fd
Revises:
Create Date: 2021-09-27 16:23:39.987261
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "5a32159504fd"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"uploaded_files",
sa.Column("id", sa.BigInteger(), nullable=True),
sa.Column("backend", sa.String(length=16), nullable=False),
sa.Column("data", sa.JSON(), nullable=False),
sa.Column("upload_time", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("uploaded_files")
# ### end Alembic commands ###

View File

@@ -1,26 +0,0 @@
from datetime import datetime
from enum import Enum
import ormar
from core.db import database, metadata
class BaseMeta(ormar.ModelMeta):
metadata = metadata
database = database
class UploadBackends(str, Enum):
bot = "bot"
user = "user"
class UploadedFile(ormar.Model):
class Meta(BaseMeta):
tablename = "uploaded_files"
id = ormar.BigInteger(primary_key=True, nullable=False)
backend = ormar.String(max_length=16, choices=list(UploadBackends))
data = ormar.JSON()
upload_time = ormar.DateTime(timezone=True, default=datetime.now)

View File

@@ -1,16 +1,19 @@
from datetime import datetime
import enum
from typing import TypedDict
from pydantic import BaseModel, constr
from pydantic import BaseModel
class CreateUploadedFile(BaseModel):
backend: constr(max_length=16) # type: ignore
data: dict
upload_time: datetime
class UploadBackend(enum.StrEnum):
bot = "bot"
user = "user"
class Data(TypedDict):
chat_id: str | int
message_id: int
class UploadedFile(BaseModel):
id: int
backend: str
data: dict
upload_time: datetime
backend: UploadBackend
data: Data

View File

@@ -1,4 +1,4 @@
from app.models import UploadBackends
from app.serializers import UploadBackend
from app.services.storages import BotStorage, StoragesContainer, UserStorage
@@ -39,8 +39,8 @@ class FileDownloader:
return user_storages[cls._user_storage_index]
@classmethod
async def _download_via(cls, message_id: int, storage_type: UploadBackends):
if storage_type == UploadBackends.bot:
async def _download_via(cls, message_id: int, storage_type: UploadBackend):
if storage_type == UploadBackend.bot:
storage = cls.get_bot_storage()
else:
storage = cls.get_user_storage()
@@ -52,9 +52,7 @@ class FileDownloader:
if not cls.bot_storages and not cls.user_storages:
raise ValueError("Files storage not exist!")
if (
data := await cls._download_via(message_id, UploadBackends.bot)
) is not None:
if (data := await cls._download_via(message_id, UploadBackend.bot)) is not None:
return data
return await cls._download_via(message_id, UploadBackends.user)
return await cls._download_via(message_id, UploadBackend.user)

View File

@@ -1,12 +1,12 @@
from typing import Any, Optional
from typing import Any, BinaryIO, Optional
from fastapi import UploadFile
from app.models import UploadBackends, UploadedFile
from app.serializers import Data, UploadBackend, UploadedFile
from app.services.storages import BotStorage, StoragesContainer, UserStorage
class Wrapper:
class Wrapper(BinaryIO):
def __init__(self, wrapped: Any, filename: str) -> None:
self.wrapped = wrapped
self.filename = filename
@@ -39,24 +39,26 @@ class FileUploader:
self.file = file
self.caption = caption
self.upload_data: Optional[dict] = None
self.upload_backend: Optional[UploadBackends] = None
self.upload_data: Optional[Data] = None
self.upload_backend: Optional[UploadBackend] = None
async def _upload(self) -> bool:
if not self.bot_storages and not self.user_storages:
raise ValueError("Files storage not exist!")
if await self._upload_via(UploadBackends.bot):
if await self._upload_via(UploadBackend.bot):
return True
return await self._upload_via(UploadBackends.user)
return await self._upload_via(UploadBackend.user)
async def _upload_via(self, storage_type: UploadBackends) -> bool:
if storage_type == UploadBackends.bot:
async def _upload_via(self, storage_type: UploadBackend) -> bool:
if storage_type == UploadBackend.bot:
storage = self.get_bot_storage()
else:
storage = self.get_user_storage()
assert self.file.filename
wrapped = Wrapper(self.file.file, self.file.filename)
data = await storage.upload(wrapped, caption=self.caption)
@@ -69,11 +71,11 @@ class FileUploader:
return True
async def _save_to_db(self) -> UploadedFile:
return await UploadedFile.objects.create(
backend=self.upload_backend,
data=self.upload_data,
)
def get_result(self) -> UploadedFile:
assert self.upload_backend is not None
assert self.upload_data is not None
return UploadedFile(backend=self.upload_backend, data=self.upload_data)
@classmethod
def get_bot_storage(cls) -> BotStorage:
@@ -107,4 +109,4 @@ class FileUploader:
if not upload_result:
return None
return await uploader._save_to_db()
return uploader.get_result()

View File

@@ -25,9 +25,12 @@ class BaseStorage:
self, file: telethon.hints.FileLike, caption: Optional[str] = None
) -> Optional[tuple[Union[str, int], int]]:
try:
message = await self.client.send_file(
entity=self.channel_id, file=file, caption=caption
)
if caption:
message = await self.client.send_file(
entity=self.channel_id, file=file, caption=caption
)
else:
message = await self.client.send_file(entity=self.channel_id, file=file)
except telethon.errors.FilePartInvalidError:
return None
except telethon.errors.PhotoInvalidError:

View File

@@ -4,8 +4,7 @@ from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, s
from fastapi.responses import StreamingResponse
from app.depends import check_token
from app.models import UploadedFile as UploadedFileDB
from app.serializers import CreateUploadedFile, UploadedFile
from app.serializers import UploadedFile
from app.services.file_downloader import FileDownloader
from app.services.file_uploader import FileUploader
@@ -15,32 +14,6 @@ router = APIRouter(
)
@router.get("/", response_model=list[UploadedFile])
async def get_files():
return await UploadedFileDB.objects.all()
@router.get(
"/{file_id}",
response_model=UploadedFile,
responses={
404: {},
},
)
async def get_file(file_id: int):
uploaded_file = await UploadedFileDB.objects.get_or_none(id=file_id)
if not uploaded_file:
raise HTTPException(status.HTTP_404_NOT_FOUND)
return uploaded_file
@router.post("/", response_model=UploadedFile)
async def create_file(data: CreateUploadedFile):
return await UploadedFileDB.objects.create(**data.dict())
@router.post("/upload/", response_model=UploadedFile)
async def upload_file(file: UploadFile = File({}), caption: Optional[str] = Form({})):
return await FileUploader.upload(file, caption=caption)
@@ -54,22 +27,3 @@ async def download_by_message(chat_id: str, message_id: int):
raise HTTPException(status.HTTP_400_BAD_REQUEST)
return StreamingResponse(data)
@router.delete("/{file_id}", response_model=UploadedFile, responses={400: {}})
async def delete_file(file_id: int):
uploaded_file = await UploadedFileDB.objects.get_or_none(id=file_id)
if not uploaded_file:
raise HTTPException(status.HTTP_400_BAD_REQUEST)
await uploaded_file.delete()
return uploaded_file
healthcheck_router = APIRouter(tags=["healthcheck"])
@healthcheck_router.get("/healthcheck")
async def healthcheck():
return "Ok"