This commit is contained in:
2021-11-10 13:13:33 +03:00
commit 4c7af00ac3
22 changed files with 1663 additions and 0 deletions

View File

@@ -0,0 +1,98 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = ./app/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator"
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. Valid values are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # default: use os.pathsep
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1 @@
Generic single-database configuration.

View File

@@ -0,0 +1,66 @@
from logging.config import fileConfig
from alembic import context
import sys, os
from sqlalchemy.engine import create_engine
from core.db import DATABASE_URL
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../../')
config = context.config
from app.models import BaseMeta
target_metadata = BaseMeta.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = create_engine(DATABASE_URL)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,32 @@
"""empty message
Revision ID: 3bbf7cb4eaa2
Revises: 5a32159504fd
Create Date: 2021-09-27 17:18:27.635063
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '3bbf7cb4eaa2'
down_revision = '5a32159504fd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('uploaded_files', 'upload_time',
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('uploaded_files', 'upload_time',
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,34 @@
"""empty message
Revision ID: 5a32159504fd
Revises:
Create Date: 2021-09-27 16:23:39.987261
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5a32159504fd'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('uploaded_files',
sa.Column('id', sa.BigInteger(), nullable=True),
sa.Column('backend', sa.String(length=16), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('upload_time', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('uploaded_files')
# ### end Alembic commands ###

View File

@@ -0,0 +1,26 @@
from enum import Enum
from datetime import datetime
import ormar
from core.db import metadata, database
class BaseMeta(ormar.ModelMeta):
metadata = metadata
database = database
class UploadBackends(str, Enum):
aiogram = 'aiogram'
telethon = 'telethon'
class UploadedFile(ormar.Model):
class Meta(BaseMeta):
tablename = "uploaded_files"
id = ormar.BigInteger(primary_key=True, nullable=False)
backend = ormar.String(max_length=16, choices=list(UploadBackends))
data = ormar.JSON()
upload_time = ormar.DateTime(timezone=True, default=datetime.now)

View File

@@ -0,0 +1,5 @@
from app.services.file_uploader import FileUploader
async def on_start():
await FileUploader.prepare()

View File

@@ -0,0 +1,10 @@
from datetime import datetime
from pydantic import BaseModel
class UploadedFile(BaseModel):
id: int
backend: str
data: dict
upload_time: datetime

View File

@@ -0,0 +1,128 @@
from typing import Optional
from io import BytesIO
from fastapi import UploadFile
from telegram_files_storage import AiogramFilesStorage, TelethonFilesStorage
from core.config import env_config
from app.models import UploadedFile, UploadBackends
class FileUploader:
AIOGRAM_STORAGES: list[AiogramFilesStorage] = []
TELETHON_STORAGES: list[TelethonFilesStorage] = []
_aiogram_storage_index = 0
_telethon_storage_index = 0
def __init__(self, file: UploadFile) -> None:
self.file = file
self.upload_data: Optional[dict] = None
self.upload_backend: Optional[UploadBackends] = None
async def _upload(self) -> bool:
if not self.AIOGRAM_STORAGES and not self.TELETHON_STORAGES:
raise ValueError("Files storage not exist!")
if await self._upload_via_aiogram():
return True
return await self._upload_via_telethon()
async def _upload_via_aiogram(self) -> bool:
if not self.AIOGRAM_STORAGES:
return False
data = await self.file.read()
if isinstance(data, str):
data = data.encode()
if len(data) > 50 * 1000 * 1000:
return False
bytes_io = BytesIO(data)
bytes_io.name = self.file.filename
storage = self.get_aiogram_storage()
upload_result = await storage.upload(bytes_io)
self.upload_data = {
'file_id': upload_result
}
self.upload_backend = UploadBackends.aiogram
return True
async def _upload_via_telethon(self) -> bool:
if not self.TELETHON_STORAGES:
return False
data = await self.file.read()
if isinstance(data, str):
data = data.encode()
bytes_io = BytesIO(data)
bytes_io.name = self.file.filename
storage = self.get_telethon_storage()
upload_result = await storage.upload(bytes_io)
self.upload_data = {
'chat_id': storage.chat_id,
'message_id': upload_result
}
self.upload_backend = UploadBackends.telethon
return True
async def _save_to_db(self) -> UploadedFile:
return await UploadedFile.objects.create(
backend=self.upload_backend,
data=self.upload_data,
)
@classmethod
async def prepare(cls):
if env_config.BOT_TOKENS:
cls.AIOGRAM_STORAGES: list[AiogramFilesStorage] = [
AiogramFilesStorage(env_config.TELEGRAM_CHAT_ID, token) for token in env_config.BOT_TOKENS
]
if env_config.TELETHON_APP_CONFIG and env_config.TELETHON_SESSIONS:
cls.TELETHON_STORAGES: list[TelethonFilesStorage] = [
TelethonFilesStorage(env_config.TELEGRAM_CHAT_ID, env_config.TELETHON_APP_CONFIG.APP_ID, env_config.TELETHON_APP_CONFIG.API_HASH, session)
for session in env_config.TELETHON_SESSIONS
]
for storage in [*cls.AIOGRAM_STORAGES, *cls.TELETHON_STORAGES]:
await storage.prepare()
@classmethod
def get_aiogram_storage(cls) -> AiogramFilesStorage:
if not cls.AIOGRAM_STORAGES:
raise ValueError("Aiogram storage not exist!")
cls._aiogram_storage_index = (cls._aiogram_storage_index + 1) % len(cls.AIOGRAM_STORAGES)
return cls.AIOGRAM_STORAGES[cls._aiogram_storage_index]
@classmethod
def get_telethon_storage(cls) -> TelethonFilesStorage:
if not cls.TELETHON_STORAGES:
raise ValueError("Telethon storage not exists!")
cls._telethon_storage_index = (cls._telethon_storage_index + 1) % len(cls.TELETHON_STORAGES)
return cls.TELETHON_STORAGES[cls._telethon_storage_index]
@classmethod
async def upload(cls, file: UploadFile) -> Optional[UploadedFile]:
uploader = cls(file)
upload_result = await uploader._upload()
if not upload_result:
return None
return await uploader._save_to_db()

View File

@@ -0,0 +1,47 @@
from fastapi import File, UploadFile
from starlette import status
from fastapi import APIRouter, HTTPException
from app.serializers import UploadedFile
from app import models
from app.services.file_uploader import FileUploader
router = APIRouter(
prefix="/api/v1",
)
@router.get("/files", response_model=list[UploadedFile])
async def get_files():
return await models.UploadedFile.objects.all()
@router.get("/files/{file_id}", response_model=UploadedFile, responses={
404: {},
})
async def get_file(file_id: int):
uploaded_file = await models.UploadedFile.objects.get_or_none(id=file_id)
if not uploaded_file:
raise HTTPException(status.HTTP_404_NOT_FOUND)
return uploaded_file
@router.post("/files", response_model=UploadedFile)
async def upload_file(file: UploadFile = File({})):
return await FileUploader.upload(file)
@router.delete("/files/{file_id}", response_model=UploadedFile, responses={
400: {}
})
async def delete_file(file_id: int):
uploaded_file = await models.UploadedFile.objects.get_or_none(id=file_id)
if not uploaded_file:
raise HTTPException(status.HTTP_400_BAD_REQUEST)
await uploaded_file.delete()
return uploaded_file