mirror of
https://github.com/flibusta-apps/telegram_files_server.git
synced 2025-12-06 04:25:38 +01:00
Refactor
This commit is contained in:
@@ -1,98 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# path to migration scripts
|
|
||||||
script_location = ./app/alembic
|
|
||||||
|
|
||||||
# template used to generate migration files
|
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
|
||||||
# defaults to the current working directory.
|
|
||||||
prepend_sys_path = .
|
|
||||||
|
|
||||||
# timezone to use when rendering the date within the migration file
|
|
||||||
# as well as the filename.
|
|
||||||
# If specified, requires the python-dateutil library that can be
|
|
||||||
# installed by adding `alembic[tz]` to the pip requirements
|
|
||||||
# string value is passed to dateutil.tz.gettz()
|
|
||||||
# leave blank for localtime
|
|
||||||
# timezone =
|
|
||||||
|
|
||||||
# max length of characters to apply to the
|
|
||||||
# "slug" field
|
|
||||||
# truncate_slug_length = 40
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
|
||||||
# a source .py file to be detected as revisions in the
|
|
||||||
# versions/ directory
|
|
||||||
# sourceless = false
|
|
||||||
|
|
||||||
# version location specification; This defaults
|
|
||||||
# to alembic/versions. When using multiple version
|
|
||||||
# directories, initial revisions must be specified with --version-path.
|
|
||||||
# The path separator used here should be the separator specified by "version_path_separator"
|
|
||||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
|
||||||
|
|
||||||
# version path separator; As mentioned above, this is the character used to split
|
|
||||||
# version_locations. Valid values are:
|
|
||||||
#
|
|
||||||
# version_path_separator = :
|
|
||||||
# version_path_separator = ;
|
|
||||||
# version_path_separator = space
|
|
||||||
version_path_separator = os # default: use os.pathsep
|
|
||||||
|
|
||||||
# the output encoding used when revision files
|
|
||||||
# are written from script.py.mako
|
|
||||||
# output_encoding = utf-8
|
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
|
||||||
# post_write_hooks defines scripts or Python functions that are run
|
|
||||||
# on newly generated revision scripts. See the documentation for further
|
|
||||||
# detail and examples
|
|
||||||
|
|
||||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
|
||||||
# hooks = black
|
|
||||||
# black.type = console_scripts
|
|
||||||
# black.entrypoint = black
|
|
||||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Generic single-database configuration.
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from alembic import context
|
|
||||||
from sqlalchemy.engine import create_engine
|
|
||||||
|
|
||||||
from core.db import DATABASE_URL
|
|
||||||
|
|
||||||
myPath = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
sys.path.insert(0, myPath + "/../../")
|
|
||||||
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
|
|
||||||
from app.models import BaseMeta # noqa: E402
|
|
||||||
|
|
||||||
target_metadata = BaseMeta.metadata
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline():
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
|
||||||
context.configure(
|
|
||||||
url=url,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
literal_binds=True,
|
|
||||||
dialect_opts={"paramstyle": "named"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online():
|
|
||||||
"""Run migrations in 'online' mode.
|
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
connectable = create_engine(DATABASE_URL)
|
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
|
||||||
context.configure(connection=connection, target_metadata=target_metadata)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
run_migrations_online()
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = ${repr(up_revision)}
|
|
||||||
down_revision = ${repr(down_revision)}
|
|
||||||
branch_labels = ${repr(branch_labels)}
|
|
||||||
depends_on = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 3bbf7cb4eaa2
|
|
||||||
Revises: 5a32159504fd
|
|
||||||
Create Date: 2021-09-27 17:18:27.635063
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "3bbf7cb4eaa2"
|
|
||||||
down_revision = "5a32159504fd"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column(
|
|
||||||
"uploaded_files",
|
|
||||||
"upload_time",
|
|
||||||
existing_type=postgresql.TIMESTAMP(timezone=True),
|
|
||||||
nullable=True,
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column(
|
|
||||||
"uploaded_files",
|
|
||||||
"upload_time",
|
|
||||||
existing_type=postgresql.TIMESTAMP(timezone=True),
|
|
||||||
nullable=False,
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 5a32159504fd
|
|
||||||
Revises:
|
|
||||||
Create Date: 2021-09-27 16:23:39.987261
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "5a32159504fd"
|
|
||||||
down_revision = None
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table(
|
|
||||||
"uploaded_files",
|
|
||||||
sa.Column("id", sa.BigInteger(), nullable=True),
|
|
||||||
sa.Column("backend", sa.String(length=16), nullable=False),
|
|
||||||
sa.Column("data", sa.JSON(), nullable=False),
|
|
||||||
sa.Column("upload_time", sa.DateTime(timezone=True), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint("id"),
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table("uploaded_files")
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
import ormar
|
|
||||||
|
|
||||||
from core.db import database, metadata
|
|
||||||
|
|
||||||
|
|
||||||
class BaseMeta(ormar.ModelMeta):
|
|
||||||
metadata = metadata
|
|
||||||
database = database
|
|
||||||
|
|
||||||
|
|
||||||
class UploadBackends(str, Enum):
|
|
||||||
bot = "bot"
|
|
||||||
user = "user"
|
|
||||||
|
|
||||||
|
|
||||||
class UploadedFile(ormar.Model):
|
|
||||||
class Meta(BaseMeta):
|
|
||||||
tablename = "uploaded_files"
|
|
||||||
|
|
||||||
id = ormar.BigInteger(primary_key=True, nullable=False)
|
|
||||||
backend = ormar.String(max_length=16, choices=list(UploadBackends))
|
|
||||||
data = ormar.JSON()
|
|
||||||
upload_time = ormar.DateTime(timezone=True, default=datetime.now)
|
|
||||||
@@ -1,16 +1,19 @@
|
|||||||
from datetime import datetime
|
import enum
|
||||||
|
from typing import TypedDict
|
||||||
|
|
||||||
from pydantic import BaseModel, constr
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
class CreateUploadedFile(BaseModel):
|
class UploadBackend(enum.StrEnum):
|
||||||
backend: constr(max_length=16) # type: ignore
|
bot = "bot"
|
||||||
data: dict
|
user = "user"
|
||||||
upload_time: datetime
|
|
||||||
|
|
||||||
|
class Data(TypedDict):
|
||||||
|
chat_id: str | int
|
||||||
|
message_id: int
|
||||||
|
|
||||||
|
|
||||||
class UploadedFile(BaseModel):
|
class UploadedFile(BaseModel):
|
||||||
id: int
|
backend: UploadBackend
|
||||||
backend: str
|
data: Data
|
||||||
data: dict
|
|
||||||
upload_time: datetime
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from app.models import UploadBackends
|
from app.serializers import UploadBackend
|
||||||
from app.services.storages import BotStorage, StoragesContainer, UserStorage
|
from app.services.storages import BotStorage, StoragesContainer, UserStorage
|
||||||
|
|
||||||
|
|
||||||
@@ -39,8 +39,8 @@ class FileDownloader:
|
|||||||
return user_storages[cls._user_storage_index]
|
return user_storages[cls._user_storage_index]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _download_via(cls, message_id: int, storage_type: UploadBackends):
|
async def _download_via(cls, message_id: int, storage_type: UploadBackend):
|
||||||
if storage_type == UploadBackends.bot:
|
if storage_type == UploadBackend.bot:
|
||||||
storage = cls.get_bot_storage()
|
storage = cls.get_bot_storage()
|
||||||
else:
|
else:
|
||||||
storage = cls.get_user_storage()
|
storage = cls.get_user_storage()
|
||||||
@@ -52,9 +52,7 @@ class FileDownloader:
|
|||||||
if not cls.bot_storages and not cls.user_storages:
|
if not cls.bot_storages and not cls.user_storages:
|
||||||
raise ValueError("Files storage not exist!")
|
raise ValueError("Files storage not exist!")
|
||||||
|
|
||||||
if (
|
if (data := await cls._download_via(message_id, UploadBackend.bot)) is not None:
|
||||||
data := await cls._download_via(message_id, UploadBackends.bot)
|
|
||||||
) is not None:
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
return await cls._download_via(message_id, UploadBackends.user)
|
return await cls._download_via(message_id, UploadBackend.user)
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
from typing import Any, Optional
|
from typing import Any, BinaryIO, Optional
|
||||||
|
|
||||||
from fastapi import UploadFile
|
from fastapi import UploadFile
|
||||||
|
|
||||||
from app.models import UploadBackends, UploadedFile
|
from app.serializers import Data, UploadBackend, UploadedFile
|
||||||
from app.services.storages import BotStorage, StoragesContainer, UserStorage
|
from app.services.storages import BotStorage, StoragesContainer, UserStorage
|
||||||
|
|
||||||
|
|
||||||
class Wrapper:
|
class Wrapper(BinaryIO):
|
||||||
def __init__(self, wrapped: Any, filename: str) -> None:
|
def __init__(self, wrapped: Any, filename: str) -> None:
|
||||||
self.wrapped = wrapped
|
self.wrapped = wrapped
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
@@ -39,24 +39,26 @@ class FileUploader:
|
|||||||
self.file = file
|
self.file = file
|
||||||
self.caption = caption
|
self.caption = caption
|
||||||
|
|
||||||
self.upload_data: Optional[dict] = None
|
self.upload_data: Optional[Data] = None
|
||||||
self.upload_backend: Optional[UploadBackends] = None
|
self.upload_backend: Optional[UploadBackend] = None
|
||||||
|
|
||||||
async def _upload(self) -> bool:
|
async def _upload(self) -> bool:
|
||||||
if not self.bot_storages and not self.user_storages:
|
if not self.bot_storages and not self.user_storages:
|
||||||
raise ValueError("Files storage not exist!")
|
raise ValueError("Files storage not exist!")
|
||||||
|
|
||||||
if await self._upload_via(UploadBackends.bot):
|
if await self._upload_via(UploadBackend.bot):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return await self._upload_via(UploadBackends.user)
|
return await self._upload_via(UploadBackend.user)
|
||||||
|
|
||||||
async def _upload_via(self, storage_type: UploadBackends) -> bool:
|
async def _upload_via(self, storage_type: UploadBackend) -> bool:
|
||||||
if storage_type == UploadBackends.bot:
|
if storage_type == UploadBackend.bot:
|
||||||
storage = self.get_bot_storage()
|
storage = self.get_bot_storage()
|
||||||
else:
|
else:
|
||||||
storage = self.get_user_storage()
|
storage = self.get_user_storage()
|
||||||
|
|
||||||
|
assert self.file.filename
|
||||||
|
|
||||||
wrapped = Wrapper(self.file.file, self.file.filename)
|
wrapped = Wrapper(self.file.file, self.file.filename)
|
||||||
|
|
||||||
data = await storage.upload(wrapped, caption=self.caption)
|
data = await storage.upload(wrapped, caption=self.caption)
|
||||||
@@ -69,11 +71,11 @@ class FileUploader:
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def _save_to_db(self) -> UploadedFile:
|
def get_result(self) -> UploadedFile:
|
||||||
return await UploadedFile.objects.create(
|
assert self.upload_backend is not None
|
||||||
backend=self.upload_backend,
|
assert self.upload_data is not None
|
||||||
data=self.upload_data,
|
|
||||||
)
|
return UploadedFile(backend=self.upload_backend, data=self.upload_data)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_bot_storage(cls) -> BotStorage:
|
def get_bot_storage(cls) -> BotStorage:
|
||||||
@@ -107,4 +109,4 @@ class FileUploader:
|
|||||||
if not upload_result:
|
if not upload_result:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return await uploader._save_to_db()
|
return uploader.get_result()
|
||||||
|
|||||||
@@ -25,9 +25,12 @@ class BaseStorage:
|
|||||||
self, file: telethon.hints.FileLike, caption: Optional[str] = None
|
self, file: telethon.hints.FileLike, caption: Optional[str] = None
|
||||||
) -> Optional[tuple[Union[str, int], int]]:
|
) -> Optional[tuple[Union[str, int], int]]:
|
||||||
try:
|
try:
|
||||||
message = await self.client.send_file(
|
if caption:
|
||||||
entity=self.channel_id, file=file, caption=caption
|
message = await self.client.send_file(
|
||||||
)
|
entity=self.channel_id, file=file, caption=caption
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
message = await self.client.send_file(entity=self.channel_id, file=file)
|
||||||
except telethon.errors.FilePartInvalidError:
|
except telethon.errors.FilePartInvalidError:
|
||||||
return None
|
return None
|
||||||
except telethon.errors.PhotoInvalidError:
|
except telethon.errors.PhotoInvalidError:
|
||||||
|
|||||||
@@ -4,8 +4,7 @@ from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, s
|
|||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
|
|
||||||
from app.depends import check_token
|
from app.depends import check_token
|
||||||
from app.models import UploadedFile as UploadedFileDB
|
from app.serializers import UploadedFile
|
||||||
from app.serializers import CreateUploadedFile, UploadedFile
|
|
||||||
from app.services.file_downloader import FileDownloader
|
from app.services.file_downloader import FileDownloader
|
||||||
from app.services.file_uploader import FileUploader
|
from app.services.file_uploader import FileUploader
|
||||||
|
|
||||||
@@ -15,32 +14,6 @@ router = APIRouter(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=list[UploadedFile])
|
|
||||||
async def get_files():
|
|
||||||
return await UploadedFileDB.objects.all()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
|
||||||
"/{file_id}",
|
|
||||||
response_model=UploadedFile,
|
|
||||||
responses={
|
|
||||||
404: {},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
async def get_file(file_id: int):
|
|
||||||
uploaded_file = await UploadedFileDB.objects.get_or_none(id=file_id)
|
|
||||||
|
|
||||||
if not uploaded_file:
|
|
||||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
return uploaded_file
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=UploadedFile)
|
|
||||||
async def create_file(data: CreateUploadedFile):
|
|
||||||
return await UploadedFileDB.objects.create(**data.dict())
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/upload/", response_model=UploadedFile)
|
@router.post("/upload/", response_model=UploadedFile)
|
||||||
async def upload_file(file: UploadFile = File({}), caption: Optional[str] = Form({})):
|
async def upload_file(file: UploadFile = File({}), caption: Optional[str] = Form({})):
|
||||||
return await FileUploader.upload(file, caption=caption)
|
return await FileUploader.upload(file, caption=caption)
|
||||||
@@ -54,22 +27,3 @@ async def download_by_message(chat_id: str, message_id: int):
|
|||||||
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
return StreamingResponse(data)
|
return StreamingResponse(data)
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{file_id}", response_model=UploadedFile, responses={400: {}})
|
|
||||||
async def delete_file(file_id: int):
|
|
||||||
uploaded_file = await UploadedFileDB.objects.get_or_none(id=file_id)
|
|
||||||
|
|
||||||
if not uploaded_file:
|
|
||||||
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
await uploaded_file.delete()
|
|
||||||
return uploaded_file
|
|
||||||
|
|
||||||
|
|
||||||
healthcheck_router = APIRouter(tags=["healthcheck"])
|
|
||||||
|
|
||||||
|
|
||||||
@healthcheck_router.get("/healthcheck")
|
|
||||||
async def healthcheck():
|
|
||||||
return "Ok"
|
|
||||||
|
|||||||
@@ -5,9 +5,8 @@ from prometheus_fastapi_instrumentator import Instrumentator
|
|||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
|
|
||||||
from app.on_start import on_start
|
from app.on_start import on_start
|
||||||
from app.views import healthcheck_router, router
|
from app.views import router
|
||||||
from core.config import env_config
|
from core.config import env_config
|
||||||
from core.db import database
|
|
||||||
|
|
||||||
|
|
||||||
sentry_sdk.init(
|
sentry_sdk.init(
|
||||||
@@ -18,25 +17,12 @@ sentry_sdk.init(
|
|||||||
def start_app() -> FastAPI:
|
def start_app() -> FastAPI:
|
||||||
app = FastAPI(default_response_class=ORJSONResponse)
|
app = FastAPI(default_response_class=ORJSONResponse)
|
||||||
|
|
||||||
app.state.database = database
|
|
||||||
|
|
||||||
app.include_router(router)
|
app.include_router(router)
|
||||||
app.include_router(healthcheck_router)
|
|
||||||
|
|
||||||
@app.on_event("startup")
|
@app.on_event("startup")
|
||||||
async def startup() -> None:
|
async def startup() -> None:
|
||||||
database_ = app.state.database
|
|
||||||
if not database_.is_connected:
|
|
||||||
await database_.connect()
|
|
||||||
|
|
||||||
await on_start()
|
await on_start()
|
||||||
|
|
||||||
@app.on_event("shutdown")
|
|
||||||
async def shutdown() -> None:
|
|
||||||
database_ = app.state.database
|
|
||||||
if database_.is_connected:
|
|
||||||
await database_.disconnect()
|
|
||||||
|
|
||||||
Instrumentator(
|
Instrumentator(
|
||||||
should_ignore_untemplated=True,
|
should_ignore_untemplated=True,
|
||||||
excluded_handlers=["/docs", "/metrics", "/healthcheck"],
|
excluded_handlers=["/docs", "/metrics", "/healthcheck"],
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, BaseSettings
|
from pydantic import BaseModel
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
BotToken = str
|
BotToken = str
|
||||||
@@ -15,12 +16,6 @@ class TelethonConfig(BaseModel):
|
|||||||
class EnvConfig(BaseSettings):
|
class EnvConfig(BaseSettings):
|
||||||
API_KEY: str
|
API_KEY: str
|
||||||
|
|
||||||
POSTGRES_USER: str
|
|
||||||
POSTGRES_PASSWORD: str
|
|
||||||
POSTGRES_HOST: str
|
|
||||||
POSTGRES_PORT: int
|
|
||||||
POSTGRES_DB: str
|
|
||||||
|
|
||||||
TELEGRAM_CHAT_ID: int
|
TELEGRAM_CHAT_ID: int
|
||||||
|
|
||||||
BOT_TOKENS: Optional[list[BotToken]]
|
BOT_TOKENS: Optional[list[BotToken]]
|
||||||
@@ -30,9 +25,5 @@ class EnvConfig(BaseSettings):
|
|||||||
|
|
||||||
SENTRY_DSN: str
|
SENTRY_DSN: str
|
||||||
|
|
||||||
class Config:
|
|
||||||
env_file = ".env"
|
|
||||||
env_file_encoding = "utf-8"
|
|
||||||
|
|
||||||
|
|
||||||
env_config = EnvConfig()
|
env_config = EnvConfig()
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
from urllib.parse import quote
|
|
||||||
|
|
||||||
from databases import Database
|
|
||||||
from sqlalchemy import MetaData
|
|
||||||
|
|
||||||
from core.config import env_config
|
|
||||||
|
|
||||||
|
|
||||||
DATABASE_URL = (
|
|
||||||
f"postgresql://{env_config.POSTGRES_USER}:{quote(env_config.POSTGRES_PASSWORD)}@"
|
|
||||||
f"{env_config.POSTGRES_HOST}:{env_config.POSTGRES_PORT}/{env_config.POSTGRES_DB}"
|
|
||||||
)
|
|
||||||
|
|
||||||
metadata = MetaData()
|
|
||||||
database = Database(DATABASE_URL)
|
|
||||||
840
poetry.lock
generated
840
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -3,22 +3,20 @@ name = "fastapi_file_server"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
description = ""
|
description = ""
|
||||||
authors = ["Kurbanov Bulat <kurbanovbul@gmail.com>"]
|
authors = ["Kurbanov Bulat <kurbanovbul@gmail.com>"]
|
||||||
license = "Apache 2.0"
|
license = "MIT"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.11"
|
python = "^3.11"
|
||||||
fastapi = "^0.95.1"
|
fastapi = "^0.101.0"
|
||||||
uvicorn = {extras = ["standard"], version = "^0.22.0"}
|
uvicorn = {extras = ["standard"], version = "^0.23.2"}
|
||||||
ormar = {extras = ["postgresql"], version = "^0.12.1"}
|
pydantic = "^2.1.1"
|
||||||
alembic = "^1.10.4"
|
|
||||||
pydantic = {extras = ["dotenv"], version = "^1.10.4"}
|
|
||||||
python-multipart = "^0.0.6"
|
python-multipart = "^0.0.6"
|
||||||
httpx = "^0.24.0"
|
telethon = "^1.29.2"
|
||||||
telethon = "^1.28.5"
|
prometheus-fastapi-instrumentator = "^6.1.0"
|
||||||
prometheus-fastapi-instrumentator = "^6.0.0"
|
|
||||||
uvloop = "^0.17.0"
|
uvloop = "^0.17.0"
|
||||||
orjson = "^3.8.12"
|
orjson = "^3.9.2"
|
||||||
sentry-sdk = "^1.22.2"
|
sentry-sdk = "^1.29.2"
|
||||||
|
pydantic-settings = "^2.0.2"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
pytest = "^7.2.0"
|
pytest = "^7.2.0"
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
import httpx
|
|
||||||
|
|
||||||
|
|
||||||
response = httpx.get("http://localhost:8080/healthcheck")
|
|
||||||
print(f"HEALTHCHECK STATUS: {response.status_code}")
|
|
||||||
exit(0 if response.status_code == 200 else 1)
|
|
||||||
@@ -3,5 +3,4 @@ cd /app
|
|||||||
rm -rf prometheus
|
rm -rf prometheus
|
||||||
mkdir prometheus
|
mkdir prometheus
|
||||||
|
|
||||||
alembic -c ./app/alembic.ini upgrade head
|
|
||||||
uvicorn main:app --host 0.0.0.0 --port 8080 --loop uvloop
|
uvicorn main:app --host 0.0.0.0 --port 8080 --loop uvloop
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
from fastapi_file_server import __version__
|
|
||||||
|
|
||||||
|
|
||||||
def test_version():
|
|
||||||
assert __version__ == "0.1.0"
|
|
||||||
Reference in New Issue
Block a user