mirror of
https://github.com/flibusta-apps/telegram_files_server.git
synced 2025-12-06 04:25:38 +01:00
Init
This commit is contained in:
10
.gitignore
vendored
Normal file
10
.gitignore
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
.mypy_cache
|
||||||
|
.pytest_cache
|
||||||
|
.vscode
|
||||||
|
|
||||||
|
__pycache__
|
||||||
|
|
||||||
|
.env
|
||||||
|
*.session
|
||||||
|
|
||||||
|
venv
|
||||||
0
README.rst
Normal file
0
README.rst
Normal file
1
fastapi_file_server/__init__.py
Normal file
1
fastapi_file_server/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
__version__ = '0.1.0'
|
||||||
98
fastapi_file_server/app/alembic.ini
Normal file
98
fastapi_file_server/app/alembic.ini
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = ./app/alembic
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python-dateutil library that can be
|
||||||
|
# installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to alembic/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "version_path_separator"
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. Valid values are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
version_path_separator = os # default: use os.pathsep
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
1
fastapi_file_server/app/alembic/README
Normal file
1
fastapi_file_server/app/alembic/README
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
||||||
66
fastapi_file_server/app/alembic/env.py
Normal file
66
fastapi_file_server/app/alembic/env.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
import sys, os
|
||||||
|
|
||||||
|
from sqlalchemy.engine import create_engine
|
||||||
|
|
||||||
|
from core.db import DATABASE_URL
|
||||||
|
|
||||||
|
|
||||||
|
myPath = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
sys.path.insert(0, myPath + '/../../')
|
||||||
|
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
|
||||||
|
from app.models import BaseMeta
|
||||||
|
target_metadata = BaseMeta.metadata
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline():
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online():
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = create_engine(DATABASE_URL)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
24
fastapi_file_server/app/alembic/script.py.mako
Normal file
24
fastapi_file_server/app/alembic/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
32
fastapi_file_server/app/alembic/versions/3bbf7cb4eaa2_.py
Normal file
32
fastapi_file_server/app/alembic/versions/3bbf7cb4eaa2_.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 3bbf7cb4eaa2
|
||||||
|
Revises: 5a32159504fd
|
||||||
|
Create Date: 2021-09-27 17:18:27.635063
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '3bbf7cb4eaa2'
|
||||||
|
down_revision = '5a32159504fd'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('uploaded_files', 'upload_time',
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
nullable=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('uploaded_files', 'upload_time',
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
nullable=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
34
fastapi_file_server/app/alembic/versions/5a32159504fd_.py
Normal file
34
fastapi_file_server/app/alembic/versions/5a32159504fd_.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 5a32159504fd
|
||||||
|
Revises:
|
||||||
|
Create Date: 2021-09-27 16:23:39.987261
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '5a32159504fd'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('uploaded_files',
|
||||||
|
sa.Column('id', sa.BigInteger(), nullable=True),
|
||||||
|
sa.Column('backend', sa.String(length=16), nullable=False),
|
||||||
|
sa.Column('data', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('upload_time', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('uploaded_files')
|
||||||
|
# ### end Alembic commands ###
|
||||||
26
fastapi_file_server/app/models.py
Normal file
26
fastapi_file_server/app/models.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import ormar
|
||||||
|
|
||||||
|
from core.db import metadata, database
|
||||||
|
|
||||||
|
|
||||||
|
class BaseMeta(ormar.ModelMeta):
|
||||||
|
metadata = metadata
|
||||||
|
database = database
|
||||||
|
|
||||||
|
|
||||||
|
class UploadBackends(str, Enum):
|
||||||
|
aiogram = 'aiogram'
|
||||||
|
telethon = 'telethon'
|
||||||
|
|
||||||
|
|
||||||
|
class UploadedFile(ormar.Model):
|
||||||
|
class Meta(BaseMeta):
|
||||||
|
tablename = "uploaded_files"
|
||||||
|
|
||||||
|
id = ormar.BigInteger(primary_key=True, nullable=False)
|
||||||
|
backend = ormar.String(max_length=16, choices=list(UploadBackends))
|
||||||
|
data = ormar.JSON()
|
||||||
|
upload_time = ormar.DateTime(timezone=True, default=datetime.now)
|
||||||
5
fastapi_file_server/app/on_start.py
Normal file
5
fastapi_file_server/app/on_start.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from app.services.file_uploader import FileUploader
|
||||||
|
|
||||||
|
|
||||||
|
async def on_start():
|
||||||
|
await FileUploader.prepare()
|
||||||
10
fastapi_file_server/app/serializers.py
Normal file
10
fastapi_file_server/app/serializers.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class UploadedFile(BaseModel):
|
||||||
|
id: int
|
||||||
|
backend: str
|
||||||
|
data: dict
|
||||||
|
upload_time: datetime
|
||||||
128
fastapi_file_server/app/services/file_uploader.py
Normal file
128
fastapi_file_server/app/services/file_uploader.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
from typing import Optional
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
from fastapi import UploadFile
|
||||||
|
from telegram_files_storage import AiogramFilesStorage, TelethonFilesStorage
|
||||||
|
|
||||||
|
from core.config import env_config
|
||||||
|
from app.models import UploadedFile, UploadBackends
|
||||||
|
|
||||||
|
|
||||||
|
class FileUploader:
|
||||||
|
AIOGRAM_STORAGES: list[AiogramFilesStorage] = []
|
||||||
|
TELETHON_STORAGES: list[TelethonFilesStorage] = []
|
||||||
|
|
||||||
|
_aiogram_storage_index = 0
|
||||||
|
_telethon_storage_index = 0
|
||||||
|
|
||||||
|
def __init__(self, file: UploadFile) -> None:
|
||||||
|
self.file = file
|
||||||
|
self.upload_data: Optional[dict] = None
|
||||||
|
self.upload_backend: Optional[UploadBackends] = None
|
||||||
|
|
||||||
|
async def _upload(self) -> bool:
|
||||||
|
if not self.AIOGRAM_STORAGES and not self.TELETHON_STORAGES:
|
||||||
|
raise ValueError("Files storage not exist!")
|
||||||
|
|
||||||
|
if await self._upload_via_aiogram():
|
||||||
|
return True
|
||||||
|
|
||||||
|
return await self._upload_via_telethon()
|
||||||
|
|
||||||
|
async def _upload_via_aiogram(self) -> bool:
|
||||||
|
if not self.AIOGRAM_STORAGES:
|
||||||
|
return False
|
||||||
|
|
||||||
|
data = await self.file.read()
|
||||||
|
|
||||||
|
if isinstance(data, str):
|
||||||
|
data = data.encode()
|
||||||
|
|
||||||
|
if len(data) > 50 * 1000 * 1000:
|
||||||
|
return False
|
||||||
|
|
||||||
|
bytes_io = BytesIO(data)
|
||||||
|
bytes_io.name = self.file.filename
|
||||||
|
|
||||||
|
storage = self.get_aiogram_storage()
|
||||||
|
upload_result = await storage.upload(bytes_io)
|
||||||
|
|
||||||
|
self.upload_data = {
|
||||||
|
'file_id': upload_result
|
||||||
|
}
|
||||||
|
self.upload_backend = UploadBackends.aiogram
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def _upload_via_telethon(self) -> bool:
|
||||||
|
if not self.TELETHON_STORAGES:
|
||||||
|
return False
|
||||||
|
|
||||||
|
data = await self.file.read()
|
||||||
|
|
||||||
|
if isinstance(data, str):
|
||||||
|
data = data.encode()
|
||||||
|
|
||||||
|
bytes_io = BytesIO(data)
|
||||||
|
bytes_io.name = self.file.filename
|
||||||
|
|
||||||
|
storage = self.get_telethon_storage()
|
||||||
|
upload_result = await storage.upload(bytes_io)
|
||||||
|
|
||||||
|
self.upload_data = {
|
||||||
|
'chat_id': storage.chat_id,
|
||||||
|
'message_id': upload_result
|
||||||
|
}
|
||||||
|
self.upload_backend = UploadBackends.telethon
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def _save_to_db(self) -> UploadedFile:
|
||||||
|
return await UploadedFile.objects.create(
|
||||||
|
backend=self.upload_backend,
|
||||||
|
data=self.upload_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def prepare(cls):
|
||||||
|
if env_config.BOT_TOKENS:
|
||||||
|
cls.AIOGRAM_STORAGES: list[AiogramFilesStorage] = [
|
||||||
|
AiogramFilesStorage(env_config.TELEGRAM_CHAT_ID, token) for token in env_config.BOT_TOKENS
|
||||||
|
]
|
||||||
|
|
||||||
|
if env_config.TELETHON_APP_CONFIG and env_config.TELETHON_SESSIONS:
|
||||||
|
cls.TELETHON_STORAGES: list[TelethonFilesStorage] = [
|
||||||
|
TelethonFilesStorage(env_config.TELEGRAM_CHAT_ID, env_config.TELETHON_APP_CONFIG.APP_ID, env_config.TELETHON_APP_CONFIG.API_HASH, session)
|
||||||
|
for session in env_config.TELETHON_SESSIONS
|
||||||
|
]
|
||||||
|
|
||||||
|
for storage in [*cls.AIOGRAM_STORAGES, *cls.TELETHON_STORAGES]:
|
||||||
|
await storage.prepare()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_aiogram_storage(cls) -> AiogramFilesStorage:
|
||||||
|
if not cls.AIOGRAM_STORAGES:
|
||||||
|
raise ValueError("Aiogram storage not exist!")
|
||||||
|
|
||||||
|
cls._aiogram_storage_index = (cls._aiogram_storage_index + 1) % len(cls.AIOGRAM_STORAGES)
|
||||||
|
|
||||||
|
return cls.AIOGRAM_STORAGES[cls._aiogram_storage_index]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_telethon_storage(cls) -> TelethonFilesStorage:
|
||||||
|
if not cls.TELETHON_STORAGES:
|
||||||
|
raise ValueError("Telethon storage not exists!")
|
||||||
|
|
||||||
|
cls._telethon_storage_index = (cls._telethon_storage_index + 1) % len(cls.TELETHON_STORAGES)
|
||||||
|
|
||||||
|
return cls.TELETHON_STORAGES[cls._telethon_storage_index]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def upload(cls, file: UploadFile) -> Optional[UploadedFile]:
|
||||||
|
uploader = cls(file)
|
||||||
|
upload_result = await uploader._upload()
|
||||||
|
|
||||||
|
if not upload_result:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return await uploader._save_to_db()
|
||||||
47
fastapi_file_server/app/views.py
Normal file
47
fastapi_file_server/app/views.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
from fastapi import File, UploadFile
|
||||||
|
from starlette import status
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
|
||||||
|
from app.serializers import UploadedFile
|
||||||
|
from app import models
|
||||||
|
from app.services.file_uploader import FileUploader
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(
|
||||||
|
prefix="/api/v1",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/files", response_model=list[UploadedFile])
|
||||||
|
async def get_files():
|
||||||
|
return await models.UploadedFile.objects.all()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/files/{file_id}", response_model=UploadedFile, responses={
|
||||||
|
404: {},
|
||||||
|
})
|
||||||
|
async def get_file(file_id: int):
|
||||||
|
uploaded_file = await models.UploadedFile.objects.get_or_none(id=file_id)
|
||||||
|
|
||||||
|
if not uploaded_file:
|
||||||
|
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
return uploaded_file
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/files", response_model=UploadedFile)
|
||||||
|
async def upload_file(file: UploadFile = File({})):
|
||||||
|
return await FileUploader.upload(file)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/files/{file_id}", response_model=UploadedFile, responses={
|
||||||
|
400: {}
|
||||||
|
})
|
||||||
|
async def delete_file(file_id: int):
|
||||||
|
uploaded_file = await models.UploadedFile.objects.get_or_none(id=file_id)
|
||||||
|
|
||||||
|
if not uploaded_file:
|
||||||
|
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
await uploaded_file.delete()
|
||||||
|
return uploaded_file
|
||||||
29
fastapi_file_server/core/app.py
Normal file
29
fastapi_file_server/core/app.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
|
||||||
|
from core.db import database
|
||||||
|
from app.on_start import on_start
|
||||||
|
from app.views import router
|
||||||
|
|
||||||
|
|
||||||
|
def start_app() -> FastAPI:
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
app.state.database = database
|
||||||
|
|
||||||
|
app.include_router(router)
|
||||||
|
|
||||||
|
@app.on_event('startup')
|
||||||
|
async def startup() -> None:
|
||||||
|
database_ = app.state.database
|
||||||
|
if not database_.is_connected:
|
||||||
|
await database_.connect()
|
||||||
|
|
||||||
|
await on_start()
|
||||||
|
|
||||||
|
@app.on_event('shutdown')
|
||||||
|
async def shutdown() -> None:
|
||||||
|
database_ = app.state.database
|
||||||
|
if database_.is_connected:
|
||||||
|
await database_.disconnect()
|
||||||
|
|
||||||
|
return app
|
||||||
34
fastapi_file_server/core/config.py
Normal file
34
fastapi_file_server/core/config.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from typing import Optional
|
||||||
|
from pydantic import BaseModel, BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
BotToken = str
|
||||||
|
TelethonSessionName= str
|
||||||
|
|
||||||
|
class TelethonConfig(BaseModel):
|
||||||
|
APP_ID: int
|
||||||
|
API_HASH: str
|
||||||
|
|
||||||
|
|
||||||
|
class EnvConfig(BaseSettings):
|
||||||
|
API_KEY: str
|
||||||
|
|
||||||
|
POSTGRES_USER: str
|
||||||
|
POSTGRES_PASSWORD: str
|
||||||
|
POSTGRES_HOST: str
|
||||||
|
POSTGRES_PORT: int
|
||||||
|
POSTGRES_DB: str
|
||||||
|
|
||||||
|
TELEGRAM_CHAT_ID: int
|
||||||
|
|
||||||
|
BOT_TOKENS: Optional[list[BotToken]]
|
||||||
|
|
||||||
|
TELETHON_APP_CONFIG: Optional[TelethonConfig]
|
||||||
|
TELETHON_SESSIONS: Optional[list[TelethonSessionName]]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
env_file = '.env'
|
||||||
|
env_file_encoding = 'utf-8'
|
||||||
|
|
||||||
|
|
||||||
|
env_config = EnvConfig()
|
||||||
15
fastapi_file_server/core/db.py
Normal file
15
fastapi_file_server/core/db.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
from urllib.parse import quote
|
||||||
|
from databases import Database
|
||||||
|
|
||||||
|
from sqlalchemy import MetaData
|
||||||
|
|
||||||
|
from core.config import env_config
|
||||||
|
|
||||||
|
|
||||||
|
DATABASE_URL = (
|
||||||
|
f"postgresql://{env_config.POSTGRES_USER}:{quote(env_config.POSTGRES_PASSWORD)}@"
|
||||||
|
f"{env_config.POSTGRES_HOST}:{env_config.POSTGRES_PORT}/{env_config.POSTGRES_DB}"
|
||||||
|
)
|
||||||
|
|
||||||
|
metadata = MetaData()
|
||||||
|
database = Database(DATABASE_URL)
|
||||||
4
fastapi_file_server/main.py
Normal file
4
fastapi_file_server/main.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from core.app import start_app
|
||||||
|
|
||||||
|
|
||||||
|
app = start_app()
|
||||||
1068
poetry.lock
generated
Normal file
1068
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
26
pyproject.toml
Normal file
26
pyproject.toml
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
[tool.poetry]
|
||||||
|
name = "fastapi_file_server"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = ""
|
||||||
|
authors = ["Kurbanov Bulat <kurbanovbul@gmail.com>"]
|
||||||
|
license = "Apache 2.0"
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.9"
|
||||||
|
fastapi = "^0.68.1"
|
||||||
|
uvicorn = "^0.15.0"
|
||||||
|
ormar = "^0.10.19"
|
||||||
|
alembic = "^1.7.3"
|
||||||
|
pydantic = {extras = ["dotenv"], version = "^1.8.2"}
|
||||||
|
asyncpg = "^0.24.0"
|
||||||
|
psycopg2 = "^2.9.1"
|
||||||
|
python-multipart = "^0.0.5"
|
||||||
|
telegram-files-storage = "^1.0.1"
|
||||||
|
|
||||||
|
[tool.poetry.dev-dependencies]
|
||||||
|
pytest = "^5.2"
|
||||||
|
mypy = "^0.910"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
||||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
5
tests/test_fastapi_file_server.py
Normal file
5
tests/test_fastapi_file_server.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from fastapi_file_server import __version__
|
||||||
|
|
||||||
|
|
||||||
|
def test_version():
|
||||||
|
assert __version__ == '0.1.0'
|
||||||
Reference in New Issue
Block a user