This commit is contained in:
2021-12-04 00:52:23 +03:00
commit c42316246f
19 changed files with 475 additions and 0 deletions

98
src/app/alembic.ini Normal file
View File

@@ -0,0 +1,98 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = ./app/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator"
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. Valid values are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # default: use os.pathsep
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
src/app/alembic/README Normal file
View File

@@ -0,0 +1 @@
Generic single-database configuration.

65
src/app/alembic/env.py Normal file
View File

@@ -0,0 +1,65 @@
from alembic import context
import sys, os
from sqlalchemy.engine import create_engine
from core.db import DATABASE_URL
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../../')
config = context.config
from app.models import BaseMeta
target_metadata = BaseMeta.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = create_engine(DATABASE_URL)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata, compare_type=True
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,36 @@
"""empty message
Revision ID: 7a76c257df70
Revises:
Create Date: 2021-12-04 00:46:17.770026
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7a76c257df70'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('services',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('user', sa.BigInteger(), nullable=False),
sa.Column('status', sa.String(length=12), nullable=True),
sa.Column('created_time', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('services')
# ### end Alembic commands ###

9
src/app/depends.py Normal file
View File

@@ -0,0 +1,9 @@
from fastapi import Security, HTTPException, status
from core.auth import default_security
from core.config import env_config
async def check_token(api_key: str = Security(default_security)):
if api_key != env_config.API_KEY:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Wrong api key!")

28
src/app/models.py Normal file
View File

@@ -0,0 +1,28 @@
from enum import Enum
from datetime import datetime
import ormar
from core.db import metadata, database
class BaseMeta(ormar.ModelMeta):
metadata = metadata
database = database
class Statuses(str, Enum):
pending = "pending"
approved = "approved"
blocked = "blocked"
class Service(ormar.Model):
class Meta(BaseMeta):
tablename = "services"
id: int = ormar.Integer(primary_key=True) # type: ignore
token: str = ormar.String(max_length=128, unique=True) # type: ignore
user: int = ormar.BigInteger() # type: ignore
status: str = ormar.String(max_length=12, choices=list(Statuses), default=Statuses.pending) # type: ignore
created_time = ormar.DateTime(timezone=True, default=datetime.now)

19
src/app/serializers.py Normal file
View File

@@ -0,0 +1,19 @@
from datetime import datetime
from pydantic import BaseModel, constr
from app.models import Statuses
class ServiceCreate(BaseModel):
token: constr(max_length=128) # type: ignore
user: str
status: Statuses
class ServiceDetail(BaseModel):
id: int
token: str
user: str
status: str
created_data: datetime

33
src/app/views.py Normal file
View File

@@ -0,0 +1,33 @@
from fastapi import APIRouter, HTTPException, status, Depends
from app.depends import check_token
from app.serializers import ServiceCreate, ServiceDetail
from app.models import Service
# TODO: add redis cache
router = APIRouter(
dependencies=[Depends(check_token)]
)
@router.get("/", response_model=list[ServiceDetail])
async def get_services():
return await Service.objects.all()
@router.get("/{id}", response_model=ServiceDetail)
async def get_service(id: int):
service = await Service.objects.get_or_none(id=id)
if service is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return service
@router.post("/", response_model=ServiceDetail)
async def register_service(data: ServiceCreate):
return await Service.objects.create(**data.dict())

26
src/core/app.py Normal file
View File

@@ -0,0 +1,26 @@
from fastapi import FastAPI
from core.db import database
from app.views import router
def start_app() -> FastAPI:
app = FastAPI()
app.include_router(router)
app.state.database = database
@app.on_event('startup')
async def startup() -> None:
database_ = app.state.database
if not database_.is_connected:
await database_.connect()
@app.on_event('shutdown')
async def shutdown() -> None:
database_ = app.state.database
if database_.is_connected:
await database_.disconnect()
return app

4
src/core/auth.py Normal file
View File

@@ -0,0 +1,4 @@
from fastapi.security import APIKeyHeader
default_security = APIKeyHeader(name="Authorization")

14
src/core/config.py Normal file
View File

@@ -0,0 +1,14 @@
from pydantic import BaseSettings
class EnvConfig(BaseSettings):
API_KEY: str
POSTGRES_USER: str
POSTGRES_PASSWORD: str
POSTGRES_HOST: str
POSTGRES_PORT: int
POSTGRES_DB: str
env_config = EnvConfig()

15
src/core/db.py Normal file
View File

@@ -0,0 +1,15 @@
from urllib.parse import quote
from databases import Database
from sqlalchemy import MetaData
from core.config import env_config
DATABASE_URL = (
f"postgresql://{env_config.POSTGRES_USER}:{quote(env_config.POSTGRES_PASSWORD)}@"
f"{env_config.POSTGRES_HOST}:{env_config.POSTGRES_PORT}/{env_config.POSTGRES_DB}"
)
metadata = MetaData()
database = Database(DATABASE_URL)

4
src/main.py Normal file
View File

@@ -0,0 +1,4 @@
from core.app import start_app
app = start_app()