commit 27f35df97a93ef7f425aadd97f669a93442b869b Author: Kurbanov Bulat Date: Fri Dec 3 23:21:37 2021 +0300 Init diff --git a/.github/workflows/build_docker_image.yaml b/.github/workflows/build_docker_image.yaml new file mode 100644 index 0000000..41be1da --- /dev/null +++ b/.github/workflows/build_docker_image.yaml @@ -0,0 +1,49 @@ +name: Build docker image + +on: + push: + branches: + - 'main' + +jobs: + Build-Docker-Image: + runs-on: ubuntu-latest + steps: + - + name: Checkout + uses: actions/checkout@v2 + + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - id: repository_name + uses: ASzc/change-string-case-action@v1 + with: + string: ${{ github.repository }} + + - + name: Login to ghcr.io + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - + name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + env: + IMAGE: ${{ steps.repository_name.outputs.lowercase }} + with: + push: true + tags: ghcr.io/${{ env.IMAGE }}:latest + context: . + file: ./docker/build.dockerfile + + - + name: Invoke deployment hook + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.WEBHOOK_URL }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7f67744 --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +venv + +.vscode + +__pycache__ diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..8ca5e16 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +fastapi +fastapi-pagination +pydantic +alembic +ormar +uvicorn +asyncpg +psycopg2 diff --git a/src/app/alembic.ini b/src/app/alembic.ini new file mode 100644 index 0000000..01b575f --- /dev/null +++ b/src/app/alembic.ini @@ -0,0 +1,98 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = ./app/alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. Valid values are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # default: use os.pathsep + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/src/app/alembic/README b/src/app/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/src/app/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/src/app/alembic/env.py b/src/app/alembic/env.py new file mode 100644 index 0000000..a791adb --- /dev/null +++ b/src/app/alembic/env.py @@ -0,0 +1,64 @@ +from alembic import context +import sys, os + +from sqlalchemy.engine import create_engine + +from core.db import DATABASE_URL + + +myPath = os.path.dirname(os.path.abspath(__file__)) +sys.path.insert(0, myPath + '/../../') + +config = context.config + + +from app.models import BaseMeta +target_metadata = BaseMeta.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = create_engine(DATABASE_URL) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata, compare_type=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/src/app/alembic/script.py.mako b/src/app/alembic/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/src/app/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/src/app/alembic/versions/7e45f53febe1_.py b/src/app/alembic/versions/7e45f53febe1_.py new file mode 100644 index 0000000..7bac4b7 --- /dev/null +++ b/src/app/alembic/versions/7e45f53febe1_.py @@ -0,0 +1,53 @@ +"""empty message + +Revision ID: 7e45f53febe1 +Revises: +Create Date: 2021-12-03 22:42:22.514771 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '7e45f53febe1' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('languages', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('label', sa.String(length=16), nullable=False), + sa.Column('code', sa.String(length=4), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('code') + ) + op.create_table('user_settings', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.BigInteger(), nullable=False), + sa.Column('last_name', sa.String(length=64), nullable=False), + sa.Column('first_name', sa.String(length=64), nullable=False), + sa.Column('username', sa.String(length=32), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('user_id') + ) + op.create_table('users_languages', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('language', sa.Integer(), nullable=True), + sa.Column('user', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['language'], ['languages.id'], name='fk_users_languages_languages_language_id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['user'], ['user_settings.id'], name='fk_users_languages_user_settings_user_id', onupdate='CASCADE', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('users_languages') + op.drop_table('user_settings') + op.drop_table('languages') + # ### end Alembic commands ### diff --git a/src/app/depends.py b/src/app/depends.py new file mode 100644 index 0000000..b99768e --- /dev/null +++ b/src/app/depends.py @@ -0,0 +1,9 @@ +from fastapi import Security, HTTPException, status + +from core.auth import default_security +from core.config import env_config + + +async def check_token(api_key: str = Security(default_security)): + if api_key != env_config.API_KEY: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Wrong api key!") diff --git a/src/app/models.py b/src/app/models.py new file mode 100644 index 0000000..1b8bc66 --- /dev/null +++ b/src/app/models.py @@ -0,0 +1,32 @@ +import ormar + +from core.db import metadata, database + + +class BaseMeta(ormar.ModelMeta): + metadata = metadata + database = database + + +class Language(ormar.Model): + class Meta(BaseMeta): + tablename = "languages" + + id: int = ormar.Integer(primary_key=True) # type: ignore + label: str = ormar.String(max_length=16) # type: ignore + code: str = ormar.String(max_length=4, unique=True) # type: ignore + + +class User(ormar.Model): + class Meta(BaseMeta): + tablename = "user_settings" + + id: int = ormar.Integer(primary_key=True) # type: ignore + + user_id: int = ormar.BigInteger(unique=True) # type: ignore + last_name: str = ormar.String(max_length=64) # type: ignore + first_name: str = ormar.String(max_length=64) # type: ignore + username: str = ormar.String(max_length=32) # type: ignore + source: str = ormar.String(max_length=32) # type: ignore + + allowed_langs = ormar.ManyToMany(Language) diff --git a/src/app/serializers.py b/src/app/serializers.py new file mode 100644 index 0000000..b0a918a --- /dev/null +++ b/src/app/serializers.py @@ -0,0 +1,22 @@ +from pydantic import BaseModel, constr + + +class LanguageDetail(BaseModel): + id: int + label: constr(max_length=16) # type: ignore + code: constr(max_length=4) # type: ignore + + +class UserBase(BaseModel): + user_id: int + last_name: constr(max_length=64) # type: ignore + first_name: constr(max_length=64) # type: ignore + username: constr(max_length=32) # type: ignore + + +class UserCreateOrUpdate(BaseModel): + allowed_langs: list[str] + + +class UserDetail(UserCreateOrUpdate): + allowed_langs: list[LanguageDetail] diff --git a/src/app/views.py b/src/app/views.py new file mode 100644 index 0000000..d716b05 --- /dev/null +++ b/src/app/views.py @@ -0,0 +1,72 @@ +from fastapi import APIRouter, HTTPException, status + +from app.serializers import UserCreateOrUpdate, UserDetail, LanguageDetail +from app.models import User, Language + + +# TODO: add redis cache + + +users_router = APIRouter( + prefix="/users", + tags=["users"] +) + + +@users_router.get("/", response_model=list[UserDetail]) +async def get_users(): + return await User.objects.select_related("allowed_langs").all() + + +@users_router.get("/{user_id}", response_model=UserDetail) +async def get_user(user_id: int): + user_data = await User.objects.select_related("allowd_langs").get_or_none(user_id=user_id) + + if user_data is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + + return user_data + + +@users_router.post("/{user_id}", response_model=UserDetail) +async def get_or_update_user(user_id: int, data: UserCreateOrUpdate): + data_dict = data.dict() + + user_data = await User.objects.select_related("allowed_langs").get_or_none(user_id=user_id) + + allowed_langs = data_dict.pop("allowed_langs") + + if user_data is None: + return User.objects.create(**data.dict()) + else: + user_data.update_from_dict(data.dict()) + + user_data.allowed_langs.clear() + + langs = await Language.objects.filter(code__in=allowed_langs).all() + + for lang in langs: + await user_data.allowed_langs.add(lang) + + return user_data + + +languages_router = APIRouter( + prefix="/languages", + tags=["languages"] +) + + +@languages_router.get("/", response_model=list[LanguageDetail]) +async def get_languages(): + return await Language.objects.all() + + +@languages_router.get("/{code}", response_model=LanguageDetail) +async def get_language(code: str): + language = await Language.objects.get_or_none(code=code) + + if language is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + + return language diff --git a/src/core/app.py b/src/core/app.py new file mode 100644 index 0000000..ecf0318 --- /dev/null +++ b/src/core/app.py @@ -0,0 +1,27 @@ +from fastapi import FastAPI + +from core.db import database +from app.views import users_router, languages_router + + +def start_app() -> FastAPI: + app = FastAPI() + + app.include_router(users_router) + app.include_router(languages_router) + + app.state.database = database + + @app.on_event('startup') + async def startup() -> None: + database_ = app.state.database + if not database_.is_connected: + await database_.connect() + + @app.on_event('shutdown') + async def shutdown() -> None: + database_ = app.state.database + if database_.is_connected: + await database_.disconnect() + + return app diff --git a/src/core/auth.py b/src/core/auth.py new file mode 100644 index 0000000..7cc07b5 --- /dev/null +++ b/src/core/auth.py @@ -0,0 +1,4 @@ +from fastapi.security import APIKeyHeader + + +default_security = APIKeyHeader(name="Authorization") diff --git a/src/core/config.py b/src/core/config.py new file mode 100644 index 0000000..56ae79a --- /dev/null +++ b/src/core/config.py @@ -0,0 +1,14 @@ +from pydantic import BaseSettings + + +class EnvConfig(BaseSettings): + API_KEY: str + + POSTGRES_USER: str + POSTGRES_PASSWORD: str + POSTGRES_HOST: str + POSTGRES_PORT: int + POSTGRES_DB: str + + +env_config = EnvConfig() diff --git a/src/core/db.py b/src/core/db.py new file mode 100644 index 0000000..6037ba3 --- /dev/null +++ b/src/core/db.py @@ -0,0 +1,15 @@ +from urllib.parse import quote +from databases import Database + +from sqlalchemy import MetaData + +from core.config import env_config + + +DATABASE_URL = ( + f"postgresql://{env_config.POSTGRES_USER}:{quote(env_config.POSTGRES_PASSWORD)}@" + f"{env_config.POSTGRES_HOST}:{env_config.POSTGRES_PORT}/{env_config.POSTGRES_DB}" +) + +metadata = MetaData() +database = Database(DATABASE_URL) diff --git a/src/main.py b/src/main.py new file mode 100644 index 0000000..0a4385b --- /dev/null +++ b/src/main.py @@ -0,0 +1,4 @@ +from core.app import start_app + + +app = start_app()