mirror of
https://github.com/flibusta-apps/users_settings_server.git
synced 2025-12-06 06:35:39 +01:00
Rewrite to rust
This commit is contained in:
2
.cargo/config.toml
Normal file
2
.cargo/config.toml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[alias]
|
||||||
|
prisma = "run -p prisma-cli --"
|
||||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -1,5 +1,4 @@
|
|||||||
venv
|
.env
|
||||||
|
|
||||||
.vscode
|
target
|
||||||
|
prisma-cli/target
|
||||||
__pycache__
|
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
exclude: 'docs|node_modules|migrations|.git|.tox'
|
|
||||||
|
|
||||||
repos:
|
|
||||||
- repo: https://github.com/ambv/black
|
|
||||||
rev: 23.3.0
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
language_version: python3.11
|
|
||||||
|
|
||||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
|
||||||
rev: 'v0.0.265'
|
|
||||||
hooks:
|
|
||||||
- id: ruff
|
|
||||||
|
|
||||||
- repo: https://github.com/crate-ci/typos
|
|
||||||
rev: typos-dict-v0.9.26
|
|
||||||
hooks:
|
|
||||||
- id: typos
|
|
||||||
5279
Cargo.lock
generated
Normal file
5279
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
Cargo.toml
Normal file
25
Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
[package]
|
||||||
|
name = "users_settings_server"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[workspace]
|
||||||
|
|
||||||
|
members = [
|
||||||
|
"prisma-cli"
|
||||||
|
]
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
once_cell = "1.18.0"
|
||||||
|
|
||||||
|
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }
|
||||||
|
serde = { version = "1.0.163", features = ["derive"] }
|
||||||
|
|
||||||
|
tokio = { version = "1.28.2", features = ["full"] }
|
||||||
|
axum = { version = "0.6.18", features = ["json"] }
|
||||||
|
log = "0.4.18"
|
||||||
|
env_logger = "0.10.0"
|
||||||
|
chrono = "0.4.26"
|
||||||
|
sentry = "0.31.5"
|
||||||
@@ -1,26 +1,21 @@
|
|||||||
FROM ghcr.io/flibusta-apps/base_docker_images:3.11-postgres-asyncpg-poetry-buildtime AS build-image
|
FROM rust:bullseye AS builder
|
||||||
|
|
||||||
WORKDIR /root/poetry
|
|
||||||
|
|
||||||
COPY pyproject.toml poetry.lock /root/poetry/
|
|
||||||
|
|
||||||
RUN poetry export --without-hashes > requirements.txt \
|
|
||||||
&& . /opt/venv/bin/activate \
|
|
||||||
&& pip install -r requirements.txt --no-cache-dir
|
|
||||||
|
|
||||||
|
|
||||||
FROM ghcr.io/flibusta-apps/base_docker_images:3.11-postgres-runtime AS runtime-image
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY ./src/ /app/
|
COPY . .
|
||||||
COPY ./scripts/* /root/
|
|
||||||
|
|
||||||
ENV VENV_PATH=/opt/venv
|
RUN cargo build --release --bin users_settings_server
|
||||||
ENV PATH="$VENV_PATH/bin:$PATH"
|
|
||||||
|
|
||||||
COPY --from=build-image $VENV_PATH $VENV_PATH
|
|
||||||
|
|
||||||
EXPOSE 8080
|
FROM debian:bullseye-slim
|
||||||
|
|
||||||
CMD bash /root/start_production.sh
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y openssl ca-certificates \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN update-ca-certificates
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY --from=builder /app/target/release/users_settings_server /usr/local/bin
|
||||||
|
ENTRYPOINT ["/usr/local/bin/users_settings_server"]
|
||||||
|
|||||||
1469
poetry.lock
generated
1469
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
3
prisma-cli/.gitignore
vendored
Normal file
3
prisma-cli/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
node_modules
|
||||||
|
# Keep environment variables out of version control
|
||||||
|
.env
|
||||||
4622
prisma-cli/Cargo.lock
generated
Normal file
4622
prisma-cli/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
9
prisma-cli/Cargo.toml
Normal file
9
prisma-cli/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
[package]
|
||||||
|
name = "prisma-cli"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }
|
||||||
3
prisma-cli/src/main.rs
Normal file
3
prisma-cli/src/main.rs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
fn main() {
|
||||||
|
prisma_client_rust_cli::run();
|
||||||
|
}
|
||||||
58
prisma/schema.prisma
Normal file
58
prisma/schema.prisma
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
generator client {
|
||||||
|
provider = "cargo prisma"
|
||||||
|
output = "../src/prisma.rs"
|
||||||
|
}
|
||||||
|
|
||||||
|
datasource db {
|
||||||
|
provider = "postgresql"
|
||||||
|
url = env("DATABASE_URL")
|
||||||
|
}
|
||||||
|
|
||||||
|
model UserSettings {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
user_id BigInt @unique
|
||||||
|
last_name String @db.VarChar(64)
|
||||||
|
first_name String @db.VarChar(64)
|
||||||
|
username String @db.VarChar(32)
|
||||||
|
source String @db.VarChar(32)
|
||||||
|
user_activity UserActivity?
|
||||||
|
languages LanguageToUser[]
|
||||||
|
|
||||||
|
@@map("user_settings")
|
||||||
|
}
|
||||||
|
|
||||||
|
model ChatDonateNotifications {
|
||||||
|
id BigInt @id @default(autoincrement())
|
||||||
|
chat_id BigInt @unique
|
||||||
|
sended DateTime @db.Timestamp(6)
|
||||||
|
|
||||||
|
@@map("chat_donate_notifications")
|
||||||
|
}
|
||||||
|
|
||||||
|
model Language {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
label String @db.VarChar(16)
|
||||||
|
code String @unique @db.VarChar(4)
|
||||||
|
users LanguageToUser[]
|
||||||
|
|
||||||
|
@@map("languages")
|
||||||
|
}
|
||||||
|
|
||||||
|
model UserActivity {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
user_id Int @unique @map("user")
|
||||||
|
updated DateTime @db.Timestamp(6)
|
||||||
|
user UserSettings @relation(fields: [user_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_user_activity_user_settings_id_user")
|
||||||
|
|
||||||
|
@@map("user_activity")
|
||||||
|
}
|
||||||
|
|
||||||
|
model LanguageToUser {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
language_id Int @map("language")
|
||||||
|
user_id Int @map("user")
|
||||||
|
language Language @relation(fields: [language_id], references: [id], onDelete: Cascade, map: "fk_users_languages_languages_language_id")
|
||||||
|
user UserSettings @relation(fields: [user_id], references: [id], onDelete: Cascade, map: "fk_users_languages_user_settings_user_id")
|
||||||
|
|
||||||
|
@@map("users_languages")
|
||||||
|
}
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
[tool.poetry]
|
|
||||||
name = "users_settings_server"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = ""
|
|
||||||
authors = ["Kurbanov Bulat <kurbanovbul@gmail.com>"]
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
python = "^3.11"
|
|
||||||
fastapi = "^0.95.1"
|
|
||||||
fastapi-pagination = {extras = ["ormar"], version = "^0.12.3"}
|
|
||||||
alembic = "^1.10.4"
|
|
||||||
ormar = {extras = ["postgresql"], version = "^0.12.1"}
|
|
||||||
uvicorn = {extras = ["standard"], version = "^0.22.0"}
|
|
||||||
httpx = "^0.24.0"
|
|
||||||
orjson = "^3.8.12"
|
|
||||||
prometheus-fastapi-instrumentator = "^6.0.0"
|
|
||||||
uvloop = "^0.17.0"
|
|
||||||
sentry-sdk = "^1.22.2"
|
|
||||||
redis = {extras = ["hiredis"], version = "^4.5.5"}
|
|
||||||
ormsgpack = "^1.2.6"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
|
||||||
pre-commit = "^3.3.1"
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["poetry-core>=1.0.0"]
|
|
||||||
build-backend = "poetry.core.masonry.api"
|
|
||||||
|
|
||||||
[tool.black]
|
|
||||||
include = '\.pyi?$'
|
|
||||||
exclude = '''
|
|
||||||
/(
|
|
||||||
\.git
|
|
||||||
| \.vscode
|
|
||||||
| \venv
|
|
||||||
| alembic
|
|
||||||
)/
|
|
||||||
'''
|
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
fix = true
|
|
||||||
target-version = "py311"
|
|
||||||
src = ["app"]
|
|
||||||
line-length=88
|
|
||||||
ignore = []
|
|
||||||
select = ["B", "C", "E", "F", "W", "B9", "I001"]
|
|
||||||
exclude = [
|
|
||||||
# No need to traverse our git directory
|
|
||||||
".git",
|
|
||||||
# There's no value in checking cache directories
|
|
||||||
"__pycache__",
|
|
||||||
# The conf file is mostly autogenerated, ignore it
|
|
||||||
"src/app/alembic",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.ruff.flake8-bugbear]
|
|
||||||
extend-immutable-calls = ["fastapi.File", "fastapi.Form", "fastapi.Security"]
|
|
||||||
|
|
||||||
[tool.ruff.mccabe]
|
|
||||||
max-complexity = 15
|
|
||||||
|
|
||||||
[tool.ruff.isort]
|
|
||||||
known-first-party = ["core", "app"]
|
|
||||||
force-sort-within-sections = true
|
|
||||||
force-wrap-aliases = true
|
|
||||||
section-order = ["future", "standard-library", "base_framework", "framework_ext", "third-party", "first-party", "local-folder"]
|
|
||||||
lines-after-imports = 2
|
|
||||||
|
|
||||||
[tool.ruff.isort.sections]
|
|
||||||
base_framework = ["fastapi",]
|
|
||||||
framework_ext = ["starlette"]
|
|
||||||
|
|
||||||
[tool.ruff.pyupgrade]
|
|
||||||
keep-runtime-typing = true
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
import httpx
|
|
||||||
|
|
||||||
|
|
||||||
response = httpx.get("http://localhost:8080/healthcheck")
|
|
||||||
print(f"HEALTHCHECK STATUS: {response.status_code}")
|
|
||||||
exit(0 if response.status_code == 200 else 1)
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
cd /app
|
|
||||||
|
|
||||||
rm -rf prometheus
|
|
||||||
mkdir prometheus
|
|
||||||
|
|
||||||
alembic -c ./app/alembic.ini upgrade head
|
|
||||||
uvicorn main:app --host 0.0.0.0 --port 8080 --loop uvloop
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# path to migration scripts
|
|
||||||
script_location = ./app/alembic
|
|
||||||
|
|
||||||
# template used to generate migration files
|
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
|
||||||
# defaults to the current working directory.
|
|
||||||
prepend_sys_path = .
|
|
||||||
|
|
||||||
# timezone to use when rendering the date within the migration file
|
|
||||||
# as well as the filename.
|
|
||||||
# If specified, requires the python-dateutil library that can be
|
|
||||||
# installed by adding `alembic[tz]` to the pip requirements
|
|
||||||
# string value is passed to dateutil.tz.gettz()
|
|
||||||
# leave blank for localtime
|
|
||||||
# timezone =
|
|
||||||
|
|
||||||
# max length of characters to apply to the
|
|
||||||
# "slug" field
|
|
||||||
# truncate_slug_length = 40
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
|
||||||
# a source .py file to be detected as revisions in the
|
|
||||||
# versions/ directory
|
|
||||||
# sourceless = false
|
|
||||||
|
|
||||||
# version location specification; This defaults
|
|
||||||
# to alembic/versions. When using multiple version
|
|
||||||
# directories, initial revisions must be specified with --version-path.
|
|
||||||
# The path separator used here should be the separator specified by "version_path_separator"
|
|
||||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
|
||||||
|
|
||||||
# version path separator; As mentioned above, this is the character used to split
|
|
||||||
# version_locations. Valid values are:
|
|
||||||
#
|
|
||||||
# version_path_separator = :
|
|
||||||
# version_path_separator = ;
|
|
||||||
# version_path_separator = space
|
|
||||||
version_path_separator = os # default: use os.pathsep
|
|
||||||
|
|
||||||
# the output encoding used when revision files
|
|
||||||
# are written from script.py.mako
|
|
||||||
# output_encoding = utf-8
|
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
|
||||||
# post_write_hooks defines scripts or Python functions that are run
|
|
||||||
# on newly generated revision scripts. See the documentation for further
|
|
||||||
# detail and examples
|
|
||||||
|
|
||||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
|
||||||
# hooks = black
|
|
||||||
# black.type = console_scripts
|
|
||||||
# black.entrypoint = black
|
|
||||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Generic single-database configuration.
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from alembic import context
|
|
||||||
from core.db import DATABASE_URL
|
|
||||||
from sqlalchemy.engine import create_engine
|
|
||||||
|
|
||||||
myPath = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
sys.path.insert(0, myPath + "/../../")
|
|
||||||
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
|
|
||||||
from app.models import BaseMeta
|
|
||||||
|
|
||||||
target_metadata = BaseMeta.metadata
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline():
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
|
||||||
context.configure(
|
|
||||||
url=url,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
literal_binds=True,
|
|
||||||
dialect_opts={"paramstyle": "named"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online():
|
|
||||||
"""Run migrations in 'online' mode.
|
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
connectable = create_engine(DATABASE_URL)
|
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
|
||||||
context.configure(
|
|
||||||
connection=connection, target_metadata=target_metadata, compare_type=True
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
run_migrations_online()
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = ${repr(up_revision)}
|
|
||||||
down_revision = ${repr(down_revision)}
|
|
||||||
branch_labels = ${repr(branch_labels)}
|
|
||||||
depends_on = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 64fe2045bf28
|
|
||||||
Revises: 750640043cd4
|
|
||||||
Create Date: 2023-01-05 18:28:05.296720
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "64fe2045bf28"
|
|
||||||
down_revision = "750640043cd4"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table(
|
|
||||||
"user_activity",
|
|
||||||
sa.Column("id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("user", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("updated", sa.DateTime(), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(
|
|
||||||
["user"],
|
|
||||||
["user_settings.id"],
|
|
||||||
name="fk_user_activity_user_settings_id_user",
|
|
||||||
),
|
|
||||||
sa.PrimaryKeyConstraint("id"),
|
|
||||||
sa.UniqueConstraint("user"),
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table("user_activity")
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 750640043cd4
|
|
||||||
Revises: 7e45f53febe1
|
|
||||||
Create Date: 2021-12-28 22:47:43.270886
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "750640043cd4"
|
|
||||||
down_revision = "7e45f53febe1"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column(
|
|
||||||
"user_settings", sa.Column("source", sa.String(length=32), nullable=False)
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_column("user_settings", "source")
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 7e45f53febe1
|
|
||||||
Revises:
|
|
||||||
Create Date: 2021-12-03 22:42:22.514771
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "7e45f53febe1"
|
|
||||||
down_revision = None
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table(
|
|
||||||
"languages",
|
|
||||||
sa.Column("id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("label", sa.String(length=16), nullable=False),
|
|
||||||
sa.Column("code", sa.String(length=4), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint("id"),
|
|
||||||
sa.UniqueConstraint("code"),
|
|
||||||
)
|
|
||||||
op.create_table(
|
|
||||||
"user_settings",
|
|
||||||
sa.Column("id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("user_id", sa.BigInteger(), nullable=False),
|
|
||||||
sa.Column("last_name", sa.String(length=64), nullable=False),
|
|
||||||
sa.Column("first_name", sa.String(length=64), nullable=False),
|
|
||||||
sa.Column("username", sa.String(length=32), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint("id"),
|
|
||||||
sa.UniqueConstraint("user_id"),
|
|
||||||
)
|
|
||||||
op.create_table(
|
|
||||||
"users_languages",
|
|
||||||
sa.Column("id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("language", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("user", sa.Integer(), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(
|
|
||||||
["language"],
|
|
||||||
["languages.id"],
|
|
||||||
name="fk_users_languages_languages_language_id",
|
|
||||||
onupdate="CASCADE",
|
|
||||||
ondelete="CASCADE",
|
|
||||||
),
|
|
||||||
sa.ForeignKeyConstraint(
|
|
||||||
["user"],
|
|
||||||
["user_settings.id"],
|
|
||||||
name="fk_users_languages_user_settings_user_id",
|
|
||||||
onupdate="CASCADE",
|
|
||||||
ondelete="CASCADE",
|
|
||||||
),
|
|
||||||
sa.PrimaryKeyConstraint("id"),
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table("users_languages")
|
|
||||||
op.drop_table("user_settings")
|
|
||||||
op.drop_table("languages")
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: c6ab48565c49
|
|
||||||
Revises: 64fe2045bf28
|
|
||||||
Create Date: 2023-05-13 22:50:52.525440
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "c6ab48565c49"
|
|
||||||
down_revision = "64fe2045bf28"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table(
|
|
||||||
"chat_donate_notifications",
|
|
||||||
sa.Column("id", sa.BigInteger(), nullable=False),
|
|
||||||
sa.Column("chat_id", sa.BigInteger(), nullable=False),
|
|
||||||
sa.Column("sended", sa.DateTime(), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint("id"),
|
|
||||||
sa.UniqueConstraint("chat_id"),
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table("chat_donate_notifications")
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
from fastapi import HTTPException, Security, status
|
|
||||||
|
|
||||||
from core.auth import default_security
|
|
||||||
from core.config import env_config
|
|
||||||
|
|
||||||
|
|
||||||
async def check_token(api_key: str = Security(default_security)):
|
|
||||||
if api_key != env_config.API_KEY:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN, detail="Wrong api key!"
|
|
||||||
)
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import cast
|
|
||||||
|
|
||||||
import ormar
|
|
||||||
|
|
||||||
from core.db import database, metadata
|
|
||||||
|
|
||||||
|
|
||||||
class BaseMeta(ormar.ModelMeta):
|
|
||||||
metadata = metadata
|
|
||||||
database = database
|
|
||||||
|
|
||||||
|
|
||||||
class Language(ormar.Model):
|
|
||||||
class Meta(BaseMeta):
|
|
||||||
tablename = "languages"
|
|
||||||
|
|
||||||
id: int = cast(int, ormar.Integer(primary_key=True))
|
|
||||||
label: str = cast(str, ormar.String(max_length=16))
|
|
||||||
code: str = cast(str, ormar.String(max_length=4, unique=True))
|
|
||||||
|
|
||||||
|
|
||||||
class User(ormar.Model):
|
|
||||||
class Meta(BaseMeta):
|
|
||||||
tablename = "user_settings"
|
|
||||||
|
|
||||||
id: int = cast(int, ormar.Integer(primary_key=True))
|
|
||||||
|
|
||||||
user_id: int = cast(int, ormar.BigInteger(unique=True))
|
|
||||||
last_name: str = cast(str, ormar.String(max_length=64))
|
|
||||||
first_name: str = cast(str, ormar.String(max_length=64))
|
|
||||||
username: str = cast(str, ormar.String(max_length=32))
|
|
||||||
source: str = cast(str, ormar.String(max_length=32))
|
|
||||||
|
|
||||||
allowed_langs = ormar.ManyToMany(Language)
|
|
||||||
|
|
||||||
|
|
||||||
class UserActivity(ormar.Model):
|
|
||||||
class Meta(BaseMeta):
|
|
||||||
tablename = "user_activity"
|
|
||||||
|
|
||||||
id: int = cast(int, ormar.Integer(primary_key=True))
|
|
||||||
|
|
||||||
user: User = ormar.ForeignKey(
|
|
||||||
User, nullable=False, unique=True, related_name="last_activity"
|
|
||||||
)
|
|
||||||
updated: datetime = cast(datetime, ormar.DateTime(timezone=False))
|
|
||||||
|
|
||||||
|
|
||||||
class ChatDonateNotification(ormar.Model):
|
|
||||||
class Meta(BaseMeta):
|
|
||||||
tablename = "chat_donate_notifications"
|
|
||||||
|
|
||||||
id: int = cast(int, ormar.BigInteger(primary_key=True))
|
|
||||||
chat_id: int = cast(int, ormar.BigInteger(unique=True))
|
|
||||||
sended: datetime = cast(datetime, ormar.DateTime(timezone=False))
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, constr
|
|
||||||
|
|
||||||
|
|
||||||
class CreateLanguage(BaseModel):
|
|
||||||
label: constr(max_length=16) # type: ignore
|
|
||||||
code: constr(max_length=4) # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
class LanguageDetail(CreateLanguage):
|
|
||||||
id: int
|
|
||||||
|
|
||||||
|
|
||||||
class UserBase(BaseModel):
|
|
||||||
user_id: int
|
|
||||||
last_name: constr(max_length=64) # type: ignore
|
|
||||||
first_name: constr(max_length=64) # type: ignore
|
|
||||||
username: constr(max_length=32) # type: ignore
|
|
||||||
source: constr(max_length=32) # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
class UserCreateOrUpdate(UserBase):
|
|
||||||
allowed_langs: Optional[list[str]] = None
|
|
||||||
|
|
||||||
|
|
||||||
class UserUpdate(BaseModel):
|
|
||||||
last_name: Optional[constr(max_length=64)] = None # type: ignore
|
|
||||||
first_name: Optional[constr(max_length=64)] = None # type: ignore
|
|
||||||
username: Optional[constr(max_length=32)] = None # type: ignore
|
|
||||||
source: Optional[constr(max_length=32)] = None # type: ignore
|
|
||||||
allowed_langs: Optional[list[str]] = None
|
|
||||||
|
|
||||||
|
|
||||||
class UserDetail(UserBase):
|
|
||||||
id: int
|
|
||||||
allowed_langs: list[LanguageDetail]
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
from typing import cast
|
|
||||||
|
|
||||||
from app.models import Language, User
|
|
||||||
|
|
||||||
|
|
||||||
async def update_user_allowed_langs(user: User, new_allowed_langs: list[str]) -> bool:
|
|
||||||
user_allowed_langs = cast(list[Language], user.allowed_langs)
|
|
||||||
|
|
||||||
exists_langs = {lang.code for lang in user_allowed_langs}
|
|
||||||
new_langs = set(new_allowed_langs)
|
|
||||||
|
|
||||||
to_delete = exists_langs - new_langs
|
|
||||||
to_add = new_langs - exists_langs
|
|
||||||
|
|
||||||
all_process_langs = list(to_delete) + list(to_add)
|
|
||||||
|
|
||||||
langs = await Language.objects.filter(code__in=all_process_langs).all()
|
|
||||||
|
|
||||||
updated = False
|
|
||||||
|
|
||||||
for lang in langs:
|
|
||||||
if lang.code in to_delete:
|
|
||||||
await user.allowed_langs.remove(lang)
|
|
||||||
updated = True
|
|
||||||
|
|
||||||
if lang.code in to_add:
|
|
||||||
await user.allowed_langs.add(lang)
|
|
||||||
updated = True
|
|
||||||
|
|
||||||
return updated
|
|
||||||
@@ -1,159 +0,0 @@
|
|||||||
from typing import Optional, Union
|
|
||||||
|
|
||||||
from fastapi import HTTPException, status
|
|
||||||
|
|
||||||
import ormsgpack
|
|
||||||
from redis import asyncio as aioredis
|
|
||||||
|
|
||||||
from app.models import User
|
|
||||||
from app.serializers import UserCreateOrUpdate, UserDetail, UserUpdate
|
|
||||||
from app.services.allowed_langs_updater import update_user_allowed_langs
|
|
||||||
|
|
||||||
|
|
||||||
class UsersDataManager:
|
|
||||||
@classmethod
|
|
||||||
async def _get_user_from_db(cls, user_id: int) -> Optional[User]:
|
|
||||||
return await User.objects.select_related("allowed_langs").get_or_none(
|
|
||||||
user_id=user_id
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_cache_key(cls, user_id: int) -> str:
|
|
||||||
return f"user_v2_{user_id}"
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def _get_user_from_cache(
|
|
||||||
cls, user_id: int, redis: aioredis.Redis
|
|
||||||
) -> Optional[UserDetail]:
|
|
||||||
try:
|
|
||||||
key = cls._get_cache_key(user_id)
|
|
||||||
data = await redis.get(key)
|
|
||||||
|
|
||||||
if data is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return UserDetail.parse_obj(ormsgpack.unpackb(data))
|
|
||||||
|
|
||||||
except aioredis.RedisError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def _cache_user(cls, user: User, redis: aioredis.Redis) -> bool:
|
|
||||||
try:
|
|
||||||
key = cls._get_cache_key(user.id)
|
|
||||||
data = ormsgpack.packb(user.dict())
|
|
||||||
await redis.set(key, data)
|
|
||||||
return True
|
|
||||||
except aioredis.RedisError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def get_user(
|
|
||||||
cls, user_id: int, redis: aioredis.Redis
|
|
||||||
) -> Optional[UserDetail | User]:
|
|
||||||
if cached_user := await cls._get_user_from_cache(user_id, redis):
|
|
||||||
return cached_user
|
|
||||||
|
|
||||||
user = await cls._get_user_from_db(user_id)
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
return None
|
|
||||||
|
|
||||||
await cls._cache_user(user, redis)
|
|
||||||
return user
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _is_has_data_to_update(cls, new_user: UserUpdate) -> bool:
|
|
||||||
data_dict = new_user.dict()
|
|
||||||
|
|
||||||
update_data = {}
|
|
||||||
for key in data_dict:
|
|
||||||
if data_dict[key] is not None:
|
|
||||||
update_data[key] = data_dict[key]
|
|
||||||
|
|
||||||
return bool(update_data)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def _create(cls, data: UserCreateOrUpdate):
|
|
||||||
data_dict = data.dict()
|
|
||||||
allowed_langs = data_dict.pop("allowed_langs", None) or ["ru", "be", "uk"]
|
|
||||||
|
|
||||||
user_obj = await User.objects.select_related("allowed_langs").create(
|
|
||||||
**data_dict
|
|
||||||
)
|
|
||||||
await update_user_allowed_langs(user_obj, allowed_langs)
|
|
||||||
|
|
||||||
return user_obj
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def _update(
|
|
||||||
cls, user_id: int, update_data: dict, redis: aioredis.Redis
|
|
||||||
) -> User:
|
|
||||||
user_obj = await cls._get_user_from_db(user_id)
|
|
||||||
assert user_obj is not None
|
|
||||||
|
|
||||||
if allowed_langs := update_data.pop("allowed_langs", None):
|
|
||||||
await update_user_allowed_langs(user_obj, allowed_langs)
|
|
||||||
|
|
||||||
if update_data:
|
|
||||||
user_obj.update_from_dict(update_data)
|
|
||||||
await user_obj.update()
|
|
||||||
|
|
||||||
await cls._cache_user(user_obj, redis)
|
|
||||||
|
|
||||||
return user_obj
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def create_or_update_user(
|
|
||||||
cls, data: UserCreateOrUpdate, redis: aioredis.Redis
|
|
||||||
) -> User | UserDetail:
|
|
||||||
user = await cls.get_user(data.user_id, redis)
|
|
||||||
|
|
||||||
if user is None:
|
|
||||||
new_user = await cls._create(data)
|
|
||||||
await cls._cache_user(new_user, redis)
|
|
||||||
return new_user
|
|
||||||
|
|
||||||
if not cls._is_need_update(user, data):
|
|
||||||
return user
|
|
||||||
|
|
||||||
return await cls._update(user.user_id, data.dict(), redis)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _is_need_update(
|
|
||||||
cls,
|
|
||||||
old_user: UserDetail | User,
|
|
||||||
new_user: Union[UserUpdate, UserCreateOrUpdate],
|
|
||||||
) -> bool:
|
|
||||||
old_data = old_user.dict()
|
|
||||||
new_data = new_user.dict()
|
|
||||||
|
|
||||||
allowed_langs = new_data.pop("allowed_lang", None)
|
|
||||||
|
|
||||||
for key in new_data:
|
|
||||||
if new_data[key] != old_data[key]:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if allowed_langs and set(allowed_langs) != {
|
|
||||||
lang.code for lang in old_user.allowed_langs
|
|
||||||
}:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def update_user(
|
|
||||||
cls, user_id: int, user_data: UserUpdate, redis: aioredis.Redis
|
|
||||||
) -> Union[UserDetail, User]:
|
|
||||||
user = await cls.get_user(user_id, redis)
|
|
||||||
|
|
||||||
if user is None:
|
|
||||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
if not cls._is_has_data_to_update(user_data):
|
|
||||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
if not cls._is_need_update(user, user_data):
|
|
||||||
return user
|
|
||||||
|
|
||||||
return await cls._update(user.user_id, user_data.dict(), redis)
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
from .donate_notifications import donation_notifications_router
|
|
||||||
from .healthcheck import healthcheck_router
|
|
||||||
from .languages import languages_router
|
|
||||||
from .users import users_router
|
|
||||||
|
|
||||||
|
|
||||||
routers = [
|
|
||||||
donation_notifications_router,
|
|
||||||
healthcheck_router,
|
|
||||||
languages_router,
|
|
||||||
users_router,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"routers",
|
|
||||||
]
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
|
||||||
|
|
||||||
from app.depends import check_token
|
|
||||||
from app.models import ChatDonateNotification
|
|
||||||
|
|
||||||
|
|
||||||
NOTIFICATION_DELTA_DAYS = 60
|
|
||||||
|
|
||||||
|
|
||||||
donation_notifications_router = APIRouter(
|
|
||||||
prefix="/donate_notifications",
|
|
||||||
tags=["donate_notifications"],
|
|
||||||
dependencies=[Depends(check_token)],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@donation_notifications_router.get("/{chat_id}/is_need_send")
|
|
||||||
async def is_need_send(chat_id: int) -> bool:
|
|
||||||
# add redis cache
|
|
||||||
notification = await ChatDonateNotification.objects.get_or_none(chat_id=chat_id)
|
|
||||||
|
|
||||||
if notification is None:
|
|
||||||
return True
|
|
||||||
|
|
||||||
delta = datetime.now() - notification.sended
|
|
||||||
return delta.days >= NOTIFICATION_DELTA_DAYS
|
|
||||||
|
|
||||||
|
|
||||||
@donation_notifications_router.post("/{chat_id}")
|
|
||||||
async def mark_sended(chat_id: int):
|
|
||||||
notification, created = await ChatDonateNotification.objects.get_or_create(
|
|
||||||
{"sended": datetime.now()}, chat_id=chat_id
|
|
||||||
)
|
|
||||||
|
|
||||||
if created:
|
|
||||||
return
|
|
||||||
|
|
||||||
notification.sended = datetime.now()
|
|
||||||
await notification.save()
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
from fastapi import APIRouter
|
|
||||||
|
|
||||||
|
|
||||||
healthcheck_router = APIRouter(tags=["healthcheck"])
|
|
||||||
|
|
||||||
|
|
||||||
@healthcheck_router.get("/healthcheck")
|
|
||||||
async def healthcheck():
|
|
||||||
return "Ok!"
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
|
|
||||||
from app.depends import check_token
|
|
||||||
from app.models import Language
|
|
||||||
from app.serializers import (
|
|
||||||
CreateLanguage,
|
|
||||||
LanguageDetail,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
languages_router = APIRouter(
|
|
||||||
prefix="/languages", tags=["languages"], dependencies=[Depends(check_token)]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@languages_router.get("/", response_model=list[LanguageDetail])
|
|
||||||
async def get_languages():
|
|
||||||
return await Language.objects.all()
|
|
||||||
|
|
||||||
|
|
||||||
@languages_router.get("/{code}", response_model=LanguageDetail)
|
|
||||||
async def get_language(code: str):
|
|
||||||
language = await Language.objects.get_or_none(code=code)
|
|
||||||
|
|
||||||
if language is None:
|
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
return language
|
|
||||||
|
|
||||||
|
|
||||||
@languages_router.post("/", response_model=LanguageDetail)
|
|
||||||
async def create_language(data: CreateLanguage):
|
|
||||||
return await Language.objects.create(**data.dict())
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
|
||||||
|
|
||||||
from asyncpg.exceptions import UniqueViolationError
|
|
||||||
from fastapi_pagination import Page, Params
|
|
||||||
from fastapi_pagination.ext.ormar import paginate
|
|
||||||
from redis import asyncio as aioredis
|
|
||||||
|
|
||||||
from app.depends import check_token
|
|
||||||
from app.models import User, UserActivity
|
|
||||||
from app.serializers import (
|
|
||||||
UserCreateOrUpdate,
|
|
||||||
UserDetail,
|
|
||||||
UserUpdate,
|
|
||||||
)
|
|
||||||
from app.services.users_data_manager import UsersDataManager
|
|
||||||
|
|
||||||
|
|
||||||
users_router = APIRouter(
|
|
||||||
prefix="/users", tags=["users"], dependencies=[Depends(check_token)]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@users_router.get("/", dependencies=[Depends(Params)], response_model=Page[UserDetail])
|
|
||||||
async def get_users():
|
|
||||||
return await paginate(User.objects.select_related("allowed_langs"))
|
|
||||||
|
|
||||||
|
|
||||||
@users_router.get("/{user_id}", response_model=UserDetail)
|
|
||||||
async def get_user(request: Request, user_id: int):
|
|
||||||
redis: aioredis.Redis = request.app.state.redis
|
|
||||||
user_data = await UsersDataManager.get_user(user_id, redis)
|
|
||||||
|
|
||||||
if user_data is None:
|
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
return user_data
|
|
||||||
|
|
||||||
|
|
||||||
@users_router.post("/", response_model=UserDetail)
|
|
||||||
async def create_or_update_user(request: Request, data: UserCreateOrUpdate):
|
|
||||||
redis: aioredis.Redis = request.app.state.redis
|
|
||||||
return await UsersDataManager.create_or_update_user(data, redis)
|
|
||||||
|
|
||||||
|
|
||||||
@users_router.patch("/{user_id}", response_model=UserDetail)
|
|
||||||
async def update_user(request: Request, user_id: int, data: UserUpdate):
|
|
||||||
redis: aioredis.Redis = request.app.state.redis
|
|
||||||
return await UsersDataManager.update_user(user_id, data, redis)
|
|
||||||
|
|
||||||
|
|
||||||
@users_router.post("/{user_id}/update_activity")
|
|
||||||
async def update_activity(user_id: int) -> None:
|
|
||||||
user = await User.objects.get_or_none(user_id=user_id)
|
|
||||||
|
|
||||||
if user is None:
|
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
activity = await UserActivity.objects.get_or_none(user__user_id=user_id)
|
|
||||||
|
|
||||||
if activity is None:
|
|
||||||
try:
|
|
||||||
await UserActivity.objects.create(user=user.id, updated=datetime.now())
|
|
||||||
except UniqueViolationError:
|
|
||||||
pass
|
|
||||||
return
|
|
||||||
|
|
||||||
activity.updated = datetime.now()
|
|
||||||
await activity.update()
|
|
||||||
40
src/config.rs
Normal file
40
src/config.rs
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
|
pub struct Config {
|
||||||
|
pub api_key: String,
|
||||||
|
|
||||||
|
pub postgres_user: String,
|
||||||
|
pub postgres_password: String,
|
||||||
|
pub postgres_host: String,
|
||||||
|
pub postgres_port: u32,
|
||||||
|
pub postgres_db: String,
|
||||||
|
|
||||||
|
pub sentry_dsn: String
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn get_env(env: &'static str) -> String {
|
||||||
|
std::env::var(env).unwrap_or_else(|_| panic!("Cannot get the {} env variable", env))
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl Config {
|
||||||
|
pub fn load() -> Config {
|
||||||
|
Config {
|
||||||
|
api_key: get_env("API_KEY"),
|
||||||
|
|
||||||
|
postgres_user: get_env("POSGRES_USER"),
|
||||||
|
postgres_password: get_env("POSTGRES_PASSWORD"),
|
||||||
|
postgres_host: get_env("POSTGRES_HOST"),
|
||||||
|
postgres_port: get_env("POSTGRES_PORT").parse().unwrap(),
|
||||||
|
postgres_db: get_env("POSTGRES_DB"),
|
||||||
|
|
||||||
|
sentry_dsn: get_env("SENTRY_DSN")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
||||||
|
Config::load()
|
||||||
|
});
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
from fastapi import FastAPI
|
|
||||||
from fastapi.responses import ORJSONResponse
|
|
||||||
|
|
||||||
from fastapi_pagination import add_pagination
|
|
||||||
from prometheus_fastapi_instrumentator import Instrumentator
|
|
||||||
from redis import asyncio as aioredis
|
|
||||||
|
|
||||||
from app.views import routers
|
|
||||||
from core.config import env_config
|
|
||||||
from core.db import database
|
|
||||||
|
|
||||||
|
|
||||||
def start_app() -> FastAPI:
|
|
||||||
app = FastAPI(default_response_class=ORJSONResponse)
|
|
||||||
|
|
||||||
for router in routers:
|
|
||||||
app.include_router(router)
|
|
||||||
|
|
||||||
app.state.database = database
|
|
||||||
|
|
||||||
app.state.redis = aioredis.Redis(
|
|
||||||
host=env_config.REDIS_HOST,
|
|
||||||
port=env_config.REDIS_PORT,
|
|
||||||
db=env_config.REDIS_DB,
|
|
||||||
password=env_config.REDIS_PASSWORD,
|
|
||||||
)
|
|
||||||
|
|
||||||
add_pagination(app)
|
|
||||||
|
|
||||||
@app.on_event("startup")
|
|
||||||
async def startup() -> None:
|
|
||||||
database_ = app.state.database
|
|
||||||
if not database_.is_connected:
|
|
||||||
await database_.connect()
|
|
||||||
|
|
||||||
@app.on_event("shutdown")
|
|
||||||
async def shutdown() -> None:
|
|
||||||
database_ = app.state.database
|
|
||||||
if database_.is_connected:
|
|
||||||
await database_.disconnect()
|
|
||||||
|
|
||||||
Instrumentator(
|
|
||||||
should_ignore_untemplated=True,
|
|
||||||
excluded_handlers=["/docs", "/metrics", "/healthcheck"],
|
|
||||||
).instrument(app).expose(app, include_in_schema=True)
|
|
||||||
|
|
||||||
return app
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
from fastapi.security import APIKeyHeader
|
|
||||||
|
|
||||||
|
|
||||||
default_security = APIKeyHeader(name="Authorization")
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class EnvConfig(BaseSettings):
|
|
||||||
API_KEY: str
|
|
||||||
|
|
||||||
POSTGRES_USER: str
|
|
||||||
POSTGRES_PASSWORD: str
|
|
||||||
POSTGRES_HOST: str
|
|
||||||
POSTGRES_PORT: int
|
|
||||||
POSTGRES_DB: str
|
|
||||||
|
|
||||||
REDIS_HOST: str
|
|
||||||
REDIS_PORT: int
|
|
||||||
REDIS_DB: int
|
|
||||||
REDIS_PASSWORD: Optional[str]
|
|
||||||
|
|
||||||
SENTRY_DSN: str
|
|
||||||
|
|
||||||
|
|
||||||
env_config = EnvConfig()
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from urllib.parse import quote
|
|
||||||
|
|
||||||
from databases import Database
|
|
||||||
from sqlalchemy import MetaData
|
|
||||||
|
|
||||||
from core.config import env_config
|
|
||||||
|
|
||||||
|
|
||||||
DATABASE_URL = (
|
|
||||||
f"postgresql://{env_config.POSTGRES_USER}:{quote(env_config.POSTGRES_PASSWORD)}@"
|
|
||||||
f"{env_config.POSTGRES_HOST}:{env_config.POSTGRES_PORT}/{env_config.POSTGRES_DB}"
|
|
||||||
)
|
|
||||||
|
|
||||||
metadata = MetaData()
|
|
||||||
database = Database(DATABASE_URL, min_size=1, max_size=10)
|
|
||||||
19
src/db.rs
Normal file
19
src/db.rs
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
use crate::{prisma::PrismaClient, config::CONFIG};
|
||||||
|
|
||||||
|
|
||||||
|
pub async fn get_prisma_client() -> PrismaClient {
|
||||||
|
let database_url: String = format!(
|
||||||
|
"postgresql://{}:{}@{}:{}/{}",
|
||||||
|
CONFIG.postgres_user,
|
||||||
|
CONFIG.postgres_password,
|
||||||
|
CONFIG.postgres_host,
|
||||||
|
CONFIG.postgres_port,
|
||||||
|
CONFIG.postgres_db
|
||||||
|
);
|
||||||
|
|
||||||
|
PrismaClient::_builder()
|
||||||
|
.with_url(database_url)
|
||||||
|
.build()
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
10
src/main.py
10
src/main.py
@@ -1,10 +0,0 @@
|
|||||||
import sentry_sdk
|
|
||||||
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
|
|
||||||
|
|
||||||
from core.app import start_app
|
|
||||||
from core.config import env_config
|
|
||||||
|
|
||||||
|
|
||||||
sentry_sdk.init(dsn=env_config.SENTRY_DSN)
|
|
||||||
|
|
||||||
app = SentryAsgiMiddleware(start_app())
|
|
||||||
28
src/main.rs
Normal file
28
src/main.rs
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
pub mod config;
|
||||||
|
pub mod prisma;
|
||||||
|
pub mod views;
|
||||||
|
pub mod db;
|
||||||
|
|
||||||
|
use std::net::SocketAddr;
|
||||||
|
|
||||||
|
|
||||||
|
async fn start_app() {
|
||||||
|
let app = views::get_router();
|
||||||
|
|
||||||
|
let addr = SocketAddr::from(([0, 0, 0, 0], 8080));
|
||||||
|
|
||||||
|
log::info!("Start webserver...");
|
||||||
|
axum::Server::bind(&addr)
|
||||||
|
.serve(app.into_make_service())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
log::info!("Webserver shutdown...")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
let _guard = sentry::init(config::CONFIG.sentry_dsn.clone());
|
||||||
|
env_logger::init();
|
||||||
|
|
||||||
|
start_app().await;
|
||||||
|
}
|
||||||
5260
src/prisma.rs
Normal file
5260
src/prisma.rs
Normal file
File diff suppressed because one or more lines are too long
61
src/views/donate_notifications.rs
Normal file
61
src/views/donate_notifications.rs
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
use axum::{Router, response::IntoResponse, routing::{get, post}, extract::Path, Json, http::StatusCode};
|
||||||
|
use chrono::Duration;
|
||||||
|
|
||||||
|
use crate::{prisma::chat_donate_notifications, db::get_prisma_client};
|
||||||
|
|
||||||
|
|
||||||
|
async fn is_need_send(
|
||||||
|
Path(chat_id): Path<i64>
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
const NOTIFICATION_DELTA_DAYS: i64 = 60;
|
||||||
|
|
||||||
|
let client = get_prisma_client().await;
|
||||||
|
|
||||||
|
let notification = client.chat_donate_notifications()
|
||||||
|
.find_unique(chat_donate_notifications::chat_id::equals(chat_id))
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
match notification {
|
||||||
|
Some(notification) => {
|
||||||
|
let now = chrono::offset::Local::now().naive_local();
|
||||||
|
let check_date = now - Duration::days(NOTIFICATION_DELTA_DAYS);
|
||||||
|
let result = notification.sended.naive_local() < check_date;
|
||||||
|
|
||||||
|
Json(result).into_response()
|
||||||
|
},
|
||||||
|
None => Json(true).into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async fn mark_sended(
|
||||||
|
Path(chat_id): Path<i64>
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let client = get_prisma_client().await;
|
||||||
|
|
||||||
|
let _ = client.chat_donate_notifications()
|
||||||
|
.upsert(
|
||||||
|
chat_donate_notifications::chat_id::equals(chat_id),
|
||||||
|
chat_donate_notifications::create(
|
||||||
|
chat_id,
|
||||||
|
chrono::offset::Local::now().into(),
|
||||||
|
vec![]
|
||||||
|
),
|
||||||
|
vec![
|
||||||
|
chat_donate_notifications::sended::set(
|
||||||
|
chrono::offset::Local::now().into()
|
||||||
|
)
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
StatusCode::OK
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub fn get_router() -> Router {
|
||||||
|
Router::new()
|
||||||
|
.route("/:chat_id/is_need_send", get(is_need_send))
|
||||||
|
.route("/:chat_id", post(mark_sended))
|
||||||
|
}
|
||||||
66
src/views/languages.rs
Normal file
66
src/views/languages.rs
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
use axum::{Router, response::IntoResponse, routing::get, Json, extract::Path, http::StatusCode};
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::{prisma::language, db::get_prisma_client};
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct LanguageDetail {
|
||||||
|
pub id: i32,
|
||||||
|
pub label: String,
|
||||||
|
pub code: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl From<language::Data> for LanguageDetail {
|
||||||
|
fn from(value: language::Data) -> Self {
|
||||||
|
let language::Data { id, label, code, .. } = value;
|
||||||
|
|
||||||
|
Self {
|
||||||
|
id,
|
||||||
|
label,
|
||||||
|
code
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async fn get_languages() -> impl IntoResponse {
|
||||||
|
let client = get_prisma_client().await;
|
||||||
|
|
||||||
|
let languages: Vec<LanguageDetail> = client.language()
|
||||||
|
.find_many(vec![])
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.map(|item| item.into())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Json(languages).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async fn get_language_by_code(
|
||||||
|
Path(code): Path<String>
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let client = get_prisma_client().await;
|
||||||
|
|
||||||
|
let language = client.language()
|
||||||
|
.find_unique(language::code::equals(code))
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
match language {
|
||||||
|
Some(v) => Json::<LanguageDetail>(v.into()).into_response(),
|
||||||
|
None => StatusCode::NOT_FOUND.into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub fn get_router() -> Router {
|
||||||
|
Router::new()
|
||||||
|
.route("/", get(get_languages))
|
||||||
|
.route("/:code", get(get_language_by_code))
|
||||||
|
}
|
||||||
36
src/views/mod.rs
Normal file
36
src/views/mod.rs
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
use axum::{Router, response::Response, http::{StatusCode, self, Request}, middleware::{Next, self}};
|
||||||
|
|
||||||
|
use crate::config::CONFIG;
|
||||||
|
|
||||||
|
pub mod users;
|
||||||
|
pub mod pagination;
|
||||||
|
pub mod languages;
|
||||||
|
pub mod donate_notifications;
|
||||||
|
|
||||||
|
|
||||||
|
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
|
||||||
|
let auth_header = req.headers()
|
||||||
|
.get(http::header::AUTHORIZATION)
|
||||||
|
.and_then(|header| header.to_str().ok());
|
||||||
|
|
||||||
|
let auth_header = if let Some(auth_header) = auth_header {
|
||||||
|
auth_header
|
||||||
|
} else {
|
||||||
|
return Err(StatusCode::UNAUTHORIZED);
|
||||||
|
};
|
||||||
|
|
||||||
|
if auth_header != CONFIG.api_key {
|
||||||
|
return Err(StatusCode::UNAUTHORIZED);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(next.run(req).await)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub fn get_router() -> Router {
|
||||||
|
Router::new()
|
||||||
|
.nest("/users/", users::get_router())
|
||||||
|
.nest("/languages/", languages::get_router())
|
||||||
|
.nest("/donate_notifications/", donate_notifications::get_router())
|
||||||
|
.layer(middleware::from_fn(auth))
|
||||||
|
}
|
||||||
49
src/views/pagination.rs
Normal file
49
src/views/pagination.rs
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct Pagination {
|
||||||
|
#[serde(default = "default_page")]
|
||||||
|
pub page: usize,
|
||||||
|
#[serde(default = "default_size")]
|
||||||
|
pub size: usize
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_page() -> usize { 1 }
|
||||||
|
fn default_size() -> usize { 50 }
|
||||||
|
|
||||||
|
impl Pagination {
|
||||||
|
pub fn skip(&self) -> i64 {
|
||||||
|
((self.page - 1) * self.size).try_into().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn take(&self) -> i64 {
|
||||||
|
self.size.try_into().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct Page<T> where T: Serialize {
|
||||||
|
pub items: Vec<T>,
|
||||||
|
pub total: usize,
|
||||||
|
pub page: usize,
|
||||||
|
pub size: usize,
|
||||||
|
pub pages: usize
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Page<T> where T: Serialize {
|
||||||
|
pub fn create(
|
||||||
|
items: Vec<T>,
|
||||||
|
items_count: i64,
|
||||||
|
pagination: Pagination
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
items,
|
||||||
|
total: items_count.try_into().unwrap(),
|
||||||
|
page: pagination.page,
|
||||||
|
size: pagination.size,
|
||||||
|
pages: (items_count as f64 / pagination.size as f64).ceil() as usize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
170
src/views/users/mod.rs
Normal file
170
src/views/users/mod.rs
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
pub mod serializers;
|
||||||
|
pub mod utils;
|
||||||
|
|
||||||
|
use axum::{Router, response::IntoResponse, routing::{get, post}, extract::{Query, Path, self}, Json, http::StatusCode};
|
||||||
|
use crate::{prisma::{user_settings, language_to_user, user_activity}, db::get_prisma_client};
|
||||||
|
|
||||||
|
use self::{serializers::{UserDetail, CreateOrUpdateUserData}, utils::update_languages};
|
||||||
|
|
||||||
|
use super::pagination::{Pagination, Page};
|
||||||
|
|
||||||
|
|
||||||
|
async fn get_users(
|
||||||
|
pagination: Query<Pagination>
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let pagination: Pagination = pagination.0;
|
||||||
|
|
||||||
|
let client = get_prisma_client().await;
|
||||||
|
|
||||||
|
let users_count = client.user_settings()
|
||||||
|
.count(vec![])
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let users: Vec<UserDetail> = client.user_settings()
|
||||||
|
.find_many(vec![])
|
||||||
|
.with(
|
||||||
|
user_settings::languages::fetch(vec![])
|
||||||
|
.with(
|
||||||
|
language_to_user::language::fetch()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(user_settings::id::order(prisma_client_rust::Direction::Asc))
|
||||||
|
.skip(pagination.skip())
|
||||||
|
.take(pagination.take())
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.map(|item| item.into())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Json(Page::create(
|
||||||
|
users,
|
||||||
|
users_count,
|
||||||
|
pagination
|
||||||
|
)).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async fn get_user(
|
||||||
|
Path(user_id): Path<i64>
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let client = get_prisma_client().await;
|
||||||
|
|
||||||
|
let user = client.user_settings()
|
||||||
|
.find_unique(user_settings::user_id::equals(user_id))
|
||||||
|
.with(
|
||||||
|
user_settings::languages::fetch(vec![])
|
||||||
|
.with(
|
||||||
|
language_to_user::language::fetch()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if user.is_none() {
|
||||||
|
return StatusCode::NOT_FOUND.into_response();
|
||||||
|
}
|
||||||
|
|
||||||
|
Json::<UserDetail>(user.unwrap().into()).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async fn create_or_update_user(
|
||||||
|
extract::Json(data): extract::Json<CreateOrUpdateUserData>
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let client = get_prisma_client().await;
|
||||||
|
|
||||||
|
let user = client.user_settings()
|
||||||
|
.upsert(
|
||||||
|
user_settings::user_id::equals(data.user_id),
|
||||||
|
user_settings::create(
|
||||||
|
data.user_id,
|
||||||
|
data.last_name.clone(),
|
||||||
|
data.first_name.clone(),
|
||||||
|
data.username.clone(),
|
||||||
|
data.source.clone(),
|
||||||
|
vec![]
|
||||||
|
),
|
||||||
|
vec![
|
||||||
|
user_settings::last_name::set(data.last_name),
|
||||||
|
user_settings::first_name::set(data.first_name),
|
||||||
|
user_settings::username::set(data.username),
|
||||||
|
user_settings::source::set(data.source)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
.with(
|
||||||
|
user_settings::languages::fetch(vec![])
|
||||||
|
.with(
|
||||||
|
language_to_user::language::fetch()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let user_id = user.id;
|
||||||
|
update_languages(user, data.allowed_langs).await;
|
||||||
|
|
||||||
|
let user = client.user_settings()
|
||||||
|
.find_unique(user_settings::id::equals(user_id))
|
||||||
|
.with(
|
||||||
|
user_settings::languages::fetch(vec![])
|
||||||
|
.with(
|
||||||
|
language_to_user::language::fetch()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
Json::<UserDetail>(user.into()).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async fn update_activity(
|
||||||
|
Path(user_id): Path<i64>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let client = get_prisma_client().await;
|
||||||
|
|
||||||
|
let user = client.user_settings()
|
||||||
|
.find_unique(user_settings::user_id::equals(user_id))
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let user = match user {
|
||||||
|
Some(v) => v,
|
||||||
|
None => return StatusCode::NOT_FOUND.into_response(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = client.user_activity()
|
||||||
|
.upsert(
|
||||||
|
user_activity::user_id::equals(user.id),
|
||||||
|
user_activity::create(
|
||||||
|
chrono::offset::Local::now().into(),
|
||||||
|
user_settings::id::equals(user.id),
|
||||||
|
vec![]
|
||||||
|
),
|
||||||
|
vec![
|
||||||
|
user_activity::updated::set(chrono::offset::Local::now().into())
|
||||||
|
]
|
||||||
|
)
|
||||||
|
.exec()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
StatusCode::OK.into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub fn get_router() -> Router {
|
||||||
|
Router::new()
|
||||||
|
.route("/", get(get_users))
|
||||||
|
.route("/:user_id", get(get_user))
|
||||||
|
.route("/", post(create_or_update_user))
|
||||||
|
.route("/:user_id/update_activity", post(update_activity))
|
||||||
|
}
|
||||||
66
src/views/users/serializers.rs
Normal file
66
src/views/users/serializers.rs
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
use serde::{Serialize, Deserialize};
|
||||||
|
|
||||||
|
use crate::prisma::{user_settings, language};
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct UserLanguage {
|
||||||
|
pub id: i32,
|
||||||
|
pub label: String,
|
||||||
|
pub code: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl From<language::Data> for UserLanguage {
|
||||||
|
fn from(value: language::Data) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id,
|
||||||
|
label: value.label,
|
||||||
|
code: value.code
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct UserDetail {
|
||||||
|
pub id: i32,
|
||||||
|
pub user_id: i64,
|
||||||
|
pub last_name: String,
|
||||||
|
pub first_name: String,
|
||||||
|
pub username: String,
|
||||||
|
pub source: String,
|
||||||
|
pub allowed_langs: Vec<UserLanguage>
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl From<user_settings::Data> for UserDetail {
|
||||||
|
fn from(value: user_settings::Data) -> Self {
|
||||||
|
let allowed_langs: Vec<UserLanguage> = value
|
||||||
|
.languages.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.map(|item| *item.language.unwrap())
|
||||||
|
.map(|item| item.into())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
id: value.id,
|
||||||
|
user_id: value.user_id,
|
||||||
|
last_name: value.last_name,
|
||||||
|
first_name: value.first_name,
|
||||||
|
username: value.username,
|
||||||
|
source: value.source,
|
||||||
|
allowed_langs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct CreateOrUpdateUserData {
|
||||||
|
pub user_id: i64,
|
||||||
|
pub last_name: String,
|
||||||
|
pub first_name: String,
|
||||||
|
pub username: String,
|
||||||
|
pub source: String,
|
||||||
|
pub allowed_langs: Vec<String>
|
||||||
|
}
|
||||||
68
src/views/users/utils.rs
Normal file
68
src/views/users/utils.rs
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use crate::{prisma::{user_settings, language, language_to_user}, db::get_prisma_client};
|
||||||
|
|
||||||
|
|
||||||
|
pub async fn update_languages(
|
||||||
|
user: user_settings::Data,
|
||||||
|
new_langs: Vec<String>
|
||||||
|
) {
|
||||||
|
let client = get_prisma_client().await;
|
||||||
|
|
||||||
|
// Delete
|
||||||
|
{
|
||||||
|
let need_delete: Vec<_> = user.languages().unwrap()
|
||||||
|
.iter()
|
||||||
|
.map(|item| {
|
||||||
|
let language::Data{ id, code, .. } = *item.clone().language.unwrap();
|
||||||
|
(id, code)
|
||||||
|
})
|
||||||
|
.filter(|(_, code)| !new_langs.contains(code))
|
||||||
|
.map(|(id, _)| id)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let _ = client.language_to_user()
|
||||||
|
.delete_many(
|
||||||
|
vec![language_to_user::id::in_vec(need_delete)]
|
||||||
|
)
|
||||||
|
.exec()
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create
|
||||||
|
{
|
||||||
|
let languages: HashMap<_, _> = client.language()
|
||||||
|
.find_many(vec![])
|
||||||
|
.exec()
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.map(|l| (l.code, l.id))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let current_langs: Vec<_> = user.languages().unwrap()
|
||||||
|
.iter()
|
||||||
|
.map(|item| item.clone().language.unwrap().code)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let need_create: Vec<i32> = new_langs
|
||||||
|
.into_iter()
|
||||||
|
.filter(|code| !current_langs.contains(code))
|
||||||
|
.map(|code| *languages.get(&code).unwrap())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let _ = client.language_to_user()
|
||||||
|
.create_many(
|
||||||
|
need_create
|
||||||
|
.iter()
|
||||||
|
.map(|language_id| language_to_user::create_unchecked(
|
||||||
|
*language_id,
|
||||||
|
user.id,
|
||||||
|
vec![]
|
||||||
|
))
|
||||||
|
.collect()
|
||||||
|
)
|
||||||
|
.exec()
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user