Rewrite to rust

This commit is contained in:
2023-07-24 21:54:32 +02:00
parent a7e8b74dd0
commit 9f70226a15
39 changed files with 11984 additions and 1925 deletions

View File

@@ -1,98 +0,0 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = ./app/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator"
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. Valid values are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # default: use os.pathsep
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -1 +0,0 @@
Generic single-database configuration.

View File

@@ -1,67 +0,0 @@
import os
import sys
from alembic import context
from sqlalchemy.engine import create_engine
from core.db import DATABASE_URL
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + "/../../")
config = context.config
from app.models import BaseMeta
target_metadata = BaseMeta.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = create_engine(DATABASE_URL)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata, compare_type=True
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -1,24 +0,0 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -1,31 +0,0 @@
"""empty message
Revision ID: 738a796c3f0a
Revises: 7a76c257df70
Create Date: 2021-12-13 01:34:21.957994
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "738a796c3f0a"
down_revision = "7a76c257df70"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"services",
sa.Column("privileged", sa.Boolean(), server_default="f", nullable=False),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("services", "privileged")
# ### end Alembic commands ###

View File

@@ -1,30 +0,0 @@
"""empty message
Revision ID: 738a796c3f0b
Revises: 85ece6cfed22
Create Date: 2021-12-13 01:34:21.957994
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "738a796c3f0b"
down_revision = "85ece6cfed22"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"services", sa.Column("username", sa.String(length=64), nullable=True)
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("services", "username")
# ### end Alembic commands ###

View File

@@ -1,37 +0,0 @@
"""empty message
Revision ID: 7a76c257df70
Revises:
Create Date: 2021-12-04 00:46:17.770026
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "7a76c257df70"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"services",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("token", sa.String(length=128), nullable=False),
sa.Column("user", sa.BigInteger(), nullable=False),
sa.Column("status", sa.String(length=12), nullable=True),
sa.Column("created_time", sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("token"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("services")
# ### end Alembic commands ###

View File

@@ -1,39 +0,0 @@
"""empty message
Revision ID: 85ece6cfed22
Revises: 738a796c3f0a
Create Date: 2021-12-27 23:08:26.124204
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "85ece6cfed22"
down_revision = "738a796c3f0a"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("services", sa.Column("cache", sa.String(length=12), nullable=True))
op.drop_column("services", "privileged")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"services",
sa.Column(
"privileged",
sa.BOOLEAN(),
server_default=sa.text("false"),
autoincrement=False,
nullable=False,
),
)
op.drop_column("services", "cache")
# ### end Alembic commands ###

View File

@@ -1,11 +0,0 @@
from fastapi import HTTPException, Security, status
from core.auth import default_security
from core.config import env_config
async def check_token(api_key: str = Security(default_security)):
if api_key != env_config.API_KEY:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN, detail="Wrong api key!"
)

View File

@@ -1,40 +0,0 @@
from datetime import datetime
from enum import Enum
import ormar
from core.db import database, metadata
class BaseMeta(ormar.ModelMeta):
metadata = metadata
database = database
class Statuses(str, Enum):
pending = "pending"
approved = "approved"
blocked = "blocked"
class CachePrivileges(str, Enum):
original = "original"
buffer = "buffer"
no_cache = "no_cache"
class Service(ormar.Model):
class Meta(BaseMeta):
tablename = "services"
id: int = ormar.Integer(primary_key=True) # type: ignore
token: str = ormar.String(max_length=128, unique=True) # type: ignore
username: str = ormar.String(max_length=64, default="") # type: ignore
user: int = ormar.BigInteger() # type: ignore
status: str = ormar.String(
max_length=12, choices=list(Statuses), default=Statuses.pending
) # type: ignore
cache: str = ormar.String(
max_length=12, choices=list(CachePrivileges), default=CachePrivileges.no_cache
) # type: ignore
created_time = ormar.DateTime(timezone=True, default=datetime.now)

View File

@@ -1,24 +0,0 @@
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, constr
from app.models import CachePrivileges, Statuses
class ServiceCreate(BaseModel):
token: constr(max_length=128) # type: ignore
user: int
username: constr(max_length=64) # type: ignore
status: Statuses
cache: CachePrivileges
class ServiceDetail(BaseModel):
id: int
token: str
username: Optional[str]
user: str
status: str
cache: str
created_time: datetime

View File

@@ -1,81 +0,0 @@
from fastapi import APIRouter, Depends, HTTPException, status
from app.depends import check_token
from app.models import CachePrivileges, Service, Statuses
from app.serializers import ServiceCreate, ServiceDetail
# TODO: add redis cache
router = APIRouter(dependencies=[Depends(check_token)])
@router.get("/", response_model=list[ServiceDetail])
async def get_services():
return await Service.objects.all()
@router.get("/healthcheck")
async def healthcheck():
return "Ok!"
@router.get("/{id}/", response_model=ServiceDetail)
async def get_service(id: int):
service = await Service.objects.get_or_none(id=id)
if service is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return service
@router.delete("/{id}/", response_model=ServiceDetail)
async def delete_service(id: int):
service = await Service.objects.get_or_none(id=id)
if service is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
await service.delete()
return service
@router.post("/", response_model=ServiceDetail)
async def register_service(data: ServiceCreate):
user_services_count = await Service.objects.filter(user=data.user).count()
if user_services_count >= 3:
raise HTTPException(status.HTTP_402_PAYMENT_REQUIRED)
return await Service.objects.create(**data.dict())
@router.patch("/{id}/update_status", response_model=ServiceDetail)
async def update_service_state(id: int, new_status: Statuses):
service = await Service.objects.get_or_none(id=id)
if service is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
service.status = new_status
await service.update(["status"])
return service
@router.patch("/{id}/update_cache", response_model=ServiceDetail)
async def update_service_cache(id: int, new_cache: CachePrivileges):
service = await Service.objects.get_or_none(id=id)
if service is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
service.cache = new_cache
await service.update(["cache"])
return service

40
src/config.rs Normal file
View File

@@ -0,0 +1,40 @@
use once_cell::sync::Lazy;
pub struct Config {
pub api_key: String,
pub postgres_user: String,
pub postgres_password: String,
pub postgres_host: String,
pub postgres_port: u32,
pub postgres_db: String,
pub sentry_dsn: String
}
fn get_env(env: &'static str) -> String {
std::env::var(env).unwrap_or_else(|_| panic!("Cannot get the {} env variable", env))
}
impl Config {
pub fn load() -> Config {
Config {
api_key: get_env("API_KEY"),
postgres_user: get_env("POSTGRES_USER"),
postgres_password: get_env("POSTGRES_PASSWORD"),
postgres_host: get_env("POSTGRES_HOST"),
postgres_port: get_env("POSTGRES_PORT").parse().unwrap(),
postgres_db: get_env("POSTGRES_DB"),
sentry_dsn: get_env("SENTRY_DSN")
}
}
}
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
Config::load()
});

View File

@@ -1,34 +0,0 @@
from fastapi import FastAPI
import sentry_sdk
from app.views import router
from core.config import env_config
from core.db import database
sentry_sdk.init(
env_config.SENTRY_DSN,
)
def start_app() -> FastAPI:
app = FastAPI()
app.include_router(router)
app.state.database = database
@app.on_event("startup")
async def startup() -> None:
database_ = app.state.database
if not database_.is_connected:
await database_.connect()
@app.on_event("shutdown")
async def shutdown() -> None:
database_ = app.state.database
if database_.is_connected:
await database_.disconnect()
return app

View File

@@ -1,4 +0,0 @@
from fastapi.security import APIKeyHeader
default_security = APIKeyHeader(name="Authorization")

View File

@@ -1,16 +0,0 @@
from pydantic import BaseSettings
class EnvConfig(BaseSettings):
API_KEY: str
POSTGRES_USER: str
POSTGRES_PASSWORD: str
POSTGRES_HOST: str
POSTGRES_PORT: int
POSTGRES_DB: str
SENTRY_DSN: str
env_config = EnvConfig()

View File

@@ -1,15 +0,0 @@
from urllib.parse import quote
from databases import Database
from sqlalchemy import MetaData
from core.config import env_config
DATABASE_URL = (
f"postgresql://{env_config.POSTGRES_USER}:{quote(env_config.POSTGRES_PASSWORD)}@"
f"{env_config.POSTGRES_HOST}:{env_config.POSTGRES_PORT}/{env_config.POSTGRES_DB}"
)
metadata = MetaData()
database = Database(DATABASE_URL, min_size=1, max_size=2)

19
src/db.rs Normal file
View File

@@ -0,0 +1,19 @@
use crate::{prisma::PrismaClient, config::CONFIG};
pub async fn get_prisma_client() -> PrismaClient {
let database_url: String = format!(
"postgresql://{}:{}@{}:{}/{}?connection_limit=2",
CONFIG.postgres_user,
CONFIG.postgres_password,
CONFIG.postgres_host,
CONFIG.postgres_port,
CONFIG.postgres_db
);
PrismaClient::_builder()
.with_url(database_url)
.build()
.await
.unwrap()
}

View File

@@ -1,4 +0,0 @@
from core.app import start_app
app = start_app()

33
src/main.rs Normal file
View File

@@ -0,0 +1,33 @@
pub mod config;
pub mod db;
pub mod prisma;
pub mod views;
use tracing::info;
use std::net::SocketAddr;
async fn start_app() {
let app = views::get_router().await;
let addr = SocketAddr::from(([0, 0, 0, 0], 8080));
info!("Start webserver...");
axum::Server::bind(&addr)
.serve(app.into_make_service())
.await
.unwrap();
info!("Webserver shutdown...")
}
#[tokio::main]
async fn main() {
tracing_subscriber::fmt()
.with_target(false)
.compact()
.init();
let _guard = sentry::init(config::CONFIG.sentry_dsn.clone());
start_app().await;
}

1585
src/prisma.rs Normal file

File diff suppressed because one or more lines are too long

198
src/views.rs Normal file
View File

@@ -0,0 +1,198 @@
use axum::{Router, response::{Response, IntoResponse}, http::{StatusCode, self, Request}, middleware::{Next, self}, Extension, routing::{get, delete, post, patch}, Json, extract::{Path, self}};
use axum_prometheus::PrometheusMetricLayer;
use serde::Deserialize;
use tower_http::trace::{TraceLayer, self};
use tracing::Level;
use std::sync::Arc;
use crate::{config::CONFIG, db::get_prisma_client, prisma::{PrismaClient, service}};
pub type Database = Extension<Arc<PrismaClient>>;
//
async fn get_services(
db: Database
) -> impl IntoResponse {
let services = db.service()
.find_many(vec![])
.exec()
.await
.unwrap();
Json(services).into_response()
}
async fn get_service(
Path(id): Path<i32>,
db: Database
) -> impl IntoResponse {
let service = db.service()
.find_unique(service::id::equals(id))
.exec()
.await
.unwrap();
match service {
Some(v) => Json(v).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
async fn delete_service(
Path(id): Path<i32>,
db: Database
) -> impl IntoResponse {
let service = db.service()
.find_unique(service::id::equals(id))
.exec()
.await
.unwrap();
match service {
Some(v) => {
let _ = db.service()
.delete(service::id::equals(id))
.exec()
.await;
Json(v).into_response()
},
None => StatusCode::NOT_FOUND.into_response(),
}
}
#[derive(Deserialize)]
pub struct CreateServiceData {
#[serde(rename = "token")]
pub token: String,
#[serde(rename = "user")]
pub user: i64,
#[serde(rename = "status")]
pub status: String,
#[serde(rename = "cache")]
pub cache: String,
#[serde(rename = "username")]
pub username: String,
}
async fn create_service(
db: Database,
extract::Json(data): extract::Json<CreateServiceData>,
) -> impl IntoResponse {
let CreateServiceData { token, user, status, cache, username } = data;
let service = db.service()
.create(
token,
user,
status,
chrono::offset::Local::now().into(),
cache,
username,
vec![]
)
.exec()
.await
.unwrap();
Json(service).into_response()
}
async fn update_state(
Path(id): Path<i32>,
db: Database,
extract::Json(state): extract::Json<String>
) -> impl IntoResponse {
let service = db.service()
.update(
service::id::equals(id),
vec![
service::status::set(state)
]
)
.exec()
.await;
match service {
Ok(v) => Json(v).into_response(),
Err(_) => StatusCode::NOT_FOUND.into_response(),
}
}
async fn update_cache(
Path(id): Path<i32>,
db: Database,
extract::Json(cache): extract::Json<String>
) -> impl IntoResponse {
let service = db.service()
.update(
service::id::equals(id),
vec![
service::cache::set(cache)
]
)
.exec()
.await;
match service {
Ok(v) => Json(v).into_response(),
Err(_) => StatusCode::NOT_FOUND.into_response(),
}
}
//
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
let auth_header = req.headers()
.get(http::header::AUTHORIZATION)
.and_then(|header| header.to_str().ok());
let auth_header = if let Some(auth_header) = auth_header {
auth_header
} else {
return Err(StatusCode::UNAUTHORIZED);
};
if auth_header != CONFIG.api_key {
return Err(StatusCode::UNAUTHORIZED);
}
Ok(next.run(req).await)
}
pub async fn get_router() -> Router {
let client = Arc::new(get_prisma_client().await);
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();
let app_router = Router::new()
.route("/", get(get_services))
.route("/:id/", get(get_service))
.route("/:id/", delete(delete_service))
.route("/", post(create_service))
.route("/:id/update_status", patch(update_state))
.route("/:id/update_cache", patch(update_cache))
.layer(middleware::from_fn(auth))
.layer(Extension(client))
.layer(prometheus_layer);
let metric_router = Router::new()
.route("/metrics", get(|| async move { metric_handle.render() }));
Router::new()
.nest("/", app_router)
.nest("/", metric_router)
.layer(
TraceLayer::new_for_http()
.make_span_with(trace::DefaultMakeSpan::new()
.level(Level::INFO))
.on_response(trace::DefaultOnResponse::new()
.level(Level::INFO)),
)
}