mirror of
https://github.com/flibusta-apps/telegram_files_cache_server.git
synced 2025-12-06 06:35:38 +01:00
Move to sqlx
This commit is contained in:
@@ -1,2 +0,0 @@
|
|||||||
[alias]
|
|
||||||
prisma = "run -p prisma-cli --"
|
|
||||||
2349
Cargo.lock
generated
2349
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
45
Cargo.toml
45
Cargo.toml
@@ -4,43 +4,40 @@ version = "0.1.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
|
||||||
[workspace]
|
|
||||||
members = [
|
|
||||||
"prisma-cli"
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
once_cell = "1.20.1"
|
once_cell = "1.20.2"
|
||||||
|
|
||||||
dotenvy = "0.15.0"
|
dotenvy = "0.15.0"
|
||||||
|
|
||||||
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.11", default-features = false, features = ["postgresql"] }
|
tokio = { version = "1.42.0", features = ["full"] }
|
||||||
serde = { version = "1.0.210", features = ["derive"] }
|
tokio-util = { version = "0.7.13", features = ["compat"] }
|
||||||
serde_json = "1.0.128"
|
|
||||||
reqwest = { version = "0.12.8", features = ["json", "stream", "multipart"] }
|
|
||||||
|
|
||||||
tokio = { version = "1.40.0", features = ["full"] }
|
axum = { version = "0.7.9", features = ["json"] }
|
||||||
tokio-util = { version = "0.7.12", features = ["compat"] }
|
|
||||||
axum = { version = "0.7.7", features = ["json"] }
|
|
||||||
axum-prometheus = "0.7.0"
|
axum-prometheus = "0.7.0"
|
||||||
chrono = "0.4.38"
|
serde = { version = "1.0.216", features = ["derive"] }
|
||||||
sentry = { version = "0.34.0", features = ["debug-images"] }
|
serde_json = "1.0.134"
|
||||||
|
|
||||||
tracing = "0.1.40"
|
tracing = "0.1.41"
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"]}
|
tracing-subscriber = { version = "0.3.19", features = ["env-filter"]}
|
||||||
sentry-tracing = "0.34.0"
|
sentry-tracing = "0.35.0"
|
||||||
tower-http = { version = "0.6.1", features = ["trace"] }
|
tower-http = { version = "0.6.2", features = ["trace"] }
|
||||||
|
|
||||||
|
reqwest = { version = "0.12.9", features = ["json", "stream", "multipart"] }
|
||||||
|
|
||||||
|
chrono = "0.4.39"
|
||||||
|
sentry = { version = "0.35.0", features = ["debug-images"] }
|
||||||
|
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
|
|
||||||
futures = "0.3.30"
|
futures = "0.3.31"
|
||||||
futures-core = "0.3.30"
|
futures-core = "0.3.31"
|
||||||
async-stream = "0.3.6"
|
async-stream = "0.3.6"
|
||||||
|
|
||||||
tempfile = "3.13.0"
|
tempfile = "3.14.0"
|
||||||
bytes = "1.7.2"
|
bytes = "1.9.0"
|
||||||
|
|
||||||
teloxide = { version = "0.13.0", features = ["macros", "webhooks-axum", "cache-me", "throttle"] }
|
teloxide = { version = "0.13.0", features = ["macros", "webhooks-axum", "cache-me", "throttle"] }
|
||||||
|
|
||||||
moka = { version = "0.12.8", features = ["future"] }
|
moka = { version = "0.12.8", features = ["future"] }
|
||||||
|
|
||||||
|
sqlx = { version = "0.8.2", features = ["runtime-tokio", "postgres", "macros"] }
|
||||||
|
|||||||
3
prisma-cli/.gitignore
vendored
3
prisma-cli/.gitignore
vendored
@@ -1,3 +0,0 @@
|
|||||||
node_modules
|
|
||||||
# Keep environment variables out of version control
|
|
||||||
.env
|
|
||||||
3195
prisma-cli/Cargo.lock
generated
3195
prisma-cli/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,9 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "prisma-cli"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.10", default-features = false, features = ["postgresql"] }
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
fn main() {
|
|
||||||
prisma_client_rust_cli::run();
|
|
||||||
}
|
|
||||||
BIN
src/.DS_Store
vendored
BIN
src/.DS_Store
vendored
Binary file not shown.
12
src/db.rs
12
src/db.rs
@@ -1,6 +1,8 @@
|
|||||||
use crate::{config::CONFIG, prisma::PrismaClient};
|
use crate::config::CONFIG;
|
||||||
|
|
||||||
pub async fn get_prisma_client() -> PrismaClient {
|
use sqlx::{postgres::PgPoolOptions, PgPool};
|
||||||
|
|
||||||
|
pub async fn get_pg_pool() -> PgPool {
|
||||||
let database_url: String = format!(
|
let database_url: String = format!(
|
||||||
"postgresql://{}:{}@{}:{}/{}?connection_limit=10&pool_timeout=300",
|
"postgresql://{}:{}@{}:{}/{}?connection_limit=10&pool_timeout=300",
|
||||||
CONFIG.postgres_user,
|
CONFIG.postgres_user,
|
||||||
@@ -10,9 +12,9 @@ pub async fn get_prisma_client() -> PrismaClient {
|
|||||||
CONFIG.postgres_db
|
CONFIG.postgres_db
|
||||||
);
|
);
|
||||||
|
|
||||||
PrismaClient::_builder()
|
PgPoolOptions::new()
|
||||||
.with_url(database_url)
|
.max_connections(5)
|
||||||
.build()
|
.connect(&database_url)
|
||||||
.await
|
.await
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod db;
|
pub mod db;
|
||||||
pub mod prisma;
|
|
||||||
pub mod repository;
|
pub mod repository;
|
||||||
|
pub mod serializers;
|
||||||
pub mod services;
|
pub mod services;
|
||||||
pub mod views;
|
pub mod views;
|
||||||
|
|
||||||
|
|||||||
1351
src/prisma.rs
1351
src/prisma.rs
File diff suppressed because one or more lines are too long
@@ -1,6 +1,4 @@
|
|||||||
use prisma_client_rust::QueryError;
|
use crate::{serializers::CachedFile, views::Database};
|
||||||
|
|
||||||
use crate::{prisma::cached_file, views::Database};
|
|
||||||
|
|
||||||
pub struct CachedFileRepository {
|
pub struct CachedFileRepository {
|
||||||
db: Database,
|
db: Database,
|
||||||
@@ -15,11 +13,18 @@ impl CachedFileRepository {
|
|||||||
&self,
|
&self,
|
||||||
object_id: i32,
|
object_id: i32,
|
||||||
object_type: String,
|
object_type: String,
|
||||||
) -> Result<cached_file::Data, QueryError> {
|
) -> Result<CachedFile, sqlx::Error> {
|
||||||
self.db
|
sqlx::query_as!(
|
||||||
.cached_file()
|
CachedFile,
|
||||||
.delete(cached_file::object_id_object_type(object_id, object_type))
|
r#"
|
||||||
.exec()
|
DELETE FROM cached_files
|
||||||
|
WHERE object_id = $1 AND object_type = $2
|
||||||
|
RETURNING *
|
||||||
|
"#,
|
||||||
|
object_id,
|
||||||
|
object_type
|
||||||
|
)
|
||||||
|
.fetch_one(&self.db)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
8
src/serializers.rs
Normal file
8
src/serializers.rs
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
#[derive(sqlx::FromRow, serde::Serialize)]
|
||||||
|
pub struct CachedFile {
|
||||||
|
pub id: i32,
|
||||||
|
pub object_id: i32,
|
||||||
|
pub object_type: String,
|
||||||
|
pub message_id: i64,
|
||||||
|
pub chat_id: i64,
|
||||||
|
}
|
||||||
@@ -14,7 +14,7 @@ use teloxide::{
|
|||||||
};
|
};
|
||||||
use tracing::log;
|
use tracing::log;
|
||||||
|
|
||||||
use crate::{config, prisma::cached_file, repository::CachedFileRepository, views::Database};
|
use crate::{config, repository::CachedFileRepository, serializers::CachedFile, views::Database};
|
||||||
|
|
||||||
use self::{
|
use self::{
|
||||||
book_library::{get_book, get_books, types::BaseBook},
|
book_library::{get_book, get_books, types::BaseBook},
|
||||||
@@ -55,14 +55,16 @@ pub async fn get_cached_file_or_cache(
|
|||||||
object_id: i32,
|
object_id: i32,
|
||||||
object_type: String,
|
object_type: String,
|
||||||
db: Database,
|
db: Database,
|
||||||
) -> Option<cached_file::Data> {
|
) -> Option<CachedFile> {
|
||||||
let cached_file = db
|
let cached_file = sqlx::query_as!(
|
||||||
.cached_file()
|
CachedFile,
|
||||||
.find_unique(cached_file::object_id_object_type(
|
r#"
|
||||||
|
SELECT * FROM cached_files
|
||||||
|
WHERE object_id = $1 AND object_type = $2"#,
|
||||||
object_id,
|
object_id,
|
||||||
object_type.clone(),
|
object_type
|
||||||
))
|
)
|
||||||
.exec()
|
.fetch_optional(&db)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -72,7 +74,7 @@ pub async fn get_cached_file_or_cache(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_cached_file_copy(original: cached_file::Data, db: Database) -> CacheData {
|
pub async fn get_cached_file_copy(original: CachedFile, db: Database) -> CacheData {
|
||||||
let bot = ROUND_ROBIN_BOT.get_bot();
|
let bot = ROUND_ROBIN_BOT.get_bot();
|
||||||
|
|
||||||
let message_id = match bot
|
let message_id = match bot
|
||||||
@@ -85,11 +87,16 @@ pub async fn get_cached_file_copy(original: cached_file::Data, db: Database) ->
|
|||||||
{
|
{
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
let _ = db
|
sqlx::query!(
|
||||||
.cached_file()
|
r#"
|
||||||
.delete(cached_file::id::equals(original.id))
|
DELETE FROM cached_files
|
||||||
.exec()
|
WHERE id = $1
|
||||||
.await;
|
"#,
|
||||||
|
original.id
|
||||||
|
)
|
||||||
|
.execute(&db)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let new_original =
|
let new_original =
|
||||||
get_cached_file_or_cache(original.object_id, original.object_type.clone(), db)
|
get_cached_file_or_cache(original.object_id, original.object_type.clone(), db)
|
||||||
@@ -117,11 +124,7 @@ pub async fn get_cached_file_copy(original: cached_file::Data, db: Database) ->
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn cache_file(
|
pub async fn cache_file(object_id: i32, object_type: String, db: Database) -> Option<CachedFile> {
|
||||||
object_id: i32,
|
|
||||||
object_type: String,
|
|
||||||
db: Database,
|
|
||||||
) -> Option<cached_file::Data> {
|
|
||||||
let book = match get_book(object_id).await {
|
let book = match get_book(object_id).await {
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@@ -154,18 +157,23 @@ pub async fn cache_file(
|
|||||||
};
|
};
|
||||||
|
|
||||||
Some(
|
Some(
|
||||||
db.cached_file()
|
sqlx::query_as!(
|
||||||
.create(object_id, object_type, message_id, chat_id, vec![])
|
CachedFile,
|
||||||
.exec()
|
r#"INSERT INTO cached_files (object_id, object_type, message_id, chat_id)
|
||||||
|
VALUES ($1, $2, $3, $4)
|
||||||
|
RETURNING *"#,
|
||||||
|
object_id,
|
||||||
|
object_type,
|
||||||
|
message_id,
|
||||||
|
chat_id
|
||||||
|
)
|
||||||
|
.fetch_one(&db)
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn download_from_cache(
|
pub async fn download_from_cache(cached_data: CachedFile, db: Database) -> Option<DownloadResult> {
|
||||||
cached_data: cached_file::Data,
|
|
||||||
db: Database,
|
|
||||||
) -> Option<DownloadResult> {
|
|
||||||
let response_task = tokio::task::spawn(download_from_telegram_files(
|
let response_task = tokio::task::spawn(download_from_telegram_files(
|
||||||
cached_data.message_id,
|
cached_data.message_id,
|
||||||
cached_data.chat_id,
|
cached_data.chat_id,
|
||||||
@@ -300,13 +308,13 @@ pub async fn start_update_cache(db: Database) {
|
|||||||
|
|
||||||
for book in books {
|
for book in books {
|
||||||
'types: for available_type in book.available_types {
|
'types: for available_type in book.available_types {
|
||||||
let cached_file = match db
|
let cached_file = match sqlx::query_as!(
|
||||||
.cached_file()
|
CachedFile,
|
||||||
.find_unique(cached_file::object_id_object_type(
|
r#"SELECT * FROM cached_files WHERE object_id = $1 AND object_type = $2"#,
|
||||||
book.id,
|
book.id,
|
||||||
available_type.clone(),
|
available_type.clone()
|
||||||
))
|
)
|
||||||
.exec()
|
.fetch_optional(&db)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
|
|||||||
39
src/views.rs
39
src/views.rs
@@ -9,27 +9,26 @@ use axum::{
|
|||||||
};
|
};
|
||||||
use axum_prometheus::PrometheusMetricLayer;
|
use axum_prometheus::PrometheusMetricLayer;
|
||||||
use base64::{engine::general_purpose, Engine};
|
use base64::{engine::general_purpose, Engine};
|
||||||
use serde::Deserialize;
|
use sqlx::PgPool;
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio_util::io::ReaderStream;
|
use tokio_util::io::ReaderStream;
|
||||||
use tower_http::trace::{self, TraceLayer};
|
use tower_http::trace::{self, TraceLayer};
|
||||||
use tracing::Level;
|
use tracing::Level;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::CONFIG,
|
config::CONFIG,
|
||||||
db::get_prisma_client,
|
db::get_pg_pool,
|
||||||
prisma::{cached_file, PrismaClient},
|
serializers::CachedFile,
|
||||||
services::{
|
services::{
|
||||||
download_from_cache, download_utils::get_response_async_read, get_cached_file_copy,
|
download_from_cache, download_utils::get_response_async_read, get_cached_file_copy,
|
||||||
get_cached_file_or_cache, start_update_cache, CacheData,
|
get_cached_file_or_cache, start_update_cache, CacheData,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub type Database = Arc<PrismaClient>;
|
pub type Database = PgPool;
|
||||||
|
|
||||||
//
|
//
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(serde::Deserialize)]
|
||||||
pub struct GetCachedFileQuery {
|
pub struct GetCachedFileQuery {
|
||||||
pub copy: bool,
|
pub copy: bool,
|
||||||
}
|
}
|
||||||
@@ -111,26 +110,20 @@ async fn delete_cached_file(
|
|||||||
Path((object_id, object_type)): Path<(i32, String)>,
|
Path((object_id, object_type)): Path<(i32, String)>,
|
||||||
Extension(Ext { db, .. }): Extension<Ext>,
|
Extension(Ext { db, .. }): Extension<Ext>,
|
||||||
) -> impl IntoResponse {
|
) -> impl IntoResponse {
|
||||||
let cached_file = db
|
let cached_file: Option<CachedFile> = sqlx::query_as!(
|
||||||
.cached_file()
|
CachedFile,
|
||||||
.find_unique(cached_file::object_id_object_type(
|
r#"DELETE FROM cached_files
|
||||||
|
WHERE object_id = $1 AND object_type = $2
|
||||||
|
RETURNING *"#,
|
||||||
object_id,
|
object_id,
|
||||||
object_type.clone(),
|
object_type
|
||||||
))
|
)
|
||||||
.exec()
|
.fetch_optional(&db)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
match cached_file {
|
match cached_file {
|
||||||
Some(v) => {
|
Some(v) => Json::<CachedFile>(v).into_response(),
|
||||||
db.cached_file()
|
|
||||||
.delete(cached_file::object_id_object_type(object_id, object_type))
|
|
||||||
.exec()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
Json(v).into_response()
|
|
||||||
}
|
|
||||||
None => StatusCode::NO_CONTENT.into_response(),
|
None => StatusCode::NO_CONTENT.into_response(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -164,11 +157,11 @@ async fn auth(req: Request<axum::body::Body>, next: Next) -> Result<Response, St
|
|||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct Ext {
|
struct Ext {
|
||||||
pub db: Arc<PrismaClient>,
|
pub db: PgPool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_router() -> Router {
|
pub async fn get_router() -> Router {
|
||||||
let db = Arc::new(get_prisma_client().await);
|
let db = get_pg_pool().await;
|
||||||
|
|
||||||
let ext = Ext { db };
|
let ext = Ext { db };
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user